re PR other/33426 (Support of #pragma ivdep)
[gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2013 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "hash-table.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "flags.h"
30 #include "function.h"
31 #include "ggc.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple.h"
34 #include "gimple-ssa.h"
35 #include "cgraph.h"
36 #include "tree-cfg.h"
37 #include "tree-phinodes.h"
38 #include "ssa-iterators.h"
39 #include "tree-ssanames.h"
40 #include "tree-ssa-loop-manip.h"
41 #include "tree-ssa-loop-niter.h"
42 #include "tree-into-ssa.h"
43 #include "tree-dfa.h"
44 #include "tree-ssa.h"
45 #include "tree-dump.h"
46 #include "tree-pass.h"
47 #include "diagnostic-core.h"
48 #include "except.h"
49 #include "cfgloop.h"
50 #include "tree-ssa-propagate.h"
51 #include "value-prof.h"
52 #include "pointer-set.h"
53 #include "tree-inline.h"
54 #include "target.h"
55 #include "tree-ssa-live.h"
56 #include "omp-low.h"
57 #include "tree-cfgcleanup.h"
58
59 /* This file contains functions for building the Control Flow Graph (CFG)
60 for a function tree. */
61
62 /* Local declarations. */
63
64 /* Initial capacity for the basic block array. */
65 static const int initial_cfg_capacity = 20;
66
67 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
68 which use a particular edge. The CASE_LABEL_EXPRs are chained together
69 via their CASE_CHAIN field, which we clear after we're done with the
70 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
71
72 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
73 update the case vector in response to edge redirections.
74
75 Right now this table is set up and torn down at key points in the
76 compilation process. It would be nice if we could make the table
77 more persistent. The key is getting notification of changes to
78 the CFG (particularly edge removal, creation and redirection). */
79
80 static struct pointer_map_t *edge_to_cases;
81
82 /* If we record edge_to_cases, this bitmap will hold indexes
83 of basic blocks that end in a GIMPLE_SWITCH which we touched
84 due to edge manipulations. */
85
86 static bitmap touched_switch_bbs;
87
88 /* CFG statistics. */
89 struct cfg_stats_d
90 {
91 long num_merged_labels;
92 };
93
94 static struct cfg_stats_d cfg_stats;
95
96 /* Nonzero if we found a computed goto while building basic blocks. */
97 static bool found_computed_goto;
98
99 /* Hash table to store last discriminator assigned for each locus. */
100 struct locus_discrim_map
101 {
102 location_t locus;
103 int discriminator;
104 };
105
106 /* Hashtable helpers. */
107
108 struct locus_discrim_hasher : typed_free_remove <locus_discrim_map>
109 {
110 typedef locus_discrim_map value_type;
111 typedef locus_discrim_map compare_type;
112 static inline hashval_t hash (const value_type *);
113 static inline bool equal (const value_type *, const compare_type *);
114 };
115
116 /* Trivial hash function for a location_t. ITEM is a pointer to
117 a hash table entry that maps a location_t to a discriminator. */
118
119 inline hashval_t
120 locus_discrim_hasher::hash (const value_type *item)
121 {
122 return LOCATION_LINE (item->locus);
123 }
124
125 /* Equality function for the locus-to-discriminator map. A and B
126 point to the two hash table entries to compare. */
127
128 inline bool
129 locus_discrim_hasher::equal (const value_type *a, const compare_type *b)
130 {
131 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
132 }
133
134 static hash_table <locus_discrim_hasher> discriminator_per_locus;
135
136 /* Basic blocks and flowgraphs. */
137 static void make_blocks (gimple_seq);
138 static void factor_computed_gotos (void);
139
140 /* Edges. */
141 static void make_edges (void);
142 static void assign_discriminators (void);
143 static void make_cond_expr_edges (basic_block);
144 static void make_gimple_switch_edges (basic_block);
145 static void make_goto_expr_edges (basic_block);
146 static void make_gimple_asm_edges (basic_block);
147 static edge gimple_redirect_edge_and_branch (edge, basic_block);
148 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
149 static unsigned int split_critical_edges (void);
150
151 /* Various helpers. */
152 static inline bool stmt_starts_bb_p (gimple, gimple);
153 static int gimple_verify_flow_info (void);
154 static void gimple_make_forwarder_block (edge);
155 static gimple first_non_label_stmt (basic_block);
156 static bool verify_gimple_transaction (gimple);
157
158 /* Flowgraph optimization and cleanup. */
159 static void gimple_merge_blocks (basic_block, basic_block);
160 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
161 static void remove_bb (basic_block);
162 static edge find_taken_edge_computed_goto (basic_block, tree);
163 static edge find_taken_edge_cond_expr (basic_block, tree);
164 static edge find_taken_edge_switch_expr (basic_block, tree);
165 static tree find_case_label_for_value (gimple, tree);
166
167 void
168 init_empty_tree_cfg_for_function (struct function *fn)
169 {
170 /* Initialize the basic block array. */
171 init_flow (fn);
172 profile_status_for_function (fn) = PROFILE_ABSENT;
173 n_basic_blocks_for_function (fn) = NUM_FIXED_BLOCKS;
174 last_basic_block_for_function (fn) = NUM_FIXED_BLOCKS;
175 vec_alloc (basic_block_info_for_function (fn), initial_cfg_capacity);
176 vec_safe_grow_cleared (basic_block_info_for_function (fn),
177 initial_cfg_capacity);
178
179 /* Build a mapping of labels to their associated blocks. */
180 vec_alloc (label_to_block_map_for_function (fn), initial_cfg_capacity);
181 vec_safe_grow_cleared (label_to_block_map_for_function (fn),
182 initial_cfg_capacity);
183
184 SET_BASIC_BLOCK_FOR_FUNCTION (fn, ENTRY_BLOCK,
185 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn));
186 SET_BASIC_BLOCK_FOR_FUNCTION (fn, EXIT_BLOCK,
187 EXIT_BLOCK_PTR_FOR_FUNCTION (fn));
188
189 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)->next_bb
190 = EXIT_BLOCK_PTR_FOR_FUNCTION (fn);
191 EXIT_BLOCK_PTR_FOR_FUNCTION (fn)->prev_bb
192 = ENTRY_BLOCK_PTR_FOR_FUNCTION (fn);
193 }
194
195 void
196 init_empty_tree_cfg (void)
197 {
198 init_empty_tree_cfg_for_function (cfun);
199 }
200
201 /*---------------------------------------------------------------------------
202 Create basic blocks
203 ---------------------------------------------------------------------------*/
204
205 /* Entry point to the CFG builder for trees. SEQ is the sequence of
206 statements to be added to the flowgraph. */
207
208 static void
209 build_gimple_cfg (gimple_seq seq)
210 {
211 /* Register specific gimple functions. */
212 gimple_register_cfg_hooks ();
213
214 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
215
216 init_empty_tree_cfg ();
217
218 found_computed_goto = 0;
219 make_blocks (seq);
220
221 /* Computed gotos are hell to deal with, especially if there are
222 lots of them with a large number of destinations. So we factor
223 them to a common computed goto location before we build the
224 edge list. After we convert back to normal form, we will un-factor
225 the computed gotos since factoring introduces an unwanted jump. */
226 if (found_computed_goto)
227 factor_computed_gotos ();
228
229 /* Make sure there is always at least one block, even if it's empty. */
230 if (n_basic_blocks == NUM_FIXED_BLOCKS)
231 create_empty_bb (ENTRY_BLOCK_PTR);
232
233 /* Adjust the size of the array. */
234 if (basic_block_info->length () < (size_t) n_basic_blocks)
235 vec_safe_grow_cleared (basic_block_info, n_basic_blocks);
236
237 /* To speed up statement iterator walks, we first purge dead labels. */
238 cleanup_dead_labels ();
239
240 /* Group case nodes to reduce the number of edges.
241 We do this after cleaning up dead labels because otherwise we miss
242 a lot of obvious case merging opportunities. */
243 group_case_labels ();
244
245 /* Create the edges of the flowgraph. */
246 discriminator_per_locus.create (13);
247 make_edges ();
248 assign_discriminators ();
249 cleanup_dead_labels ();
250 discriminator_per_locus.dispose ();
251 }
252
253
254 /* Search for ANNOTATE call with annot_expr_ivdep_kind; if found, remove
255 it and set loop->safelen to INT_MAX. We assume that the annotation
256 comes immediately before the condition. */
257
258 static void
259 replace_loop_annotate ()
260 {
261 struct loop *loop;
262 loop_iterator li;
263 basic_block bb;
264 gimple_stmt_iterator gsi;
265 gimple stmt;
266
267 FOR_EACH_LOOP (li, loop, 0)
268 {
269 gsi = gsi_last_bb (loop->header);
270 stmt = gsi_stmt (gsi);
271 if (stmt && gimple_code (stmt) == GIMPLE_COND)
272 {
273 gsi_prev_nondebug (&gsi);
274 if (gsi_end_p (gsi))
275 continue;
276 stmt = gsi_stmt (gsi);
277 if (gimple_code (stmt) != GIMPLE_CALL)
278 continue;
279 if (!gimple_call_internal_p (stmt)
280 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
281 continue;
282 if ((annot_expr_kind) tree_low_cst (gimple_call_arg (stmt, 1), 0)
283 != annot_expr_ivdep_kind)
284 continue;
285 stmt = gimple_build_assign (gimple_call_lhs (stmt),
286 gimple_call_arg (stmt, 0));
287 gsi_replace (&gsi, stmt, true);
288 loop->safelen = INT_MAX;
289 }
290 }
291
292 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
293 FOR_EACH_BB (bb)
294 {
295 gsi = gsi_last_bb (bb);
296 stmt = gsi_stmt (gsi);
297 if (stmt && gimple_code (stmt) == GIMPLE_COND)
298 gsi_prev_nondebug (&gsi);
299 if (gsi_end_p (gsi))
300 continue;
301 stmt = gsi_stmt (gsi);
302 if (gimple_code (stmt) != GIMPLE_CALL)
303 continue;
304 if (!gimple_call_internal_p (stmt)
305 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
306 continue;
307 if ((annot_expr_kind) tree_low_cst (gimple_call_arg (stmt, 1), 0)
308 != annot_expr_ivdep_kind)
309 continue;
310 warning (0, "ignoring %<GCC ivdep%> annotation");
311 stmt = gimple_build_assign (gimple_call_lhs (stmt),
312 gimple_call_arg (stmt, 0));
313 gsi_replace (&gsi, stmt, true);
314 }
315 }
316
317
318 static unsigned int
319 execute_build_cfg (void)
320 {
321 gimple_seq body = gimple_body (current_function_decl);
322
323 build_gimple_cfg (body);
324 gimple_set_body (current_function_decl, NULL);
325 if (dump_file && (dump_flags & TDF_DETAILS))
326 {
327 fprintf (dump_file, "Scope blocks:\n");
328 dump_scope_blocks (dump_file, dump_flags);
329 }
330 cleanup_tree_cfg ();
331 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
332 replace_loop_annotate ();
333 return 0;
334 }
335
336 namespace {
337
338 const pass_data pass_data_build_cfg =
339 {
340 GIMPLE_PASS, /* type */
341 "cfg", /* name */
342 OPTGROUP_NONE, /* optinfo_flags */
343 false, /* has_gate */
344 true, /* has_execute */
345 TV_TREE_CFG, /* tv_id */
346 PROP_gimple_leh, /* properties_required */
347 ( PROP_cfg | PROP_loops ), /* properties_provided */
348 0, /* properties_destroyed */
349 0, /* todo_flags_start */
350 TODO_verify_stmts, /* todo_flags_finish */
351 };
352
353 class pass_build_cfg : public gimple_opt_pass
354 {
355 public:
356 pass_build_cfg (gcc::context *ctxt)
357 : gimple_opt_pass (pass_data_build_cfg, ctxt)
358 {}
359
360 /* opt_pass methods: */
361 unsigned int execute () { return execute_build_cfg (); }
362
363 }; // class pass_build_cfg
364
365 } // anon namespace
366
367 gimple_opt_pass *
368 make_pass_build_cfg (gcc::context *ctxt)
369 {
370 return new pass_build_cfg (ctxt);
371 }
372
373
374 /* Return true if T is a computed goto. */
375
376 static bool
377 computed_goto_p (gimple t)
378 {
379 return (gimple_code (t) == GIMPLE_GOTO
380 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
381 }
382
383
384 /* Search the CFG for any computed gotos. If found, factor them to a
385 common computed goto site. Also record the location of that site so
386 that we can un-factor the gotos after we have converted back to
387 normal form. */
388
389 static void
390 factor_computed_gotos (void)
391 {
392 basic_block bb;
393 tree factored_label_decl = NULL;
394 tree var = NULL;
395 gimple factored_computed_goto_label = NULL;
396 gimple factored_computed_goto = NULL;
397
398 /* We know there are one or more computed gotos in this function.
399 Examine the last statement in each basic block to see if the block
400 ends with a computed goto. */
401
402 FOR_EACH_BB (bb)
403 {
404 gimple_stmt_iterator gsi = gsi_last_bb (bb);
405 gimple last;
406
407 if (gsi_end_p (gsi))
408 continue;
409
410 last = gsi_stmt (gsi);
411
412 /* Ignore the computed goto we create when we factor the original
413 computed gotos. */
414 if (last == factored_computed_goto)
415 continue;
416
417 /* If the last statement is a computed goto, factor it. */
418 if (computed_goto_p (last))
419 {
420 gimple assignment;
421
422 /* The first time we find a computed goto we need to create
423 the factored goto block and the variable each original
424 computed goto will use for their goto destination. */
425 if (!factored_computed_goto)
426 {
427 basic_block new_bb = create_empty_bb (bb);
428 gimple_stmt_iterator new_gsi = gsi_start_bb (new_bb);
429
430 /* Create the destination of the factored goto. Each original
431 computed goto will put its desired destination into this
432 variable and jump to the label we create immediately
433 below. */
434 var = create_tmp_var (ptr_type_node, "gotovar");
435
436 /* Build a label for the new block which will contain the
437 factored computed goto. */
438 factored_label_decl = create_artificial_label (UNKNOWN_LOCATION);
439 factored_computed_goto_label
440 = gimple_build_label (factored_label_decl);
441 gsi_insert_after (&new_gsi, factored_computed_goto_label,
442 GSI_NEW_STMT);
443
444 /* Build our new computed goto. */
445 factored_computed_goto = gimple_build_goto (var);
446 gsi_insert_after (&new_gsi, factored_computed_goto, GSI_NEW_STMT);
447 }
448
449 /* Copy the original computed goto's destination into VAR. */
450 assignment = gimple_build_assign (var, gimple_goto_dest (last));
451 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
452
453 /* And re-vector the computed goto to the new destination. */
454 gimple_goto_set_dest (last, factored_label_decl);
455 }
456 }
457 }
458
459
460 /* Build a flowgraph for the sequence of stmts SEQ. */
461
462 static void
463 make_blocks (gimple_seq seq)
464 {
465 gimple_stmt_iterator i = gsi_start (seq);
466 gimple stmt = NULL;
467 bool start_new_block = true;
468 bool first_stmt_of_seq = true;
469 basic_block bb = ENTRY_BLOCK_PTR;
470
471 while (!gsi_end_p (i))
472 {
473 gimple prev_stmt;
474
475 prev_stmt = stmt;
476 stmt = gsi_stmt (i);
477
478 /* If the statement starts a new basic block or if we have determined
479 in a previous pass that we need to create a new block for STMT, do
480 so now. */
481 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
482 {
483 if (!first_stmt_of_seq)
484 gsi_split_seq_before (&i, &seq);
485 bb = create_basic_block (seq, NULL, bb);
486 start_new_block = false;
487 }
488
489 /* Now add STMT to BB and create the subgraphs for special statement
490 codes. */
491 gimple_set_bb (stmt, bb);
492
493 if (computed_goto_p (stmt))
494 found_computed_goto = true;
495
496 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
497 next iteration. */
498 if (stmt_ends_bb_p (stmt))
499 {
500 /* If the stmt can make abnormal goto use a new temporary
501 for the assignment to the LHS. This makes sure the old value
502 of the LHS is available on the abnormal edge. Otherwise
503 we will end up with overlapping life-ranges for abnormal
504 SSA names. */
505 if (gimple_has_lhs (stmt)
506 && stmt_can_make_abnormal_goto (stmt)
507 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
508 {
509 tree lhs = gimple_get_lhs (stmt);
510 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
511 gimple s = gimple_build_assign (lhs, tmp);
512 gimple_set_location (s, gimple_location (stmt));
513 gimple_set_block (s, gimple_block (stmt));
514 gimple_set_lhs (stmt, tmp);
515 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
516 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
517 DECL_GIMPLE_REG_P (tmp) = 1;
518 gsi_insert_after (&i, s, GSI_SAME_STMT);
519 }
520 start_new_block = true;
521 }
522
523 gsi_next (&i);
524 first_stmt_of_seq = false;
525 }
526 }
527
528
529 /* Create and return a new empty basic block after bb AFTER. */
530
531 static basic_block
532 create_bb (void *h, void *e, basic_block after)
533 {
534 basic_block bb;
535
536 gcc_assert (!e);
537
538 /* Create and initialize a new basic block. Since alloc_block uses
539 GC allocation that clears memory to allocate a basic block, we do
540 not have to clear the newly allocated basic block here. */
541 bb = alloc_block ();
542
543 bb->index = last_basic_block;
544 bb->flags = BB_NEW;
545 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
546
547 /* Add the new block to the linked list of blocks. */
548 link_block (bb, after);
549
550 /* Grow the basic block array if needed. */
551 if ((size_t) last_basic_block == basic_block_info->length ())
552 {
553 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
554 vec_safe_grow_cleared (basic_block_info, new_size);
555 }
556
557 /* Add the newly created block to the array. */
558 SET_BASIC_BLOCK (last_basic_block, bb);
559
560 n_basic_blocks++;
561 last_basic_block++;
562
563 return bb;
564 }
565
566
567 /*---------------------------------------------------------------------------
568 Edge creation
569 ---------------------------------------------------------------------------*/
570
571 /* Fold COND_EXPR_COND of each COND_EXPR. */
572
573 void
574 fold_cond_expr_cond (void)
575 {
576 basic_block bb;
577
578 FOR_EACH_BB (bb)
579 {
580 gimple stmt = last_stmt (bb);
581
582 if (stmt && gimple_code (stmt) == GIMPLE_COND)
583 {
584 location_t loc = gimple_location (stmt);
585 tree cond;
586 bool zerop, onep;
587
588 fold_defer_overflow_warnings ();
589 cond = fold_binary_loc (loc, gimple_cond_code (stmt), boolean_type_node,
590 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
591 if (cond)
592 {
593 zerop = integer_zerop (cond);
594 onep = integer_onep (cond);
595 }
596 else
597 zerop = onep = false;
598
599 fold_undefer_overflow_warnings (zerop || onep,
600 stmt,
601 WARN_STRICT_OVERFLOW_CONDITIONAL);
602 if (zerop)
603 gimple_cond_make_false (stmt);
604 else if (onep)
605 gimple_cond_make_true (stmt);
606 }
607 }
608 }
609
610 /* Join all the blocks in the flowgraph. */
611
612 static void
613 make_edges (void)
614 {
615 basic_block bb;
616 struct omp_region *cur_region = NULL;
617
618 /* Create an edge from entry to the first block with executable
619 statements in it. */
620 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
621
622 /* Traverse the basic block array placing edges. */
623 FOR_EACH_BB (bb)
624 {
625 gimple last = last_stmt (bb);
626 bool fallthru;
627
628 if (last)
629 {
630 enum gimple_code code = gimple_code (last);
631 switch (code)
632 {
633 case GIMPLE_GOTO:
634 make_goto_expr_edges (bb);
635 fallthru = false;
636 break;
637 case GIMPLE_RETURN:
638 make_edge (bb, EXIT_BLOCK_PTR, 0);
639 fallthru = false;
640 break;
641 case GIMPLE_COND:
642 make_cond_expr_edges (bb);
643 fallthru = false;
644 break;
645 case GIMPLE_SWITCH:
646 make_gimple_switch_edges (bb);
647 fallthru = false;
648 break;
649 case GIMPLE_RESX:
650 make_eh_edges (last);
651 fallthru = false;
652 break;
653 case GIMPLE_EH_DISPATCH:
654 fallthru = make_eh_dispatch_edges (last);
655 break;
656
657 case GIMPLE_CALL:
658 /* If this function receives a nonlocal goto, then we need to
659 make edges from this call site to all the nonlocal goto
660 handlers. */
661 if (stmt_can_make_abnormal_goto (last))
662 make_abnormal_goto_edges (bb, true);
663
664 /* If this statement has reachable exception handlers, then
665 create abnormal edges to them. */
666 make_eh_edges (last);
667
668 /* BUILTIN_RETURN is really a return statement. */
669 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
670 make_edge (bb, EXIT_BLOCK_PTR, 0), fallthru = false;
671 /* Some calls are known not to return. */
672 else
673 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
674 break;
675
676 case GIMPLE_ASSIGN:
677 /* A GIMPLE_ASSIGN may throw internally and thus be considered
678 control-altering. */
679 if (is_ctrl_altering_stmt (last))
680 make_eh_edges (last);
681 fallthru = true;
682 break;
683
684 case GIMPLE_ASM:
685 make_gimple_asm_edges (bb);
686 fallthru = true;
687 break;
688
689 CASE_GIMPLE_OMP:
690 fallthru = make_gimple_omp_edges (bb, &cur_region);
691 break;
692
693 case GIMPLE_TRANSACTION:
694 {
695 tree abort_label = gimple_transaction_label (last);
696 if (abort_label)
697 make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT);
698 fallthru = true;
699 }
700 break;
701
702 default:
703 gcc_assert (!stmt_ends_bb_p (last));
704 fallthru = true;
705 }
706 }
707 else
708 fallthru = true;
709
710 if (fallthru)
711 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
712 }
713
714 free_omp_regions ();
715
716 /* Fold COND_EXPR_COND of each COND_EXPR. */
717 fold_cond_expr_cond ();
718 }
719
720 /* Find the next available discriminator value for LOCUS. The
721 discriminator distinguishes among several basic blocks that
722 share a common locus, allowing for more accurate sample-based
723 profiling. */
724
725 static int
726 next_discriminator_for_locus (location_t locus)
727 {
728 struct locus_discrim_map item;
729 struct locus_discrim_map **slot;
730
731 item.locus = locus;
732 item.discriminator = 0;
733 slot = discriminator_per_locus.find_slot_with_hash (
734 &item, LOCATION_LINE (locus), INSERT);
735 gcc_assert (slot);
736 if (*slot == HTAB_EMPTY_ENTRY)
737 {
738 *slot = XNEW (struct locus_discrim_map);
739 gcc_assert (*slot);
740 (*slot)->locus = locus;
741 (*slot)->discriminator = 0;
742 }
743 (*slot)->discriminator++;
744 return (*slot)->discriminator;
745 }
746
747 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
748
749 static bool
750 same_line_p (location_t locus1, location_t locus2)
751 {
752 expanded_location from, to;
753
754 if (locus1 == locus2)
755 return true;
756
757 from = expand_location (locus1);
758 to = expand_location (locus2);
759
760 if (from.line != to.line)
761 return false;
762 if (from.file == to.file)
763 return true;
764 return (from.file != NULL
765 && to.file != NULL
766 && filename_cmp (from.file, to.file) == 0);
767 }
768
769 /* Assign discriminators to each basic block. */
770
771 static void
772 assign_discriminators (void)
773 {
774 basic_block bb;
775
776 FOR_EACH_BB (bb)
777 {
778 edge e;
779 edge_iterator ei;
780 gimple last = last_stmt (bb);
781 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
782
783 if (locus == UNKNOWN_LOCATION)
784 continue;
785
786 FOR_EACH_EDGE (e, ei, bb->succs)
787 {
788 gimple first = first_non_label_stmt (e->dest);
789 gimple last = last_stmt (e->dest);
790 if ((first && same_line_p (locus, gimple_location (first)))
791 || (last && same_line_p (locus, gimple_location (last))))
792 {
793 if (e->dest->discriminator != 0 && bb->discriminator == 0)
794 bb->discriminator = next_discriminator_for_locus (locus);
795 else
796 e->dest->discriminator = next_discriminator_for_locus (locus);
797 }
798 }
799 }
800 }
801
802 /* Create the edges for a GIMPLE_COND starting at block BB. */
803
804 static void
805 make_cond_expr_edges (basic_block bb)
806 {
807 gimple entry = last_stmt (bb);
808 gimple then_stmt, else_stmt;
809 basic_block then_bb, else_bb;
810 tree then_label, else_label;
811 edge e;
812
813 gcc_assert (entry);
814 gcc_assert (gimple_code (entry) == GIMPLE_COND);
815
816 /* Entry basic blocks for each component. */
817 then_label = gimple_cond_true_label (entry);
818 else_label = gimple_cond_false_label (entry);
819 then_bb = label_to_block (then_label);
820 else_bb = label_to_block (else_label);
821 then_stmt = first_stmt (then_bb);
822 else_stmt = first_stmt (else_bb);
823
824 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
825 e->goto_locus = gimple_location (then_stmt);
826 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
827 if (e)
828 e->goto_locus = gimple_location (else_stmt);
829
830 /* We do not need the labels anymore. */
831 gimple_cond_set_true_label (entry, NULL_TREE);
832 gimple_cond_set_false_label (entry, NULL_TREE);
833 }
834
835
836 /* Called for each element in the hash table (P) as we delete the
837 edge to cases hash table.
838
839 Clear all the TREE_CHAINs to prevent problems with copying of
840 SWITCH_EXPRs and structure sharing rules, then free the hash table
841 element. */
842
843 static bool
844 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
845 void *data ATTRIBUTE_UNUSED)
846 {
847 tree t, next;
848
849 for (t = (tree) *value; t; t = next)
850 {
851 next = CASE_CHAIN (t);
852 CASE_CHAIN (t) = NULL;
853 }
854
855 *value = NULL;
856 return true;
857 }
858
859 /* Start recording information mapping edges to case labels. */
860
861 void
862 start_recording_case_labels (void)
863 {
864 gcc_assert (edge_to_cases == NULL);
865 edge_to_cases = pointer_map_create ();
866 touched_switch_bbs = BITMAP_ALLOC (NULL);
867 }
868
869 /* Return nonzero if we are recording information for case labels. */
870
871 static bool
872 recording_case_labels_p (void)
873 {
874 return (edge_to_cases != NULL);
875 }
876
877 /* Stop recording information mapping edges to case labels and
878 remove any information we have recorded. */
879 void
880 end_recording_case_labels (void)
881 {
882 bitmap_iterator bi;
883 unsigned i;
884 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
885 pointer_map_destroy (edge_to_cases);
886 edge_to_cases = NULL;
887 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
888 {
889 basic_block bb = BASIC_BLOCK (i);
890 if (bb)
891 {
892 gimple stmt = last_stmt (bb);
893 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
894 group_case_labels_stmt (stmt);
895 }
896 }
897 BITMAP_FREE (touched_switch_bbs);
898 }
899
900 /* If we are inside a {start,end}_recording_cases block, then return
901 a chain of CASE_LABEL_EXPRs from T which reference E.
902
903 Otherwise return NULL. */
904
905 static tree
906 get_cases_for_edge (edge e, gimple t)
907 {
908 void **slot;
909 size_t i, n;
910
911 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
912 chains available. Return NULL so the caller can detect this case. */
913 if (!recording_case_labels_p ())
914 return NULL;
915
916 slot = pointer_map_contains (edge_to_cases, e);
917 if (slot)
918 return (tree) *slot;
919
920 /* If we did not find E in the hash table, then this must be the first
921 time we have been queried for information about E & T. Add all the
922 elements from T to the hash table then perform the query again. */
923
924 n = gimple_switch_num_labels (t);
925 for (i = 0; i < n; i++)
926 {
927 tree elt = gimple_switch_label (t, i);
928 tree lab = CASE_LABEL (elt);
929 basic_block label_bb = label_to_block (lab);
930 edge this_edge = find_edge (e->src, label_bb);
931
932 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
933 a new chain. */
934 slot = pointer_map_insert (edge_to_cases, this_edge);
935 CASE_CHAIN (elt) = (tree) *slot;
936 *slot = elt;
937 }
938
939 return (tree) *pointer_map_contains (edge_to_cases, e);
940 }
941
942 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
943
944 static void
945 make_gimple_switch_edges (basic_block bb)
946 {
947 gimple entry = last_stmt (bb);
948 size_t i, n;
949
950 n = gimple_switch_num_labels (entry);
951
952 for (i = 0; i < n; ++i)
953 {
954 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
955 basic_block label_bb = label_to_block (lab);
956 make_edge (bb, label_bb, 0);
957 }
958 }
959
960
961 /* Return the basic block holding label DEST. */
962
963 basic_block
964 label_to_block_fn (struct function *ifun, tree dest)
965 {
966 int uid = LABEL_DECL_UID (dest);
967
968 /* We would die hard when faced by an undefined label. Emit a label to
969 the very first basic block. This will hopefully make even the dataflow
970 and undefined variable warnings quite right. */
971 if (seen_error () && uid < 0)
972 {
973 gimple_stmt_iterator gsi = gsi_start_bb (BASIC_BLOCK (NUM_FIXED_BLOCKS));
974 gimple stmt;
975
976 stmt = gimple_build_label (dest);
977 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
978 uid = LABEL_DECL_UID (dest);
979 }
980 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
981 return NULL;
982 return (*ifun->cfg->x_label_to_block_map)[uid];
983 }
984
985 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
986 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
987
988 void
989 make_abnormal_goto_edges (basic_block bb, bool for_call)
990 {
991 basic_block target_bb;
992 gimple_stmt_iterator gsi;
993
994 FOR_EACH_BB (target_bb)
995 {
996 for (gsi = gsi_start_bb (target_bb); !gsi_end_p (gsi); gsi_next (&gsi))
997 {
998 gimple label_stmt = gsi_stmt (gsi);
999 tree target;
1000
1001 if (gimple_code (label_stmt) != GIMPLE_LABEL)
1002 break;
1003
1004 target = gimple_label_label (label_stmt);
1005
1006 /* Make an edge to every label block that has been marked as a
1007 potential target for a computed goto or a non-local goto. */
1008 if ((FORCED_LABEL (target) && !for_call)
1009 || (DECL_NONLOCAL (target) && for_call))
1010 {
1011 make_edge (bb, target_bb, EDGE_ABNORMAL);
1012 break;
1013 }
1014 }
1015 if (!gsi_end_p (gsi)
1016 && is_gimple_debug (gsi_stmt (gsi)))
1017 gsi_next_nondebug (&gsi);
1018 if (!gsi_end_p (gsi))
1019 {
1020 /* Make an edge to every setjmp-like call. */
1021 gimple call_stmt = gsi_stmt (gsi);
1022 if (is_gimple_call (call_stmt)
1023 && (gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE))
1024 make_edge (bb, target_bb, EDGE_ABNORMAL);
1025 }
1026 }
1027 }
1028
1029 /* Create edges for a goto statement at block BB. */
1030
1031 static void
1032 make_goto_expr_edges (basic_block bb)
1033 {
1034 gimple_stmt_iterator last = gsi_last_bb (bb);
1035 gimple goto_t = gsi_stmt (last);
1036
1037 /* A simple GOTO creates normal edges. */
1038 if (simple_goto_p (goto_t))
1039 {
1040 tree dest = gimple_goto_dest (goto_t);
1041 basic_block label_bb = label_to_block (dest);
1042 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1043 e->goto_locus = gimple_location (goto_t);
1044 gsi_remove (&last, true);
1045 return;
1046 }
1047
1048 /* A computed GOTO creates abnormal edges. */
1049 make_abnormal_goto_edges (bb, false);
1050 }
1051
1052 /* Create edges for an asm statement with labels at block BB. */
1053
1054 static void
1055 make_gimple_asm_edges (basic_block bb)
1056 {
1057 gimple stmt = last_stmt (bb);
1058 int i, n = gimple_asm_nlabels (stmt);
1059
1060 for (i = 0; i < n; ++i)
1061 {
1062 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1063 basic_block label_bb = label_to_block (label);
1064 make_edge (bb, label_bb, 0);
1065 }
1066 }
1067
1068 /*---------------------------------------------------------------------------
1069 Flowgraph analysis
1070 ---------------------------------------------------------------------------*/
1071
1072 /* Cleanup useless labels in basic blocks. This is something we wish
1073 to do early because it allows us to group case labels before creating
1074 the edges for the CFG, and it speeds up block statement iterators in
1075 all passes later on.
1076 We rerun this pass after CFG is created, to get rid of the labels that
1077 are no longer referenced. After then we do not run it any more, since
1078 (almost) no new labels should be created. */
1079
1080 /* A map from basic block index to the leading label of that block. */
1081 static struct label_record
1082 {
1083 /* The label. */
1084 tree label;
1085
1086 /* True if the label is referenced from somewhere. */
1087 bool used;
1088 } *label_for_bb;
1089
1090 /* Given LABEL return the first label in the same basic block. */
1091
1092 static tree
1093 main_block_label (tree label)
1094 {
1095 basic_block bb = label_to_block (label);
1096 tree main_label = label_for_bb[bb->index].label;
1097
1098 /* label_to_block possibly inserted undefined label into the chain. */
1099 if (!main_label)
1100 {
1101 label_for_bb[bb->index].label = label;
1102 main_label = label;
1103 }
1104
1105 label_for_bb[bb->index].used = true;
1106 return main_label;
1107 }
1108
1109 /* Clean up redundant labels within the exception tree. */
1110
1111 static void
1112 cleanup_dead_labels_eh (void)
1113 {
1114 eh_landing_pad lp;
1115 eh_region r;
1116 tree lab;
1117 int i;
1118
1119 if (cfun->eh == NULL)
1120 return;
1121
1122 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1123 if (lp && lp->post_landing_pad)
1124 {
1125 lab = main_block_label (lp->post_landing_pad);
1126 if (lab != lp->post_landing_pad)
1127 {
1128 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1129 EH_LANDING_PAD_NR (lab) = lp->index;
1130 }
1131 }
1132
1133 FOR_ALL_EH_REGION (r)
1134 switch (r->type)
1135 {
1136 case ERT_CLEANUP:
1137 case ERT_MUST_NOT_THROW:
1138 break;
1139
1140 case ERT_TRY:
1141 {
1142 eh_catch c;
1143 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1144 {
1145 lab = c->label;
1146 if (lab)
1147 c->label = main_block_label (lab);
1148 }
1149 }
1150 break;
1151
1152 case ERT_ALLOWED_EXCEPTIONS:
1153 lab = r->u.allowed.label;
1154 if (lab)
1155 r->u.allowed.label = main_block_label (lab);
1156 break;
1157 }
1158 }
1159
1160
1161 /* Cleanup redundant labels. This is a three-step process:
1162 1) Find the leading label for each block.
1163 2) Redirect all references to labels to the leading labels.
1164 3) Cleanup all useless labels. */
1165
1166 void
1167 cleanup_dead_labels (void)
1168 {
1169 basic_block bb;
1170 label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
1171
1172 /* Find a suitable label for each block. We use the first user-defined
1173 label if there is one, or otherwise just the first label we see. */
1174 FOR_EACH_BB (bb)
1175 {
1176 gimple_stmt_iterator i;
1177
1178 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1179 {
1180 tree label;
1181 gimple stmt = gsi_stmt (i);
1182
1183 if (gimple_code (stmt) != GIMPLE_LABEL)
1184 break;
1185
1186 label = gimple_label_label (stmt);
1187
1188 /* If we have not yet seen a label for the current block,
1189 remember this one and see if there are more labels. */
1190 if (!label_for_bb[bb->index].label)
1191 {
1192 label_for_bb[bb->index].label = label;
1193 continue;
1194 }
1195
1196 /* If we did see a label for the current block already, but it
1197 is an artificially created label, replace it if the current
1198 label is a user defined label. */
1199 if (!DECL_ARTIFICIAL (label)
1200 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1201 {
1202 label_for_bb[bb->index].label = label;
1203 break;
1204 }
1205 }
1206 }
1207
1208 /* Now redirect all jumps/branches to the selected label.
1209 First do so for each block ending in a control statement. */
1210 FOR_EACH_BB (bb)
1211 {
1212 gimple stmt = last_stmt (bb);
1213 tree label, new_label;
1214
1215 if (!stmt)
1216 continue;
1217
1218 switch (gimple_code (stmt))
1219 {
1220 case GIMPLE_COND:
1221 label = gimple_cond_true_label (stmt);
1222 if (label)
1223 {
1224 new_label = main_block_label (label);
1225 if (new_label != label)
1226 gimple_cond_set_true_label (stmt, new_label);
1227 }
1228
1229 label = gimple_cond_false_label (stmt);
1230 if (label)
1231 {
1232 new_label = main_block_label (label);
1233 if (new_label != label)
1234 gimple_cond_set_false_label (stmt, new_label);
1235 }
1236 break;
1237
1238 case GIMPLE_SWITCH:
1239 {
1240 size_t i, n = gimple_switch_num_labels (stmt);
1241
1242 /* Replace all destination labels. */
1243 for (i = 0; i < n; ++i)
1244 {
1245 tree case_label = gimple_switch_label (stmt, i);
1246 label = CASE_LABEL (case_label);
1247 new_label = main_block_label (label);
1248 if (new_label != label)
1249 CASE_LABEL (case_label) = new_label;
1250 }
1251 break;
1252 }
1253
1254 case GIMPLE_ASM:
1255 {
1256 int i, n = gimple_asm_nlabels (stmt);
1257
1258 for (i = 0; i < n; ++i)
1259 {
1260 tree cons = gimple_asm_label_op (stmt, i);
1261 tree label = main_block_label (TREE_VALUE (cons));
1262 TREE_VALUE (cons) = label;
1263 }
1264 break;
1265 }
1266
1267 /* We have to handle gotos until they're removed, and we don't
1268 remove them until after we've created the CFG edges. */
1269 case GIMPLE_GOTO:
1270 if (!computed_goto_p (stmt))
1271 {
1272 label = gimple_goto_dest (stmt);
1273 new_label = main_block_label (label);
1274 if (new_label != label)
1275 gimple_goto_set_dest (stmt, new_label);
1276 }
1277 break;
1278
1279 case GIMPLE_TRANSACTION:
1280 {
1281 tree label = gimple_transaction_label (stmt);
1282 if (label)
1283 {
1284 tree new_label = main_block_label (label);
1285 if (new_label != label)
1286 gimple_transaction_set_label (stmt, new_label);
1287 }
1288 }
1289 break;
1290
1291 default:
1292 break;
1293 }
1294 }
1295
1296 /* Do the same for the exception region tree labels. */
1297 cleanup_dead_labels_eh ();
1298
1299 /* Finally, purge dead labels. All user-defined labels and labels that
1300 can be the target of non-local gotos and labels which have their
1301 address taken are preserved. */
1302 FOR_EACH_BB (bb)
1303 {
1304 gimple_stmt_iterator i;
1305 tree label_for_this_bb = label_for_bb[bb->index].label;
1306
1307 if (!label_for_this_bb)
1308 continue;
1309
1310 /* If the main label of the block is unused, we may still remove it. */
1311 if (!label_for_bb[bb->index].used)
1312 label_for_this_bb = NULL;
1313
1314 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1315 {
1316 tree label;
1317 gimple stmt = gsi_stmt (i);
1318
1319 if (gimple_code (stmt) != GIMPLE_LABEL)
1320 break;
1321
1322 label = gimple_label_label (stmt);
1323
1324 if (label == label_for_this_bb
1325 || !DECL_ARTIFICIAL (label)
1326 || DECL_NONLOCAL (label)
1327 || FORCED_LABEL (label))
1328 gsi_next (&i);
1329 else
1330 gsi_remove (&i, true);
1331 }
1332 }
1333
1334 free (label_for_bb);
1335 }
1336
1337 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1338 the ones jumping to the same label.
1339 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1340
1341 void
1342 group_case_labels_stmt (gimple stmt)
1343 {
1344 int old_size = gimple_switch_num_labels (stmt);
1345 int i, j, new_size = old_size;
1346 basic_block default_bb = NULL;
1347
1348 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1349
1350 /* Look for possible opportunities to merge cases. */
1351 i = 1;
1352 while (i < old_size)
1353 {
1354 tree base_case, base_high;
1355 basic_block base_bb;
1356
1357 base_case = gimple_switch_label (stmt, i);
1358
1359 gcc_assert (base_case);
1360 base_bb = label_to_block (CASE_LABEL (base_case));
1361
1362 /* Discard cases that have the same destination as the
1363 default case. */
1364 if (base_bb == default_bb)
1365 {
1366 gimple_switch_set_label (stmt, i, NULL_TREE);
1367 i++;
1368 new_size--;
1369 continue;
1370 }
1371
1372 base_high = CASE_HIGH (base_case)
1373 ? CASE_HIGH (base_case)
1374 : CASE_LOW (base_case);
1375 i++;
1376
1377 /* Try to merge case labels. Break out when we reach the end
1378 of the label vector or when we cannot merge the next case
1379 label with the current one. */
1380 while (i < old_size)
1381 {
1382 tree merge_case = gimple_switch_label (stmt, i);
1383 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1384 double_int bhp1 = tree_to_double_int (base_high) + double_int_one;
1385
1386 /* Merge the cases if they jump to the same place,
1387 and their ranges are consecutive. */
1388 if (merge_bb == base_bb
1389 && tree_to_double_int (CASE_LOW (merge_case)) == bhp1)
1390 {
1391 base_high = CASE_HIGH (merge_case) ?
1392 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1393 CASE_HIGH (base_case) = base_high;
1394 gimple_switch_set_label (stmt, i, NULL_TREE);
1395 new_size--;
1396 i++;
1397 }
1398 else
1399 break;
1400 }
1401 }
1402
1403 /* Compress the case labels in the label vector, and adjust the
1404 length of the vector. */
1405 for (i = 0, j = 0; i < new_size; i++)
1406 {
1407 while (! gimple_switch_label (stmt, j))
1408 j++;
1409 gimple_switch_set_label (stmt, i,
1410 gimple_switch_label (stmt, j++));
1411 }
1412
1413 gcc_assert (new_size <= old_size);
1414 gimple_switch_set_num_labels (stmt, new_size);
1415 }
1416
1417 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1418 and scan the sorted vector of cases. Combine the ones jumping to the
1419 same label. */
1420
1421 void
1422 group_case_labels (void)
1423 {
1424 basic_block bb;
1425
1426 FOR_EACH_BB (bb)
1427 {
1428 gimple stmt = last_stmt (bb);
1429 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1430 group_case_labels_stmt (stmt);
1431 }
1432 }
1433
1434 /* Checks whether we can merge block B into block A. */
1435
1436 static bool
1437 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1438 {
1439 gimple stmt;
1440 gimple_stmt_iterator gsi;
1441
1442 if (!single_succ_p (a))
1443 return false;
1444
1445 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1446 return false;
1447
1448 if (single_succ (a) != b)
1449 return false;
1450
1451 if (!single_pred_p (b))
1452 return false;
1453
1454 if (b == EXIT_BLOCK_PTR)
1455 return false;
1456
1457 /* If A ends by a statement causing exceptions or something similar, we
1458 cannot merge the blocks. */
1459 stmt = last_stmt (a);
1460 if (stmt && stmt_ends_bb_p (stmt))
1461 return false;
1462
1463 /* Do not allow a block with only a non-local label to be merged. */
1464 if (stmt
1465 && gimple_code (stmt) == GIMPLE_LABEL
1466 && DECL_NONLOCAL (gimple_label_label (stmt)))
1467 return false;
1468
1469 /* Examine the labels at the beginning of B. */
1470 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
1471 {
1472 tree lab;
1473 stmt = gsi_stmt (gsi);
1474 if (gimple_code (stmt) != GIMPLE_LABEL)
1475 break;
1476 lab = gimple_label_label (stmt);
1477
1478 /* Do not remove user forced labels or for -O0 any user labels. */
1479 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1480 return false;
1481 }
1482
1483 /* Protect the loop latches. */
1484 if (current_loops && b->loop_father->latch == b)
1485 return false;
1486
1487 /* It must be possible to eliminate all phi nodes in B. If ssa form
1488 is not up-to-date and a name-mapping is registered, we cannot eliminate
1489 any phis. Symbols marked for renaming are never a problem though. */
1490 for (gsi = gsi_start_phis (b); !gsi_end_p (gsi); gsi_next (&gsi))
1491 {
1492 gimple phi = gsi_stmt (gsi);
1493 /* Technically only new names matter. */
1494 if (name_registered_for_update_p (PHI_RESULT (phi)))
1495 return false;
1496 }
1497
1498 /* When not optimizing, don't merge if we'd lose goto_locus. */
1499 if (!optimize
1500 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1501 {
1502 location_t goto_locus = single_succ_edge (a)->goto_locus;
1503 gimple_stmt_iterator prev, next;
1504 prev = gsi_last_nondebug_bb (a);
1505 next = gsi_after_labels (b);
1506 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1507 gsi_next_nondebug (&next);
1508 if ((gsi_end_p (prev)
1509 || gimple_location (gsi_stmt (prev)) != goto_locus)
1510 && (gsi_end_p (next)
1511 || gimple_location (gsi_stmt (next)) != goto_locus))
1512 return false;
1513 }
1514
1515 return true;
1516 }
1517
1518 /* Replaces all uses of NAME by VAL. */
1519
1520 void
1521 replace_uses_by (tree name, tree val)
1522 {
1523 imm_use_iterator imm_iter;
1524 use_operand_p use;
1525 gimple stmt;
1526 edge e;
1527
1528 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1529 {
1530 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1531 {
1532 replace_exp (use, val);
1533
1534 if (gimple_code (stmt) == GIMPLE_PHI)
1535 {
1536 e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
1537 if (e->flags & EDGE_ABNORMAL)
1538 {
1539 /* This can only occur for virtual operands, since
1540 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1541 would prevent replacement. */
1542 gcc_checking_assert (virtual_operand_p (name));
1543 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1544 }
1545 }
1546 }
1547
1548 if (gimple_code (stmt) != GIMPLE_PHI)
1549 {
1550 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1551 gimple orig_stmt = stmt;
1552 size_t i;
1553
1554 /* Mark the block if we changed the last stmt in it. */
1555 if (cfgcleanup_altered_bbs
1556 && stmt_ends_bb_p (stmt))
1557 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1558
1559 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1560 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1561 only change sth from non-invariant to invariant, and only
1562 when propagating constants. */
1563 if (is_gimple_min_invariant (val))
1564 for (i = 0; i < gimple_num_ops (stmt); i++)
1565 {
1566 tree op = gimple_op (stmt, i);
1567 /* Operands may be empty here. For example, the labels
1568 of a GIMPLE_COND are nulled out following the creation
1569 of the corresponding CFG edges. */
1570 if (op && TREE_CODE (op) == ADDR_EXPR)
1571 recompute_tree_invariant_for_addr_expr (op);
1572 }
1573
1574 if (fold_stmt (&gsi))
1575 stmt = gsi_stmt (gsi);
1576
1577 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1578 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1579
1580 update_stmt (stmt);
1581 }
1582 }
1583
1584 gcc_checking_assert (has_zero_uses (name));
1585
1586 /* Also update the trees stored in loop structures. */
1587 if (current_loops)
1588 {
1589 struct loop *loop;
1590 loop_iterator li;
1591
1592 FOR_EACH_LOOP (li, loop, 0)
1593 {
1594 substitute_in_loop_info (loop, name, val);
1595 }
1596 }
1597 }
1598
1599 /* Merge block B into block A. */
1600
1601 static void
1602 gimple_merge_blocks (basic_block a, basic_block b)
1603 {
1604 gimple_stmt_iterator last, gsi, psi;
1605
1606 if (dump_file)
1607 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1608
1609 /* Remove all single-valued PHI nodes from block B of the form
1610 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1611 gsi = gsi_last_bb (a);
1612 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1613 {
1614 gimple phi = gsi_stmt (psi);
1615 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1616 gimple copy;
1617 bool may_replace_uses = (virtual_operand_p (def)
1618 || may_propagate_copy (def, use));
1619
1620 /* In case we maintain loop closed ssa form, do not propagate arguments
1621 of loop exit phi nodes. */
1622 if (current_loops
1623 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1624 && !virtual_operand_p (def)
1625 && TREE_CODE (use) == SSA_NAME
1626 && a->loop_father != b->loop_father)
1627 may_replace_uses = false;
1628
1629 if (!may_replace_uses)
1630 {
1631 gcc_assert (!virtual_operand_p (def));
1632
1633 /* Note that just emitting the copies is fine -- there is no problem
1634 with ordering of phi nodes. This is because A is the single
1635 predecessor of B, therefore results of the phi nodes cannot
1636 appear as arguments of the phi nodes. */
1637 copy = gimple_build_assign (def, use);
1638 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1639 remove_phi_node (&psi, false);
1640 }
1641 else
1642 {
1643 /* If we deal with a PHI for virtual operands, we can simply
1644 propagate these without fussing with folding or updating
1645 the stmt. */
1646 if (virtual_operand_p (def))
1647 {
1648 imm_use_iterator iter;
1649 use_operand_p use_p;
1650 gimple stmt;
1651
1652 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1653 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1654 SET_USE (use_p, use);
1655
1656 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1657 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1658 }
1659 else
1660 replace_uses_by (def, use);
1661
1662 remove_phi_node (&psi, true);
1663 }
1664 }
1665
1666 /* Ensure that B follows A. */
1667 move_block_after (b, a);
1668
1669 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1670 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1671
1672 /* Remove labels from B and set gimple_bb to A for other statements. */
1673 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1674 {
1675 gimple stmt = gsi_stmt (gsi);
1676 if (gimple_code (stmt) == GIMPLE_LABEL)
1677 {
1678 tree label = gimple_label_label (stmt);
1679 int lp_nr;
1680
1681 gsi_remove (&gsi, false);
1682
1683 /* Now that we can thread computed gotos, we might have
1684 a situation where we have a forced label in block B
1685 However, the label at the start of block B might still be
1686 used in other ways (think about the runtime checking for
1687 Fortran assigned gotos). So we can not just delete the
1688 label. Instead we move the label to the start of block A. */
1689 if (FORCED_LABEL (label))
1690 {
1691 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1692 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1693 }
1694 /* Other user labels keep around in a form of a debug stmt. */
1695 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1696 {
1697 gimple dbg = gimple_build_debug_bind (label,
1698 integer_zero_node,
1699 stmt);
1700 gimple_debug_bind_reset_value (dbg);
1701 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1702 }
1703
1704 lp_nr = EH_LANDING_PAD_NR (label);
1705 if (lp_nr)
1706 {
1707 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1708 lp->post_landing_pad = NULL;
1709 }
1710 }
1711 else
1712 {
1713 gimple_set_bb (stmt, a);
1714 gsi_next (&gsi);
1715 }
1716 }
1717
1718 /* Merge the sequences. */
1719 last = gsi_last_bb (a);
1720 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1721 set_bb_seq (b, NULL);
1722
1723 if (cfgcleanup_altered_bbs)
1724 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1725 }
1726
1727
1728 /* Return the one of two successors of BB that is not reachable by a
1729 complex edge, if there is one. Else, return BB. We use
1730 this in optimizations that use post-dominators for their heuristics,
1731 to catch the cases in C++ where function calls are involved. */
1732
1733 basic_block
1734 single_noncomplex_succ (basic_block bb)
1735 {
1736 edge e0, e1;
1737 if (EDGE_COUNT (bb->succs) != 2)
1738 return bb;
1739
1740 e0 = EDGE_SUCC (bb, 0);
1741 e1 = EDGE_SUCC (bb, 1);
1742 if (e0->flags & EDGE_COMPLEX)
1743 return e1->dest;
1744 if (e1->flags & EDGE_COMPLEX)
1745 return e0->dest;
1746
1747 return bb;
1748 }
1749
1750 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1751
1752 void
1753 notice_special_calls (gimple call)
1754 {
1755 int flags = gimple_call_flags (call);
1756
1757 if (flags & ECF_MAY_BE_ALLOCA)
1758 cfun->calls_alloca = true;
1759 if (flags & ECF_RETURNS_TWICE)
1760 cfun->calls_setjmp = true;
1761 }
1762
1763
1764 /* Clear flags set by notice_special_calls. Used by dead code removal
1765 to update the flags. */
1766
1767 void
1768 clear_special_calls (void)
1769 {
1770 cfun->calls_alloca = false;
1771 cfun->calls_setjmp = false;
1772 }
1773
1774 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1775
1776 static void
1777 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1778 {
1779 /* Since this block is no longer reachable, we can just delete all
1780 of its PHI nodes. */
1781 remove_phi_nodes (bb);
1782
1783 /* Remove edges to BB's successors. */
1784 while (EDGE_COUNT (bb->succs) > 0)
1785 remove_edge (EDGE_SUCC (bb, 0));
1786 }
1787
1788
1789 /* Remove statements of basic block BB. */
1790
1791 static void
1792 remove_bb (basic_block bb)
1793 {
1794 gimple_stmt_iterator i;
1795
1796 if (dump_file)
1797 {
1798 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1799 if (dump_flags & TDF_DETAILS)
1800 {
1801 dump_bb (dump_file, bb, 0, dump_flags);
1802 fprintf (dump_file, "\n");
1803 }
1804 }
1805
1806 if (current_loops)
1807 {
1808 struct loop *loop = bb->loop_father;
1809
1810 /* If a loop gets removed, clean up the information associated
1811 with it. */
1812 if (loop->latch == bb
1813 || loop->header == bb)
1814 free_numbers_of_iterations_estimates_loop (loop);
1815 }
1816
1817 /* Remove all the instructions in the block. */
1818 if (bb_seq (bb) != NULL)
1819 {
1820 /* Walk backwards so as to get a chance to substitute all
1821 released DEFs into debug stmts. See
1822 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
1823 details. */
1824 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
1825 {
1826 gimple stmt = gsi_stmt (i);
1827 if (gimple_code (stmt) == GIMPLE_LABEL
1828 && (FORCED_LABEL (gimple_label_label (stmt))
1829 || DECL_NONLOCAL (gimple_label_label (stmt))))
1830 {
1831 basic_block new_bb;
1832 gimple_stmt_iterator new_gsi;
1833
1834 /* A non-reachable non-local label may still be referenced.
1835 But it no longer needs to carry the extra semantics of
1836 non-locality. */
1837 if (DECL_NONLOCAL (gimple_label_label (stmt)))
1838 {
1839 DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
1840 FORCED_LABEL (gimple_label_label (stmt)) = 1;
1841 }
1842
1843 new_bb = bb->prev_bb;
1844 new_gsi = gsi_start_bb (new_bb);
1845 gsi_remove (&i, false);
1846 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
1847 }
1848 else
1849 {
1850 /* Release SSA definitions if we are in SSA. Note that we
1851 may be called when not in SSA. For example,
1852 final_cleanup calls this function via
1853 cleanup_tree_cfg. */
1854 if (gimple_in_ssa_p (cfun))
1855 release_defs (stmt);
1856
1857 gsi_remove (&i, true);
1858 }
1859
1860 if (gsi_end_p (i))
1861 i = gsi_last_bb (bb);
1862 else
1863 gsi_prev (&i);
1864 }
1865 }
1866
1867 remove_phi_nodes_and_edges_for_unreachable_block (bb);
1868 bb->il.gimple.seq = NULL;
1869 bb->il.gimple.phi_nodes = NULL;
1870 }
1871
1872
1873 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
1874 predicate VAL, return the edge that will be taken out of the block.
1875 If VAL does not match a unique edge, NULL is returned. */
1876
1877 edge
1878 find_taken_edge (basic_block bb, tree val)
1879 {
1880 gimple stmt;
1881
1882 stmt = last_stmt (bb);
1883
1884 gcc_assert (stmt);
1885 gcc_assert (is_ctrl_stmt (stmt));
1886
1887 if (val == NULL)
1888 return NULL;
1889
1890 if (!is_gimple_min_invariant (val))
1891 return NULL;
1892
1893 if (gimple_code (stmt) == GIMPLE_COND)
1894 return find_taken_edge_cond_expr (bb, val);
1895
1896 if (gimple_code (stmt) == GIMPLE_SWITCH)
1897 return find_taken_edge_switch_expr (bb, val);
1898
1899 if (computed_goto_p (stmt))
1900 {
1901 /* Only optimize if the argument is a label, if the argument is
1902 not a label then we can not construct a proper CFG.
1903
1904 It may be the case that we only need to allow the LABEL_REF to
1905 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
1906 appear inside a LABEL_EXPR just to be safe. */
1907 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
1908 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
1909 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
1910 return NULL;
1911 }
1912
1913 gcc_unreachable ();
1914 }
1915
1916 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
1917 statement, determine which of the outgoing edges will be taken out of the
1918 block. Return NULL if either edge may be taken. */
1919
1920 static edge
1921 find_taken_edge_computed_goto (basic_block bb, tree val)
1922 {
1923 basic_block dest;
1924 edge e = NULL;
1925
1926 dest = label_to_block (val);
1927 if (dest)
1928 {
1929 e = find_edge (bb, dest);
1930 gcc_assert (e != NULL);
1931 }
1932
1933 return e;
1934 }
1935
1936 /* Given a constant value VAL and the entry block BB to a COND_EXPR
1937 statement, determine which of the two edges will be taken out of the
1938 block. Return NULL if either edge may be taken. */
1939
1940 static edge
1941 find_taken_edge_cond_expr (basic_block bb, tree val)
1942 {
1943 edge true_edge, false_edge;
1944
1945 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1946
1947 gcc_assert (TREE_CODE (val) == INTEGER_CST);
1948 return (integer_zerop (val) ? false_edge : true_edge);
1949 }
1950
1951 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
1952 statement, determine which edge will be taken out of the block. Return
1953 NULL if any edge may be taken. */
1954
1955 static edge
1956 find_taken_edge_switch_expr (basic_block bb, tree val)
1957 {
1958 basic_block dest_bb;
1959 edge e;
1960 gimple switch_stmt;
1961 tree taken_case;
1962
1963 switch_stmt = last_stmt (bb);
1964 taken_case = find_case_label_for_value (switch_stmt, val);
1965 dest_bb = label_to_block (CASE_LABEL (taken_case));
1966
1967 e = find_edge (bb, dest_bb);
1968 gcc_assert (e);
1969 return e;
1970 }
1971
1972
1973 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
1974 We can make optimal use here of the fact that the case labels are
1975 sorted: We can do a binary search for a case matching VAL. */
1976
1977 static tree
1978 find_case_label_for_value (gimple switch_stmt, tree val)
1979 {
1980 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
1981 tree default_case = gimple_switch_default_label (switch_stmt);
1982
1983 for (low = 0, high = n; high - low > 1; )
1984 {
1985 size_t i = (high + low) / 2;
1986 tree t = gimple_switch_label (switch_stmt, i);
1987 int cmp;
1988
1989 /* Cache the result of comparing CASE_LOW and val. */
1990 cmp = tree_int_cst_compare (CASE_LOW (t), val);
1991
1992 if (cmp > 0)
1993 high = i;
1994 else
1995 low = i;
1996
1997 if (CASE_HIGH (t) == NULL)
1998 {
1999 /* A singe-valued case label. */
2000 if (cmp == 0)
2001 return t;
2002 }
2003 else
2004 {
2005 /* A case range. We can only handle integer ranges. */
2006 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2007 return t;
2008 }
2009 }
2010
2011 return default_case;
2012 }
2013
2014
2015 /* Dump a basic block on stderr. */
2016
2017 void
2018 gimple_debug_bb (basic_block bb)
2019 {
2020 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2021 }
2022
2023
2024 /* Dump basic block with index N on stderr. */
2025
2026 basic_block
2027 gimple_debug_bb_n (int n)
2028 {
2029 gimple_debug_bb (BASIC_BLOCK (n));
2030 return BASIC_BLOCK (n);
2031 }
2032
2033
2034 /* Dump the CFG on stderr.
2035
2036 FLAGS are the same used by the tree dumping functions
2037 (see TDF_* in dumpfile.h). */
2038
2039 void
2040 gimple_debug_cfg (int flags)
2041 {
2042 gimple_dump_cfg (stderr, flags);
2043 }
2044
2045
2046 /* Dump the program showing basic block boundaries on the given FILE.
2047
2048 FLAGS are the same used by the tree dumping functions (see TDF_* in
2049 tree.h). */
2050
2051 void
2052 gimple_dump_cfg (FILE *file, int flags)
2053 {
2054 if (flags & TDF_DETAILS)
2055 {
2056 dump_function_header (file, current_function_decl, flags);
2057 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2058 n_basic_blocks, n_edges, last_basic_block);
2059
2060 brief_dump_cfg (file, flags | TDF_COMMENT);
2061 fprintf (file, "\n");
2062 }
2063
2064 if (flags & TDF_STATS)
2065 dump_cfg_stats (file);
2066
2067 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2068 }
2069
2070
2071 /* Dump CFG statistics on FILE. */
2072
2073 void
2074 dump_cfg_stats (FILE *file)
2075 {
2076 static long max_num_merged_labels = 0;
2077 unsigned long size, total = 0;
2078 long num_edges;
2079 basic_block bb;
2080 const char * const fmt_str = "%-30s%-13s%12s\n";
2081 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2082 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2083 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2084 const char *funcname = current_function_name ();
2085
2086 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2087
2088 fprintf (file, "---------------------------------------------------------\n");
2089 fprintf (file, fmt_str, "", " Number of ", "Memory");
2090 fprintf (file, fmt_str, "", " instances ", "used ");
2091 fprintf (file, "---------------------------------------------------------\n");
2092
2093 size = n_basic_blocks * sizeof (struct basic_block_def);
2094 total += size;
2095 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2096 SCALE (size), LABEL (size));
2097
2098 num_edges = 0;
2099 FOR_EACH_BB (bb)
2100 num_edges += EDGE_COUNT (bb->succs);
2101 size = num_edges * sizeof (struct edge_def);
2102 total += size;
2103 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2104
2105 fprintf (file, "---------------------------------------------------------\n");
2106 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2107 LABEL (total));
2108 fprintf (file, "---------------------------------------------------------\n");
2109 fprintf (file, "\n");
2110
2111 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2112 max_num_merged_labels = cfg_stats.num_merged_labels;
2113
2114 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2115 cfg_stats.num_merged_labels, max_num_merged_labels);
2116
2117 fprintf (file, "\n");
2118 }
2119
2120
2121 /* Dump CFG statistics on stderr. Keep extern so that it's always
2122 linked in the final executable. */
2123
2124 DEBUG_FUNCTION void
2125 debug_cfg_stats (void)
2126 {
2127 dump_cfg_stats (stderr);
2128 }
2129
2130 /*---------------------------------------------------------------------------
2131 Miscellaneous helpers
2132 ---------------------------------------------------------------------------*/
2133
2134 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2135 flow. Transfers of control flow associated with EH are excluded. */
2136
2137 static bool
2138 call_can_make_abnormal_goto (gimple t)
2139 {
2140 /* If the function has no non-local labels, then a call cannot make an
2141 abnormal transfer of control. */
2142 if (!cfun->has_nonlocal_label
2143 && !cfun->calls_setjmp)
2144 return false;
2145
2146 /* Likewise if the call has no side effects. */
2147 if (!gimple_has_side_effects (t))
2148 return false;
2149
2150 /* Likewise if the called function is leaf. */
2151 if (gimple_call_flags (t) & ECF_LEAF)
2152 return false;
2153
2154 return true;
2155 }
2156
2157
2158 /* Return true if T can make an abnormal transfer of control flow.
2159 Transfers of control flow associated with EH are excluded. */
2160
2161 bool
2162 stmt_can_make_abnormal_goto (gimple t)
2163 {
2164 if (computed_goto_p (t))
2165 return true;
2166 if (is_gimple_call (t))
2167 return call_can_make_abnormal_goto (t);
2168 return false;
2169 }
2170
2171
2172 /* Return true if T represents a stmt that always transfers control. */
2173
2174 bool
2175 is_ctrl_stmt (gimple t)
2176 {
2177 switch (gimple_code (t))
2178 {
2179 case GIMPLE_COND:
2180 case GIMPLE_SWITCH:
2181 case GIMPLE_GOTO:
2182 case GIMPLE_RETURN:
2183 case GIMPLE_RESX:
2184 return true;
2185 default:
2186 return false;
2187 }
2188 }
2189
2190
2191 /* Return true if T is a statement that may alter the flow of control
2192 (e.g., a call to a non-returning function). */
2193
2194 bool
2195 is_ctrl_altering_stmt (gimple t)
2196 {
2197 gcc_assert (t);
2198
2199 switch (gimple_code (t))
2200 {
2201 case GIMPLE_CALL:
2202 {
2203 int flags = gimple_call_flags (t);
2204
2205 /* A call alters control flow if it can make an abnormal goto. */
2206 if (call_can_make_abnormal_goto (t))
2207 return true;
2208
2209 /* A call also alters control flow if it does not return. */
2210 if (flags & ECF_NORETURN)
2211 return true;
2212
2213 /* TM ending statements have backedges out of the transaction.
2214 Return true so we split the basic block containing them.
2215 Note that the TM_BUILTIN test is merely an optimization. */
2216 if ((flags & ECF_TM_BUILTIN)
2217 && is_tm_ending_fndecl (gimple_call_fndecl (t)))
2218 return true;
2219
2220 /* BUILT_IN_RETURN call is same as return statement. */
2221 if (gimple_call_builtin_p (t, BUILT_IN_RETURN))
2222 return true;
2223 }
2224 break;
2225
2226 case GIMPLE_EH_DISPATCH:
2227 /* EH_DISPATCH branches to the individual catch handlers at
2228 this level of a try or allowed-exceptions region. It can
2229 fallthru to the next statement as well. */
2230 return true;
2231
2232 case GIMPLE_ASM:
2233 if (gimple_asm_nlabels (t) > 0)
2234 return true;
2235 break;
2236
2237 CASE_GIMPLE_OMP:
2238 /* OpenMP directives alter control flow. */
2239 return true;
2240
2241 case GIMPLE_TRANSACTION:
2242 /* A transaction start alters control flow. */
2243 return true;
2244
2245 default:
2246 break;
2247 }
2248
2249 /* If a statement can throw, it alters control flow. */
2250 return stmt_can_throw_internal (t);
2251 }
2252
2253
2254 /* Return true if T is a simple local goto. */
2255
2256 bool
2257 simple_goto_p (gimple t)
2258 {
2259 return (gimple_code (t) == GIMPLE_GOTO
2260 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2261 }
2262
2263
2264 /* Return true if STMT should start a new basic block. PREV_STMT is
2265 the statement preceding STMT. It is used when STMT is a label or a
2266 case label. Labels should only start a new basic block if their
2267 previous statement wasn't a label. Otherwise, sequence of labels
2268 would generate unnecessary basic blocks that only contain a single
2269 label. */
2270
2271 static inline bool
2272 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2273 {
2274 if (stmt == NULL)
2275 return false;
2276
2277 /* Labels start a new basic block only if the preceding statement
2278 wasn't a label of the same type. This prevents the creation of
2279 consecutive blocks that have nothing but a single label. */
2280 if (gimple_code (stmt) == GIMPLE_LABEL)
2281 {
2282 /* Nonlocal and computed GOTO targets always start a new block. */
2283 if (DECL_NONLOCAL (gimple_label_label (stmt))
2284 || FORCED_LABEL (gimple_label_label (stmt)))
2285 return true;
2286
2287 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2288 {
2289 if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
2290 return true;
2291
2292 cfg_stats.num_merged_labels++;
2293 return false;
2294 }
2295 else
2296 return true;
2297 }
2298 else if (gimple_code (stmt) == GIMPLE_CALL
2299 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2300 /* setjmp acts similar to a nonlocal GOTO target and thus should
2301 start a new block. */
2302 return true;
2303
2304 return false;
2305 }
2306
2307
2308 /* Return true if T should end a basic block. */
2309
2310 bool
2311 stmt_ends_bb_p (gimple t)
2312 {
2313 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2314 }
2315
2316 /* Remove block annotations and other data structures. */
2317
2318 void
2319 delete_tree_cfg_annotations (void)
2320 {
2321 vec_free (label_to_block_map);
2322 }
2323
2324
2325 /* Return the first statement in basic block BB. */
2326
2327 gimple
2328 first_stmt (basic_block bb)
2329 {
2330 gimple_stmt_iterator i = gsi_start_bb (bb);
2331 gimple stmt = NULL;
2332
2333 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2334 {
2335 gsi_next (&i);
2336 stmt = NULL;
2337 }
2338 return stmt;
2339 }
2340
2341 /* Return the first non-label statement in basic block BB. */
2342
2343 static gimple
2344 first_non_label_stmt (basic_block bb)
2345 {
2346 gimple_stmt_iterator i = gsi_start_bb (bb);
2347 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2348 gsi_next (&i);
2349 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2350 }
2351
2352 /* Return the last statement in basic block BB. */
2353
2354 gimple
2355 last_stmt (basic_block bb)
2356 {
2357 gimple_stmt_iterator i = gsi_last_bb (bb);
2358 gimple stmt = NULL;
2359
2360 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2361 {
2362 gsi_prev (&i);
2363 stmt = NULL;
2364 }
2365 return stmt;
2366 }
2367
2368 /* Return the last statement of an otherwise empty block. Return NULL
2369 if the block is totally empty, or if it contains more than one
2370 statement. */
2371
2372 gimple
2373 last_and_only_stmt (basic_block bb)
2374 {
2375 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2376 gimple last, prev;
2377
2378 if (gsi_end_p (i))
2379 return NULL;
2380
2381 last = gsi_stmt (i);
2382 gsi_prev_nondebug (&i);
2383 if (gsi_end_p (i))
2384 return last;
2385
2386 /* Empty statements should no longer appear in the instruction stream.
2387 Everything that might have appeared before should be deleted by
2388 remove_useless_stmts, and the optimizers should just gsi_remove
2389 instead of smashing with build_empty_stmt.
2390
2391 Thus the only thing that should appear here in a block containing
2392 one executable statement is a label. */
2393 prev = gsi_stmt (i);
2394 if (gimple_code (prev) == GIMPLE_LABEL)
2395 return last;
2396 else
2397 return NULL;
2398 }
2399
2400 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2401
2402 static void
2403 reinstall_phi_args (edge new_edge, edge old_edge)
2404 {
2405 edge_var_map_vector *v;
2406 edge_var_map *vm;
2407 int i;
2408 gimple_stmt_iterator phis;
2409
2410 v = redirect_edge_var_map_vector (old_edge);
2411 if (!v)
2412 return;
2413
2414 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2415 v->iterate (i, &vm) && !gsi_end_p (phis);
2416 i++, gsi_next (&phis))
2417 {
2418 gimple phi = gsi_stmt (phis);
2419 tree result = redirect_edge_var_map_result (vm);
2420 tree arg = redirect_edge_var_map_def (vm);
2421
2422 gcc_assert (result == gimple_phi_result (phi));
2423
2424 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2425 }
2426
2427 redirect_edge_var_map_clear (old_edge);
2428 }
2429
2430 /* Returns the basic block after which the new basic block created
2431 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2432 near its "logical" location. This is of most help to humans looking
2433 at debugging dumps. */
2434
2435 static basic_block
2436 split_edge_bb_loc (edge edge_in)
2437 {
2438 basic_block dest = edge_in->dest;
2439 basic_block dest_prev = dest->prev_bb;
2440
2441 if (dest_prev)
2442 {
2443 edge e = find_edge (dest_prev, dest);
2444 if (e && !(e->flags & EDGE_COMPLEX))
2445 return edge_in->src;
2446 }
2447 return dest_prev;
2448 }
2449
2450 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2451 Abort on abnormal edges. */
2452
2453 static basic_block
2454 gimple_split_edge (edge edge_in)
2455 {
2456 basic_block new_bb, after_bb, dest;
2457 edge new_edge, e;
2458
2459 /* Abnormal edges cannot be split. */
2460 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2461
2462 dest = edge_in->dest;
2463
2464 after_bb = split_edge_bb_loc (edge_in);
2465
2466 new_bb = create_empty_bb (after_bb);
2467 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2468 new_bb->count = edge_in->count;
2469 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2470 new_edge->probability = REG_BR_PROB_BASE;
2471 new_edge->count = edge_in->count;
2472
2473 e = redirect_edge_and_branch (edge_in, new_bb);
2474 gcc_assert (e == edge_in);
2475 reinstall_phi_args (new_edge, e);
2476
2477 return new_bb;
2478 }
2479
2480
2481 /* Verify properties of the address expression T with base object BASE. */
2482
2483 static tree
2484 verify_address (tree t, tree base)
2485 {
2486 bool old_constant;
2487 bool old_side_effects;
2488 bool new_constant;
2489 bool new_side_effects;
2490
2491 old_constant = TREE_CONSTANT (t);
2492 old_side_effects = TREE_SIDE_EFFECTS (t);
2493
2494 recompute_tree_invariant_for_addr_expr (t);
2495 new_side_effects = TREE_SIDE_EFFECTS (t);
2496 new_constant = TREE_CONSTANT (t);
2497
2498 if (old_constant != new_constant)
2499 {
2500 error ("constant not recomputed when ADDR_EXPR changed");
2501 return t;
2502 }
2503 if (old_side_effects != new_side_effects)
2504 {
2505 error ("side effects not recomputed when ADDR_EXPR changed");
2506 return t;
2507 }
2508
2509 if (!(TREE_CODE (base) == VAR_DECL
2510 || TREE_CODE (base) == PARM_DECL
2511 || TREE_CODE (base) == RESULT_DECL))
2512 return NULL_TREE;
2513
2514 if (DECL_GIMPLE_REG_P (base))
2515 {
2516 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2517 return base;
2518 }
2519
2520 return NULL_TREE;
2521 }
2522
2523 /* Callback for walk_tree, check that all elements with address taken are
2524 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2525 inside a PHI node. */
2526
2527 static tree
2528 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2529 {
2530 tree t = *tp, x;
2531
2532 if (TYPE_P (t))
2533 *walk_subtrees = 0;
2534
2535 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2536 #define CHECK_OP(N, MSG) \
2537 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2538 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2539
2540 switch (TREE_CODE (t))
2541 {
2542 case SSA_NAME:
2543 if (SSA_NAME_IN_FREE_LIST (t))
2544 {
2545 error ("SSA name in freelist but still referenced");
2546 return *tp;
2547 }
2548 break;
2549
2550 case INDIRECT_REF:
2551 error ("INDIRECT_REF in gimple IL");
2552 return t;
2553
2554 case MEM_REF:
2555 x = TREE_OPERAND (t, 0);
2556 if (!POINTER_TYPE_P (TREE_TYPE (x))
2557 || !is_gimple_mem_ref_addr (x))
2558 {
2559 error ("invalid first operand of MEM_REF");
2560 return x;
2561 }
2562 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2563 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2564 {
2565 error ("invalid offset operand of MEM_REF");
2566 return TREE_OPERAND (t, 1);
2567 }
2568 if (TREE_CODE (x) == ADDR_EXPR
2569 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2570 return x;
2571 *walk_subtrees = 0;
2572 break;
2573
2574 case ASSERT_EXPR:
2575 x = fold (ASSERT_EXPR_COND (t));
2576 if (x == boolean_false_node)
2577 {
2578 error ("ASSERT_EXPR with an always-false condition");
2579 return *tp;
2580 }
2581 break;
2582
2583 case MODIFY_EXPR:
2584 error ("MODIFY_EXPR not expected while having tuples");
2585 return *tp;
2586
2587 case ADDR_EXPR:
2588 {
2589 tree tem;
2590
2591 gcc_assert (is_gimple_address (t));
2592
2593 /* Skip any references (they will be checked when we recurse down the
2594 tree) and ensure that any variable used as a prefix is marked
2595 addressable. */
2596 for (x = TREE_OPERAND (t, 0);
2597 handled_component_p (x);
2598 x = TREE_OPERAND (x, 0))
2599 ;
2600
2601 if ((tem = verify_address (t, x)))
2602 return tem;
2603
2604 if (!(TREE_CODE (x) == VAR_DECL
2605 || TREE_CODE (x) == PARM_DECL
2606 || TREE_CODE (x) == RESULT_DECL))
2607 return NULL;
2608
2609 if (!TREE_ADDRESSABLE (x))
2610 {
2611 error ("address taken, but ADDRESSABLE bit not set");
2612 return x;
2613 }
2614
2615 break;
2616 }
2617
2618 case COND_EXPR:
2619 x = COND_EXPR_COND (t);
2620 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2621 {
2622 error ("non-integral used in condition");
2623 return x;
2624 }
2625 if (!is_gimple_condexpr (x))
2626 {
2627 error ("invalid conditional operand");
2628 return x;
2629 }
2630 break;
2631
2632 case NON_LVALUE_EXPR:
2633 case TRUTH_NOT_EXPR:
2634 gcc_unreachable ();
2635
2636 CASE_CONVERT:
2637 case FIX_TRUNC_EXPR:
2638 case FLOAT_EXPR:
2639 case NEGATE_EXPR:
2640 case ABS_EXPR:
2641 case BIT_NOT_EXPR:
2642 CHECK_OP (0, "invalid operand to unary operator");
2643 break;
2644
2645 case REALPART_EXPR:
2646 case IMAGPART_EXPR:
2647 case BIT_FIELD_REF:
2648 if (!is_gimple_reg_type (TREE_TYPE (t)))
2649 {
2650 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2651 return t;
2652 }
2653
2654 if (TREE_CODE (t) == BIT_FIELD_REF)
2655 {
2656 if (!host_integerp (TREE_OPERAND (t, 1), 1)
2657 || !host_integerp (TREE_OPERAND (t, 2), 1))
2658 {
2659 error ("invalid position or size operand to BIT_FIELD_REF");
2660 return t;
2661 }
2662 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2663 && (TYPE_PRECISION (TREE_TYPE (t))
2664 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2665 {
2666 error ("integral result type precision does not match "
2667 "field size of BIT_FIELD_REF");
2668 return t;
2669 }
2670 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2671 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2672 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2673 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2674 {
2675 error ("mode precision of non-integral result does not "
2676 "match field size of BIT_FIELD_REF");
2677 return t;
2678 }
2679 }
2680 t = TREE_OPERAND (t, 0);
2681
2682 /* Fall-through. */
2683 case COMPONENT_REF:
2684 case ARRAY_REF:
2685 case ARRAY_RANGE_REF:
2686 case VIEW_CONVERT_EXPR:
2687 /* We have a nest of references. Verify that each of the operands
2688 that determine where to reference is either a constant or a variable,
2689 verify that the base is valid, and then show we've already checked
2690 the subtrees. */
2691 while (handled_component_p (t))
2692 {
2693 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2694 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2695 else if (TREE_CODE (t) == ARRAY_REF
2696 || TREE_CODE (t) == ARRAY_RANGE_REF)
2697 {
2698 CHECK_OP (1, "invalid array index");
2699 if (TREE_OPERAND (t, 2))
2700 CHECK_OP (2, "invalid array lower bound");
2701 if (TREE_OPERAND (t, 3))
2702 CHECK_OP (3, "invalid array stride");
2703 }
2704 else if (TREE_CODE (t) == BIT_FIELD_REF
2705 || TREE_CODE (t) == REALPART_EXPR
2706 || TREE_CODE (t) == IMAGPART_EXPR)
2707 {
2708 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
2709 "REALPART_EXPR");
2710 return t;
2711 }
2712
2713 t = TREE_OPERAND (t, 0);
2714 }
2715
2716 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2717 {
2718 error ("invalid reference prefix");
2719 return t;
2720 }
2721 *walk_subtrees = 0;
2722 break;
2723 case PLUS_EXPR:
2724 case MINUS_EXPR:
2725 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2726 POINTER_PLUS_EXPR. */
2727 if (POINTER_TYPE_P (TREE_TYPE (t)))
2728 {
2729 error ("invalid operand to plus/minus, type is a pointer");
2730 return t;
2731 }
2732 CHECK_OP (0, "invalid operand to binary operator");
2733 CHECK_OP (1, "invalid operand to binary operator");
2734 break;
2735
2736 case POINTER_PLUS_EXPR:
2737 /* Check to make sure the first operand is a pointer or reference type. */
2738 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
2739 {
2740 error ("invalid operand to pointer plus, first operand is not a pointer");
2741 return t;
2742 }
2743 /* Check to make sure the second operand is a ptrofftype. */
2744 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
2745 {
2746 error ("invalid operand to pointer plus, second operand is not an "
2747 "integer type of appropriate width");
2748 return t;
2749 }
2750 /* FALLTHROUGH */
2751 case LT_EXPR:
2752 case LE_EXPR:
2753 case GT_EXPR:
2754 case GE_EXPR:
2755 case EQ_EXPR:
2756 case NE_EXPR:
2757 case UNORDERED_EXPR:
2758 case ORDERED_EXPR:
2759 case UNLT_EXPR:
2760 case UNLE_EXPR:
2761 case UNGT_EXPR:
2762 case UNGE_EXPR:
2763 case UNEQ_EXPR:
2764 case LTGT_EXPR:
2765 case MULT_EXPR:
2766 case TRUNC_DIV_EXPR:
2767 case CEIL_DIV_EXPR:
2768 case FLOOR_DIV_EXPR:
2769 case ROUND_DIV_EXPR:
2770 case TRUNC_MOD_EXPR:
2771 case CEIL_MOD_EXPR:
2772 case FLOOR_MOD_EXPR:
2773 case ROUND_MOD_EXPR:
2774 case RDIV_EXPR:
2775 case EXACT_DIV_EXPR:
2776 case MIN_EXPR:
2777 case MAX_EXPR:
2778 case LSHIFT_EXPR:
2779 case RSHIFT_EXPR:
2780 case LROTATE_EXPR:
2781 case RROTATE_EXPR:
2782 case BIT_IOR_EXPR:
2783 case BIT_XOR_EXPR:
2784 case BIT_AND_EXPR:
2785 CHECK_OP (0, "invalid operand to binary operator");
2786 CHECK_OP (1, "invalid operand to binary operator");
2787 break;
2788
2789 case CONSTRUCTOR:
2790 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2791 *walk_subtrees = 0;
2792 break;
2793
2794 case CASE_LABEL_EXPR:
2795 if (CASE_CHAIN (t))
2796 {
2797 error ("invalid CASE_CHAIN");
2798 return t;
2799 }
2800 break;
2801
2802 default:
2803 break;
2804 }
2805 return NULL;
2806
2807 #undef CHECK_OP
2808 }
2809
2810
2811 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
2812 Returns true if there is an error, otherwise false. */
2813
2814 static bool
2815 verify_types_in_gimple_min_lval (tree expr)
2816 {
2817 tree op;
2818
2819 if (is_gimple_id (expr))
2820 return false;
2821
2822 if (TREE_CODE (expr) != TARGET_MEM_REF
2823 && TREE_CODE (expr) != MEM_REF)
2824 {
2825 error ("invalid expression for min lvalue");
2826 return true;
2827 }
2828
2829 /* TARGET_MEM_REFs are strange beasts. */
2830 if (TREE_CODE (expr) == TARGET_MEM_REF)
2831 return false;
2832
2833 op = TREE_OPERAND (expr, 0);
2834 if (!is_gimple_val (op))
2835 {
2836 error ("invalid operand in indirect reference");
2837 debug_generic_stmt (op);
2838 return true;
2839 }
2840 /* Memory references now generally can involve a value conversion. */
2841
2842 return false;
2843 }
2844
2845 /* Verify if EXPR is a valid GIMPLE reference expression. If
2846 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
2847 if there is an error, otherwise false. */
2848
2849 static bool
2850 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
2851 {
2852 while (handled_component_p (expr))
2853 {
2854 tree op = TREE_OPERAND (expr, 0);
2855
2856 if (TREE_CODE (expr) == ARRAY_REF
2857 || TREE_CODE (expr) == ARRAY_RANGE_REF)
2858 {
2859 if (!is_gimple_val (TREE_OPERAND (expr, 1))
2860 || (TREE_OPERAND (expr, 2)
2861 && !is_gimple_val (TREE_OPERAND (expr, 2)))
2862 || (TREE_OPERAND (expr, 3)
2863 && !is_gimple_val (TREE_OPERAND (expr, 3))))
2864 {
2865 error ("invalid operands to array reference");
2866 debug_generic_stmt (expr);
2867 return true;
2868 }
2869 }
2870
2871 /* Verify if the reference array element types are compatible. */
2872 if (TREE_CODE (expr) == ARRAY_REF
2873 && !useless_type_conversion_p (TREE_TYPE (expr),
2874 TREE_TYPE (TREE_TYPE (op))))
2875 {
2876 error ("type mismatch in array reference");
2877 debug_generic_stmt (TREE_TYPE (expr));
2878 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2879 return true;
2880 }
2881 if (TREE_CODE (expr) == ARRAY_RANGE_REF
2882 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
2883 TREE_TYPE (TREE_TYPE (op))))
2884 {
2885 error ("type mismatch in array range reference");
2886 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
2887 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2888 return true;
2889 }
2890
2891 if ((TREE_CODE (expr) == REALPART_EXPR
2892 || TREE_CODE (expr) == IMAGPART_EXPR)
2893 && !useless_type_conversion_p (TREE_TYPE (expr),
2894 TREE_TYPE (TREE_TYPE (op))))
2895 {
2896 error ("type mismatch in real/imagpart reference");
2897 debug_generic_stmt (TREE_TYPE (expr));
2898 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2899 return true;
2900 }
2901
2902 if (TREE_CODE (expr) == COMPONENT_REF
2903 && !useless_type_conversion_p (TREE_TYPE (expr),
2904 TREE_TYPE (TREE_OPERAND (expr, 1))))
2905 {
2906 error ("type mismatch in component reference");
2907 debug_generic_stmt (TREE_TYPE (expr));
2908 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
2909 return true;
2910 }
2911
2912 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2913 {
2914 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
2915 that their operand is not an SSA name or an invariant when
2916 requiring an lvalue (this usually means there is a SRA or IPA-SRA
2917 bug). Otherwise there is nothing to verify, gross mismatches at
2918 most invoke undefined behavior. */
2919 if (require_lvalue
2920 && (TREE_CODE (op) == SSA_NAME
2921 || is_gimple_min_invariant (op)))
2922 {
2923 error ("conversion of an SSA_NAME on the left hand side");
2924 debug_generic_stmt (expr);
2925 return true;
2926 }
2927 else if (TREE_CODE (op) == SSA_NAME
2928 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
2929 {
2930 error ("conversion of register to a different size");
2931 debug_generic_stmt (expr);
2932 return true;
2933 }
2934 else if (!handled_component_p (op))
2935 return false;
2936 }
2937
2938 expr = op;
2939 }
2940
2941 if (TREE_CODE (expr) == MEM_REF)
2942 {
2943 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
2944 {
2945 error ("invalid address operand in MEM_REF");
2946 debug_generic_stmt (expr);
2947 return true;
2948 }
2949 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
2950 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
2951 {
2952 error ("invalid offset operand in MEM_REF");
2953 debug_generic_stmt (expr);
2954 return true;
2955 }
2956 }
2957 else if (TREE_CODE (expr) == TARGET_MEM_REF)
2958 {
2959 if (!TMR_BASE (expr)
2960 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
2961 {
2962 error ("invalid address operand in TARGET_MEM_REF");
2963 return true;
2964 }
2965 if (!TMR_OFFSET (expr)
2966 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
2967 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
2968 {
2969 error ("invalid offset operand in TARGET_MEM_REF");
2970 debug_generic_stmt (expr);
2971 return true;
2972 }
2973 }
2974
2975 return ((require_lvalue || !is_gimple_min_invariant (expr))
2976 && verify_types_in_gimple_min_lval (expr));
2977 }
2978
2979 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
2980 list of pointer-to types that is trivially convertible to DEST. */
2981
2982 static bool
2983 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
2984 {
2985 tree src;
2986
2987 if (!TYPE_POINTER_TO (src_obj))
2988 return true;
2989
2990 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
2991 if (useless_type_conversion_p (dest, src))
2992 return true;
2993
2994 return false;
2995 }
2996
2997 /* Return true if TYPE1 is a fixed-point type and if conversions to and
2998 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
2999
3000 static bool
3001 valid_fixed_convert_types_p (tree type1, tree type2)
3002 {
3003 return (FIXED_POINT_TYPE_P (type1)
3004 && (INTEGRAL_TYPE_P (type2)
3005 || SCALAR_FLOAT_TYPE_P (type2)
3006 || FIXED_POINT_TYPE_P (type2)));
3007 }
3008
3009 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3010 is a problem, otherwise false. */
3011
3012 static bool
3013 verify_gimple_call (gimple stmt)
3014 {
3015 tree fn = gimple_call_fn (stmt);
3016 tree fntype, fndecl;
3017 unsigned i;
3018
3019 if (gimple_call_internal_p (stmt))
3020 {
3021 if (fn)
3022 {
3023 error ("gimple call has two targets");
3024 debug_generic_stmt (fn);
3025 return true;
3026 }
3027 }
3028 else
3029 {
3030 if (!fn)
3031 {
3032 error ("gimple call has no target");
3033 return true;
3034 }
3035 }
3036
3037 if (fn && !is_gimple_call_addr (fn))
3038 {
3039 error ("invalid function in gimple call");
3040 debug_generic_stmt (fn);
3041 return true;
3042 }
3043
3044 if (fn
3045 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3046 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3047 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3048 {
3049 error ("non-function in gimple call");
3050 return true;
3051 }
3052
3053 fndecl = gimple_call_fndecl (stmt);
3054 if (fndecl
3055 && TREE_CODE (fndecl) == FUNCTION_DECL
3056 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3057 && !DECL_PURE_P (fndecl)
3058 && !TREE_READONLY (fndecl))
3059 {
3060 error ("invalid pure const state for function");
3061 return true;
3062 }
3063
3064 if (gimple_call_lhs (stmt)
3065 && (!is_gimple_lvalue (gimple_call_lhs (stmt))
3066 || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
3067 {
3068 error ("invalid LHS in gimple call");
3069 return true;
3070 }
3071
3072 if (gimple_call_lhs (stmt) && gimple_call_noreturn_p (stmt))
3073 {
3074 error ("LHS in noreturn call");
3075 return true;
3076 }
3077
3078 fntype = gimple_call_fntype (stmt);
3079 if (fntype
3080 && gimple_call_lhs (stmt)
3081 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3082 TREE_TYPE (fntype))
3083 /* ??? At least C++ misses conversions at assignments from
3084 void * call results.
3085 ??? Java is completely off. Especially with functions
3086 returning java.lang.Object.
3087 For now simply allow arbitrary pointer type conversions. */
3088 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3089 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3090 {
3091 error ("invalid conversion in gimple call");
3092 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3093 debug_generic_stmt (TREE_TYPE (fntype));
3094 return true;
3095 }
3096
3097 if (gimple_call_chain (stmt)
3098 && !is_gimple_val (gimple_call_chain (stmt)))
3099 {
3100 error ("invalid static chain in gimple call");
3101 debug_generic_stmt (gimple_call_chain (stmt));
3102 return true;
3103 }
3104
3105 /* If there is a static chain argument, this should not be an indirect
3106 call, and the decl should have DECL_STATIC_CHAIN set. */
3107 if (gimple_call_chain (stmt))
3108 {
3109 if (!gimple_call_fndecl (stmt))
3110 {
3111 error ("static chain in indirect gimple call");
3112 return true;
3113 }
3114 fn = TREE_OPERAND (fn, 0);
3115
3116 if (!DECL_STATIC_CHAIN (fn))
3117 {
3118 error ("static chain with function that doesn%'t use one");
3119 return true;
3120 }
3121 }
3122
3123 /* ??? The C frontend passes unpromoted arguments in case it
3124 didn't see a function declaration before the call. So for now
3125 leave the call arguments mostly unverified. Once we gimplify
3126 unit-at-a-time we have a chance to fix this. */
3127
3128 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3129 {
3130 tree arg = gimple_call_arg (stmt, i);
3131 if ((is_gimple_reg_type (TREE_TYPE (arg))
3132 && !is_gimple_val (arg))
3133 || (!is_gimple_reg_type (TREE_TYPE (arg))
3134 && !is_gimple_lvalue (arg)))
3135 {
3136 error ("invalid argument to gimple call");
3137 debug_generic_expr (arg);
3138 return true;
3139 }
3140 }
3141
3142 return false;
3143 }
3144
3145 /* Verifies the gimple comparison with the result type TYPE and
3146 the operands OP0 and OP1. */
3147
3148 static bool
3149 verify_gimple_comparison (tree type, tree op0, tree op1)
3150 {
3151 tree op0_type = TREE_TYPE (op0);
3152 tree op1_type = TREE_TYPE (op1);
3153
3154 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3155 {
3156 error ("invalid operands in gimple comparison");
3157 return true;
3158 }
3159
3160 /* For comparisons we do not have the operations type as the
3161 effective type the comparison is carried out in. Instead
3162 we require that either the first operand is trivially
3163 convertible into the second, or the other way around.
3164 Because we special-case pointers to void we allow
3165 comparisons of pointers with the same mode as well. */
3166 if (!useless_type_conversion_p (op0_type, op1_type)
3167 && !useless_type_conversion_p (op1_type, op0_type)
3168 && (!POINTER_TYPE_P (op0_type)
3169 || !POINTER_TYPE_P (op1_type)
3170 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3171 {
3172 error ("mismatching comparison operand types");
3173 debug_generic_expr (op0_type);
3174 debug_generic_expr (op1_type);
3175 return true;
3176 }
3177
3178 /* The resulting type of a comparison may be an effective boolean type. */
3179 if (INTEGRAL_TYPE_P (type)
3180 && (TREE_CODE (type) == BOOLEAN_TYPE
3181 || TYPE_PRECISION (type) == 1))
3182 {
3183 if (TREE_CODE (op0_type) == VECTOR_TYPE
3184 || TREE_CODE (op1_type) == VECTOR_TYPE)
3185 {
3186 error ("vector comparison returning a boolean");
3187 debug_generic_expr (op0_type);
3188 debug_generic_expr (op1_type);
3189 return true;
3190 }
3191 }
3192 /* Or an integer vector type with the same size and element count
3193 as the comparison operand types. */
3194 else if (TREE_CODE (type) == VECTOR_TYPE
3195 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3196 {
3197 if (TREE_CODE (op0_type) != VECTOR_TYPE
3198 || TREE_CODE (op1_type) != VECTOR_TYPE)
3199 {
3200 error ("non-vector operands in vector comparison");
3201 debug_generic_expr (op0_type);
3202 debug_generic_expr (op1_type);
3203 return true;
3204 }
3205
3206 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3207 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3208 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type))))
3209 /* The result of a vector comparison is of signed
3210 integral type. */
3211 || TYPE_UNSIGNED (TREE_TYPE (type)))
3212 {
3213 error ("invalid vector comparison resulting type");
3214 debug_generic_expr (type);
3215 return true;
3216 }
3217 }
3218 else
3219 {
3220 error ("bogus comparison result type");
3221 debug_generic_expr (type);
3222 return true;
3223 }
3224
3225 return false;
3226 }
3227
3228 /* Verify a gimple assignment statement STMT with an unary rhs.
3229 Returns true if anything is wrong. */
3230
3231 static bool
3232 verify_gimple_assign_unary (gimple stmt)
3233 {
3234 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3235 tree lhs = gimple_assign_lhs (stmt);
3236 tree lhs_type = TREE_TYPE (lhs);
3237 tree rhs1 = gimple_assign_rhs1 (stmt);
3238 tree rhs1_type = TREE_TYPE (rhs1);
3239
3240 if (!is_gimple_reg (lhs))
3241 {
3242 error ("non-register as LHS of unary operation");
3243 return true;
3244 }
3245
3246 if (!is_gimple_val (rhs1))
3247 {
3248 error ("invalid operand in unary operation");
3249 return true;
3250 }
3251
3252 /* First handle conversions. */
3253 switch (rhs_code)
3254 {
3255 CASE_CONVERT:
3256 {
3257 /* Allow conversions from pointer type to integral type only if
3258 there is no sign or zero extension involved.
3259 For targets were the precision of ptrofftype doesn't match that
3260 of pointers we need to allow arbitrary conversions to ptrofftype. */
3261 if ((POINTER_TYPE_P (lhs_type)
3262 && INTEGRAL_TYPE_P (rhs1_type))
3263 || (POINTER_TYPE_P (rhs1_type)
3264 && INTEGRAL_TYPE_P (lhs_type)
3265 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3266 || ptrofftype_p (sizetype))))
3267 return false;
3268
3269 /* Allow conversion from integral to offset type and vice versa. */
3270 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3271 && INTEGRAL_TYPE_P (rhs1_type))
3272 || (INTEGRAL_TYPE_P (lhs_type)
3273 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3274 return false;
3275
3276 /* Otherwise assert we are converting between types of the
3277 same kind. */
3278 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3279 {
3280 error ("invalid types in nop conversion");
3281 debug_generic_expr (lhs_type);
3282 debug_generic_expr (rhs1_type);
3283 return true;
3284 }
3285
3286 return false;
3287 }
3288
3289 case ADDR_SPACE_CONVERT_EXPR:
3290 {
3291 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3292 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3293 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3294 {
3295 error ("invalid types in address space conversion");
3296 debug_generic_expr (lhs_type);
3297 debug_generic_expr (rhs1_type);
3298 return true;
3299 }
3300
3301 return false;
3302 }
3303
3304 case FIXED_CONVERT_EXPR:
3305 {
3306 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3307 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3308 {
3309 error ("invalid types in fixed-point conversion");
3310 debug_generic_expr (lhs_type);
3311 debug_generic_expr (rhs1_type);
3312 return true;
3313 }
3314
3315 return false;
3316 }
3317
3318 case FLOAT_EXPR:
3319 {
3320 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3321 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3322 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3323 {
3324 error ("invalid types in conversion to floating point");
3325 debug_generic_expr (lhs_type);
3326 debug_generic_expr (rhs1_type);
3327 return true;
3328 }
3329
3330 return false;
3331 }
3332
3333 case FIX_TRUNC_EXPR:
3334 {
3335 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3336 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3337 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3338 {
3339 error ("invalid types in conversion to integer");
3340 debug_generic_expr (lhs_type);
3341 debug_generic_expr (rhs1_type);
3342 return true;
3343 }
3344
3345 return false;
3346 }
3347
3348 case VEC_UNPACK_HI_EXPR:
3349 case VEC_UNPACK_LO_EXPR:
3350 case REDUC_MAX_EXPR:
3351 case REDUC_MIN_EXPR:
3352 case REDUC_PLUS_EXPR:
3353 case VEC_UNPACK_FLOAT_HI_EXPR:
3354 case VEC_UNPACK_FLOAT_LO_EXPR:
3355 /* FIXME. */
3356 return false;
3357
3358 case NEGATE_EXPR:
3359 case ABS_EXPR:
3360 case BIT_NOT_EXPR:
3361 case PAREN_EXPR:
3362 case NON_LVALUE_EXPR:
3363 case CONJ_EXPR:
3364 break;
3365
3366 default:
3367 gcc_unreachable ();
3368 }
3369
3370 /* For the remaining codes assert there is no conversion involved. */
3371 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3372 {
3373 error ("non-trivial conversion in unary operation");
3374 debug_generic_expr (lhs_type);
3375 debug_generic_expr (rhs1_type);
3376 return true;
3377 }
3378
3379 return false;
3380 }
3381
3382 /* Verify a gimple assignment statement STMT with a binary rhs.
3383 Returns true if anything is wrong. */
3384
3385 static bool
3386 verify_gimple_assign_binary (gimple stmt)
3387 {
3388 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3389 tree lhs = gimple_assign_lhs (stmt);
3390 tree lhs_type = TREE_TYPE (lhs);
3391 tree rhs1 = gimple_assign_rhs1 (stmt);
3392 tree rhs1_type = TREE_TYPE (rhs1);
3393 tree rhs2 = gimple_assign_rhs2 (stmt);
3394 tree rhs2_type = TREE_TYPE (rhs2);
3395
3396 if (!is_gimple_reg (lhs))
3397 {
3398 error ("non-register as LHS of binary operation");
3399 return true;
3400 }
3401
3402 if (!is_gimple_val (rhs1)
3403 || !is_gimple_val (rhs2))
3404 {
3405 error ("invalid operands in binary operation");
3406 return true;
3407 }
3408
3409 /* First handle operations that involve different types. */
3410 switch (rhs_code)
3411 {
3412 case COMPLEX_EXPR:
3413 {
3414 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3415 || !(INTEGRAL_TYPE_P (rhs1_type)
3416 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3417 || !(INTEGRAL_TYPE_P (rhs2_type)
3418 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3419 {
3420 error ("type mismatch in complex expression");
3421 debug_generic_expr (lhs_type);
3422 debug_generic_expr (rhs1_type);
3423 debug_generic_expr (rhs2_type);
3424 return true;
3425 }
3426
3427 return false;
3428 }
3429
3430 case LSHIFT_EXPR:
3431 case RSHIFT_EXPR:
3432 case LROTATE_EXPR:
3433 case RROTATE_EXPR:
3434 {
3435 /* Shifts and rotates are ok on integral types, fixed point
3436 types and integer vector types. */
3437 if ((!INTEGRAL_TYPE_P (rhs1_type)
3438 && !FIXED_POINT_TYPE_P (rhs1_type)
3439 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3440 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3441 || (!INTEGRAL_TYPE_P (rhs2_type)
3442 /* Vector shifts of vectors are also ok. */
3443 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3444 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3445 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3446 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3447 || !useless_type_conversion_p (lhs_type, rhs1_type))
3448 {
3449 error ("type mismatch in shift expression");
3450 debug_generic_expr (lhs_type);
3451 debug_generic_expr (rhs1_type);
3452 debug_generic_expr (rhs2_type);
3453 return true;
3454 }
3455
3456 return false;
3457 }
3458
3459 case VEC_LSHIFT_EXPR:
3460 case VEC_RSHIFT_EXPR:
3461 {
3462 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3463 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3464 || POINTER_TYPE_P (TREE_TYPE (rhs1_type))
3465 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type))
3466 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3467 || (!INTEGRAL_TYPE_P (rhs2_type)
3468 && (TREE_CODE (rhs2_type) != VECTOR_TYPE
3469 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3470 || !useless_type_conversion_p (lhs_type, rhs1_type))
3471 {
3472 error ("type mismatch in vector shift expression");
3473 debug_generic_expr (lhs_type);
3474 debug_generic_expr (rhs1_type);
3475 debug_generic_expr (rhs2_type);
3476 return true;
3477 }
3478 /* For shifting a vector of non-integral components we
3479 only allow shifting by a constant multiple of the element size. */
3480 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3481 && (TREE_CODE (rhs2) != INTEGER_CST
3482 || !div_if_zero_remainder (EXACT_DIV_EXPR, rhs2,
3483 TYPE_SIZE (TREE_TYPE (rhs1_type)))))
3484 {
3485 error ("non-element sized vector shift of floating point vector");
3486 return true;
3487 }
3488
3489 return false;
3490 }
3491
3492 case WIDEN_LSHIFT_EXPR:
3493 {
3494 if (!INTEGRAL_TYPE_P (lhs_type)
3495 || !INTEGRAL_TYPE_P (rhs1_type)
3496 || TREE_CODE (rhs2) != INTEGER_CST
3497 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3498 {
3499 error ("type mismatch in widening vector shift expression");
3500 debug_generic_expr (lhs_type);
3501 debug_generic_expr (rhs1_type);
3502 debug_generic_expr (rhs2_type);
3503 return true;
3504 }
3505
3506 return false;
3507 }
3508
3509 case VEC_WIDEN_LSHIFT_HI_EXPR:
3510 case VEC_WIDEN_LSHIFT_LO_EXPR:
3511 {
3512 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3513 || TREE_CODE (lhs_type) != VECTOR_TYPE
3514 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3515 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3516 || TREE_CODE (rhs2) != INTEGER_CST
3517 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3518 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3519 {
3520 error ("type mismatch in widening vector shift expression");
3521 debug_generic_expr (lhs_type);
3522 debug_generic_expr (rhs1_type);
3523 debug_generic_expr (rhs2_type);
3524 return true;
3525 }
3526
3527 return false;
3528 }
3529
3530 case PLUS_EXPR:
3531 case MINUS_EXPR:
3532 {
3533 tree lhs_etype = lhs_type;
3534 tree rhs1_etype = rhs1_type;
3535 tree rhs2_etype = rhs2_type;
3536 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3537 {
3538 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3539 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3540 {
3541 error ("invalid non-vector operands to vector valued plus");
3542 return true;
3543 }
3544 lhs_etype = TREE_TYPE (lhs_type);
3545 rhs1_etype = TREE_TYPE (rhs1_type);
3546 rhs2_etype = TREE_TYPE (rhs2_type);
3547 }
3548 if (POINTER_TYPE_P (lhs_etype)
3549 || POINTER_TYPE_P (rhs1_etype)
3550 || POINTER_TYPE_P (rhs2_etype))
3551 {
3552 error ("invalid (pointer) operands to plus/minus");
3553 return true;
3554 }
3555
3556 /* Continue with generic binary expression handling. */
3557 break;
3558 }
3559
3560 case POINTER_PLUS_EXPR:
3561 {
3562 if (!POINTER_TYPE_P (rhs1_type)
3563 || !useless_type_conversion_p (lhs_type, rhs1_type)
3564 || !ptrofftype_p (rhs2_type))
3565 {
3566 error ("type mismatch in pointer plus expression");
3567 debug_generic_stmt (lhs_type);
3568 debug_generic_stmt (rhs1_type);
3569 debug_generic_stmt (rhs2_type);
3570 return true;
3571 }
3572
3573 return false;
3574 }
3575
3576 case TRUTH_ANDIF_EXPR:
3577 case TRUTH_ORIF_EXPR:
3578 case TRUTH_AND_EXPR:
3579 case TRUTH_OR_EXPR:
3580 case TRUTH_XOR_EXPR:
3581
3582 gcc_unreachable ();
3583
3584 case LT_EXPR:
3585 case LE_EXPR:
3586 case GT_EXPR:
3587 case GE_EXPR:
3588 case EQ_EXPR:
3589 case NE_EXPR:
3590 case UNORDERED_EXPR:
3591 case ORDERED_EXPR:
3592 case UNLT_EXPR:
3593 case UNLE_EXPR:
3594 case UNGT_EXPR:
3595 case UNGE_EXPR:
3596 case UNEQ_EXPR:
3597 case LTGT_EXPR:
3598 /* Comparisons are also binary, but the result type is not
3599 connected to the operand types. */
3600 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3601
3602 case WIDEN_MULT_EXPR:
3603 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3604 return true;
3605 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3606 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3607
3608 case WIDEN_SUM_EXPR:
3609 case VEC_WIDEN_MULT_HI_EXPR:
3610 case VEC_WIDEN_MULT_LO_EXPR:
3611 case VEC_WIDEN_MULT_EVEN_EXPR:
3612 case VEC_WIDEN_MULT_ODD_EXPR:
3613 case VEC_PACK_TRUNC_EXPR:
3614 case VEC_PACK_SAT_EXPR:
3615 case VEC_PACK_FIX_TRUNC_EXPR:
3616 /* FIXME. */
3617 return false;
3618
3619 case MULT_EXPR:
3620 case MULT_HIGHPART_EXPR:
3621 case TRUNC_DIV_EXPR:
3622 case CEIL_DIV_EXPR:
3623 case FLOOR_DIV_EXPR:
3624 case ROUND_DIV_EXPR:
3625 case TRUNC_MOD_EXPR:
3626 case CEIL_MOD_EXPR:
3627 case FLOOR_MOD_EXPR:
3628 case ROUND_MOD_EXPR:
3629 case RDIV_EXPR:
3630 case EXACT_DIV_EXPR:
3631 case MIN_EXPR:
3632 case MAX_EXPR:
3633 case BIT_IOR_EXPR:
3634 case BIT_XOR_EXPR:
3635 case BIT_AND_EXPR:
3636 /* Continue with generic binary expression handling. */
3637 break;
3638
3639 default:
3640 gcc_unreachable ();
3641 }
3642
3643 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3644 || !useless_type_conversion_p (lhs_type, rhs2_type))
3645 {
3646 error ("type mismatch in binary expression");
3647 debug_generic_stmt (lhs_type);
3648 debug_generic_stmt (rhs1_type);
3649 debug_generic_stmt (rhs2_type);
3650 return true;
3651 }
3652
3653 return false;
3654 }
3655
3656 /* Verify a gimple assignment statement STMT with a ternary rhs.
3657 Returns true if anything is wrong. */
3658
3659 static bool
3660 verify_gimple_assign_ternary (gimple stmt)
3661 {
3662 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3663 tree lhs = gimple_assign_lhs (stmt);
3664 tree lhs_type = TREE_TYPE (lhs);
3665 tree rhs1 = gimple_assign_rhs1 (stmt);
3666 tree rhs1_type = TREE_TYPE (rhs1);
3667 tree rhs2 = gimple_assign_rhs2 (stmt);
3668 tree rhs2_type = TREE_TYPE (rhs2);
3669 tree rhs3 = gimple_assign_rhs3 (stmt);
3670 tree rhs3_type = TREE_TYPE (rhs3);
3671
3672 if (!is_gimple_reg (lhs))
3673 {
3674 error ("non-register as LHS of ternary operation");
3675 return true;
3676 }
3677
3678 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3679 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3680 || !is_gimple_val (rhs2)
3681 || !is_gimple_val (rhs3))
3682 {
3683 error ("invalid operands in ternary operation");
3684 return true;
3685 }
3686
3687 /* First handle operations that involve different types. */
3688 switch (rhs_code)
3689 {
3690 case WIDEN_MULT_PLUS_EXPR:
3691 case WIDEN_MULT_MINUS_EXPR:
3692 if ((!INTEGRAL_TYPE_P (rhs1_type)
3693 && !FIXED_POINT_TYPE_P (rhs1_type))
3694 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3695 || !useless_type_conversion_p (lhs_type, rhs3_type)
3696 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3697 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3698 {
3699 error ("type mismatch in widening multiply-accumulate expression");
3700 debug_generic_expr (lhs_type);
3701 debug_generic_expr (rhs1_type);
3702 debug_generic_expr (rhs2_type);
3703 debug_generic_expr (rhs3_type);
3704 return true;
3705 }
3706 break;
3707
3708 case FMA_EXPR:
3709 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3710 || !useless_type_conversion_p (lhs_type, rhs2_type)
3711 || !useless_type_conversion_p (lhs_type, rhs3_type))
3712 {
3713 error ("type mismatch in fused multiply-add expression");
3714 debug_generic_expr (lhs_type);
3715 debug_generic_expr (rhs1_type);
3716 debug_generic_expr (rhs2_type);
3717 debug_generic_expr (rhs3_type);
3718 return true;
3719 }
3720 break;
3721
3722 case COND_EXPR:
3723 case VEC_COND_EXPR:
3724 if (!useless_type_conversion_p (lhs_type, rhs2_type)
3725 || !useless_type_conversion_p (lhs_type, rhs3_type))
3726 {
3727 error ("type mismatch in conditional expression");
3728 debug_generic_expr (lhs_type);
3729 debug_generic_expr (rhs2_type);
3730 debug_generic_expr (rhs3_type);
3731 return true;
3732 }
3733 break;
3734
3735 case VEC_PERM_EXPR:
3736 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3737 || !useless_type_conversion_p (lhs_type, rhs2_type))
3738 {
3739 error ("type mismatch in vector permute expression");
3740 debug_generic_expr (lhs_type);
3741 debug_generic_expr (rhs1_type);
3742 debug_generic_expr (rhs2_type);
3743 debug_generic_expr (rhs3_type);
3744 return true;
3745 }
3746
3747 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3748 || TREE_CODE (rhs2_type) != VECTOR_TYPE
3749 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
3750 {
3751 error ("vector types expected in vector permute expression");
3752 debug_generic_expr (lhs_type);
3753 debug_generic_expr (rhs1_type);
3754 debug_generic_expr (rhs2_type);
3755 debug_generic_expr (rhs3_type);
3756 return true;
3757 }
3758
3759 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
3760 || TYPE_VECTOR_SUBPARTS (rhs2_type)
3761 != TYPE_VECTOR_SUBPARTS (rhs3_type)
3762 || TYPE_VECTOR_SUBPARTS (rhs3_type)
3763 != TYPE_VECTOR_SUBPARTS (lhs_type))
3764 {
3765 error ("vectors with different element number found "
3766 "in vector permute expression");
3767 debug_generic_expr (lhs_type);
3768 debug_generic_expr (rhs1_type);
3769 debug_generic_expr (rhs2_type);
3770 debug_generic_expr (rhs3_type);
3771 return true;
3772 }
3773
3774 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
3775 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
3776 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
3777 {
3778 error ("invalid mask type in vector permute expression");
3779 debug_generic_expr (lhs_type);
3780 debug_generic_expr (rhs1_type);
3781 debug_generic_expr (rhs2_type);
3782 debug_generic_expr (rhs3_type);
3783 return true;
3784 }
3785
3786 return false;
3787
3788 case DOT_PROD_EXPR:
3789 case REALIGN_LOAD_EXPR:
3790 /* FIXME. */
3791 return false;
3792
3793 default:
3794 gcc_unreachable ();
3795 }
3796 return false;
3797 }
3798
3799 /* Verify a gimple assignment statement STMT with a single rhs.
3800 Returns true if anything is wrong. */
3801
3802 static bool
3803 verify_gimple_assign_single (gimple stmt)
3804 {
3805 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3806 tree lhs = gimple_assign_lhs (stmt);
3807 tree lhs_type = TREE_TYPE (lhs);
3808 tree rhs1 = gimple_assign_rhs1 (stmt);
3809 tree rhs1_type = TREE_TYPE (rhs1);
3810 bool res = false;
3811
3812 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3813 {
3814 error ("non-trivial conversion at assignment");
3815 debug_generic_expr (lhs_type);
3816 debug_generic_expr (rhs1_type);
3817 return true;
3818 }
3819
3820 if (gimple_clobber_p (stmt)
3821 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
3822 {
3823 error ("non-decl/MEM_REF LHS in clobber statement");
3824 debug_generic_expr (lhs);
3825 return true;
3826 }
3827
3828 if (handled_component_p (lhs))
3829 res |= verify_types_in_gimple_reference (lhs, true);
3830
3831 /* Special codes we cannot handle via their class. */
3832 switch (rhs_code)
3833 {
3834 case ADDR_EXPR:
3835 {
3836 tree op = TREE_OPERAND (rhs1, 0);
3837 if (!is_gimple_addressable (op))
3838 {
3839 error ("invalid operand in unary expression");
3840 return true;
3841 }
3842
3843 /* Technically there is no longer a need for matching types, but
3844 gimple hygiene asks for this check. In LTO we can end up
3845 combining incompatible units and thus end up with addresses
3846 of globals that change their type to a common one. */
3847 if (!in_lto_p
3848 && !types_compatible_p (TREE_TYPE (op),
3849 TREE_TYPE (TREE_TYPE (rhs1)))
3850 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
3851 TREE_TYPE (op)))
3852 {
3853 error ("type mismatch in address expression");
3854 debug_generic_stmt (TREE_TYPE (rhs1));
3855 debug_generic_stmt (TREE_TYPE (op));
3856 return true;
3857 }
3858
3859 return verify_types_in_gimple_reference (op, true);
3860 }
3861
3862 /* tcc_reference */
3863 case INDIRECT_REF:
3864 error ("INDIRECT_REF in gimple IL");
3865 return true;
3866
3867 case COMPONENT_REF:
3868 case BIT_FIELD_REF:
3869 case ARRAY_REF:
3870 case ARRAY_RANGE_REF:
3871 case VIEW_CONVERT_EXPR:
3872 case REALPART_EXPR:
3873 case IMAGPART_EXPR:
3874 case TARGET_MEM_REF:
3875 case MEM_REF:
3876 if (!is_gimple_reg (lhs)
3877 && is_gimple_reg_type (TREE_TYPE (lhs)))
3878 {
3879 error ("invalid rhs for gimple memory store");
3880 debug_generic_stmt (lhs);
3881 debug_generic_stmt (rhs1);
3882 return true;
3883 }
3884 return res || verify_types_in_gimple_reference (rhs1, false);
3885
3886 /* tcc_constant */
3887 case SSA_NAME:
3888 case INTEGER_CST:
3889 case REAL_CST:
3890 case FIXED_CST:
3891 case COMPLEX_CST:
3892 case VECTOR_CST:
3893 case STRING_CST:
3894 return res;
3895
3896 /* tcc_declaration */
3897 case CONST_DECL:
3898 return res;
3899 case VAR_DECL:
3900 case PARM_DECL:
3901 if (!is_gimple_reg (lhs)
3902 && !is_gimple_reg (rhs1)
3903 && is_gimple_reg_type (TREE_TYPE (lhs)))
3904 {
3905 error ("invalid rhs for gimple memory store");
3906 debug_generic_stmt (lhs);
3907 debug_generic_stmt (rhs1);
3908 return true;
3909 }
3910 return res;
3911
3912 case CONSTRUCTOR:
3913 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
3914 {
3915 unsigned int i;
3916 tree elt_i, elt_v, elt_t = NULL_TREE;
3917
3918 if (CONSTRUCTOR_NELTS (rhs1) == 0)
3919 return res;
3920 /* For vector CONSTRUCTORs we require that either it is empty
3921 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
3922 (then the element count must be correct to cover the whole
3923 outer vector and index must be NULL on all elements, or it is
3924 a CONSTRUCTOR of scalar elements, where we as an exception allow
3925 smaller number of elements (assuming zero filling) and
3926 consecutive indexes as compared to NULL indexes (such
3927 CONSTRUCTORs can appear in the IL from FEs). */
3928 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
3929 {
3930 if (elt_t == NULL_TREE)
3931 {
3932 elt_t = TREE_TYPE (elt_v);
3933 if (TREE_CODE (elt_t) == VECTOR_TYPE)
3934 {
3935 tree elt_t = TREE_TYPE (elt_v);
3936 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
3937 TREE_TYPE (elt_t)))
3938 {
3939 error ("incorrect type of vector CONSTRUCTOR"
3940 " elements");
3941 debug_generic_stmt (rhs1);
3942 return true;
3943 }
3944 else if (CONSTRUCTOR_NELTS (rhs1)
3945 * TYPE_VECTOR_SUBPARTS (elt_t)
3946 != TYPE_VECTOR_SUBPARTS (rhs1_type))
3947 {
3948 error ("incorrect number of vector CONSTRUCTOR"
3949 " elements");
3950 debug_generic_stmt (rhs1);
3951 return true;
3952 }
3953 }
3954 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
3955 elt_t))
3956 {
3957 error ("incorrect type of vector CONSTRUCTOR elements");
3958 debug_generic_stmt (rhs1);
3959 return true;
3960 }
3961 else if (CONSTRUCTOR_NELTS (rhs1)
3962 > TYPE_VECTOR_SUBPARTS (rhs1_type))
3963 {
3964 error ("incorrect number of vector CONSTRUCTOR elements");
3965 debug_generic_stmt (rhs1);
3966 return true;
3967 }
3968 }
3969 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
3970 {
3971 error ("incorrect type of vector CONSTRUCTOR elements");
3972 debug_generic_stmt (rhs1);
3973 return true;
3974 }
3975 if (elt_i != NULL_TREE
3976 && (TREE_CODE (elt_t) == VECTOR_TYPE
3977 || TREE_CODE (elt_i) != INTEGER_CST
3978 || compare_tree_int (elt_i, i) != 0))
3979 {
3980 error ("vector CONSTRUCTOR with non-NULL element index");
3981 debug_generic_stmt (rhs1);
3982 return true;
3983 }
3984 }
3985 }
3986 return res;
3987 case OBJ_TYPE_REF:
3988 case ASSERT_EXPR:
3989 case WITH_SIZE_EXPR:
3990 /* FIXME. */
3991 return res;
3992
3993 default:;
3994 }
3995
3996 return res;
3997 }
3998
3999 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4000 is a problem, otherwise false. */
4001
4002 static bool
4003 verify_gimple_assign (gimple stmt)
4004 {
4005 switch (gimple_assign_rhs_class (stmt))
4006 {
4007 case GIMPLE_SINGLE_RHS:
4008 return verify_gimple_assign_single (stmt);
4009
4010 case GIMPLE_UNARY_RHS:
4011 return verify_gimple_assign_unary (stmt);
4012
4013 case GIMPLE_BINARY_RHS:
4014 return verify_gimple_assign_binary (stmt);
4015
4016 case GIMPLE_TERNARY_RHS:
4017 return verify_gimple_assign_ternary (stmt);
4018
4019 default:
4020 gcc_unreachable ();
4021 }
4022 }
4023
4024 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4025 is a problem, otherwise false. */
4026
4027 static bool
4028 verify_gimple_return (gimple stmt)
4029 {
4030 tree op = gimple_return_retval (stmt);
4031 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4032
4033 /* We cannot test for present return values as we do not fix up missing
4034 return values from the original source. */
4035 if (op == NULL)
4036 return false;
4037
4038 if (!is_gimple_val (op)
4039 && TREE_CODE (op) != RESULT_DECL)
4040 {
4041 error ("invalid operand in return statement");
4042 debug_generic_stmt (op);
4043 return true;
4044 }
4045
4046 if ((TREE_CODE (op) == RESULT_DECL
4047 && DECL_BY_REFERENCE (op))
4048 || (TREE_CODE (op) == SSA_NAME
4049 && SSA_NAME_VAR (op)
4050 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4051 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4052 op = TREE_TYPE (op);
4053
4054 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4055 {
4056 error ("invalid conversion in return statement");
4057 debug_generic_stmt (restype);
4058 debug_generic_stmt (TREE_TYPE (op));
4059 return true;
4060 }
4061
4062 return false;
4063 }
4064
4065
4066 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4067 is a problem, otherwise false. */
4068
4069 static bool
4070 verify_gimple_goto (gimple stmt)
4071 {
4072 tree dest = gimple_goto_dest (stmt);
4073
4074 /* ??? We have two canonical forms of direct goto destinations, a
4075 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4076 if (TREE_CODE (dest) != LABEL_DECL
4077 && (!is_gimple_val (dest)
4078 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4079 {
4080 error ("goto destination is neither a label nor a pointer");
4081 return true;
4082 }
4083
4084 return false;
4085 }
4086
4087 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4088 is a problem, otherwise false. */
4089
4090 static bool
4091 verify_gimple_switch (gimple stmt)
4092 {
4093 unsigned int i, n;
4094 tree elt, prev_upper_bound = NULL_TREE;
4095 tree index_type, elt_type = NULL_TREE;
4096
4097 if (!is_gimple_val (gimple_switch_index (stmt)))
4098 {
4099 error ("invalid operand to switch statement");
4100 debug_generic_stmt (gimple_switch_index (stmt));
4101 return true;
4102 }
4103
4104 index_type = TREE_TYPE (gimple_switch_index (stmt));
4105 if (! INTEGRAL_TYPE_P (index_type))
4106 {
4107 error ("non-integral type switch statement");
4108 debug_generic_expr (index_type);
4109 return true;
4110 }
4111
4112 elt = gimple_switch_label (stmt, 0);
4113 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4114 {
4115 error ("invalid default case label in switch statement");
4116 debug_generic_expr (elt);
4117 return true;
4118 }
4119
4120 n = gimple_switch_num_labels (stmt);
4121 for (i = 1; i < n; i++)
4122 {
4123 elt = gimple_switch_label (stmt, i);
4124
4125 if (! CASE_LOW (elt))
4126 {
4127 error ("invalid case label in switch statement");
4128 debug_generic_expr (elt);
4129 return true;
4130 }
4131 if (CASE_HIGH (elt)
4132 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4133 {
4134 error ("invalid case range in switch statement");
4135 debug_generic_expr (elt);
4136 return true;
4137 }
4138
4139 if (elt_type)
4140 {
4141 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4142 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4143 {
4144 error ("type mismatch for case label in switch statement");
4145 debug_generic_expr (elt);
4146 return true;
4147 }
4148 }
4149 else
4150 {
4151 elt_type = TREE_TYPE (CASE_LOW (elt));
4152 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4153 {
4154 error ("type precision mismatch in switch statement");
4155 return true;
4156 }
4157 }
4158
4159 if (prev_upper_bound)
4160 {
4161 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4162 {
4163 error ("case labels not sorted in switch statement");
4164 return true;
4165 }
4166 }
4167
4168 prev_upper_bound = CASE_HIGH (elt);
4169 if (! prev_upper_bound)
4170 prev_upper_bound = CASE_LOW (elt);
4171 }
4172
4173 return false;
4174 }
4175
4176 /* Verify a gimple debug statement STMT.
4177 Returns true if anything is wrong. */
4178
4179 static bool
4180 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4181 {
4182 /* There isn't much that could be wrong in a gimple debug stmt. A
4183 gimple debug bind stmt, for example, maps a tree, that's usually
4184 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4185 component or member of an aggregate type, to another tree, that
4186 can be an arbitrary expression. These stmts expand into debug
4187 insns, and are converted to debug notes by var-tracking.c. */
4188 return false;
4189 }
4190
4191 /* Verify a gimple label statement STMT.
4192 Returns true if anything is wrong. */
4193
4194 static bool
4195 verify_gimple_label (gimple stmt)
4196 {
4197 tree decl = gimple_label_label (stmt);
4198 int uid;
4199 bool err = false;
4200
4201 if (TREE_CODE (decl) != LABEL_DECL)
4202 return true;
4203 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4204 && DECL_CONTEXT (decl) != current_function_decl)
4205 {
4206 error ("label's context is not the current function decl");
4207 err |= true;
4208 }
4209
4210 uid = LABEL_DECL_UID (decl);
4211 if (cfun->cfg
4212 && (uid == -1 || (*label_to_block_map)[uid] != gimple_bb (stmt)))
4213 {
4214 error ("incorrect entry in label_to_block_map");
4215 err |= true;
4216 }
4217
4218 uid = EH_LANDING_PAD_NR (decl);
4219 if (uid)
4220 {
4221 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4222 if (decl != lp->post_landing_pad)
4223 {
4224 error ("incorrect setting of landing pad number");
4225 err |= true;
4226 }
4227 }
4228
4229 return err;
4230 }
4231
4232 /* Verify the GIMPLE statement STMT. Returns true if there is an
4233 error, otherwise false. */
4234
4235 static bool
4236 verify_gimple_stmt (gimple stmt)
4237 {
4238 switch (gimple_code (stmt))
4239 {
4240 case GIMPLE_ASSIGN:
4241 return verify_gimple_assign (stmt);
4242
4243 case GIMPLE_LABEL:
4244 return verify_gimple_label (stmt);
4245
4246 case GIMPLE_CALL:
4247 return verify_gimple_call (stmt);
4248
4249 case GIMPLE_COND:
4250 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4251 {
4252 error ("invalid comparison code in gimple cond");
4253 return true;
4254 }
4255 if (!(!gimple_cond_true_label (stmt)
4256 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4257 || !(!gimple_cond_false_label (stmt)
4258 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4259 {
4260 error ("invalid labels in gimple cond");
4261 return true;
4262 }
4263
4264 return verify_gimple_comparison (boolean_type_node,
4265 gimple_cond_lhs (stmt),
4266 gimple_cond_rhs (stmt));
4267
4268 case GIMPLE_GOTO:
4269 return verify_gimple_goto (stmt);
4270
4271 case GIMPLE_SWITCH:
4272 return verify_gimple_switch (stmt);
4273
4274 case GIMPLE_RETURN:
4275 return verify_gimple_return (stmt);
4276
4277 case GIMPLE_ASM:
4278 return false;
4279
4280 case GIMPLE_TRANSACTION:
4281 return verify_gimple_transaction (stmt);
4282
4283 /* Tuples that do not have tree operands. */
4284 case GIMPLE_NOP:
4285 case GIMPLE_PREDICT:
4286 case GIMPLE_RESX:
4287 case GIMPLE_EH_DISPATCH:
4288 case GIMPLE_EH_MUST_NOT_THROW:
4289 return false;
4290
4291 CASE_GIMPLE_OMP:
4292 /* OpenMP directives are validated by the FE and never operated
4293 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4294 non-gimple expressions when the main index variable has had
4295 its address taken. This does not affect the loop itself
4296 because the header of an GIMPLE_OMP_FOR is merely used to determine
4297 how to setup the parallel iteration. */
4298 return false;
4299
4300 case GIMPLE_DEBUG:
4301 return verify_gimple_debug (stmt);
4302
4303 default:
4304 gcc_unreachable ();
4305 }
4306 }
4307
4308 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4309 and false otherwise. */
4310
4311 static bool
4312 verify_gimple_phi (gimple phi)
4313 {
4314 bool err = false;
4315 unsigned i;
4316 tree phi_result = gimple_phi_result (phi);
4317 bool virtual_p;
4318
4319 if (!phi_result)
4320 {
4321 error ("invalid PHI result");
4322 return true;
4323 }
4324
4325 virtual_p = virtual_operand_p (phi_result);
4326 if (TREE_CODE (phi_result) != SSA_NAME
4327 || (virtual_p
4328 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4329 {
4330 error ("invalid PHI result");
4331 err = true;
4332 }
4333
4334 for (i = 0; i < gimple_phi_num_args (phi); i++)
4335 {
4336 tree t = gimple_phi_arg_def (phi, i);
4337
4338 if (!t)
4339 {
4340 error ("missing PHI def");
4341 err |= true;
4342 continue;
4343 }
4344 /* Addressable variables do have SSA_NAMEs but they
4345 are not considered gimple values. */
4346 else if ((TREE_CODE (t) == SSA_NAME
4347 && virtual_p != virtual_operand_p (t))
4348 || (virtual_p
4349 && (TREE_CODE (t) != SSA_NAME
4350 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4351 || (!virtual_p
4352 && !is_gimple_val (t)))
4353 {
4354 error ("invalid PHI argument");
4355 debug_generic_expr (t);
4356 err |= true;
4357 }
4358 #ifdef ENABLE_TYPES_CHECKING
4359 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4360 {
4361 error ("incompatible types in PHI argument %u", i);
4362 debug_generic_stmt (TREE_TYPE (phi_result));
4363 debug_generic_stmt (TREE_TYPE (t));
4364 err |= true;
4365 }
4366 #endif
4367 }
4368
4369 return err;
4370 }
4371
4372 /* Verify the GIMPLE statements inside the sequence STMTS. */
4373
4374 static bool
4375 verify_gimple_in_seq_2 (gimple_seq stmts)
4376 {
4377 gimple_stmt_iterator ittr;
4378 bool err = false;
4379
4380 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4381 {
4382 gimple stmt = gsi_stmt (ittr);
4383
4384 switch (gimple_code (stmt))
4385 {
4386 case GIMPLE_BIND:
4387 err |= verify_gimple_in_seq_2 (gimple_bind_body (stmt));
4388 break;
4389
4390 case GIMPLE_TRY:
4391 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4392 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4393 break;
4394
4395 case GIMPLE_EH_FILTER:
4396 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4397 break;
4398
4399 case GIMPLE_EH_ELSE:
4400 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (stmt));
4401 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (stmt));
4402 break;
4403
4404 case GIMPLE_CATCH:
4405 err |= verify_gimple_in_seq_2 (gimple_catch_handler (stmt));
4406 break;
4407
4408 case GIMPLE_TRANSACTION:
4409 err |= verify_gimple_transaction (stmt);
4410 break;
4411
4412 default:
4413 {
4414 bool err2 = verify_gimple_stmt (stmt);
4415 if (err2)
4416 debug_gimple_stmt (stmt);
4417 err |= err2;
4418 }
4419 }
4420 }
4421
4422 return err;
4423 }
4424
4425 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4426 is a problem, otherwise false. */
4427
4428 static bool
4429 verify_gimple_transaction (gimple stmt)
4430 {
4431 tree lab = gimple_transaction_label (stmt);
4432 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4433 return true;
4434 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4435 }
4436
4437
4438 /* Verify the GIMPLE statements inside the statement list STMTS. */
4439
4440 DEBUG_FUNCTION void
4441 verify_gimple_in_seq (gimple_seq stmts)
4442 {
4443 timevar_push (TV_TREE_STMT_VERIFY);
4444 if (verify_gimple_in_seq_2 (stmts))
4445 internal_error ("verify_gimple failed");
4446 timevar_pop (TV_TREE_STMT_VERIFY);
4447 }
4448
4449 /* Return true when the T can be shared. */
4450
4451 static bool
4452 tree_node_can_be_shared (tree t)
4453 {
4454 if (IS_TYPE_OR_DECL_P (t)
4455 || is_gimple_min_invariant (t)
4456 || TREE_CODE (t) == SSA_NAME
4457 || t == error_mark_node
4458 || TREE_CODE (t) == IDENTIFIER_NODE)
4459 return true;
4460
4461 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4462 return true;
4463
4464 if (DECL_P (t))
4465 return true;
4466
4467 return false;
4468 }
4469
4470 /* Called via walk_tree. Verify tree sharing. */
4471
4472 static tree
4473 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4474 {
4475 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4476
4477 if (tree_node_can_be_shared (*tp))
4478 {
4479 *walk_subtrees = false;
4480 return NULL;
4481 }
4482
4483 if (pointer_set_insert (visited, *tp))
4484 return *tp;
4485
4486 return NULL;
4487 }
4488
4489 /* Called via walk_gimple_stmt. Verify tree sharing. */
4490
4491 static tree
4492 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4493 {
4494 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4495 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4496 }
4497
4498 static bool eh_error_found;
4499 static int
4500 verify_eh_throw_stmt_node (void **slot, void *data)
4501 {
4502 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4503 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4504
4505 if (!pointer_set_contains (visited, node->stmt))
4506 {
4507 error ("dead STMT in EH table");
4508 debug_gimple_stmt (node->stmt);
4509 eh_error_found = true;
4510 }
4511 return 1;
4512 }
4513
4514 /* Verify if the location LOCs block is in BLOCKS. */
4515
4516 static bool
4517 verify_location (pointer_set_t *blocks, location_t loc)
4518 {
4519 tree block = LOCATION_BLOCK (loc);
4520 if (block != NULL_TREE
4521 && !pointer_set_contains (blocks, block))
4522 {
4523 error ("location references block not in block tree");
4524 return true;
4525 }
4526 if (block != NULL_TREE)
4527 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4528 return false;
4529 }
4530
4531 /* Called via walk_tree. Verify that expressions have no blocks. */
4532
4533 static tree
4534 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4535 {
4536 if (!EXPR_P (*tp))
4537 {
4538 *walk_subtrees = false;
4539 return NULL;
4540 }
4541
4542 location_t loc = EXPR_LOCATION (*tp);
4543 if (LOCATION_BLOCK (loc) != NULL)
4544 return *tp;
4545
4546 return NULL;
4547 }
4548
4549 /* Called via walk_tree. Verify locations of expressions. */
4550
4551 static tree
4552 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4553 {
4554 struct pointer_set_t *blocks = (struct pointer_set_t *) data;
4555
4556 if (TREE_CODE (*tp) == VAR_DECL
4557 && DECL_HAS_DEBUG_EXPR_P (*tp))
4558 {
4559 tree t = DECL_DEBUG_EXPR (*tp);
4560 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4561 if (addr)
4562 return addr;
4563 }
4564 if ((TREE_CODE (*tp) == VAR_DECL
4565 || TREE_CODE (*tp) == PARM_DECL
4566 || TREE_CODE (*tp) == RESULT_DECL)
4567 && DECL_HAS_VALUE_EXPR_P (*tp))
4568 {
4569 tree t = DECL_VALUE_EXPR (*tp);
4570 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4571 if (addr)
4572 return addr;
4573 }
4574
4575 if (!EXPR_P (*tp))
4576 {
4577 *walk_subtrees = false;
4578 return NULL;
4579 }
4580
4581 location_t loc = EXPR_LOCATION (*tp);
4582 if (verify_location (blocks, loc))
4583 return *tp;
4584
4585 return NULL;
4586 }
4587
4588 /* Called via walk_gimple_op. Verify locations of expressions. */
4589
4590 static tree
4591 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
4592 {
4593 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4594 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
4595 }
4596
4597 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4598
4599 static void
4600 collect_subblocks (pointer_set_t *blocks, tree block)
4601 {
4602 tree t;
4603 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
4604 {
4605 pointer_set_insert (blocks, t);
4606 collect_subblocks (blocks, t);
4607 }
4608 }
4609
4610 /* Verify the GIMPLE statements in the CFG of FN. */
4611
4612 DEBUG_FUNCTION void
4613 verify_gimple_in_cfg (struct function *fn)
4614 {
4615 basic_block bb;
4616 bool err = false;
4617 struct pointer_set_t *visited, *visited_stmts, *blocks;
4618
4619 timevar_push (TV_TREE_STMT_VERIFY);
4620 visited = pointer_set_create ();
4621 visited_stmts = pointer_set_create ();
4622
4623 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4624 blocks = pointer_set_create ();
4625 if (DECL_INITIAL (fn->decl))
4626 {
4627 pointer_set_insert (blocks, DECL_INITIAL (fn->decl));
4628 collect_subblocks (blocks, DECL_INITIAL (fn->decl));
4629 }
4630
4631 FOR_EACH_BB_FN (bb, fn)
4632 {
4633 gimple_stmt_iterator gsi;
4634
4635 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4636 {
4637 gimple phi = gsi_stmt (gsi);
4638 bool err2 = false;
4639 unsigned i;
4640
4641 pointer_set_insert (visited_stmts, phi);
4642
4643 if (gimple_bb (phi) != bb)
4644 {
4645 error ("gimple_bb (phi) is set to a wrong basic block");
4646 err2 = true;
4647 }
4648
4649 err2 |= verify_gimple_phi (phi);
4650
4651 /* Only PHI arguments have locations. */
4652 if (gimple_location (phi) != UNKNOWN_LOCATION)
4653 {
4654 error ("PHI node with location");
4655 err2 = true;
4656 }
4657
4658 for (i = 0; i < gimple_phi_num_args (phi); i++)
4659 {
4660 tree arg = gimple_phi_arg_def (phi, i);
4661 tree addr = walk_tree (&arg, verify_node_sharing_1,
4662 visited, NULL);
4663 if (addr)
4664 {
4665 error ("incorrect sharing of tree nodes");
4666 debug_generic_expr (addr);
4667 err2 |= true;
4668 }
4669 location_t loc = gimple_phi_arg_location (phi, i);
4670 if (virtual_operand_p (gimple_phi_result (phi))
4671 && loc != UNKNOWN_LOCATION)
4672 {
4673 error ("virtual PHI with argument locations");
4674 err2 = true;
4675 }
4676 addr = walk_tree (&arg, verify_expr_location_1, blocks, NULL);
4677 if (addr)
4678 {
4679 debug_generic_expr (addr);
4680 err2 = true;
4681 }
4682 err2 |= verify_location (blocks, loc);
4683 }
4684
4685 if (err2)
4686 debug_gimple_stmt (phi);
4687 err |= err2;
4688 }
4689
4690 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4691 {
4692 gimple stmt = gsi_stmt (gsi);
4693 bool err2 = false;
4694 struct walk_stmt_info wi;
4695 tree addr;
4696 int lp_nr;
4697
4698 pointer_set_insert (visited_stmts, stmt);
4699
4700 if (gimple_bb (stmt) != bb)
4701 {
4702 error ("gimple_bb (stmt) is set to a wrong basic block");
4703 err2 = true;
4704 }
4705
4706 err2 |= verify_gimple_stmt (stmt);
4707 err2 |= verify_location (blocks, gimple_location (stmt));
4708
4709 memset (&wi, 0, sizeof (wi));
4710 wi.info = (void *) visited;
4711 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
4712 if (addr)
4713 {
4714 error ("incorrect sharing of tree nodes");
4715 debug_generic_expr (addr);
4716 err2 |= true;
4717 }
4718
4719 memset (&wi, 0, sizeof (wi));
4720 wi.info = (void *) blocks;
4721 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
4722 if (addr)
4723 {
4724 debug_generic_expr (addr);
4725 err2 |= true;
4726 }
4727
4728 /* ??? Instead of not checking these stmts at all the walker
4729 should know its context via wi. */
4730 if (!is_gimple_debug (stmt)
4731 && !is_gimple_omp (stmt))
4732 {
4733 memset (&wi, 0, sizeof (wi));
4734 addr = walk_gimple_op (stmt, verify_expr, &wi);
4735 if (addr)
4736 {
4737 debug_generic_expr (addr);
4738 inform (gimple_location (stmt), "in statement");
4739 err2 |= true;
4740 }
4741 }
4742
4743 /* If the statement is marked as part of an EH region, then it is
4744 expected that the statement could throw. Verify that when we
4745 have optimizations that simplify statements such that we prove
4746 that they cannot throw, that we update other data structures
4747 to match. */
4748 lp_nr = lookup_stmt_eh_lp (stmt);
4749 if (lp_nr != 0)
4750 {
4751 if (!stmt_could_throw_p (stmt))
4752 {
4753 error ("statement marked for throw, but doesn%'t");
4754 err2 |= true;
4755 }
4756 else if (lp_nr > 0
4757 && !gsi_one_before_end_p (gsi)
4758 && stmt_can_throw_internal (stmt))
4759 {
4760 error ("statement marked for throw in middle of block");
4761 err2 |= true;
4762 }
4763 }
4764
4765 if (err2)
4766 debug_gimple_stmt (stmt);
4767 err |= err2;
4768 }
4769 }
4770
4771 eh_error_found = false;
4772 if (get_eh_throw_stmt_table (cfun))
4773 htab_traverse (get_eh_throw_stmt_table (cfun),
4774 verify_eh_throw_stmt_node,
4775 visited_stmts);
4776
4777 if (err || eh_error_found)
4778 internal_error ("verify_gimple failed");
4779
4780 pointer_set_destroy (visited);
4781 pointer_set_destroy (visited_stmts);
4782 pointer_set_destroy (blocks);
4783 verify_histograms ();
4784 timevar_pop (TV_TREE_STMT_VERIFY);
4785 }
4786
4787
4788 /* Verifies that the flow information is OK. */
4789
4790 static int
4791 gimple_verify_flow_info (void)
4792 {
4793 int err = 0;
4794 basic_block bb;
4795 gimple_stmt_iterator gsi;
4796 gimple stmt;
4797 edge e;
4798 edge_iterator ei;
4799
4800 if (ENTRY_BLOCK_PTR->il.gimple.seq || ENTRY_BLOCK_PTR->il.gimple.phi_nodes)
4801 {
4802 error ("ENTRY_BLOCK has IL associated with it");
4803 err = 1;
4804 }
4805
4806 if (EXIT_BLOCK_PTR->il.gimple.seq || EXIT_BLOCK_PTR->il.gimple.phi_nodes)
4807 {
4808 error ("EXIT_BLOCK has IL associated with it");
4809 err = 1;
4810 }
4811
4812 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4813 if (e->flags & EDGE_FALLTHRU)
4814 {
4815 error ("fallthru to exit from bb %d", e->src->index);
4816 err = 1;
4817 }
4818
4819 FOR_EACH_BB (bb)
4820 {
4821 bool found_ctrl_stmt = false;
4822
4823 stmt = NULL;
4824
4825 /* Skip labels on the start of basic block. */
4826 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4827 {
4828 tree label;
4829 gimple prev_stmt = stmt;
4830
4831 stmt = gsi_stmt (gsi);
4832
4833 if (gimple_code (stmt) != GIMPLE_LABEL)
4834 break;
4835
4836 label = gimple_label_label (stmt);
4837 if (prev_stmt && DECL_NONLOCAL (label))
4838 {
4839 error ("nonlocal label ");
4840 print_generic_expr (stderr, label, 0);
4841 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4842 bb->index);
4843 err = 1;
4844 }
4845
4846 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
4847 {
4848 error ("EH landing pad label ");
4849 print_generic_expr (stderr, label, 0);
4850 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4851 bb->index);
4852 err = 1;
4853 }
4854
4855 if (label_to_block (label) != bb)
4856 {
4857 error ("label ");
4858 print_generic_expr (stderr, label, 0);
4859 fprintf (stderr, " to block does not match in bb %d",
4860 bb->index);
4861 err = 1;
4862 }
4863
4864 if (decl_function_context (label) != current_function_decl)
4865 {
4866 error ("label ");
4867 print_generic_expr (stderr, label, 0);
4868 fprintf (stderr, " has incorrect context in bb %d",
4869 bb->index);
4870 err = 1;
4871 }
4872 }
4873
4874 /* Verify that body of basic block BB is free of control flow. */
4875 for (; !gsi_end_p (gsi); gsi_next (&gsi))
4876 {
4877 gimple stmt = gsi_stmt (gsi);
4878
4879 if (found_ctrl_stmt)
4880 {
4881 error ("control flow in the middle of basic block %d",
4882 bb->index);
4883 err = 1;
4884 }
4885
4886 if (stmt_ends_bb_p (stmt))
4887 found_ctrl_stmt = true;
4888
4889 if (gimple_code (stmt) == GIMPLE_LABEL)
4890 {
4891 error ("label ");
4892 print_generic_expr (stderr, gimple_label_label (stmt), 0);
4893 fprintf (stderr, " in the middle of basic block %d", bb->index);
4894 err = 1;
4895 }
4896 }
4897
4898 gsi = gsi_last_bb (bb);
4899 if (gsi_end_p (gsi))
4900 continue;
4901
4902 stmt = gsi_stmt (gsi);
4903
4904 if (gimple_code (stmt) == GIMPLE_LABEL)
4905 continue;
4906
4907 err |= verify_eh_edges (stmt);
4908
4909 if (is_ctrl_stmt (stmt))
4910 {
4911 FOR_EACH_EDGE (e, ei, bb->succs)
4912 if (e->flags & EDGE_FALLTHRU)
4913 {
4914 error ("fallthru edge after a control statement in bb %d",
4915 bb->index);
4916 err = 1;
4917 }
4918 }
4919
4920 if (gimple_code (stmt) != GIMPLE_COND)
4921 {
4922 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4923 after anything else but if statement. */
4924 FOR_EACH_EDGE (e, ei, bb->succs)
4925 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
4926 {
4927 error ("true/false edge after a non-GIMPLE_COND in bb %d",
4928 bb->index);
4929 err = 1;
4930 }
4931 }
4932
4933 switch (gimple_code (stmt))
4934 {
4935 case GIMPLE_COND:
4936 {
4937 edge true_edge;
4938 edge false_edge;
4939
4940 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
4941
4942 if (!true_edge
4943 || !false_edge
4944 || !(true_edge->flags & EDGE_TRUE_VALUE)
4945 || !(false_edge->flags & EDGE_FALSE_VALUE)
4946 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4947 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4948 || EDGE_COUNT (bb->succs) >= 3)
4949 {
4950 error ("wrong outgoing edge flags at end of bb %d",
4951 bb->index);
4952 err = 1;
4953 }
4954 }
4955 break;
4956
4957 case GIMPLE_GOTO:
4958 if (simple_goto_p (stmt))
4959 {
4960 error ("explicit goto at end of bb %d", bb->index);
4961 err = 1;
4962 }
4963 else
4964 {
4965 /* FIXME. We should double check that the labels in the
4966 destination blocks have their address taken. */
4967 FOR_EACH_EDGE (e, ei, bb->succs)
4968 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
4969 | EDGE_FALSE_VALUE))
4970 || !(e->flags & EDGE_ABNORMAL))
4971 {
4972 error ("wrong outgoing edge flags at end of bb %d",
4973 bb->index);
4974 err = 1;
4975 }
4976 }
4977 break;
4978
4979 case GIMPLE_CALL:
4980 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
4981 break;
4982 /* ... fallthru ... */
4983 case GIMPLE_RETURN:
4984 if (!single_succ_p (bb)
4985 || (single_succ_edge (bb)->flags
4986 & (EDGE_FALLTHRU | EDGE_ABNORMAL
4987 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4988 {
4989 error ("wrong outgoing edge flags at end of bb %d", bb->index);
4990 err = 1;
4991 }
4992 if (single_succ (bb) != EXIT_BLOCK_PTR)
4993 {
4994 error ("return edge does not point to exit in bb %d",
4995 bb->index);
4996 err = 1;
4997 }
4998 break;
4999
5000 case GIMPLE_SWITCH:
5001 {
5002 tree prev;
5003 edge e;
5004 size_t i, n;
5005
5006 n = gimple_switch_num_labels (stmt);
5007
5008 /* Mark all the destination basic blocks. */
5009 for (i = 0; i < n; ++i)
5010 {
5011 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
5012 basic_block label_bb = label_to_block (lab);
5013 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5014 label_bb->aux = (void *)1;
5015 }
5016
5017 /* Verify that the case labels are sorted. */
5018 prev = gimple_switch_label (stmt, 0);
5019 for (i = 1; i < n; ++i)
5020 {
5021 tree c = gimple_switch_label (stmt, i);
5022 if (!CASE_LOW (c))
5023 {
5024 error ("found default case not at the start of "
5025 "case vector");
5026 err = 1;
5027 continue;
5028 }
5029 if (CASE_LOW (prev)
5030 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5031 {
5032 error ("case labels not sorted: ");
5033 print_generic_expr (stderr, prev, 0);
5034 fprintf (stderr," is greater than ");
5035 print_generic_expr (stderr, c, 0);
5036 fprintf (stderr," but comes before it.\n");
5037 err = 1;
5038 }
5039 prev = c;
5040 }
5041 /* VRP will remove the default case if it can prove it will
5042 never be executed. So do not verify there always exists
5043 a default case here. */
5044
5045 FOR_EACH_EDGE (e, ei, bb->succs)
5046 {
5047 if (!e->dest->aux)
5048 {
5049 error ("extra outgoing edge %d->%d",
5050 bb->index, e->dest->index);
5051 err = 1;
5052 }
5053
5054 e->dest->aux = (void *)2;
5055 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5056 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5057 {
5058 error ("wrong outgoing edge flags at end of bb %d",
5059 bb->index);
5060 err = 1;
5061 }
5062 }
5063
5064 /* Check that we have all of them. */
5065 for (i = 0; i < n; ++i)
5066 {
5067 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
5068 basic_block label_bb = label_to_block (lab);
5069
5070 if (label_bb->aux != (void *)2)
5071 {
5072 error ("missing edge %i->%i", bb->index, label_bb->index);
5073 err = 1;
5074 }
5075 }
5076
5077 FOR_EACH_EDGE (e, ei, bb->succs)
5078 e->dest->aux = (void *)0;
5079 }
5080 break;
5081
5082 case GIMPLE_EH_DISPATCH:
5083 err |= verify_eh_dispatch_edge (stmt);
5084 break;
5085
5086 default:
5087 break;
5088 }
5089 }
5090
5091 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5092 verify_dominators (CDI_DOMINATORS);
5093
5094 return err;
5095 }
5096
5097
5098 /* Updates phi nodes after creating a forwarder block joined
5099 by edge FALLTHRU. */
5100
5101 static void
5102 gimple_make_forwarder_block (edge fallthru)
5103 {
5104 edge e;
5105 edge_iterator ei;
5106 basic_block dummy, bb;
5107 tree var;
5108 gimple_stmt_iterator gsi;
5109
5110 dummy = fallthru->src;
5111 bb = fallthru->dest;
5112
5113 if (single_pred_p (bb))
5114 return;
5115
5116 /* If we redirected a branch we must create new PHI nodes at the
5117 start of BB. */
5118 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5119 {
5120 gimple phi, new_phi;
5121
5122 phi = gsi_stmt (gsi);
5123 var = gimple_phi_result (phi);
5124 new_phi = create_phi_node (var, bb);
5125 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5126 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5127 UNKNOWN_LOCATION);
5128 }
5129
5130 /* Add the arguments we have stored on edges. */
5131 FOR_EACH_EDGE (e, ei, bb->preds)
5132 {
5133 if (e == fallthru)
5134 continue;
5135
5136 flush_pending_stmts (e);
5137 }
5138 }
5139
5140
5141 /* Return a non-special label in the head of basic block BLOCK.
5142 Create one if it doesn't exist. */
5143
5144 tree
5145 gimple_block_label (basic_block bb)
5146 {
5147 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5148 bool first = true;
5149 tree label;
5150 gimple stmt;
5151
5152 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5153 {
5154 stmt = gsi_stmt (i);
5155 if (gimple_code (stmt) != GIMPLE_LABEL)
5156 break;
5157 label = gimple_label_label (stmt);
5158 if (!DECL_NONLOCAL (label))
5159 {
5160 if (!first)
5161 gsi_move_before (&i, &s);
5162 return label;
5163 }
5164 }
5165
5166 label = create_artificial_label (UNKNOWN_LOCATION);
5167 stmt = gimple_build_label (label);
5168 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5169 return label;
5170 }
5171
5172
5173 /* Attempt to perform edge redirection by replacing a possibly complex
5174 jump instruction by a goto or by removing the jump completely.
5175 This can apply only if all edges now point to the same block. The
5176 parameters and return values are equivalent to
5177 redirect_edge_and_branch. */
5178
5179 static edge
5180 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5181 {
5182 basic_block src = e->src;
5183 gimple_stmt_iterator i;
5184 gimple stmt;
5185
5186 /* We can replace or remove a complex jump only when we have exactly
5187 two edges. */
5188 if (EDGE_COUNT (src->succs) != 2
5189 /* Verify that all targets will be TARGET. Specifically, the
5190 edge that is not E must also go to TARGET. */
5191 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5192 return NULL;
5193
5194 i = gsi_last_bb (src);
5195 if (gsi_end_p (i))
5196 return NULL;
5197
5198 stmt = gsi_stmt (i);
5199
5200 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5201 {
5202 gsi_remove (&i, true);
5203 e = ssa_redirect_edge (e, target);
5204 e->flags = EDGE_FALLTHRU;
5205 return e;
5206 }
5207
5208 return NULL;
5209 }
5210
5211
5212 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5213 edge representing the redirected branch. */
5214
5215 static edge
5216 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5217 {
5218 basic_block bb = e->src;
5219 gimple_stmt_iterator gsi;
5220 edge ret;
5221 gimple stmt;
5222
5223 if (e->flags & EDGE_ABNORMAL)
5224 return NULL;
5225
5226 if (e->dest == dest)
5227 return NULL;
5228
5229 if (e->flags & EDGE_EH)
5230 return redirect_eh_edge (e, dest);
5231
5232 if (e->src != ENTRY_BLOCK_PTR)
5233 {
5234 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5235 if (ret)
5236 return ret;
5237 }
5238
5239 gsi = gsi_last_bb (bb);
5240 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5241
5242 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5243 {
5244 case GIMPLE_COND:
5245 /* For COND_EXPR, we only need to redirect the edge. */
5246 break;
5247
5248 case GIMPLE_GOTO:
5249 /* No non-abnormal edges should lead from a non-simple goto, and
5250 simple ones should be represented implicitly. */
5251 gcc_unreachable ();
5252
5253 case GIMPLE_SWITCH:
5254 {
5255 tree label = gimple_block_label (dest);
5256 tree cases = get_cases_for_edge (e, stmt);
5257
5258 /* If we have a list of cases associated with E, then use it
5259 as it's a lot faster than walking the entire case vector. */
5260 if (cases)
5261 {
5262 edge e2 = find_edge (e->src, dest);
5263 tree last, first;
5264
5265 first = cases;
5266 while (cases)
5267 {
5268 last = cases;
5269 CASE_LABEL (cases) = label;
5270 cases = CASE_CHAIN (cases);
5271 }
5272
5273 /* If there was already an edge in the CFG, then we need
5274 to move all the cases associated with E to E2. */
5275 if (e2)
5276 {
5277 tree cases2 = get_cases_for_edge (e2, stmt);
5278
5279 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5280 CASE_CHAIN (cases2) = first;
5281 }
5282 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5283 }
5284 else
5285 {
5286 size_t i, n = gimple_switch_num_labels (stmt);
5287
5288 for (i = 0; i < n; i++)
5289 {
5290 tree elt = gimple_switch_label (stmt, i);
5291 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5292 CASE_LABEL (elt) = label;
5293 }
5294 }
5295 }
5296 break;
5297
5298 case GIMPLE_ASM:
5299 {
5300 int i, n = gimple_asm_nlabels (stmt);
5301 tree label = NULL;
5302
5303 for (i = 0; i < n; ++i)
5304 {
5305 tree cons = gimple_asm_label_op (stmt, i);
5306 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5307 {
5308 if (!label)
5309 label = gimple_block_label (dest);
5310 TREE_VALUE (cons) = label;
5311 }
5312 }
5313
5314 /* If we didn't find any label matching the former edge in the
5315 asm labels, we must be redirecting the fallthrough
5316 edge. */
5317 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5318 }
5319 break;
5320
5321 case GIMPLE_RETURN:
5322 gsi_remove (&gsi, true);
5323 e->flags |= EDGE_FALLTHRU;
5324 break;
5325
5326 case GIMPLE_OMP_RETURN:
5327 case GIMPLE_OMP_CONTINUE:
5328 case GIMPLE_OMP_SECTIONS_SWITCH:
5329 case GIMPLE_OMP_FOR:
5330 /* The edges from OMP constructs can be simply redirected. */
5331 break;
5332
5333 case GIMPLE_EH_DISPATCH:
5334 if (!(e->flags & EDGE_FALLTHRU))
5335 redirect_eh_dispatch_edge (stmt, e, dest);
5336 break;
5337
5338 case GIMPLE_TRANSACTION:
5339 /* The ABORT edge has a stored label associated with it, otherwise
5340 the edges are simply redirectable. */
5341 if (e->flags == 0)
5342 gimple_transaction_set_label (stmt, gimple_block_label (dest));
5343 break;
5344
5345 default:
5346 /* Otherwise it must be a fallthru edge, and we don't need to
5347 do anything besides redirecting it. */
5348 gcc_assert (e->flags & EDGE_FALLTHRU);
5349 break;
5350 }
5351
5352 /* Update/insert PHI nodes as necessary. */
5353
5354 /* Now update the edges in the CFG. */
5355 e = ssa_redirect_edge (e, dest);
5356
5357 return e;
5358 }
5359
5360 /* Returns true if it is possible to remove edge E by redirecting
5361 it to the destination of the other edge from E->src. */
5362
5363 static bool
5364 gimple_can_remove_branch_p (const_edge e)
5365 {
5366 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5367 return false;
5368
5369 return true;
5370 }
5371
5372 /* Simple wrapper, as we can always redirect fallthru edges. */
5373
5374 static basic_block
5375 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5376 {
5377 e = gimple_redirect_edge_and_branch (e, dest);
5378 gcc_assert (e);
5379
5380 return NULL;
5381 }
5382
5383
5384 /* Splits basic block BB after statement STMT (but at least after the
5385 labels). If STMT is NULL, BB is split just after the labels. */
5386
5387 static basic_block
5388 gimple_split_block (basic_block bb, void *stmt)
5389 {
5390 gimple_stmt_iterator gsi;
5391 gimple_stmt_iterator gsi_tgt;
5392 gimple act;
5393 gimple_seq list;
5394 basic_block new_bb;
5395 edge e;
5396 edge_iterator ei;
5397
5398 new_bb = create_empty_bb (bb);
5399
5400 /* Redirect the outgoing edges. */
5401 new_bb->succs = bb->succs;
5402 bb->succs = NULL;
5403 FOR_EACH_EDGE (e, ei, new_bb->succs)
5404 e->src = new_bb;
5405
5406 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5407 stmt = NULL;
5408
5409 /* Move everything from GSI to the new basic block. */
5410 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5411 {
5412 act = gsi_stmt (gsi);
5413 if (gimple_code (act) == GIMPLE_LABEL)
5414 continue;
5415
5416 if (!stmt)
5417 break;
5418
5419 if (stmt == act)
5420 {
5421 gsi_next (&gsi);
5422 break;
5423 }
5424 }
5425
5426 if (gsi_end_p (gsi))
5427 return new_bb;
5428
5429 /* Split the statement list - avoid re-creating new containers as this
5430 brings ugly quadratic memory consumption in the inliner.
5431 (We are still quadratic since we need to update stmt BB pointers,
5432 sadly.) */
5433 gsi_split_seq_before (&gsi, &list);
5434 set_bb_seq (new_bb, list);
5435 for (gsi_tgt = gsi_start (list);
5436 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5437 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5438
5439 return new_bb;
5440 }
5441
5442
5443 /* Moves basic block BB after block AFTER. */
5444
5445 static bool
5446 gimple_move_block_after (basic_block bb, basic_block after)
5447 {
5448 if (bb->prev_bb == after)
5449 return true;
5450
5451 unlink_block (bb);
5452 link_block (bb, after);
5453
5454 return true;
5455 }
5456
5457
5458 /* Return TRUE if block BB has no executable statements, otherwise return
5459 FALSE. */
5460
5461 static bool
5462 gimple_empty_block_p (basic_block bb)
5463 {
5464 /* BB must have no executable statements. */
5465 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5466 if (phi_nodes (bb))
5467 return false;
5468 if (gsi_end_p (gsi))
5469 return true;
5470 if (is_gimple_debug (gsi_stmt (gsi)))
5471 gsi_next_nondebug (&gsi);
5472 return gsi_end_p (gsi);
5473 }
5474
5475
5476 /* Split a basic block if it ends with a conditional branch and if the
5477 other part of the block is not empty. */
5478
5479 static basic_block
5480 gimple_split_block_before_cond_jump (basic_block bb)
5481 {
5482 gimple last, split_point;
5483 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5484 if (gsi_end_p (gsi))
5485 return NULL;
5486 last = gsi_stmt (gsi);
5487 if (gimple_code (last) != GIMPLE_COND
5488 && gimple_code (last) != GIMPLE_SWITCH)
5489 return NULL;
5490 gsi_prev_nondebug (&gsi);
5491 split_point = gsi_stmt (gsi);
5492 return split_block (bb, split_point)->dest;
5493 }
5494
5495
5496 /* Return true if basic_block can be duplicated. */
5497
5498 static bool
5499 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5500 {
5501 return true;
5502 }
5503
5504 /* Create a duplicate of the basic block BB. NOTE: This does not
5505 preserve SSA form. */
5506
5507 static basic_block
5508 gimple_duplicate_bb (basic_block bb)
5509 {
5510 basic_block new_bb;
5511 gimple_stmt_iterator gsi, gsi_tgt;
5512 gimple_seq phis = phi_nodes (bb);
5513 gimple phi, stmt, copy;
5514
5515 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
5516
5517 /* Copy the PHI nodes. We ignore PHI node arguments here because
5518 the incoming edges have not been setup yet. */
5519 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
5520 {
5521 phi = gsi_stmt (gsi);
5522 copy = create_phi_node (NULL_TREE, new_bb);
5523 create_new_def_for (gimple_phi_result (phi), copy,
5524 gimple_phi_result_ptr (copy));
5525 gimple_set_uid (copy, gimple_uid (phi));
5526 }
5527
5528 gsi_tgt = gsi_start_bb (new_bb);
5529 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5530 {
5531 def_operand_p def_p;
5532 ssa_op_iter op_iter;
5533 tree lhs;
5534
5535 stmt = gsi_stmt (gsi);
5536 if (gimple_code (stmt) == GIMPLE_LABEL)
5537 continue;
5538
5539 /* Don't duplicate label debug stmts. */
5540 if (gimple_debug_bind_p (stmt)
5541 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5542 == LABEL_DECL)
5543 continue;
5544
5545 /* Create a new copy of STMT and duplicate STMT's virtual
5546 operands. */
5547 copy = gimple_copy (stmt);
5548 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5549
5550 maybe_duplicate_eh_stmt (copy, stmt);
5551 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5552
5553 /* When copying around a stmt writing into a local non-user
5554 aggregate, make sure it won't share stack slot with other
5555 vars. */
5556 lhs = gimple_get_lhs (stmt);
5557 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5558 {
5559 tree base = get_base_address (lhs);
5560 if (base
5561 && (TREE_CODE (base) == VAR_DECL
5562 || TREE_CODE (base) == RESULT_DECL)
5563 && DECL_IGNORED_P (base)
5564 && !TREE_STATIC (base)
5565 && !DECL_EXTERNAL (base)
5566 && (TREE_CODE (base) != VAR_DECL
5567 || !DECL_HAS_VALUE_EXPR_P (base)))
5568 DECL_NONSHAREABLE (base) = 1;
5569 }
5570
5571 /* Create new names for all the definitions created by COPY and
5572 add replacement mappings for each new name. */
5573 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5574 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5575 }
5576
5577 return new_bb;
5578 }
5579
5580 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5581
5582 static void
5583 add_phi_args_after_copy_edge (edge e_copy)
5584 {
5585 basic_block bb, bb_copy = e_copy->src, dest;
5586 edge e;
5587 edge_iterator ei;
5588 gimple phi, phi_copy;
5589 tree def;
5590 gimple_stmt_iterator psi, psi_copy;
5591
5592 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5593 return;
5594
5595 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5596
5597 if (e_copy->dest->flags & BB_DUPLICATED)
5598 dest = get_bb_original (e_copy->dest);
5599 else
5600 dest = e_copy->dest;
5601
5602 e = find_edge (bb, dest);
5603 if (!e)
5604 {
5605 /* During loop unrolling the target of the latch edge is copied.
5606 In this case we are not looking for edge to dest, but to
5607 duplicated block whose original was dest. */
5608 FOR_EACH_EDGE (e, ei, bb->succs)
5609 {
5610 if ((e->dest->flags & BB_DUPLICATED)
5611 && get_bb_original (e->dest) == dest)
5612 break;
5613 }
5614
5615 gcc_assert (e != NULL);
5616 }
5617
5618 for (psi = gsi_start_phis (e->dest),
5619 psi_copy = gsi_start_phis (e_copy->dest);
5620 !gsi_end_p (psi);
5621 gsi_next (&psi), gsi_next (&psi_copy))
5622 {
5623 phi = gsi_stmt (psi);
5624 phi_copy = gsi_stmt (psi_copy);
5625 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5626 add_phi_arg (phi_copy, def, e_copy,
5627 gimple_phi_arg_location_from_edge (phi, e));
5628 }
5629 }
5630
5631
5632 /* Basic block BB_COPY was created by code duplication. Add phi node
5633 arguments for edges going out of BB_COPY. The blocks that were
5634 duplicated have BB_DUPLICATED set. */
5635
5636 void
5637 add_phi_args_after_copy_bb (basic_block bb_copy)
5638 {
5639 edge e_copy;
5640 edge_iterator ei;
5641
5642 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5643 {
5644 add_phi_args_after_copy_edge (e_copy);
5645 }
5646 }
5647
5648 /* Blocks in REGION_COPY array of length N_REGION were created by
5649 duplication of basic blocks. Add phi node arguments for edges
5650 going from these blocks. If E_COPY is not NULL, also add
5651 phi node arguments for its destination.*/
5652
5653 void
5654 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5655 edge e_copy)
5656 {
5657 unsigned i;
5658
5659 for (i = 0; i < n_region; i++)
5660 region_copy[i]->flags |= BB_DUPLICATED;
5661
5662 for (i = 0; i < n_region; i++)
5663 add_phi_args_after_copy_bb (region_copy[i]);
5664 if (e_copy)
5665 add_phi_args_after_copy_edge (e_copy);
5666
5667 for (i = 0; i < n_region; i++)
5668 region_copy[i]->flags &= ~BB_DUPLICATED;
5669 }
5670
5671 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5672 important exit edge EXIT. By important we mean that no SSA name defined
5673 inside region is live over the other exit edges of the region. All entry
5674 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5675 to the duplicate of the region. Dominance and loop information is
5676 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
5677 UPDATE_DOMINANCE is false then we assume that the caller will update the
5678 dominance information after calling this function. The new basic
5679 blocks are stored to REGION_COPY in the same order as they had in REGION,
5680 provided that REGION_COPY is not NULL.
5681 The function returns false if it is unable to copy the region,
5682 true otherwise. */
5683
5684 bool
5685 gimple_duplicate_sese_region (edge entry, edge exit,
5686 basic_block *region, unsigned n_region,
5687 basic_block *region_copy,
5688 bool update_dominance)
5689 {
5690 unsigned i;
5691 bool free_region_copy = false, copying_header = false;
5692 struct loop *loop = entry->dest->loop_father;
5693 edge exit_copy;
5694 vec<basic_block> doms;
5695 edge redirected;
5696 int total_freq = 0, entry_freq = 0;
5697 gcov_type total_count = 0, entry_count = 0;
5698
5699 if (!can_copy_bbs_p (region, n_region))
5700 return false;
5701
5702 /* Some sanity checking. Note that we do not check for all possible
5703 missuses of the functions. I.e. if you ask to copy something weird,
5704 it will work, but the state of structures probably will not be
5705 correct. */
5706 for (i = 0; i < n_region; i++)
5707 {
5708 /* We do not handle subloops, i.e. all the blocks must belong to the
5709 same loop. */
5710 if (region[i]->loop_father != loop)
5711 return false;
5712
5713 if (region[i] != entry->dest
5714 && region[i] == loop->header)
5715 return false;
5716 }
5717
5718 set_loop_copy (loop, loop);
5719
5720 /* In case the function is used for loop header copying (which is the primary
5721 use), ensure that EXIT and its copy will be new latch and entry edges. */
5722 if (loop->header == entry->dest)
5723 {
5724 copying_header = true;
5725 set_loop_copy (loop, loop_outer (loop));
5726
5727 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5728 return false;
5729
5730 for (i = 0; i < n_region; i++)
5731 if (region[i] != exit->src
5732 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5733 return false;
5734 }
5735
5736 if (!region_copy)
5737 {
5738 region_copy = XNEWVEC (basic_block, n_region);
5739 free_region_copy = true;
5740 }
5741
5742 initialize_original_copy_tables ();
5743
5744 /* Record blocks outside the region that are dominated by something
5745 inside. */
5746 if (update_dominance)
5747 {
5748 doms.create (0);
5749 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5750 }
5751
5752 if (entry->dest->count)
5753 {
5754 total_count = entry->dest->count;
5755 entry_count = entry->count;
5756 /* Fix up corner cases, to avoid division by zero or creation of negative
5757 frequencies. */
5758 if (entry_count > total_count)
5759 entry_count = total_count;
5760 }
5761 else
5762 {
5763 total_freq = entry->dest->frequency;
5764 entry_freq = EDGE_FREQUENCY (entry);
5765 /* Fix up corner cases, to avoid division by zero or creation of negative
5766 frequencies. */
5767 if (total_freq == 0)
5768 total_freq = 1;
5769 else if (entry_freq > total_freq)
5770 entry_freq = total_freq;
5771 }
5772
5773 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5774 split_edge_bb_loc (entry), update_dominance);
5775 if (total_count)
5776 {
5777 scale_bbs_frequencies_gcov_type (region, n_region,
5778 total_count - entry_count,
5779 total_count);
5780 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
5781 total_count);
5782 }
5783 else
5784 {
5785 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5786 total_freq);
5787 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5788 }
5789
5790 if (copying_header)
5791 {
5792 loop->header = exit->dest;
5793 loop->latch = exit->src;
5794 }
5795
5796 /* Redirect the entry and add the phi node arguments. */
5797 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
5798 gcc_assert (redirected != NULL);
5799 flush_pending_stmts (entry);
5800
5801 /* Concerning updating of dominators: We must recount dominators
5802 for entry block and its copy. Anything that is outside of the
5803 region, but was dominated by something inside needs recounting as
5804 well. */
5805 if (update_dominance)
5806 {
5807 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
5808 doms.safe_push (get_bb_original (entry->dest));
5809 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5810 doms.release ();
5811 }
5812
5813 /* Add the other PHI node arguments. */
5814 add_phi_args_after_copy (region_copy, n_region, NULL);
5815
5816 if (free_region_copy)
5817 free (region_copy);
5818
5819 free_original_copy_tables ();
5820 return true;
5821 }
5822
5823 /* Checks if BB is part of the region defined by N_REGION BBS. */
5824 static bool
5825 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
5826 {
5827 unsigned int n;
5828
5829 for (n = 0; n < n_region; n++)
5830 {
5831 if (bb == bbs[n])
5832 return true;
5833 }
5834 return false;
5835 }
5836
5837 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
5838 are stored to REGION_COPY in the same order in that they appear
5839 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5840 the region, EXIT an exit from it. The condition guarding EXIT
5841 is moved to ENTRY. Returns true if duplication succeeds, false
5842 otherwise.
5843
5844 For example,
5845
5846 some_code;
5847 if (cond)
5848 A;
5849 else
5850 B;
5851
5852 is transformed to
5853
5854 if (cond)
5855 {
5856 some_code;
5857 A;
5858 }
5859 else
5860 {
5861 some_code;
5862 B;
5863 }
5864 */
5865
5866 bool
5867 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
5868 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
5869 basic_block *region_copy ATTRIBUTE_UNUSED)
5870 {
5871 unsigned i;
5872 bool free_region_copy = false;
5873 struct loop *loop = exit->dest->loop_father;
5874 struct loop *orig_loop = entry->dest->loop_father;
5875 basic_block switch_bb, entry_bb, nentry_bb;
5876 vec<basic_block> doms;
5877 int total_freq = 0, exit_freq = 0;
5878 gcov_type total_count = 0, exit_count = 0;
5879 edge exits[2], nexits[2], e;
5880 gimple_stmt_iterator gsi;
5881 gimple cond_stmt;
5882 edge sorig, snew;
5883 basic_block exit_bb;
5884 gimple_stmt_iterator psi;
5885 gimple phi;
5886 tree def;
5887 struct loop *target, *aloop, *cloop;
5888
5889 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
5890 exits[0] = exit;
5891 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
5892
5893 if (!can_copy_bbs_p (region, n_region))
5894 return false;
5895
5896 initialize_original_copy_tables ();
5897 set_loop_copy (orig_loop, loop);
5898
5899 target= loop;
5900 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
5901 {
5902 if (bb_part_of_region_p (aloop->header, region, n_region))
5903 {
5904 cloop = duplicate_loop (aloop, target);
5905 duplicate_subloops (aloop, cloop);
5906 }
5907 }
5908
5909 if (!region_copy)
5910 {
5911 region_copy = XNEWVEC (basic_block, n_region);
5912 free_region_copy = true;
5913 }
5914
5915 gcc_assert (!need_ssa_update_p (cfun));
5916
5917 /* Record blocks outside the region that are dominated by something
5918 inside. */
5919 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5920
5921 if (exit->src->count)
5922 {
5923 total_count = exit->src->count;
5924 exit_count = exit->count;
5925 /* Fix up corner cases, to avoid division by zero or creation of negative
5926 frequencies. */
5927 if (exit_count > total_count)
5928 exit_count = total_count;
5929 }
5930 else
5931 {
5932 total_freq = exit->src->frequency;
5933 exit_freq = EDGE_FREQUENCY (exit);
5934 /* Fix up corner cases, to avoid division by zero or creation of negative
5935 frequencies. */
5936 if (total_freq == 0)
5937 total_freq = 1;
5938 if (exit_freq > total_freq)
5939 exit_freq = total_freq;
5940 }
5941
5942 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
5943 split_edge_bb_loc (exit), true);
5944 if (total_count)
5945 {
5946 scale_bbs_frequencies_gcov_type (region, n_region,
5947 total_count - exit_count,
5948 total_count);
5949 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
5950 total_count);
5951 }
5952 else
5953 {
5954 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
5955 total_freq);
5956 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
5957 }
5958
5959 /* Create the switch block, and put the exit condition to it. */
5960 entry_bb = entry->dest;
5961 nentry_bb = get_bb_copy (entry_bb);
5962 if (!last_stmt (entry->src)
5963 || !stmt_ends_bb_p (last_stmt (entry->src)))
5964 switch_bb = entry->src;
5965 else
5966 switch_bb = split_edge (entry);
5967 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
5968
5969 gsi = gsi_last_bb (switch_bb);
5970 cond_stmt = last_stmt (exit->src);
5971 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
5972 cond_stmt = gimple_copy (cond_stmt);
5973
5974 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
5975
5976 sorig = single_succ_edge (switch_bb);
5977 sorig->flags = exits[1]->flags;
5978 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
5979
5980 /* Register the new edge from SWITCH_BB in loop exit lists. */
5981 rescan_loop_exit (snew, true, false);
5982
5983 /* Add the PHI node arguments. */
5984 add_phi_args_after_copy (region_copy, n_region, snew);
5985
5986 /* Get rid of now superfluous conditions and associated edges (and phi node
5987 arguments). */
5988 exit_bb = exit->dest;
5989
5990 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
5991 PENDING_STMT (e) = NULL;
5992
5993 /* The latch of ORIG_LOOP was copied, and so was the backedge
5994 to the original header. We redirect this backedge to EXIT_BB. */
5995 for (i = 0; i < n_region; i++)
5996 if (get_bb_original (region_copy[i]) == orig_loop->latch)
5997 {
5998 gcc_assert (single_succ_edge (region_copy[i]));
5999 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6000 PENDING_STMT (e) = NULL;
6001 for (psi = gsi_start_phis (exit_bb);
6002 !gsi_end_p (psi);
6003 gsi_next (&psi))
6004 {
6005 phi = gsi_stmt (psi);
6006 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6007 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6008 }
6009 }
6010 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6011 PENDING_STMT (e) = NULL;
6012
6013 /* Anything that is outside of the region, but was dominated by something
6014 inside needs to update dominance info. */
6015 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6016 doms.release ();
6017 /* Update the SSA web. */
6018 update_ssa (TODO_update_ssa);
6019
6020 if (free_region_copy)
6021 free (region_copy);
6022
6023 free_original_copy_tables ();
6024 return true;
6025 }
6026
6027 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6028 adding blocks when the dominator traversal reaches EXIT. This
6029 function silently assumes that ENTRY strictly dominates EXIT. */
6030
6031 void
6032 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6033 vec<basic_block> *bbs_p)
6034 {
6035 basic_block son;
6036
6037 for (son = first_dom_son (CDI_DOMINATORS, entry);
6038 son;
6039 son = next_dom_son (CDI_DOMINATORS, son))
6040 {
6041 bbs_p->safe_push (son);
6042 if (son != exit)
6043 gather_blocks_in_sese_region (son, exit, bbs_p);
6044 }
6045 }
6046
6047 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6048 The duplicates are recorded in VARS_MAP. */
6049
6050 static void
6051 replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
6052 tree to_context)
6053 {
6054 tree t = *tp, new_t;
6055 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6056 void **loc;
6057
6058 if (DECL_CONTEXT (t) == to_context)
6059 return;
6060
6061 loc = pointer_map_contains (vars_map, t);
6062
6063 if (!loc)
6064 {
6065 loc = pointer_map_insert (vars_map, t);
6066
6067 if (SSA_VAR_P (t))
6068 {
6069 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6070 add_local_decl (f, new_t);
6071 }
6072 else
6073 {
6074 gcc_assert (TREE_CODE (t) == CONST_DECL);
6075 new_t = copy_node (t);
6076 }
6077 DECL_CONTEXT (new_t) = to_context;
6078
6079 *loc = new_t;
6080 }
6081 else
6082 new_t = (tree) *loc;
6083
6084 *tp = new_t;
6085 }
6086
6087
6088 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6089 VARS_MAP maps old ssa names and var_decls to the new ones. */
6090
6091 static tree
6092 replace_ssa_name (tree name, struct pointer_map_t *vars_map,
6093 tree to_context)
6094 {
6095 void **loc;
6096 tree new_name;
6097
6098 gcc_assert (!virtual_operand_p (name));
6099
6100 loc = pointer_map_contains (vars_map, name);
6101
6102 if (!loc)
6103 {
6104 tree decl = SSA_NAME_VAR (name);
6105 if (decl)
6106 {
6107 replace_by_duplicate_decl (&decl, vars_map, to_context);
6108 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6109 decl, SSA_NAME_DEF_STMT (name));
6110 if (SSA_NAME_IS_DEFAULT_DEF (name))
6111 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context),
6112 decl, new_name);
6113 }
6114 else
6115 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6116 name, SSA_NAME_DEF_STMT (name));
6117
6118 loc = pointer_map_insert (vars_map, name);
6119 *loc = new_name;
6120 }
6121 else
6122 new_name = (tree) *loc;
6123
6124 return new_name;
6125 }
6126
6127 struct move_stmt_d
6128 {
6129 tree orig_block;
6130 tree new_block;
6131 tree from_context;
6132 tree to_context;
6133 struct pointer_map_t *vars_map;
6134 htab_t new_label_map;
6135 struct pointer_map_t *eh_map;
6136 bool remap_decls_p;
6137 };
6138
6139 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6140 contained in *TP if it has been ORIG_BLOCK previously and change the
6141 DECL_CONTEXT of every local variable referenced in *TP. */
6142
6143 static tree
6144 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6145 {
6146 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6147 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6148 tree t = *tp;
6149
6150 if (EXPR_P (t))
6151 {
6152 tree block = TREE_BLOCK (t);
6153 if (block == p->orig_block
6154 || (p->orig_block == NULL_TREE
6155 && block != NULL_TREE))
6156 TREE_SET_BLOCK (t, p->new_block);
6157 #ifdef ENABLE_CHECKING
6158 else if (block != NULL_TREE)
6159 {
6160 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6161 block = BLOCK_SUPERCONTEXT (block);
6162 gcc_assert (block == p->orig_block);
6163 }
6164 #endif
6165 }
6166 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6167 {
6168 if (TREE_CODE (t) == SSA_NAME)
6169 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6170 else if (TREE_CODE (t) == LABEL_DECL)
6171 {
6172 if (p->new_label_map)
6173 {
6174 struct tree_map in, *out;
6175 in.base.from = t;
6176 out = (struct tree_map *)
6177 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6178 if (out)
6179 *tp = t = out->to;
6180 }
6181
6182 DECL_CONTEXT (t) = p->to_context;
6183 }
6184 else if (p->remap_decls_p)
6185 {
6186 /* Replace T with its duplicate. T should no longer appear in the
6187 parent function, so this looks wasteful; however, it may appear
6188 in referenced_vars, and more importantly, as virtual operands of
6189 statements, and in alias lists of other variables. It would be
6190 quite difficult to expunge it from all those places. ??? It might
6191 suffice to do this for addressable variables. */
6192 if ((TREE_CODE (t) == VAR_DECL
6193 && !is_global_var (t))
6194 || TREE_CODE (t) == CONST_DECL)
6195 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6196 }
6197 *walk_subtrees = 0;
6198 }
6199 else if (TYPE_P (t))
6200 *walk_subtrees = 0;
6201
6202 return NULL_TREE;
6203 }
6204
6205 /* Helper for move_stmt_r. Given an EH region number for the source
6206 function, map that to the duplicate EH regio number in the dest. */
6207
6208 static int
6209 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6210 {
6211 eh_region old_r, new_r;
6212 void **slot;
6213
6214 old_r = get_eh_region_from_number (old_nr);
6215 slot = pointer_map_contains (p->eh_map, old_r);
6216 new_r = (eh_region) *slot;
6217
6218 return new_r->index;
6219 }
6220
6221 /* Similar, but operate on INTEGER_CSTs. */
6222
6223 static tree
6224 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6225 {
6226 int old_nr, new_nr;
6227
6228 old_nr = tree_low_cst (old_t_nr, 0);
6229 new_nr = move_stmt_eh_region_nr (old_nr, p);
6230
6231 return build_int_cst (integer_type_node, new_nr);
6232 }
6233
6234 /* Like move_stmt_op, but for gimple statements.
6235
6236 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6237 contained in the current statement in *GSI_P and change the
6238 DECL_CONTEXT of every local variable referenced in the current
6239 statement. */
6240
6241 static tree
6242 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6243 struct walk_stmt_info *wi)
6244 {
6245 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6246 gimple stmt = gsi_stmt (*gsi_p);
6247 tree block = gimple_block (stmt);
6248
6249 if (block == p->orig_block
6250 || (p->orig_block == NULL_TREE
6251 && block != NULL_TREE))
6252 gimple_set_block (stmt, p->new_block);
6253
6254 switch (gimple_code (stmt))
6255 {
6256 case GIMPLE_CALL:
6257 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6258 {
6259 tree r, fndecl = gimple_call_fndecl (stmt);
6260 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6261 switch (DECL_FUNCTION_CODE (fndecl))
6262 {
6263 case BUILT_IN_EH_COPY_VALUES:
6264 r = gimple_call_arg (stmt, 1);
6265 r = move_stmt_eh_region_tree_nr (r, p);
6266 gimple_call_set_arg (stmt, 1, r);
6267 /* FALLTHRU */
6268
6269 case BUILT_IN_EH_POINTER:
6270 case BUILT_IN_EH_FILTER:
6271 r = gimple_call_arg (stmt, 0);
6272 r = move_stmt_eh_region_tree_nr (r, p);
6273 gimple_call_set_arg (stmt, 0, r);
6274 break;
6275
6276 default:
6277 break;
6278 }
6279 }
6280 break;
6281
6282 case GIMPLE_RESX:
6283 {
6284 int r = gimple_resx_region (stmt);
6285 r = move_stmt_eh_region_nr (r, p);
6286 gimple_resx_set_region (stmt, r);
6287 }
6288 break;
6289
6290 case GIMPLE_EH_DISPATCH:
6291 {
6292 int r = gimple_eh_dispatch_region (stmt);
6293 r = move_stmt_eh_region_nr (r, p);
6294 gimple_eh_dispatch_set_region (stmt, r);
6295 }
6296 break;
6297
6298 case GIMPLE_OMP_RETURN:
6299 case GIMPLE_OMP_CONTINUE:
6300 break;
6301 default:
6302 if (is_gimple_omp (stmt))
6303 {
6304 /* Do not remap variables inside OMP directives. Variables
6305 referenced in clauses and directive header belong to the
6306 parent function and should not be moved into the child
6307 function. */
6308 bool save_remap_decls_p = p->remap_decls_p;
6309 p->remap_decls_p = false;
6310 *handled_ops_p = true;
6311
6312 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6313 move_stmt_op, wi);
6314
6315 p->remap_decls_p = save_remap_decls_p;
6316 }
6317 break;
6318 }
6319
6320 return NULL_TREE;
6321 }
6322
6323 /* Move basic block BB from function CFUN to function DEST_FN. The
6324 block is moved out of the original linked list and placed after
6325 block AFTER in the new list. Also, the block is removed from the
6326 original array of blocks and placed in DEST_FN's array of blocks.
6327 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6328 updated to reflect the moved edges.
6329
6330 The local variables are remapped to new instances, VARS_MAP is used
6331 to record the mapping. */
6332
6333 static void
6334 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6335 basic_block after, bool update_edge_count_p,
6336 struct move_stmt_d *d)
6337 {
6338 struct control_flow_graph *cfg;
6339 edge_iterator ei;
6340 edge e;
6341 gimple_stmt_iterator si;
6342 unsigned old_len, new_len;
6343
6344 /* Remove BB from dominance structures. */
6345 delete_from_dominance_info (CDI_DOMINATORS, bb);
6346
6347 /* Move BB from its current loop to the copy in the new function. */
6348 if (current_loops)
6349 {
6350 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6351 if (new_loop)
6352 bb->loop_father = new_loop;
6353 }
6354
6355 /* Link BB to the new linked list. */
6356 move_block_after (bb, after);
6357
6358 /* Update the edge count in the corresponding flowgraphs. */
6359 if (update_edge_count_p)
6360 FOR_EACH_EDGE (e, ei, bb->succs)
6361 {
6362 cfun->cfg->x_n_edges--;
6363 dest_cfun->cfg->x_n_edges++;
6364 }
6365
6366 /* Remove BB from the original basic block array. */
6367 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6368 cfun->cfg->x_n_basic_blocks--;
6369
6370 /* Grow DEST_CFUN's basic block array if needed. */
6371 cfg = dest_cfun->cfg;
6372 cfg->x_n_basic_blocks++;
6373 if (bb->index >= cfg->x_last_basic_block)
6374 cfg->x_last_basic_block = bb->index + 1;
6375
6376 old_len = vec_safe_length (cfg->x_basic_block_info);
6377 if ((unsigned) cfg->x_last_basic_block >= old_len)
6378 {
6379 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6380 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6381 }
6382
6383 (*cfg->x_basic_block_info)[bb->index] = bb;
6384
6385 /* Remap the variables in phi nodes. */
6386 for (si = gsi_start_phis (bb); !gsi_end_p (si); )
6387 {
6388 gimple phi = gsi_stmt (si);
6389 use_operand_p use;
6390 tree op = PHI_RESULT (phi);
6391 ssa_op_iter oi;
6392 unsigned i;
6393
6394 if (virtual_operand_p (op))
6395 {
6396 /* Remove the phi nodes for virtual operands (alias analysis will be
6397 run for the new function, anyway). */
6398 remove_phi_node (&si, true);
6399 continue;
6400 }
6401
6402 SET_PHI_RESULT (phi,
6403 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6404 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6405 {
6406 op = USE_FROM_PTR (use);
6407 if (TREE_CODE (op) == SSA_NAME)
6408 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6409 }
6410
6411 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6412 {
6413 location_t locus = gimple_phi_arg_location (phi, i);
6414 tree block = LOCATION_BLOCK (locus);
6415
6416 if (locus == UNKNOWN_LOCATION)
6417 continue;
6418 if (d->orig_block == NULL_TREE || block == d->orig_block)
6419 {
6420 if (d->new_block == NULL_TREE)
6421 locus = LOCATION_LOCUS (locus);
6422 else
6423 locus = COMBINE_LOCATION_DATA (line_table, locus, d->new_block);
6424 gimple_phi_arg_set_location (phi, i, locus);
6425 }
6426 }
6427
6428 gsi_next (&si);
6429 }
6430
6431 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6432 {
6433 gimple stmt = gsi_stmt (si);
6434 struct walk_stmt_info wi;
6435
6436 memset (&wi, 0, sizeof (wi));
6437 wi.info = d;
6438 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6439
6440 if (gimple_code (stmt) == GIMPLE_LABEL)
6441 {
6442 tree label = gimple_label_label (stmt);
6443 int uid = LABEL_DECL_UID (label);
6444
6445 gcc_assert (uid > -1);
6446
6447 old_len = vec_safe_length (cfg->x_label_to_block_map);
6448 if (old_len <= (unsigned) uid)
6449 {
6450 new_len = 3 * uid / 2 + 1;
6451 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6452 }
6453
6454 (*cfg->x_label_to_block_map)[uid] = bb;
6455 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6456
6457 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6458
6459 if (uid >= dest_cfun->cfg->last_label_uid)
6460 dest_cfun->cfg->last_label_uid = uid + 1;
6461 }
6462
6463 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6464 remove_stmt_from_eh_lp_fn (cfun, stmt);
6465
6466 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6467 gimple_remove_stmt_histograms (cfun, stmt);
6468
6469 /* We cannot leave any operands allocated from the operand caches of
6470 the current function. */
6471 free_stmt_operands (stmt);
6472 push_cfun (dest_cfun);
6473 update_stmt (stmt);
6474 pop_cfun ();
6475 }
6476
6477 FOR_EACH_EDGE (e, ei, bb->succs)
6478 if (e->goto_locus != UNKNOWN_LOCATION)
6479 {
6480 tree block = LOCATION_BLOCK (e->goto_locus);
6481 if (d->orig_block == NULL_TREE
6482 || block == d->orig_block)
6483 e->goto_locus = d->new_block ?
6484 COMBINE_LOCATION_DATA (line_table, e->goto_locus, d->new_block) :
6485 LOCATION_LOCUS (e->goto_locus);
6486 }
6487 }
6488
6489 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6490 the outermost EH region. Use REGION as the incoming base EH region. */
6491
6492 static eh_region
6493 find_outermost_region_in_block (struct function *src_cfun,
6494 basic_block bb, eh_region region)
6495 {
6496 gimple_stmt_iterator si;
6497
6498 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6499 {
6500 gimple stmt = gsi_stmt (si);
6501 eh_region stmt_region;
6502 int lp_nr;
6503
6504 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6505 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6506 if (stmt_region)
6507 {
6508 if (region == NULL)
6509 region = stmt_region;
6510 else if (stmt_region != region)
6511 {
6512 region = eh_region_outermost (src_cfun, stmt_region, region);
6513 gcc_assert (region != NULL);
6514 }
6515 }
6516 }
6517
6518 return region;
6519 }
6520
6521 static tree
6522 new_label_mapper (tree decl, void *data)
6523 {
6524 htab_t hash = (htab_t) data;
6525 struct tree_map *m;
6526 void **slot;
6527
6528 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6529
6530 m = XNEW (struct tree_map);
6531 m->hash = DECL_UID (decl);
6532 m->base.from = decl;
6533 m->to = create_artificial_label (UNKNOWN_LOCATION);
6534 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6535 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6536 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6537
6538 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6539 gcc_assert (*slot == NULL);
6540
6541 *slot = m;
6542
6543 return m->to;
6544 }
6545
6546 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6547 subblocks. */
6548
6549 static void
6550 replace_block_vars_by_duplicates (tree block, struct pointer_map_t *vars_map,
6551 tree to_context)
6552 {
6553 tree *tp, t;
6554
6555 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6556 {
6557 t = *tp;
6558 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6559 continue;
6560 replace_by_duplicate_decl (&t, vars_map, to_context);
6561 if (t != *tp)
6562 {
6563 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6564 {
6565 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6566 DECL_HAS_VALUE_EXPR_P (t) = 1;
6567 }
6568 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6569 *tp = t;
6570 }
6571 }
6572
6573 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6574 replace_block_vars_by_duplicates (block, vars_map, to_context);
6575 }
6576
6577 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6578 from FN1 to FN2. */
6579
6580 static void
6581 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
6582 struct loop *loop)
6583 {
6584 /* Discard it from the old loop array. */
6585 (*get_loops (fn1))[loop->num] = NULL;
6586
6587 /* Place it in the new loop array, assigning it a new number. */
6588 loop->num = number_of_loops (fn2);
6589 vec_safe_push (loops_for_fn (fn2)->larray, loop);
6590
6591 /* Recurse to children. */
6592 for (loop = loop->inner; loop; loop = loop->next)
6593 fixup_loop_arrays_after_move (fn1, fn2, loop);
6594 }
6595
6596 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6597 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6598 single basic block in the original CFG and the new basic block is
6599 returned. DEST_CFUN must not have a CFG yet.
6600
6601 Note that the region need not be a pure SESE region. Blocks inside
6602 the region may contain calls to abort/exit. The only restriction
6603 is that ENTRY_BB should be the only entry point and it must
6604 dominate EXIT_BB.
6605
6606 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6607 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6608 to the new function.
6609
6610 All local variables referenced in the region are assumed to be in
6611 the corresponding BLOCK_VARS and unexpanded variable lists
6612 associated with DEST_CFUN. */
6613
6614 basic_block
6615 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
6616 basic_block exit_bb, tree orig_block)
6617 {
6618 vec<basic_block> bbs, dom_bbs;
6619 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6620 basic_block after, bb, *entry_pred, *exit_succ, abb;
6621 struct function *saved_cfun = cfun;
6622 int *entry_flag, *exit_flag;
6623 unsigned *entry_prob, *exit_prob;
6624 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
6625 edge e;
6626 edge_iterator ei;
6627 htab_t new_label_map;
6628 struct pointer_map_t *vars_map, *eh_map;
6629 struct loop *loop = entry_bb->loop_father;
6630 struct loop *loop0 = get_loop (saved_cfun, 0);
6631 struct move_stmt_d d;
6632
6633 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6634 region. */
6635 gcc_assert (entry_bb != exit_bb
6636 && (!exit_bb
6637 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
6638
6639 /* Collect all the blocks in the region. Manually add ENTRY_BB
6640 because it won't be added by dfs_enumerate_from. */
6641 bbs.create (0);
6642 bbs.safe_push (entry_bb);
6643 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6644
6645 /* The blocks that used to be dominated by something in BBS will now be
6646 dominated by the new block. */
6647 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6648 bbs.address (),
6649 bbs.length ());
6650
6651 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6652 the predecessor edges to ENTRY_BB and the successor edges to
6653 EXIT_BB so that we can re-attach them to the new basic block that
6654 will replace the region. */
6655 num_entry_edges = EDGE_COUNT (entry_bb->preds);
6656 entry_pred = XNEWVEC (basic_block, num_entry_edges);
6657 entry_flag = XNEWVEC (int, num_entry_edges);
6658 entry_prob = XNEWVEC (unsigned, num_entry_edges);
6659 i = 0;
6660 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
6661 {
6662 entry_prob[i] = e->probability;
6663 entry_flag[i] = e->flags;
6664 entry_pred[i++] = e->src;
6665 remove_edge (e);
6666 }
6667
6668 if (exit_bb)
6669 {
6670 num_exit_edges = EDGE_COUNT (exit_bb->succs);
6671 exit_succ = XNEWVEC (basic_block, num_exit_edges);
6672 exit_flag = XNEWVEC (int, num_exit_edges);
6673 exit_prob = XNEWVEC (unsigned, num_exit_edges);
6674 i = 0;
6675 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
6676 {
6677 exit_prob[i] = e->probability;
6678 exit_flag[i] = e->flags;
6679 exit_succ[i++] = e->dest;
6680 remove_edge (e);
6681 }
6682 }
6683 else
6684 {
6685 num_exit_edges = 0;
6686 exit_succ = NULL;
6687 exit_flag = NULL;
6688 exit_prob = NULL;
6689 }
6690
6691 /* Switch context to the child function to initialize DEST_FN's CFG. */
6692 gcc_assert (dest_cfun->cfg == NULL);
6693 push_cfun (dest_cfun);
6694
6695 init_empty_tree_cfg ();
6696
6697 /* Initialize EH information for the new function. */
6698 eh_map = NULL;
6699 new_label_map = NULL;
6700 if (saved_cfun->eh)
6701 {
6702 eh_region region = NULL;
6703
6704 FOR_EACH_VEC_ELT (bbs, i, bb)
6705 region = find_outermost_region_in_block (saved_cfun, bb, region);
6706
6707 init_eh_for_function ();
6708 if (region != NULL)
6709 {
6710 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6711 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
6712 new_label_mapper, new_label_map);
6713 }
6714 }
6715
6716 /* Initialize an empty loop tree. */
6717 struct loops *loops = ggc_alloc_cleared_loops ();
6718 init_loops_structure (dest_cfun, loops, 1);
6719 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
6720 set_loops_for_fn (dest_cfun, loops);
6721
6722 /* Move the outlined loop tree part. */
6723 num_nodes = bbs.length ();
6724 FOR_EACH_VEC_ELT (bbs, i, bb)
6725 {
6726 if (bb->loop_father->header == bb)
6727 {
6728 struct loop *this_loop = bb->loop_father;
6729 struct loop *outer = loop_outer (this_loop);
6730 if (outer == loop
6731 /* If the SESE region contains some bbs ending with
6732 a noreturn call, those are considered to belong
6733 to the outermost loop in saved_cfun, rather than
6734 the entry_bb's loop_father. */
6735 || outer == loop0)
6736 {
6737 if (outer != loop)
6738 num_nodes -= this_loop->num_nodes;
6739 flow_loop_tree_node_remove (bb->loop_father);
6740 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
6741 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
6742 }
6743 }
6744 else if (bb->loop_father == loop0 && loop0 != loop)
6745 num_nodes--;
6746
6747 /* Remove loop exits from the outlined region. */
6748 if (loops_for_fn (saved_cfun)->exits)
6749 FOR_EACH_EDGE (e, ei, bb->succs)
6750 {
6751 void **slot = htab_find_slot_with_hash
6752 (loops_for_fn (saved_cfun)->exits, e,
6753 htab_hash_pointer (e), NO_INSERT);
6754 if (slot)
6755 htab_clear_slot (loops_for_fn (saved_cfun)->exits, slot);
6756 }
6757 }
6758
6759
6760 /* Adjust the number of blocks in the tree root of the outlined part. */
6761 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
6762
6763 /* Setup a mapping to be used by move_block_to_fn. */
6764 loop->aux = current_loops->tree_root;
6765 loop0->aux = current_loops->tree_root;
6766
6767 pop_cfun ();
6768
6769 /* Move blocks from BBS into DEST_CFUN. */
6770 gcc_assert (bbs.length () >= 2);
6771 after = dest_cfun->cfg->x_entry_block_ptr;
6772 vars_map = pointer_map_create ();
6773
6774 memset (&d, 0, sizeof (d));
6775 d.orig_block = orig_block;
6776 d.new_block = DECL_INITIAL (dest_cfun->decl);
6777 d.from_context = cfun->decl;
6778 d.to_context = dest_cfun->decl;
6779 d.vars_map = vars_map;
6780 d.new_label_map = new_label_map;
6781 d.eh_map = eh_map;
6782 d.remap_decls_p = true;
6783
6784 FOR_EACH_VEC_ELT (bbs, i, bb)
6785 {
6786 /* No need to update edge counts on the last block. It has
6787 already been updated earlier when we detached the region from
6788 the original CFG. */
6789 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
6790 after = bb;
6791 }
6792
6793 loop->aux = NULL;
6794 loop0->aux = NULL;
6795 /* Loop sizes are no longer correct, fix them up. */
6796 loop->num_nodes -= num_nodes;
6797 for (struct loop *outer = loop_outer (loop);
6798 outer; outer = loop_outer (outer))
6799 outer->num_nodes -= num_nodes;
6800 loop0->num_nodes -= bbs.length () - num_nodes;
6801
6802 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vect_loops)
6803 {
6804 struct loop *aloop;
6805 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
6806 if (aloop != NULL)
6807 {
6808 if (aloop->simduid)
6809 {
6810 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
6811 d.to_context);
6812 dest_cfun->has_simduid_loops = true;
6813 }
6814 if (aloop->force_vect)
6815 dest_cfun->has_force_vect_loops = true;
6816 }
6817 }
6818
6819 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
6820 if (orig_block)
6821 {
6822 tree block;
6823 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6824 == NULL_TREE);
6825 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6826 = BLOCK_SUBBLOCKS (orig_block);
6827 for (block = BLOCK_SUBBLOCKS (orig_block);
6828 block; block = BLOCK_CHAIN (block))
6829 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
6830 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
6831 }
6832
6833 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
6834 vars_map, dest_cfun->decl);
6835
6836 if (new_label_map)
6837 htab_delete (new_label_map);
6838 if (eh_map)
6839 pointer_map_destroy (eh_map);
6840 pointer_map_destroy (vars_map);
6841
6842 /* Rewire the entry and exit blocks. The successor to the entry
6843 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6844 the child function. Similarly, the predecessor of DEST_FN's
6845 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
6846 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6847 various CFG manipulation function get to the right CFG.
6848
6849 FIXME, this is silly. The CFG ought to become a parameter to
6850 these helpers. */
6851 push_cfun (dest_cfun);
6852 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
6853 if (exit_bb)
6854 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
6855 pop_cfun ();
6856
6857 /* Back in the original function, the SESE region has disappeared,
6858 create a new basic block in its place. */
6859 bb = create_empty_bb (entry_pred[0]);
6860 if (current_loops)
6861 add_bb_to_loop (bb, loop);
6862 for (i = 0; i < num_entry_edges; i++)
6863 {
6864 e = make_edge (entry_pred[i], bb, entry_flag[i]);
6865 e->probability = entry_prob[i];
6866 }
6867
6868 for (i = 0; i < num_exit_edges; i++)
6869 {
6870 e = make_edge (bb, exit_succ[i], exit_flag[i]);
6871 e->probability = exit_prob[i];
6872 }
6873
6874 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
6875 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
6876 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
6877 dom_bbs.release ();
6878
6879 if (exit_bb)
6880 {
6881 free (exit_prob);
6882 free (exit_flag);
6883 free (exit_succ);
6884 }
6885 free (entry_prob);
6886 free (entry_flag);
6887 free (entry_pred);
6888 bbs.release ();
6889
6890 return bb;
6891 }
6892
6893
6894 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
6895 */
6896
6897 void
6898 dump_function_to_file (tree fndecl, FILE *file, int flags)
6899 {
6900 tree arg, var, old_current_fndecl = current_function_decl;
6901 struct function *dsf;
6902 bool ignore_topmost_bind = false, any_var = false;
6903 basic_block bb;
6904 tree chain;
6905 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
6906 && decl_is_tm_clone (fndecl));
6907 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
6908
6909 current_function_decl = fndecl;
6910 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
6911
6912 arg = DECL_ARGUMENTS (fndecl);
6913 while (arg)
6914 {
6915 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
6916 fprintf (file, " ");
6917 print_generic_expr (file, arg, dump_flags);
6918 if (flags & TDF_VERBOSE)
6919 print_node (file, "", arg, 4);
6920 if (DECL_CHAIN (arg))
6921 fprintf (file, ", ");
6922 arg = DECL_CHAIN (arg);
6923 }
6924 fprintf (file, ")\n");
6925
6926 if (flags & TDF_VERBOSE)
6927 print_node (file, "", fndecl, 2);
6928
6929 dsf = DECL_STRUCT_FUNCTION (fndecl);
6930 if (dsf && (flags & TDF_EH))
6931 dump_eh_tree (file, dsf);
6932
6933 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
6934 {
6935 dump_node (fndecl, TDF_SLIM | flags, file);
6936 current_function_decl = old_current_fndecl;
6937 return;
6938 }
6939
6940 /* When GIMPLE is lowered, the variables are no longer available in
6941 BIND_EXPRs, so display them separately. */
6942 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
6943 {
6944 unsigned ix;
6945 ignore_topmost_bind = true;
6946
6947 fprintf (file, "{\n");
6948 if (!vec_safe_is_empty (fun->local_decls))
6949 FOR_EACH_LOCAL_DECL (fun, ix, var)
6950 {
6951 print_generic_decl (file, var, flags);
6952 if (flags & TDF_VERBOSE)
6953 print_node (file, "", var, 4);
6954 fprintf (file, "\n");
6955
6956 any_var = true;
6957 }
6958 if (gimple_in_ssa_p (cfun))
6959 for (ix = 1; ix < num_ssa_names; ++ix)
6960 {
6961 tree name = ssa_name (ix);
6962 if (name && !SSA_NAME_VAR (name))
6963 {
6964 fprintf (file, " ");
6965 print_generic_expr (file, TREE_TYPE (name), flags);
6966 fprintf (file, " ");
6967 print_generic_expr (file, name, flags);
6968 fprintf (file, ";\n");
6969
6970 any_var = true;
6971 }
6972 }
6973 }
6974
6975 if (fun && fun->decl == fndecl
6976 && fun->cfg
6977 && basic_block_info_for_function (fun))
6978 {
6979 /* If the CFG has been built, emit a CFG-based dump. */
6980 if (!ignore_topmost_bind)
6981 fprintf (file, "{\n");
6982
6983 if (any_var && n_basic_blocks_for_function (fun))
6984 fprintf (file, "\n");
6985
6986 FOR_EACH_BB_FN (bb, fun)
6987 dump_bb (file, bb, 2, flags | TDF_COMMENT);
6988
6989 fprintf (file, "}\n");
6990 }
6991 else if (DECL_SAVED_TREE (fndecl) == NULL)
6992 {
6993 /* The function is now in GIMPLE form but the CFG has not been
6994 built yet. Emit the single sequence of GIMPLE statements
6995 that make up its body. */
6996 gimple_seq body = gimple_body (fndecl);
6997
6998 if (gimple_seq_first_stmt (body)
6999 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7000 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7001 print_gimple_seq (file, body, 0, flags);
7002 else
7003 {
7004 if (!ignore_topmost_bind)
7005 fprintf (file, "{\n");
7006
7007 if (any_var)
7008 fprintf (file, "\n");
7009
7010 print_gimple_seq (file, body, 2, flags);
7011 fprintf (file, "}\n");
7012 }
7013 }
7014 else
7015 {
7016 int indent;
7017
7018 /* Make a tree based dump. */
7019 chain = DECL_SAVED_TREE (fndecl);
7020 if (chain && TREE_CODE (chain) == BIND_EXPR)
7021 {
7022 if (ignore_topmost_bind)
7023 {
7024 chain = BIND_EXPR_BODY (chain);
7025 indent = 2;
7026 }
7027 else
7028 indent = 0;
7029 }
7030 else
7031 {
7032 if (!ignore_topmost_bind)
7033 fprintf (file, "{\n");
7034 indent = 2;
7035 }
7036
7037 if (any_var)
7038 fprintf (file, "\n");
7039
7040 print_generic_stmt_indented (file, chain, flags, indent);
7041 if (ignore_topmost_bind)
7042 fprintf (file, "}\n");
7043 }
7044
7045 if (flags & TDF_ENUMERATE_LOCALS)
7046 dump_enumerated_decls (file, flags);
7047 fprintf (file, "\n\n");
7048
7049 current_function_decl = old_current_fndecl;
7050 }
7051
7052 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7053
7054 DEBUG_FUNCTION void
7055 debug_function (tree fn, int flags)
7056 {
7057 dump_function_to_file (fn, stderr, flags);
7058 }
7059
7060
7061 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7062
7063 static void
7064 print_pred_bbs (FILE *file, basic_block bb)
7065 {
7066 edge e;
7067 edge_iterator ei;
7068
7069 FOR_EACH_EDGE (e, ei, bb->preds)
7070 fprintf (file, "bb_%d ", e->src->index);
7071 }
7072
7073
7074 /* Print on FILE the indexes for the successors of basic_block BB. */
7075
7076 static void
7077 print_succ_bbs (FILE *file, basic_block bb)
7078 {
7079 edge e;
7080 edge_iterator ei;
7081
7082 FOR_EACH_EDGE (e, ei, bb->succs)
7083 fprintf (file, "bb_%d ", e->dest->index);
7084 }
7085
7086 /* Print to FILE the basic block BB following the VERBOSITY level. */
7087
7088 void
7089 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7090 {
7091 char *s_indent = (char *) alloca ((size_t) indent + 1);
7092 memset ((void *) s_indent, ' ', (size_t) indent);
7093 s_indent[indent] = '\0';
7094
7095 /* Print basic_block's header. */
7096 if (verbosity >= 2)
7097 {
7098 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7099 print_pred_bbs (file, bb);
7100 fprintf (file, "}, succs = {");
7101 print_succ_bbs (file, bb);
7102 fprintf (file, "})\n");
7103 }
7104
7105 /* Print basic_block's body. */
7106 if (verbosity >= 3)
7107 {
7108 fprintf (file, "%s {\n", s_indent);
7109 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7110 fprintf (file, "%s }\n", s_indent);
7111 }
7112 }
7113
7114 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7115
7116 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7117 VERBOSITY level this outputs the contents of the loop, or just its
7118 structure. */
7119
7120 static void
7121 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7122 {
7123 char *s_indent;
7124 basic_block bb;
7125
7126 if (loop == NULL)
7127 return;
7128
7129 s_indent = (char *) alloca ((size_t) indent + 1);
7130 memset ((void *) s_indent, ' ', (size_t) indent);
7131 s_indent[indent] = '\0';
7132
7133 /* Print loop's header. */
7134 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7135 if (loop->header)
7136 fprintf (file, "header = %d", loop->header->index);
7137 else
7138 {
7139 fprintf (file, "deleted)\n");
7140 return;
7141 }
7142 if (loop->latch)
7143 fprintf (file, ", latch = %d", loop->latch->index);
7144 else
7145 fprintf (file, ", multiple latches");
7146 fprintf (file, ", niter = ");
7147 print_generic_expr (file, loop->nb_iterations, 0);
7148
7149 if (loop->any_upper_bound)
7150 {
7151 fprintf (file, ", upper_bound = ");
7152 dump_double_int (file, loop->nb_iterations_upper_bound, true);
7153 }
7154
7155 if (loop->any_estimate)
7156 {
7157 fprintf (file, ", estimate = ");
7158 dump_double_int (file, loop->nb_iterations_estimate, true);
7159 }
7160 fprintf (file, ")\n");
7161
7162 /* Print loop's body. */
7163 if (verbosity >= 1)
7164 {
7165 fprintf (file, "%s{\n", s_indent);
7166 FOR_EACH_BB (bb)
7167 if (bb->loop_father == loop)
7168 print_loops_bb (file, bb, indent, verbosity);
7169
7170 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7171 fprintf (file, "%s}\n", s_indent);
7172 }
7173 }
7174
7175 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7176 spaces. Following VERBOSITY level this outputs the contents of the
7177 loop, or just its structure. */
7178
7179 static void
7180 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7181 int verbosity)
7182 {
7183 if (loop == NULL)
7184 return;
7185
7186 print_loop (file, loop, indent, verbosity);
7187 print_loop_and_siblings (file, loop->next, indent, verbosity);
7188 }
7189
7190 /* Follow a CFG edge from the entry point of the program, and on entry
7191 of a loop, pretty print the loop structure on FILE. */
7192
7193 void
7194 print_loops (FILE *file, int verbosity)
7195 {
7196 basic_block bb;
7197
7198 bb = ENTRY_BLOCK_PTR;
7199 if (bb && bb->loop_father)
7200 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7201 }
7202
7203 /* Dump a loop. */
7204
7205 DEBUG_FUNCTION void
7206 debug (struct loop &ref)
7207 {
7208 print_loop (stderr, &ref, 0, /*verbosity*/0);
7209 }
7210
7211 DEBUG_FUNCTION void
7212 debug (struct loop *ptr)
7213 {
7214 if (ptr)
7215 debug (*ptr);
7216 else
7217 fprintf (stderr, "<nil>\n");
7218 }
7219
7220 /* Dump a loop verbosely. */
7221
7222 DEBUG_FUNCTION void
7223 debug_verbose (struct loop &ref)
7224 {
7225 print_loop (stderr, &ref, 0, /*verbosity*/3);
7226 }
7227
7228 DEBUG_FUNCTION void
7229 debug_verbose (struct loop *ptr)
7230 {
7231 if (ptr)
7232 debug (*ptr);
7233 else
7234 fprintf (stderr, "<nil>\n");
7235 }
7236
7237
7238 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7239
7240 DEBUG_FUNCTION void
7241 debug_loops (int verbosity)
7242 {
7243 print_loops (stderr, verbosity);
7244 }
7245
7246 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7247
7248 DEBUG_FUNCTION void
7249 debug_loop (struct loop *loop, int verbosity)
7250 {
7251 print_loop (stderr, loop, 0, verbosity);
7252 }
7253
7254 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7255 level. */
7256
7257 DEBUG_FUNCTION void
7258 debug_loop_num (unsigned num, int verbosity)
7259 {
7260 debug_loop (get_loop (cfun, num), verbosity);
7261 }
7262
7263 /* Return true if BB ends with a call, possibly followed by some
7264 instructions that must stay with the call. Return false,
7265 otherwise. */
7266
7267 static bool
7268 gimple_block_ends_with_call_p (basic_block bb)
7269 {
7270 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7271 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7272 }
7273
7274
7275 /* Return true if BB ends with a conditional branch. Return false,
7276 otherwise. */
7277
7278 static bool
7279 gimple_block_ends_with_condjump_p (const_basic_block bb)
7280 {
7281 gimple stmt = last_stmt (CONST_CAST_BB (bb));
7282 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7283 }
7284
7285
7286 /* Return true if we need to add fake edge to exit at statement T.
7287 Helper function for gimple_flow_call_edges_add. */
7288
7289 static bool
7290 need_fake_edge_p (gimple t)
7291 {
7292 tree fndecl = NULL_TREE;
7293 int call_flags = 0;
7294
7295 /* NORETURN and LONGJMP calls already have an edge to exit.
7296 CONST and PURE calls do not need one.
7297 We don't currently check for CONST and PURE here, although
7298 it would be a good idea, because those attributes are
7299 figured out from the RTL in mark_constant_function, and
7300 the counter incrementation code from -fprofile-arcs
7301 leads to different results from -fbranch-probabilities. */
7302 if (is_gimple_call (t))
7303 {
7304 fndecl = gimple_call_fndecl (t);
7305 call_flags = gimple_call_flags (t);
7306 }
7307
7308 if (is_gimple_call (t)
7309 && fndecl
7310 && DECL_BUILT_IN (fndecl)
7311 && (call_flags & ECF_NOTHROW)
7312 && !(call_flags & ECF_RETURNS_TWICE)
7313 /* fork() doesn't really return twice, but the effect of
7314 wrapping it in __gcov_fork() which calls __gcov_flush()
7315 and clears the counters before forking has the same
7316 effect as returning twice. Force a fake edge. */
7317 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7318 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7319 return false;
7320
7321 if (is_gimple_call (t))
7322 {
7323 edge_iterator ei;
7324 edge e;
7325 basic_block bb;
7326
7327 if (!(call_flags & ECF_NORETURN))
7328 return true;
7329
7330 bb = gimple_bb (t);
7331 FOR_EACH_EDGE (e, ei, bb->succs)
7332 if ((e->flags & EDGE_FAKE) == 0)
7333 return true;
7334 }
7335
7336 if (gimple_code (t) == GIMPLE_ASM
7337 && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
7338 return true;
7339
7340 return false;
7341 }
7342
7343
7344 /* Add fake edges to the function exit for any non constant and non
7345 noreturn calls (or noreturn calls with EH/abnormal edges),
7346 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7347 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7348 that were split.
7349
7350 The goal is to expose cases in which entering a basic block does
7351 not imply that all subsequent instructions must be executed. */
7352
7353 static int
7354 gimple_flow_call_edges_add (sbitmap blocks)
7355 {
7356 int i;
7357 int blocks_split = 0;
7358 int last_bb = last_basic_block;
7359 bool check_last_block = false;
7360
7361 if (n_basic_blocks == NUM_FIXED_BLOCKS)
7362 return 0;
7363
7364 if (! blocks)
7365 check_last_block = true;
7366 else
7367 check_last_block = bitmap_bit_p (blocks, EXIT_BLOCK_PTR->prev_bb->index);
7368
7369 /* In the last basic block, before epilogue generation, there will be
7370 a fallthru edge to EXIT. Special care is required if the last insn
7371 of the last basic block is a call because make_edge folds duplicate
7372 edges, which would result in the fallthru edge also being marked
7373 fake, which would result in the fallthru edge being removed by
7374 remove_fake_edges, which would result in an invalid CFG.
7375
7376 Moreover, we can't elide the outgoing fake edge, since the block
7377 profiler needs to take this into account in order to solve the minimal
7378 spanning tree in the case that the call doesn't return.
7379
7380 Handle this by adding a dummy instruction in a new last basic block. */
7381 if (check_last_block)
7382 {
7383 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
7384 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7385 gimple t = NULL;
7386
7387 if (!gsi_end_p (gsi))
7388 t = gsi_stmt (gsi);
7389
7390 if (t && need_fake_edge_p (t))
7391 {
7392 edge e;
7393
7394 e = find_edge (bb, EXIT_BLOCK_PTR);
7395 if (e)
7396 {
7397 gsi_insert_on_edge (e, gimple_build_nop ());
7398 gsi_commit_edge_inserts ();
7399 }
7400 }
7401 }
7402
7403 /* Now add fake edges to the function exit for any non constant
7404 calls since there is no way that we can determine if they will
7405 return or not... */
7406 for (i = 0; i < last_bb; i++)
7407 {
7408 basic_block bb = BASIC_BLOCK (i);
7409 gimple_stmt_iterator gsi;
7410 gimple stmt, last_stmt;
7411
7412 if (!bb)
7413 continue;
7414
7415 if (blocks && !bitmap_bit_p (blocks, i))
7416 continue;
7417
7418 gsi = gsi_last_nondebug_bb (bb);
7419 if (!gsi_end_p (gsi))
7420 {
7421 last_stmt = gsi_stmt (gsi);
7422 do
7423 {
7424 stmt = gsi_stmt (gsi);
7425 if (need_fake_edge_p (stmt))
7426 {
7427 edge e;
7428
7429 /* The handling above of the final block before the
7430 epilogue should be enough to verify that there is
7431 no edge to the exit block in CFG already.
7432 Calling make_edge in such case would cause us to
7433 mark that edge as fake and remove it later. */
7434 #ifdef ENABLE_CHECKING
7435 if (stmt == last_stmt)
7436 {
7437 e = find_edge (bb, EXIT_BLOCK_PTR);
7438 gcc_assert (e == NULL);
7439 }
7440 #endif
7441
7442 /* Note that the following may create a new basic block
7443 and renumber the existing basic blocks. */
7444 if (stmt != last_stmt)
7445 {
7446 e = split_block (bb, stmt);
7447 if (e)
7448 blocks_split++;
7449 }
7450 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
7451 }
7452 gsi_prev (&gsi);
7453 }
7454 while (!gsi_end_p (gsi));
7455 }
7456 }
7457
7458 if (blocks_split)
7459 verify_flow_info ();
7460
7461 return blocks_split;
7462 }
7463
7464 /* Removes edge E and all the blocks dominated by it, and updates dominance
7465 information. The IL in E->src needs to be updated separately.
7466 If dominance info is not available, only the edge E is removed.*/
7467
7468 void
7469 remove_edge_and_dominated_blocks (edge e)
7470 {
7471 vec<basic_block> bbs_to_remove = vNULL;
7472 vec<basic_block> bbs_to_fix_dom = vNULL;
7473 bitmap df, df_idom;
7474 edge f;
7475 edge_iterator ei;
7476 bool none_removed = false;
7477 unsigned i;
7478 basic_block bb, dbb;
7479 bitmap_iterator bi;
7480
7481 if (!dom_info_available_p (CDI_DOMINATORS))
7482 {
7483 remove_edge (e);
7484 return;
7485 }
7486
7487 /* No updating is needed for edges to exit. */
7488 if (e->dest == EXIT_BLOCK_PTR)
7489 {
7490 if (cfgcleanup_altered_bbs)
7491 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7492 remove_edge (e);
7493 return;
7494 }
7495
7496 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7497 that is not dominated by E->dest, then this set is empty. Otherwise,
7498 all the basic blocks dominated by E->dest are removed.
7499
7500 Also, to DF_IDOM we store the immediate dominators of the blocks in
7501 the dominance frontier of E (i.e., of the successors of the
7502 removed blocks, if there are any, and of E->dest otherwise). */
7503 FOR_EACH_EDGE (f, ei, e->dest->preds)
7504 {
7505 if (f == e)
7506 continue;
7507
7508 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7509 {
7510 none_removed = true;
7511 break;
7512 }
7513 }
7514
7515 df = BITMAP_ALLOC (NULL);
7516 df_idom = BITMAP_ALLOC (NULL);
7517
7518 if (none_removed)
7519 bitmap_set_bit (df_idom,
7520 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7521 else
7522 {
7523 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7524 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7525 {
7526 FOR_EACH_EDGE (f, ei, bb->succs)
7527 {
7528 if (f->dest != EXIT_BLOCK_PTR)
7529 bitmap_set_bit (df, f->dest->index);
7530 }
7531 }
7532 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7533 bitmap_clear_bit (df, bb->index);
7534
7535 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7536 {
7537 bb = BASIC_BLOCK (i);
7538 bitmap_set_bit (df_idom,
7539 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7540 }
7541 }
7542
7543 if (cfgcleanup_altered_bbs)
7544 {
7545 /* Record the set of the altered basic blocks. */
7546 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7547 bitmap_ior_into (cfgcleanup_altered_bbs, df);
7548 }
7549
7550 /* Remove E and the cancelled blocks. */
7551 if (none_removed)
7552 remove_edge (e);
7553 else
7554 {
7555 /* Walk backwards so as to get a chance to substitute all
7556 released DEFs into debug stmts. See
7557 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7558 details. */
7559 for (i = bbs_to_remove.length (); i-- > 0; )
7560 delete_basic_block (bbs_to_remove[i]);
7561 }
7562
7563 /* Update the dominance information. The immediate dominator may change only
7564 for blocks whose immediate dominator belongs to DF_IDOM:
7565
7566 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7567 removal. Let Z the arbitrary block such that idom(Z) = Y and
7568 Z dominates X after the removal. Before removal, there exists a path P
7569 from Y to X that avoids Z. Let F be the last edge on P that is
7570 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7571 dominates W, and because of P, Z does not dominate W), and W belongs to
7572 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7573 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
7574 {
7575 bb = BASIC_BLOCK (i);
7576 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
7577 dbb;
7578 dbb = next_dom_son (CDI_DOMINATORS, dbb))
7579 bbs_to_fix_dom.safe_push (dbb);
7580 }
7581
7582 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
7583
7584 BITMAP_FREE (df);
7585 BITMAP_FREE (df_idom);
7586 bbs_to_remove.release ();
7587 bbs_to_fix_dom.release ();
7588 }
7589
7590 /* Purge dead EH edges from basic block BB. */
7591
7592 bool
7593 gimple_purge_dead_eh_edges (basic_block bb)
7594 {
7595 bool changed = false;
7596 edge e;
7597 edge_iterator ei;
7598 gimple stmt = last_stmt (bb);
7599
7600 if (stmt && stmt_can_throw_internal (stmt))
7601 return false;
7602
7603 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7604 {
7605 if (e->flags & EDGE_EH)
7606 {
7607 remove_edge_and_dominated_blocks (e);
7608 changed = true;
7609 }
7610 else
7611 ei_next (&ei);
7612 }
7613
7614 return changed;
7615 }
7616
7617 /* Purge dead EH edges from basic block listed in BLOCKS. */
7618
7619 bool
7620 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
7621 {
7622 bool changed = false;
7623 unsigned i;
7624 bitmap_iterator bi;
7625
7626 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7627 {
7628 basic_block bb = BASIC_BLOCK (i);
7629
7630 /* Earlier gimple_purge_dead_eh_edges could have removed
7631 this basic block already. */
7632 gcc_assert (bb || changed);
7633 if (bb != NULL)
7634 changed |= gimple_purge_dead_eh_edges (bb);
7635 }
7636
7637 return changed;
7638 }
7639
7640 /* Purge dead abnormal call edges from basic block BB. */
7641
7642 bool
7643 gimple_purge_dead_abnormal_call_edges (basic_block bb)
7644 {
7645 bool changed = false;
7646 edge e;
7647 edge_iterator ei;
7648 gimple stmt = last_stmt (bb);
7649
7650 if (!cfun->has_nonlocal_label
7651 && !cfun->calls_setjmp)
7652 return false;
7653
7654 if (stmt && stmt_can_make_abnormal_goto (stmt))
7655 return false;
7656
7657 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7658 {
7659 if (e->flags & EDGE_ABNORMAL)
7660 {
7661 if (e->flags & EDGE_FALLTHRU)
7662 e->flags &= ~EDGE_ABNORMAL;
7663 else
7664 remove_edge_and_dominated_blocks (e);
7665 changed = true;
7666 }
7667 else
7668 ei_next (&ei);
7669 }
7670
7671 return changed;
7672 }
7673
7674 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
7675
7676 bool
7677 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
7678 {
7679 bool changed = false;
7680 unsigned i;
7681 bitmap_iterator bi;
7682
7683 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7684 {
7685 basic_block bb = BASIC_BLOCK (i);
7686
7687 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
7688 this basic block already. */
7689 gcc_assert (bb || changed);
7690 if (bb != NULL)
7691 changed |= gimple_purge_dead_abnormal_call_edges (bb);
7692 }
7693
7694 return changed;
7695 }
7696
7697 /* This function is called whenever a new edge is created or
7698 redirected. */
7699
7700 static void
7701 gimple_execute_on_growing_pred (edge e)
7702 {
7703 basic_block bb = e->dest;
7704
7705 if (!gimple_seq_empty_p (phi_nodes (bb)))
7706 reserve_phi_args_for_new_edge (bb);
7707 }
7708
7709 /* This function is called immediately before edge E is removed from
7710 the edge vector E->dest->preds. */
7711
7712 static void
7713 gimple_execute_on_shrinking_pred (edge e)
7714 {
7715 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
7716 remove_phi_args (e);
7717 }
7718
7719 /*---------------------------------------------------------------------------
7720 Helper functions for Loop versioning
7721 ---------------------------------------------------------------------------*/
7722
7723 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
7724 of 'first'. Both of them are dominated by 'new_head' basic block. When
7725 'new_head' was created by 'second's incoming edge it received phi arguments
7726 on the edge by split_edge(). Later, additional edge 'e' was created to
7727 connect 'new_head' and 'first'. Now this routine adds phi args on this
7728 additional edge 'e' that new_head to second edge received as part of edge
7729 splitting. */
7730
7731 static void
7732 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
7733 basic_block new_head, edge e)
7734 {
7735 gimple phi1, phi2;
7736 gimple_stmt_iterator psi1, psi2;
7737 tree def;
7738 edge e2 = find_edge (new_head, second);
7739
7740 /* Because NEW_HEAD has been created by splitting SECOND's incoming
7741 edge, we should always have an edge from NEW_HEAD to SECOND. */
7742 gcc_assert (e2 != NULL);
7743
7744 /* Browse all 'second' basic block phi nodes and add phi args to
7745 edge 'e' for 'first' head. PHI args are always in correct order. */
7746
7747 for (psi2 = gsi_start_phis (second),
7748 psi1 = gsi_start_phis (first);
7749 !gsi_end_p (psi2) && !gsi_end_p (psi1);
7750 gsi_next (&psi2), gsi_next (&psi1))
7751 {
7752 phi1 = gsi_stmt (psi1);
7753 phi2 = gsi_stmt (psi2);
7754 def = PHI_ARG_DEF (phi2, e2->dest_idx);
7755 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
7756 }
7757 }
7758
7759
7760 /* Adds a if else statement to COND_BB with condition COND_EXPR.
7761 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
7762 the destination of the ELSE part. */
7763
7764 static void
7765 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
7766 basic_block second_head ATTRIBUTE_UNUSED,
7767 basic_block cond_bb, void *cond_e)
7768 {
7769 gimple_stmt_iterator gsi;
7770 gimple new_cond_expr;
7771 tree cond_expr = (tree) cond_e;
7772 edge e0;
7773
7774 /* Build new conditional expr */
7775 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
7776 NULL_TREE, NULL_TREE);
7777
7778 /* Add new cond in cond_bb. */
7779 gsi = gsi_last_bb (cond_bb);
7780 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
7781
7782 /* Adjust edges appropriately to connect new head with first head
7783 as well as second head. */
7784 e0 = single_succ_edge (cond_bb);
7785 e0->flags &= ~EDGE_FALLTHRU;
7786 e0->flags |= EDGE_FALSE_VALUE;
7787 }
7788
7789
7790 /* Do book-keeping of basic block BB for the profile consistency checker.
7791 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
7792 then do post-pass accounting. Store the counting in RECORD. */
7793 static void
7794 gimple_account_profile_record (basic_block bb, int after_pass,
7795 struct profile_record *record)
7796 {
7797 gimple_stmt_iterator i;
7798 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
7799 {
7800 record->size[after_pass]
7801 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
7802 if (profile_status == PROFILE_READ)
7803 record->time[after_pass]
7804 += estimate_num_insns (gsi_stmt (i),
7805 &eni_time_weights) * bb->count;
7806 else if (profile_status == PROFILE_GUESSED)
7807 record->time[after_pass]
7808 += estimate_num_insns (gsi_stmt (i),
7809 &eni_time_weights) * bb->frequency;
7810 }
7811 }
7812
7813 struct cfg_hooks gimple_cfg_hooks = {
7814 "gimple",
7815 gimple_verify_flow_info,
7816 gimple_dump_bb, /* dump_bb */
7817 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
7818 create_bb, /* create_basic_block */
7819 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
7820 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
7821 gimple_can_remove_branch_p, /* can_remove_branch_p */
7822 remove_bb, /* delete_basic_block */
7823 gimple_split_block, /* split_block */
7824 gimple_move_block_after, /* move_block_after */
7825 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
7826 gimple_merge_blocks, /* merge_blocks */
7827 gimple_predict_edge, /* predict_edge */
7828 gimple_predicted_by_p, /* predicted_by_p */
7829 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
7830 gimple_duplicate_bb, /* duplicate_block */
7831 gimple_split_edge, /* split_edge */
7832 gimple_make_forwarder_block, /* make_forward_block */
7833 NULL, /* tidy_fallthru_edge */
7834 NULL, /* force_nonfallthru */
7835 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
7836 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
7837 gimple_flow_call_edges_add, /* flow_call_edges_add */
7838 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
7839 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
7840 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
7841 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
7842 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
7843 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
7844 flush_pending_stmts, /* flush_pending_stmts */
7845 gimple_empty_block_p, /* block_empty_p */
7846 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
7847 gimple_account_profile_record,
7848 };
7849
7850
7851 /* Split all critical edges. */
7852
7853 static unsigned int
7854 split_critical_edges (void)
7855 {
7856 basic_block bb;
7857 edge e;
7858 edge_iterator ei;
7859
7860 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
7861 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
7862 mappings around the calls to split_edge. */
7863 start_recording_case_labels ();
7864 FOR_ALL_BB (bb)
7865 {
7866 FOR_EACH_EDGE (e, ei, bb->succs)
7867 {
7868 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
7869 split_edge (e);
7870 /* PRE inserts statements to edges and expects that
7871 since split_critical_edges was done beforehand, committing edge
7872 insertions will not split more edges. In addition to critical
7873 edges we must split edges that have multiple successors and
7874 end by control flow statements, such as RESX.
7875 Go ahead and split them too. This matches the logic in
7876 gimple_find_edge_insert_loc. */
7877 else if ((!single_pred_p (e->dest)
7878 || !gimple_seq_empty_p (phi_nodes (e->dest))
7879 || e->dest == EXIT_BLOCK_PTR)
7880 && e->src != ENTRY_BLOCK_PTR
7881 && !(e->flags & EDGE_ABNORMAL))
7882 {
7883 gimple_stmt_iterator gsi;
7884
7885 gsi = gsi_last_bb (e->src);
7886 if (!gsi_end_p (gsi)
7887 && stmt_ends_bb_p (gsi_stmt (gsi))
7888 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
7889 && !gimple_call_builtin_p (gsi_stmt (gsi),
7890 BUILT_IN_RETURN)))
7891 split_edge (e);
7892 }
7893 }
7894 }
7895 end_recording_case_labels ();
7896 return 0;
7897 }
7898
7899 namespace {
7900
7901 const pass_data pass_data_split_crit_edges =
7902 {
7903 GIMPLE_PASS, /* type */
7904 "crited", /* name */
7905 OPTGROUP_NONE, /* optinfo_flags */
7906 false, /* has_gate */
7907 true, /* has_execute */
7908 TV_TREE_SPLIT_EDGES, /* tv_id */
7909 PROP_cfg, /* properties_required */
7910 PROP_no_crit_edges, /* properties_provided */
7911 0, /* properties_destroyed */
7912 0, /* todo_flags_start */
7913 TODO_verify_flow, /* todo_flags_finish */
7914 };
7915
7916 class pass_split_crit_edges : public gimple_opt_pass
7917 {
7918 public:
7919 pass_split_crit_edges (gcc::context *ctxt)
7920 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
7921 {}
7922
7923 /* opt_pass methods: */
7924 unsigned int execute () { return split_critical_edges (); }
7925
7926 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
7927 }; // class pass_split_crit_edges
7928
7929 } // anon namespace
7930
7931 gimple_opt_pass *
7932 make_pass_split_crit_edges (gcc::context *ctxt)
7933 {
7934 return new pass_split_crit_edges (ctxt);
7935 }
7936
7937
7938 /* Build a ternary operation and gimplify it. Emit code before GSI.
7939 Return the gimple_val holding the result. */
7940
7941 tree
7942 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
7943 tree type, tree a, tree b, tree c)
7944 {
7945 tree ret;
7946 location_t loc = gimple_location (gsi_stmt (*gsi));
7947
7948 ret = fold_build3_loc (loc, code, type, a, b, c);
7949 STRIP_NOPS (ret);
7950
7951 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7952 GSI_SAME_STMT);
7953 }
7954
7955 /* Build a binary operation and gimplify it. Emit code before GSI.
7956 Return the gimple_val holding the result. */
7957
7958 tree
7959 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
7960 tree type, tree a, tree b)
7961 {
7962 tree ret;
7963
7964 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
7965 STRIP_NOPS (ret);
7966
7967 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7968 GSI_SAME_STMT);
7969 }
7970
7971 /* Build a unary operation and gimplify it. Emit code before GSI.
7972 Return the gimple_val holding the result. */
7973
7974 tree
7975 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
7976 tree a)
7977 {
7978 tree ret;
7979
7980 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
7981 STRIP_NOPS (ret);
7982
7983 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7984 GSI_SAME_STMT);
7985 }
7986
7987
7988 \f
7989 /* Emit return warnings. */
7990
7991 static unsigned int
7992 execute_warn_function_return (void)
7993 {
7994 source_location location;
7995 gimple last;
7996 edge e;
7997 edge_iterator ei;
7998
7999 if (!targetm.warn_func_return (cfun->decl))
8000 return 0;
8001
8002 /* If we have a path to EXIT, then we do return. */
8003 if (TREE_THIS_VOLATILE (cfun->decl)
8004 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
8005 {
8006 location = UNKNOWN_LOCATION;
8007 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
8008 {
8009 last = last_stmt (e->src);
8010 if ((gimple_code (last) == GIMPLE_RETURN
8011 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8012 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8013 break;
8014 }
8015 if (location == UNKNOWN_LOCATION)
8016 location = cfun->function_end_locus;
8017 warning_at (location, 0, "%<noreturn%> function does return");
8018 }
8019
8020 /* If we see "return;" in some basic block, then we do reach the end
8021 without returning a value. */
8022 else if (warn_return_type
8023 && !TREE_NO_WARNING (cfun->decl)
8024 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
8025 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
8026 {
8027 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
8028 {
8029 gimple last = last_stmt (e->src);
8030 if (gimple_code (last) == GIMPLE_RETURN
8031 && gimple_return_retval (last) == NULL
8032 && !gimple_no_warning_p (last))
8033 {
8034 location = gimple_location (last);
8035 if (location == UNKNOWN_LOCATION)
8036 location = cfun->function_end_locus;
8037 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8038 TREE_NO_WARNING (cfun->decl) = 1;
8039 break;
8040 }
8041 }
8042 }
8043 return 0;
8044 }
8045
8046
8047 /* Given a basic block B which ends with a conditional and has
8048 precisely two successors, determine which of the edges is taken if
8049 the conditional is true and which is taken if the conditional is
8050 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8051
8052 void
8053 extract_true_false_edges_from_block (basic_block b,
8054 edge *true_edge,
8055 edge *false_edge)
8056 {
8057 edge e = EDGE_SUCC (b, 0);
8058
8059 if (e->flags & EDGE_TRUE_VALUE)
8060 {
8061 *true_edge = e;
8062 *false_edge = EDGE_SUCC (b, 1);
8063 }
8064 else
8065 {
8066 *false_edge = e;
8067 *true_edge = EDGE_SUCC (b, 1);
8068 }
8069 }
8070
8071 namespace {
8072
8073 const pass_data pass_data_warn_function_return =
8074 {
8075 GIMPLE_PASS, /* type */
8076 "*warn_function_return", /* name */
8077 OPTGROUP_NONE, /* optinfo_flags */
8078 false, /* has_gate */
8079 true, /* has_execute */
8080 TV_NONE, /* tv_id */
8081 PROP_cfg, /* properties_required */
8082 0, /* properties_provided */
8083 0, /* properties_destroyed */
8084 0, /* todo_flags_start */
8085 0, /* todo_flags_finish */
8086 };
8087
8088 class pass_warn_function_return : public gimple_opt_pass
8089 {
8090 public:
8091 pass_warn_function_return (gcc::context *ctxt)
8092 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8093 {}
8094
8095 /* opt_pass methods: */
8096 unsigned int execute () { return execute_warn_function_return (); }
8097
8098 }; // class pass_warn_function_return
8099
8100 } // anon namespace
8101
8102 gimple_opt_pass *
8103 make_pass_warn_function_return (gcc::context *ctxt)
8104 {
8105 return new pass_warn_function_return (ctxt);
8106 }
8107
8108 /* Walk a gimplified function and warn for functions whose return value is
8109 ignored and attribute((warn_unused_result)) is set. This is done before
8110 inlining, so we don't have to worry about that. */
8111
8112 static void
8113 do_warn_unused_result (gimple_seq seq)
8114 {
8115 tree fdecl, ftype;
8116 gimple_stmt_iterator i;
8117
8118 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8119 {
8120 gimple g = gsi_stmt (i);
8121
8122 switch (gimple_code (g))
8123 {
8124 case GIMPLE_BIND:
8125 do_warn_unused_result (gimple_bind_body (g));
8126 break;
8127 case GIMPLE_TRY:
8128 do_warn_unused_result (gimple_try_eval (g));
8129 do_warn_unused_result (gimple_try_cleanup (g));
8130 break;
8131 case GIMPLE_CATCH:
8132 do_warn_unused_result (gimple_catch_handler (g));
8133 break;
8134 case GIMPLE_EH_FILTER:
8135 do_warn_unused_result (gimple_eh_filter_failure (g));
8136 break;
8137
8138 case GIMPLE_CALL:
8139 if (gimple_call_lhs (g))
8140 break;
8141 if (gimple_call_internal_p (g))
8142 break;
8143
8144 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8145 LHS. All calls whose value is ignored should be
8146 represented like this. Look for the attribute. */
8147 fdecl = gimple_call_fndecl (g);
8148 ftype = gimple_call_fntype (g);
8149
8150 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8151 {
8152 location_t loc = gimple_location (g);
8153
8154 if (fdecl)
8155 warning_at (loc, OPT_Wunused_result,
8156 "ignoring return value of %qD, "
8157 "declared with attribute warn_unused_result",
8158 fdecl);
8159 else
8160 warning_at (loc, OPT_Wunused_result,
8161 "ignoring return value of function "
8162 "declared with attribute warn_unused_result");
8163 }
8164 break;
8165
8166 default:
8167 /* Not a container, not a call, or a call whose value is used. */
8168 break;
8169 }
8170 }
8171 }
8172
8173 static unsigned int
8174 run_warn_unused_result (void)
8175 {
8176 do_warn_unused_result (gimple_body (current_function_decl));
8177 return 0;
8178 }
8179
8180 static bool
8181 gate_warn_unused_result (void)
8182 {
8183 return flag_warn_unused_result;
8184 }
8185
8186 namespace {
8187
8188 const pass_data pass_data_warn_unused_result =
8189 {
8190 GIMPLE_PASS, /* type */
8191 "*warn_unused_result", /* name */
8192 OPTGROUP_NONE, /* optinfo_flags */
8193 true, /* has_gate */
8194 true, /* has_execute */
8195 TV_NONE, /* tv_id */
8196 PROP_gimple_any, /* properties_required */
8197 0, /* properties_provided */
8198 0, /* properties_destroyed */
8199 0, /* todo_flags_start */
8200 0, /* todo_flags_finish */
8201 };
8202
8203 class pass_warn_unused_result : public gimple_opt_pass
8204 {
8205 public:
8206 pass_warn_unused_result (gcc::context *ctxt)
8207 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8208 {}
8209
8210 /* opt_pass methods: */
8211 bool gate () { return gate_warn_unused_result (); }
8212 unsigned int execute () { return run_warn_unused_result (); }
8213
8214 }; // class pass_warn_unused_result
8215
8216 } // anon namespace
8217
8218 gimple_opt_pass *
8219 make_pass_warn_unused_result (gcc::context *ctxt)
8220 {
8221 return new pass_warn_unused_result (ctxt);
8222 }
8223
8224 /* IPA passes, compilation of earlier functions or inlining
8225 might have changed some properties, such as marked functions nothrow,
8226 pure, const or noreturn.
8227 Remove redundant edges and basic blocks, and create new ones if necessary.
8228
8229 This pass can't be executed as stand alone pass from pass manager, because
8230 in between inlining and this fixup the verify_flow_info would fail. */
8231
8232 unsigned int
8233 execute_fixup_cfg (void)
8234 {
8235 basic_block bb;
8236 gimple_stmt_iterator gsi;
8237 int todo = gimple_in_ssa_p (cfun) ? TODO_verify_ssa : 0;
8238 gcov_type count_scale;
8239 edge e;
8240 edge_iterator ei;
8241
8242 count_scale
8243 = GCOV_COMPUTE_SCALE (cgraph_get_node (current_function_decl)->count,
8244 ENTRY_BLOCK_PTR->count);
8245
8246 ENTRY_BLOCK_PTR->count = cgraph_get_node (current_function_decl)->count;
8247 EXIT_BLOCK_PTR->count = apply_scale (EXIT_BLOCK_PTR->count,
8248 count_scale);
8249
8250 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
8251 e->count = apply_scale (e->count, count_scale);
8252
8253 FOR_EACH_BB (bb)
8254 {
8255 bb->count = apply_scale (bb->count, count_scale);
8256 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
8257 {
8258 gimple stmt = gsi_stmt (gsi);
8259 tree decl = is_gimple_call (stmt)
8260 ? gimple_call_fndecl (stmt)
8261 : NULL;
8262 if (decl)
8263 {
8264 int flags = gimple_call_flags (stmt);
8265 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
8266 {
8267 if (gimple_purge_dead_abnormal_call_edges (bb))
8268 todo |= TODO_cleanup_cfg;
8269
8270 if (gimple_in_ssa_p (cfun))
8271 {
8272 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8273 update_stmt (stmt);
8274 }
8275 }
8276
8277 if (flags & ECF_NORETURN
8278 && fixup_noreturn_call (stmt))
8279 todo |= TODO_cleanup_cfg;
8280 }
8281
8282 if (maybe_clean_eh_stmt (stmt)
8283 && gimple_purge_dead_eh_edges (bb))
8284 todo |= TODO_cleanup_cfg;
8285 }
8286
8287 FOR_EACH_EDGE (e, ei, bb->succs)
8288 e->count = apply_scale (e->count, count_scale);
8289
8290 /* If we have a basic block with no successors that does not
8291 end with a control statement or a noreturn call end it with
8292 a call to __builtin_unreachable. This situation can occur
8293 when inlining a noreturn call that does in fact return. */
8294 if (EDGE_COUNT (bb->succs) == 0)
8295 {
8296 gimple stmt = last_stmt (bb);
8297 if (!stmt
8298 || (!is_ctrl_stmt (stmt)
8299 && (!is_gimple_call (stmt)
8300 || (gimple_call_flags (stmt) & ECF_NORETURN) == 0)))
8301 {
8302 stmt = gimple_build_call
8303 (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
8304 gimple_stmt_iterator gsi = gsi_last_bb (bb);
8305 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
8306 }
8307 }
8308 }
8309 if (count_scale != REG_BR_PROB_BASE)
8310 compute_function_frequency ();
8311
8312 /* We just processed all calls. */
8313 if (cfun->gimple_df)
8314 vec_free (MODIFIED_NORETURN_CALLS (cfun));
8315
8316 /* Dump a textual representation of the flowgraph. */
8317 if (dump_file)
8318 gimple_dump_cfg (dump_file, dump_flags);
8319
8320 if (current_loops
8321 && (todo & TODO_cleanup_cfg))
8322 loops_state_set (LOOPS_NEED_FIXUP);
8323
8324 return todo;
8325 }
8326
8327 namespace {
8328
8329 const pass_data pass_data_fixup_cfg =
8330 {
8331 GIMPLE_PASS, /* type */
8332 "*free_cfg_annotations", /* name */
8333 OPTGROUP_NONE, /* optinfo_flags */
8334 false, /* has_gate */
8335 true, /* has_execute */
8336 TV_NONE, /* tv_id */
8337 PROP_cfg, /* properties_required */
8338 0, /* properties_provided */
8339 0, /* properties_destroyed */
8340 0, /* todo_flags_start */
8341 0, /* todo_flags_finish */
8342 };
8343
8344 class pass_fixup_cfg : public gimple_opt_pass
8345 {
8346 public:
8347 pass_fixup_cfg (gcc::context *ctxt)
8348 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
8349 {}
8350
8351 /* opt_pass methods: */
8352 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
8353 unsigned int execute () { return execute_fixup_cfg (); }
8354
8355 }; // class pass_fixup_cfg
8356
8357 } // anon namespace
8358
8359 gimple_opt_pass *
8360 make_pass_fixup_cfg (gcc::context *ctxt)
8361 {
8362 return new pass_fixup_cfg (ctxt);
8363 }
8364
8365 /* Garbage collection support for edge_def. */
8366
8367 extern void gt_ggc_mx (tree&);
8368 extern void gt_ggc_mx (gimple&);
8369 extern void gt_ggc_mx (rtx&);
8370 extern void gt_ggc_mx (basic_block&);
8371
8372 void
8373 gt_ggc_mx (edge_def *e)
8374 {
8375 tree block = LOCATION_BLOCK (e->goto_locus);
8376 gt_ggc_mx (e->src);
8377 gt_ggc_mx (e->dest);
8378 if (current_ir_type () == IR_GIMPLE)
8379 gt_ggc_mx (e->insns.g);
8380 else
8381 gt_ggc_mx (e->insns.r);
8382 gt_ggc_mx (block);
8383 }
8384
8385 /* PCH support for edge_def. */
8386
8387 extern void gt_pch_nx (tree&);
8388 extern void gt_pch_nx (gimple&);
8389 extern void gt_pch_nx (rtx&);
8390 extern void gt_pch_nx (basic_block&);
8391
8392 void
8393 gt_pch_nx (edge_def *e)
8394 {
8395 tree block = LOCATION_BLOCK (e->goto_locus);
8396 gt_pch_nx (e->src);
8397 gt_pch_nx (e->dest);
8398 if (current_ir_type () == IR_GIMPLE)
8399 gt_pch_nx (e->insns.g);
8400 else
8401 gt_pch_nx (e->insns.r);
8402 gt_pch_nx (block);
8403 }
8404
8405 void
8406 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
8407 {
8408 tree block = LOCATION_BLOCK (e->goto_locus);
8409 op (&(e->src), cookie);
8410 op (&(e->dest), cookie);
8411 if (current_ir_type () == IR_GIMPLE)
8412 op (&(e->insns.g), cookie);
8413 else
8414 op (&(e->insns.r), cookie);
8415 op (&(block), cookie);
8416 }