re PR tree-optimization/81633 (Incorrect floating point result with tree vectoriser)
[gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2017 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "cfganal.h"
40 #include "gimple-fold.h"
41 #include "tree-eh.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-cfg.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
49 #include "tree-dfa.h"
50 #include "tree-ssa.h"
51 #include "except.h"
52 #include "cfgloop.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "omp-general.h"
58 #include "omp-expand.h"
59 #include "tree-cfgcleanup.h"
60 #include "gimplify.h"
61 #include "attribs.h"
62 #include "selftest.h"
63 #include "opts.h"
64
65 /* This file contains functions for building the Control Flow Graph (CFG)
66 for a function tree. */
67
68 /* Local declarations. */
69
70 /* Initial capacity for the basic block array. */
71 static const int initial_cfg_capacity = 20;
72
73 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
74 which use a particular edge. The CASE_LABEL_EXPRs are chained together
75 via their CASE_CHAIN field, which we clear after we're done with the
76 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
77
78 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
79 update the case vector in response to edge redirections.
80
81 Right now this table is set up and torn down at key points in the
82 compilation process. It would be nice if we could make the table
83 more persistent. The key is getting notification of changes to
84 the CFG (particularly edge removal, creation and redirection). */
85
86 static hash_map<edge, tree> *edge_to_cases;
87
88 /* If we record edge_to_cases, this bitmap will hold indexes
89 of basic blocks that end in a GIMPLE_SWITCH which we touched
90 due to edge manipulations. */
91
92 static bitmap touched_switch_bbs;
93
94 /* CFG statistics. */
95 struct cfg_stats_d
96 {
97 long num_merged_labels;
98 };
99
100 static struct cfg_stats_d cfg_stats;
101
102 /* Data to pass to replace_block_vars_by_duplicates_1. */
103 struct replace_decls_d
104 {
105 hash_map<tree, tree> *vars_map;
106 tree to_context;
107 };
108
109 /* Hash table to store last discriminator assigned for each locus. */
110 struct locus_discrim_map
111 {
112 location_t locus;
113 int discriminator;
114 };
115
116 /* Hashtable helpers. */
117
118 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
119 {
120 static inline hashval_t hash (const locus_discrim_map *);
121 static inline bool equal (const locus_discrim_map *,
122 const locus_discrim_map *);
123 };
124
125 /* Trivial hash function for a location_t. ITEM is a pointer to
126 a hash table entry that maps a location_t to a discriminator. */
127
128 inline hashval_t
129 locus_discrim_hasher::hash (const locus_discrim_map *item)
130 {
131 return LOCATION_LINE (item->locus);
132 }
133
134 /* Equality function for the locus-to-discriminator map. A and B
135 point to the two hash table entries to compare. */
136
137 inline bool
138 locus_discrim_hasher::equal (const locus_discrim_map *a,
139 const locus_discrim_map *b)
140 {
141 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
142 }
143
144 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
145
146 /* Basic blocks and flowgraphs. */
147 static void make_blocks (gimple_seq);
148
149 /* Edges. */
150 static void make_edges (void);
151 static void assign_discriminators (void);
152 static void make_cond_expr_edges (basic_block);
153 static void make_gimple_switch_edges (gswitch *, basic_block);
154 static bool make_goto_expr_edges (basic_block);
155 static void make_gimple_asm_edges (basic_block);
156 static edge gimple_redirect_edge_and_branch (edge, basic_block);
157 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
158
159 /* Various helpers. */
160 static inline bool stmt_starts_bb_p (gimple *, gimple *);
161 static int gimple_verify_flow_info (void);
162 static void gimple_make_forwarder_block (edge);
163 static gimple *first_non_label_stmt (basic_block);
164 static bool verify_gimple_transaction (gtransaction *);
165 static bool call_can_make_abnormal_goto (gimple *);
166
167 /* Flowgraph optimization and cleanup. */
168 static void gimple_merge_blocks (basic_block, basic_block);
169 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
170 static void remove_bb (basic_block);
171 static edge find_taken_edge_computed_goto (basic_block, tree);
172 static edge find_taken_edge_cond_expr (basic_block, tree);
173 static edge find_taken_edge_switch_expr (gswitch *, basic_block, tree);
174 static tree find_case_label_for_value (gswitch *, tree);
175 static void lower_phi_internal_fn ();
176
177 void
178 init_empty_tree_cfg_for_function (struct function *fn)
179 {
180 /* Initialize the basic block array. */
181 init_flow (fn);
182 profile_status_for_fn (fn) = PROFILE_ABSENT;
183 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
184 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
185 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
186 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
187 initial_cfg_capacity);
188
189 /* Build a mapping of labels to their associated blocks. */
190 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
191 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
192 initial_cfg_capacity);
193
194 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
195 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
196
197 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
198 = EXIT_BLOCK_PTR_FOR_FN (fn);
199 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
200 = ENTRY_BLOCK_PTR_FOR_FN (fn);
201 }
202
203 void
204 init_empty_tree_cfg (void)
205 {
206 init_empty_tree_cfg_for_function (cfun);
207 }
208
209 /*---------------------------------------------------------------------------
210 Create basic blocks
211 ---------------------------------------------------------------------------*/
212
213 /* Entry point to the CFG builder for trees. SEQ is the sequence of
214 statements to be added to the flowgraph. */
215
216 static void
217 build_gimple_cfg (gimple_seq seq)
218 {
219 /* Register specific gimple functions. */
220 gimple_register_cfg_hooks ();
221
222 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
223
224 init_empty_tree_cfg ();
225
226 make_blocks (seq);
227
228 /* Make sure there is always at least one block, even if it's empty. */
229 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
230 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
231
232 /* Adjust the size of the array. */
233 if (basic_block_info_for_fn (cfun)->length ()
234 < (size_t) n_basic_blocks_for_fn (cfun))
235 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
236 n_basic_blocks_for_fn (cfun));
237
238 /* To speed up statement iterator walks, we first purge dead labels. */
239 cleanup_dead_labels ();
240
241 /* Group case nodes to reduce the number of edges.
242 We do this after cleaning up dead labels because otherwise we miss
243 a lot of obvious case merging opportunities. */
244 group_case_labels ();
245
246 /* Create the edges of the flowgraph. */
247 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
248 make_edges ();
249 assign_discriminators ();
250 lower_phi_internal_fn ();
251 cleanup_dead_labels ();
252 delete discriminator_per_locus;
253 discriminator_per_locus = NULL;
254 }
255
256 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
257 them and propagate the information to LOOP. We assume that the annotations
258 come immediately before the condition in BB, if any. */
259
260 static void
261 replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
262 {
263 gimple_stmt_iterator gsi = gsi_last_bb (bb);
264 gimple *stmt = gsi_stmt (gsi);
265
266 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
267 return;
268
269 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
270 {
271 stmt = gsi_stmt (gsi);
272 if (gimple_code (stmt) != GIMPLE_CALL)
273 break;
274 if (!gimple_call_internal_p (stmt)
275 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
276 break;
277
278 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
279 {
280 case annot_expr_ivdep_kind:
281 loop->safelen = INT_MAX;
282 break;
283 case annot_expr_no_vector_kind:
284 loop->dont_vectorize = true;
285 break;
286 case annot_expr_vector_kind:
287 loop->force_vectorize = true;
288 cfun->has_force_vectorize_loops = true;
289 break;
290 default:
291 gcc_unreachable ();
292 }
293
294 stmt = gimple_build_assign (gimple_call_lhs (stmt),
295 gimple_call_arg (stmt, 0));
296 gsi_replace (&gsi, stmt, true);
297 }
298 }
299
300 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
301 them and propagate the information to the loop. We assume that the
302 annotations come immediately before the condition of the loop. */
303
304 static void
305 replace_loop_annotate (void)
306 {
307 struct loop *loop;
308 basic_block bb;
309 gimple_stmt_iterator gsi;
310 gimple *stmt;
311
312 FOR_EACH_LOOP (loop, 0)
313 {
314 /* First look into the header. */
315 replace_loop_annotate_in_block (loop->header, loop);
316
317 /* Then look into the latch, if any. */
318 if (loop->latch)
319 replace_loop_annotate_in_block (loop->latch, loop);
320 }
321
322 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
323 FOR_EACH_BB_FN (bb, cfun)
324 {
325 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
326 {
327 stmt = gsi_stmt (gsi);
328 if (gimple_code (stmt) != GIMPLE_CALL)
329 continue;
330 if (!gimple_call_internal_p (stmt)
331 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
332 continue;
333
334 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
335 {
336 case annot_expr_ivdep_kind:
337 case annot_expr_no_vector_kind:
338 case annot_expr_vector_kind:
339 break;
340 default:
341 gcc_unreachable ();
342 }
343
344 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
345 stmt = gimple_build_assign (gimple_call_lhs (stmt),
346 gimple_call_arg (stmt, 0));
347 gsi_replace (&gsi, stmt, true);
348 }
349 }
350 }
351
352 /* Lower internal PHI function from GIMPLE FE. */
353
354 static void
355 lower_phi_internal_fn ()
356 {
357 basic_block bb, pred = NULL;
358 gimple_stmt_iterator gsi;
359 tree lhs;
360 gphi *phi_node;
361 gimple *stmt;
362
363 /* After edge creation, handle __PHI function from GIMPLE FE. */
364 FOR_EACH_BB_FN (bb, cfun)
365 {
366 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi);)
367 {
368 stmt = gsi_stmt (gsi);
369 if (! gimple_call_internal_p (stmt, IFN_PHI))
370 break;
371
372 lhs = gimple_call_lhs (stmt);
373 phi_node = create_phi_node (lhs, bb);
374
375 /* Add arguments to the PHI node. */
376 for (unsigned i = 0; i < gimple_call_num_args (stmt); ++i)
377 {
378 tree arg = gimple_call_arg (stmt, i);
379 if (TREE_CODE (arg) == LABEL_DECL)
380 pred = label_to_block (arg);
381 else
382 {
383 edge e = find_edge (pred, bb);
384 add_phi_arg (phi_node, arg, e, UNKNOWN_LOCATION);
385 }
386 }
387
388 gsi_remove (&gsi, true);
389 }
390 }
391 }
392
393 static unsigned int
394 execute_build_cfg (void)
395 {
396 gimple_seq body = gimple_body (current_function_decl);
397
398 build_gimple_cfg (body);
399 gimple_set_body (current_function_decl, NULL);
400 if (dump_file && (dump_flags & TDF_DETAILS))
401 {
402 fprintf (dump_file, "Scope blocks:\n");
403 dump_scope_blocks (dump_file, dump_flags);
404 }
405 cleanup_tree_cfg ();
406 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
407 replace_loop_annotate ();
408 return 0;
409 }
410
411 namespace {
412
413 const pass_data pass_data_build_cfg =
414 {
415 GIMPLE_PASS, /* type */
416 "cfg", /* name */
417 OPTGROUP_NONE, /* optinfo_flags */
418 TV_TREE_CFG, /* tv_id */
419 PROP_gimple_leh, /* properties_required */
420 ( PROP_cfg | PROP_loops ), /* properties_provided */
421 0, /* properties_destroyed */
422 0, /* todo_flags_start */
423 0, /* todo_flags_finish */
424 };
425
426 class pass_build_cfg : public gimple_opt_pass
427 {
428 public:
429 pass_build_cfg (gcc::context *ctxt)
430 : gimple_opt_pass (pass_data_build_cfg, ctxt)
431 {}
432
433 /* opt_pass methods: */
434 virtual unsigned int execute (function *) { return execute_build_cfg (); }
435
436 }; // class pass_build_cfg
437
438 } // anon namespace
439
440 gimple_opt_pass *
441 make_pass_build_cfg (gcc::context *ctxt)
442 {
443 return new pass_build_cfg (ctxt);
444 }
445
446
447 /* Return true if T is a computed goto. */
448
449 bool
450 computed_goto_p (gimple *t)
451 {
452 return (gimple_code (t) == GIMPLE_GOTO
453 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
454 }
455
456 /* Returns true if the sequence of statements STMTS only contains
457 a call to __builtin_unreachable (). */
458
459 bool
460 gimple_seq_unreachable_p (gimple_seq stmts)
461 {
462 if (stmts == NULL)
463 return false;
464
465 gimple_stmt_iterator gsi = gsi_last (stmts);
466
467 if (!gimple_call_builtin_p (gsi_stmt (gsi), BUILT_IN_UNREACHABLE))
468 return false;
469
470 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
471 {
472 gimple *stmt = gsi_stmt (gsi);
473 if (gimple_code (stmt) != GIMPLE_LABEL
474 && !is_gimple_debug (stmt)
475 && !gimple_clobber_p (stmt))
476 return false;
477 }
478 return true;
479 }
480
481 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
482 the other edge points to a bb with just __builtin_unreachable ().
483 I.e. return true for C->M edge in:
484 <bb C>:
485 ...
486 if (something)
487 goto <bb N>;
488 else
489 goto <bb M>;
490 <bb N>:
491 __builtin_unreachable ();
492 <bb M>: */
493
494 bool
495 assert_unreachable_fallthru_edge_p (edge e)
496 {
497 basic_block pred_bb = e->src;
498 gimple *last = last_stmt (pred_bb);
499 if (last && gimple_code (last) == GIMPLE_COND)
500 {
501 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
502 if (other_bb == e->dest)
503 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
504 if (EDGE_COUNT (other_bb->succs) == 0)
505 return gimple_seq_unreachable_p (bb_seq (other_bb));
506 }
507 return false;
508 }
509
510
511 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
512 could alter control flow except via eh. We initialize the flag at
513 CFG build time and only ever clear it later. */
514
515 static void
516 gimple_call_initialize_ctrl_altering (gimple *stmt)
517 {
518 int flags = gimple_call_flags (stmt);
519
520 /* A call alters control flow if it can make an abnormal goto. */
521 if (call_can_make_abnormal_goto (stmt)
522 /* A call also alters control flow if it does not return. */
523 || flags & ECF_NORETURN
524 /* TM ending statements have backedges out of the transaction.
525 Return true so we split the basic block containing them.
526 Note that the TM_BUILTIN test is merely an optimization. */
527 || ((flags & ECF_TM_BUILTIN)
528 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
529 /* BUILT_IN_RETURN call is same as return statement. */
530 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
531 /* IFN_UNIQUE should be the last insn, to make checking for it
532 as cheap as possible. */
533 || (gimple_call_internal_p (stmt)
534 && gimple_call_internal_unique_p (stmt)))
535 gimple_call_set_ctrl_altering (stmt, true);
536 else
537 gimple_call_set_ctrl_altering (stmt, false);
538 }
539
540
541 /* Insert SEQ after BB and build a flowgraph. */
542
543 static basic_block
544 make_blocks_1 (gimple_seq seq, basic_block bb)
545 {
546 gimple_stmt_iterator i = gsi_start (seq);
547 gimple *stmt = NULL;
548 bool start_new_block = true;
549 bool first_stmt_of_seq = true;
550
551 while (!gsi_end_p (i))
552 {
553 gimple *prev_stmt;
554
555 prev_stmt = stmt;
556 stmt = gsi_stmt (i);
557
558 if (stmt && is_gimple_call (stmt))
559 gimple_call_initialize_ctrl_altering (stmt);
560
561 /* If the statement starts a new basic block or if we have determined
562 in a previous pass that we need to create a new block for STMT, do
563 so now. */
564 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
565 {
566 if (!first_stmt_of_seq)
567 gsi_split_seq_before (&i, &seq);
568 bb = create_basic_block (seq, bb);
569 start_new_block = false;
570 }
571
572 /* Now add STMT to BB and create the subgraphs for special statement
573 codes. */
574 gimple_set_bb (stmt, bb);
575
576 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
577 next iteration. */
578 if (stmt_ends_bb_p (stmt))
579 {
580 /* If the stmt can make abnormal goto use a new temporary
581 for the assignment to the LHS. This makes sure the old value
582 of the LHS is available on the abnormal edge. Otherwise
583 we will end up with overlapping life-ranges for abnormal
584 SSA names. */
585 if (gimple_has_lhs (stmt)
586 && stmt_can_make_abnormal_goto (stmt)
587 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
588 {
589 tree lhs = gimple_get_lhs (stmt);
590 tree tmp = create_tmp_var (TREE_TYPE (lhs));
591 gimple *s = gimple_build_assign (lhs, tmp);
592 gimple_set_location (s, gimple_location (stmt));
593 gimple_set_block (s, gimple_block (stmt));
594 gimple_set_lhs (stmt, tmp);
595 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
596 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
597 DECL_GIMPLE_REG_P (tmp) = 1;
598 gsi_insert_after (&i, s, GSI_SAME_STMT);
599 }
600 start_new_block = true;
601 }
602
603 gsi_next (&i);
604 first_stmt_of_seq = false;
605 }
606 return bb;
607 }
608
609 /* Build a flowgraph for the sequence of stmts SEQ. */
610
611 static void
612 make_blocks (gimple_seq seq)
613 {
614 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
615 }
616
617 /* Create and return a new empty basic block after bb AFTER. */
618
619 static basic_block
620 create_bb (void *h, void *e, basic_block after)
621 {
622 basic_block bb;
623
624 gcc_assert (!e);
625
626 /* Create and initialize a new basic block. Since alloc_block uses
627 GC allocation that clears memory to allocate a basic block, we do
628 not have to clear the newly allocated basic block here. */
629 bb = alloc_block ();
630
631 bb->index = last_basic_block_for_fn (cfun);
632 bb->flags = BB_NEW;
633 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
634
635 /* Add the new block to the linked list of blocks. */
636 link_block (bb, after);
637
638 /* Grow the basic block array if needed. */
639 if ((size_t) last_basic_block_for_fn (cfun)
640 == basic_block_info_for_fn (cfun)->length ())
641 {
642 size_t new_size =
643 (last_basic_block_for_fn (cfun)
644 + (last_basic_block_for_fn (cfun) + 3) / 4);
645 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
646 }
647
648 /* Add the newly created block to the array. */
649 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
650
651 n_basic_blocks_for_fn (cfun)++;
652 last_basic_block_for_fn (cfun)++;
653
654 return bb;
655 }
656
657
658 /*---------------------------------------------------------------------------
659 Edge creation
660 ---------------------------------------------------------------------------*/
661
662 /* If basic block BB has an abnormal edge to a basic block
663 containing IFN_ABNORMAL_DISPATCHER internal call, return
664 that the dispatcher's basic block, otherwise return NULL. */
665
666 basic_block
667 get_abnormal_succ_dispatcher (basic_block bb)
668 {
669 edge e;
670 edge_iterator ei;
671
672 FOR_EACH_EDGE (e, ei, bb->succs)
673 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
674 {
675 gimple_stmt_iterator gsi
676 = gsi_start_nondebug_after_labels_bb (e->dest);
677 gimple *g = gsi_stmt (gsi);
678 if (g && gimple_call_internal_p (g, IFN_ABNORMAL_DISPATCHER))
679 return e->dest;
680 }
681 return NULL;
682 }
683
684 /* Helper function for make_edges. Create a basic block with
685 with ABNORMAL_DISPATCHER internal call in it if needed, and
686 create abnormal edges from BBS to it and from it to FOR_BB
687 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
688
689 static void
690 handle_abnormal_edges (basic_block *dispatcher_bbs,
691 basic_block for_bb, int *bb_to_omp_idx,
692 auto_vec<basic_block> *bbs, bool computed_goto)
693 {
694 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
695 unsigned int idx = 0;
696 basic_block bb;
697 bool inner = false;
698
699 if (bb_to_omp_idx)
700 {
701 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
702 if (bb_to_omp_idx[for_bb->index] != 0)
703 inner = true;
704 }
705
706 /* If the dispatcher has been created already, then there are basic
707 blocks with abnormal edges to it, so just make a new edge to
708 for_bb. */
709 if (*dispatcher == NULL)
710 {
711 /* Check if there are any basic blocks that need to have
712 abnormal edges to this dispatcher. If there are none, return
713 early. */
714 if (bb_to_omp_idx == NULL)
715 {
716 if (bbs->is_empty ())
717 return;
718 }
719 else
720 {
721 FOR_EACH_VEC_ELT (*bbs, idx, bb)
722 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
723 break;
724 if (bb == NULL)
725 return;
726 }
727
728 /* Create the dispatcher bb. */
729 *dispatcher = create_basic_block (NULL, for_bb);
730 if (computed_goto)
731 {
732 /* Factor computed gotos into a common computed goto site. Also
733 record the location of that site so that we can un-factor the
734 gotos after we have converted back to normal form. */
735 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
736
737 /* Create the destination of the factored goto. Each original
738 computed goto will put its desired destination into this
739 variable and jump to the label we create immediately below. */
740 tree var = create_tmp_var (ptr_type_node, "gotovar");
741
742 /* Build a label for the new block which will contain the
743 factored computed goto. */
744 tree factored_label_decl
745 = create_artificial_label (UNKNOWN_LOCATION);
746 gimple *factored_computed_goto_label
747 = gimple_build_label (factored_label_decl);
748 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
749
750 /* Build our new computed goto. */
751 gimple *factored_computed_goto = gimple_build_goto (var);
752 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
753
754 FOR_EACH_VEC_ELT (*bbs, idx, bb)
755 {
756 if (bb_to_omp_idx
757 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
758 continue;
759
760 gsi = gsi_last_bb (bb);
761 gimple *last = gsi_stmt (gsi);
762
763 gcc_assert (computed_goto_p (last));
764
765 /* Copy the original computed goto's destination into VAR. */
766 gimple *assignment
767 = gimple_build_assign (var, gimple_goto_dest (last));
768 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
769
770 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
771 e->goto_locus = gimple_location (last);
772 gsi_remove (&gsi, true);
773 }
774 }
775 else
776 {
777 tree arg = inner ? boolean_true_node : boolean_false_node;
778 gimple *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
779 1, arg);
780 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
781 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
782
783 /* Create predecessor edges of the dispatcher. */
784 FOR_EACH_VEC_ELT (*bbs, idx, bb)
785 {
786 if (bb_to_omp_idx
787 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
788 continue;
789 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
790 }
791 }
792 }
793
794 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
795 }
796
797 /* Creates outgoing edges for BB. Returns 1 when it ends with an
798 computed goto, returns 2 when it ends with a statement that
799 might return to this function via an nonlocal goto, otherwise
800 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
801
802 static int
803 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
804 {
805 gimple *last = last_stmt (bb);
806 bool fallthru = false;
807 int ret = 0;
808
809 if (!last)
810 return ret;
811
812 switch (gimple_code (last))
813 {
814 case GIMPLE_GOTO:
815 if (make_goto_expr_edges (bb))
816 ret = 1;
817 fallthru = false;
818 break;
819 case GIMPLE_RETURN:
820 {
821 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
822 e->goto_locus = gimple_location (last);
823 fallthru = false;
824 }
825 break;
826 case GIMPLE_COND:
827 make_cond_expr_edges (bb);
828 fallthru = false;
829 break;
830 case GIMPLE_SWITCH:
831 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
832 fallthru = false;
833 break;
834 case GIMPLE_RESX:
835 make_eh_edges (last);
836 fallthru = false;
837 break;
838 case GIMPLE_EH_DISPATCH:
839 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
840 break;
841
842 case GIMPLE_CALL:
843 /* If this function receives a nonlocal goto, then we need to
844 make edges from this call site to all the nonlocal goto
845 handlers. */
846 if (stmt_can_make_abnormal_goto (last))
847 ret = 2;
848
849 /* If this statement has reachable exception handlers, then
850 create abnormal edges to them. */
851 make_eh_edges (last);
852
853 /* BUILTIN_RETURN is really a return statement. */
854 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
855 {
856 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
857 fallthru = false;
858 }
859 /* Some calls are known not to return. */
860 else
861 fallthru = !gimple_call_noreturn_p (last);
862 break;
863
864 case GIMPLE_ASSIGN:
865 /* A GIMPLE_ASSIGN may throw internally and thus be considered
866 control-altering. */
867 if (is_ctrl_altering_stmt (last))
868 make_eh_edges (last);
869 fallthru = true;
870 break;
871
872 case GIMPLE_ASM:
873 make_gimple_asm_edges (bb);
874 fallthru = true;
875 break;
876
877 CASE_GIMPLE_OMP:
878 fallthru = omp_make_gimple_edges (bb, pcur_region, pomp_index);
879 break;
880
881 case GIMPLE_TRANSACTION:
882 {
883 gtransaction *txn = as_a <gtransaction *> (last);
884 tree label1 = gimple_transaction_label_norm (txn);
885 tree label2 = gimple_transaction_label_uninst (txn);
886
887 if (label1)
888 make_edge (bb, label_to_block (label1), EDGE_FALLTHRU);
889 if (label2)
890 make_edge (bb, label_to_block (label2),
891 EDGE_TM_UNINSTRUMENTED | (label1 ? 0 : EDGE_FALLTHRU));
892
893 tree label3 = gimple_transaction_label_over (txn);
894 if (gimple_transaction_subcode (txn)
895 & (GTMA_HAVE_ABORT | GTMA_IS_OUTER))
896 make_edge (bb, label_to_block (label3), EDGE_TM_ABORT);
897
898 fallthru = false;
899 }
900 break;
901
902 default:
903 gcc_assert (!stmt_ends_bb_p (last));
904 fallthru = true;
905 break;
906 }
907
908 if (fallthru)
909 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
910
911 return ret;
912 }
913
914 /* Join all the blocks in the flowgraph. */
915
916 static void
917 make_edges (void)
918 {
919 basic_block bb;
920 struct omp_region *cur_region = NULL;
921 auto_vec<basic_block> ab_edge_goto;
922 auto_vec<basic_block> ab_edge_call;
923 int *bb_to_omp_idx = NULL;
924 int cur_omp_region_idx = 0;
925
926 /* Create an edge from entry to the first block with executable
927 statements in it. */
928 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
929 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
930 EDGE_FALLTHRU);
931
932 /* Traverse the basic block array placing edges. */
933 FOR_EACH_BB_FN (bb, cfun)
934 {
935 int mer;
936
937 if (bb_to_omp_idx)
938 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
939
940 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
941 if (mer == 1)
942 ab_edge_goto.safe_push (bb);
943 else if (mer == 2)
944 ab_edge_call.safe_push (bb);
945
946 if (cur_region && bb_to_omp_idx == NULL)
947 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
948 }
949
950 /* Computed gotos are hell to deal with, especially if there are
951 lots of them with a large number of destinations. So we factor
952 them to a common computed goto location before we build the
953 edge list. After we convert back to normal form, we will un-factor
954 the computed gotos since factoring introduces an unwanted jump.
955 For non-local gotos and abnormal edges from calls to calls that return
956 twice or forced labels, factor the abnormal edges too, by having all
957 abnormal edges from the calls go to a common artificial basic block
958 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
959 basic block to all forced labels and calls returning twice.
960 We do this per-OpenMP structured block, because those regions
961 are guaranteed to be single entry single exit by the standard,
962 so it is not allowed to enter or exit such regions abnormally this way,
963 thus all computed gotos, non-local gotos and setjmp/longjmp calls
964 must not transfer control across SESE region boundaries. */
965 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
966 {
967 gimple_stmt_iterator gsi;
968 basic_block dispatcher_bb_array[2] = { NULL, NULL };
969 basic_block *dispatcher_bbs = dispatcher_bb_array;
970 int count = n_basic_blocks_for_fn (cfun);
971
972 if (bb_to_omp_idx)
973 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
974
975 FOR_EACH_BB_FN (bb, cfun)
976 {
977 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
978 {
979 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
980 tree target;
981
982 if (!label_stmt)
983 break;
984
985 target = gimple_label_label (label_stmt);
986
987 /* Make an edge to every label block that has been marked as a
988 potential target for a computed goto or a non-local goto. */
989 if (FORCED_LABEL (target))
990 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
991 &ab_edge_goto, true);
992 if (DECL_NONLOCAL (target))
993 {
994 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
995 &ab_edge_call, false);
996 break;
997 }
998 }
999
1000 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
1001 gsi_next_nondebug (&gsi);
1002 if (!gsi_end_p (gsi))
1003 {
1004 /* Make an edge to every setjmp-like call. */
1005 gimple *call_stmt = gsi_stmt (gsi);
1006 if (is_gimple_call (call_stmt)
1007 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
1008 || gimple_call_builtin_p (call_stmt,
1009 BUILT_IN_SETJMP_RECEIVER)))
1010 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1011 &ab_edge_call, false);
1012 }
1013 }
1014
1015 if (bb_to_omp_idx)
1016 XDELETE (dispatcher_bbs);
1017 }
1018
1019 XDELETE (bb_to_omp_idx);
1020
1021 omp_free_regions ();
1022 }
1023
1024 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1025 needed. Returns true if new bbs were created.
1026 Note: This is transitional code, and should not be used for new code. We
1027 should be able to get rid of this by rewriting all target va-arg
1028 gimplification hooks to use an interface gimple_build_cond_value as described
1029 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1030
1031 bool
1032 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
1033 {
1034 gimple *stmt = gsi_stmt (*gsi);
1035 basic_block bb = gimple_bb (stmt);
1036 basic_block lastbb, afterbb;
1037 int old_num_bbs = n_basic_blocks_for_fn (cfun);
1038 edge e;
1039 lastbb = make_blocks_1 (seq, bb);
1040 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
1041 return false;
1042 e = split_block (bb, stmt);
1043 /* Move e->dest to come after the new basic blocks. */
1044 afterbb = e->dest;
1045 unlink_block (afterbb);
1046 link_block (afterbb, lastbb);
1047 redirect_edge_succ (e, bb->next_bb);
1048 bb = bb->next_bb;
1049 while (bb != afterbb)
1050 {
1051 struct omp_region *cur_region = NULL;
1052 profile_count cnt = profile_count::zero ();
1053 int freq = 0;
1054 bool all = true;
1055
1056 int cur_omp_region_idx = 0;
1057 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1058 gcc_assert (!mer && !cur_region);
1059 add_bb_to_loop (bb, afterbb->loop_father);
1060
1061 edge e;
1062 edge_iterator ei;
1063 FOR_EACH_EDGE (e, ei, bb->preds)
1064 {
1065 if (e->count.initialized_p ())
1066 cnt += e->count;
1067 else
1068 all = false;
1069 freq += EDGE_FREQUENCY (e);
1070 }
1071 tree_guess_outgoing_edge_probabilities (bb);
1072 if (all || profile_status_for_fn (cfun) == PROFILE_READ)
1073 bb->count = cnt;
1074 bb->frequency = freq;
1075 FOR_EACH_EDGE (e, ei, bb->succs)
1076 e->count = bb->count.apply_probability (e->probability);
1077
1078 bb = bb->next_bb;
1079 }
1080 return true;
1081 }
1082
1083 /* Find the next available discriminator value for LOCUS. The
1084 discriminator distinguishes among several basic blocks that
1085 share a common locus, allowing for more accurate sample-based
1086 profiling. */
1087
1088 static int
1089 next_discriminator_for_locus (location_t locus)
1090 {
1091 struct locus_discrim_map item;
1092 struct locus_discrim_map **slot;
1093
1094 item.locus = locus;
1095 item.discriminator = 0;
1096 slot = discriminator_per_locus->find_slot_with_hash (
1097 &item, LOCATION_LINE (locus), INSERT);
1098 gcc_assert (slot);
1099 if (*slot == HTAB_EMPTY_ENTRY)
1100 {
1101 *slot = XNEW (struct locus_discrim_map);
1102 gcc_assert (*slot);
1103 (*slot)->locus = locus;
1104 (*slot)->discriminator = 0;
1105 }
1106 (*slot)->discriminator++;
1107 return (*slot)->discriminator;
1108 }
1109
1110 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1111
1112 static bool
1113 same_line_p (location_t locus1, location_t locus2)
1114 {
1115 expanded_location from, to;
1116
1117 if (locus1 == locus2)
1118 return true;
1119
1120 from = expand_location (locus1);
1121 to = expand_location (locus2);
1122
1123 if (from.line != to.line)
1124 return false;
1125 if (from.file == to.file)
1126 return true;
1127 return (from.file != NULL
1128 && to.file != NULL
1129 && filename_cmp (from.file, to.file) == 0);
1130 }
1131
1132 /* Assign discriminators to each basic block. */
1133
1134 static void
1135 assign_discriminators (void)
1136 {
1137 basic_block bb;
1138
1139 FOR_EACH_BB_FN (bb, cfun)
1140 {
1141 edge e;
1142 edge_iterator ei;
1143 gimple *last = last_stmt (bb);
1144 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1145
1146 if (locus == UNKNOWN_LOCATION)
1147 continue;
1148
1149 FOR_EACH_EDGE (e, ei, bb->succs)
1150 {
1151 gimple *first = first_non_label_stmt (e->dest);
1152 gimple *last = last_stmt (e->dest);
1153 if ((first && same_line_p (locus, gimple_location (first)))
1154 || (last && same_line_p (locus, gimple_location (last))))
1155 {
1156 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1157 bb->discriminator = next_discriminator_for_locus (locus);
1158 else
1159 e->dest->discriminator = next_discriminator_for_locus (locus);
1160 }
1161 }
1162 }
1163 }
1164
1165 /* Create the edges for a GIMPLE_COND starting at block BB. */
1166
1167 static void
1168 make_cond_expr_edges (basic_block bb)
1169 {
1170 gcond *entry = as_a <gcond *> (last_stmt (bb));
1171 gimple *then_stmt, *else_stmt;
1172 basic_block then_bb, else_bb;
1173 tree then_label, else_label;
1174 edge e;
1175
1176 gcc_assert (entry);
1177 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1178
1179 /* Entry basic blocks for each component. */
1180 then_label = gimple_cond_true_label (entry);
1181 else_label = gimple_cond_false_label (entry);
1182 then_bb = label_to_block (then_label);
1183 else_bb = label_to_block (else_label);
1184 then_stmt = first_stmt (then_bb);
1185 else_stmt = first_stmt (else_bb);
1186
1187 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1188 e->goto_locus = gimple_location (then_stmt);
1189 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1190 if (e)
1191 e->goto_locus = gimple_location (else_stmt);
1192
1193 /* We do not need the labels anymore. */
1194 gimple_cond_set_true_label (entry, NULL_TREE);
1195 gimple_cond_set_false_label (entry, NULL_TREE);
1196 }
1197
1198
1199 /* Called for each element in the hash table (P) as we delete the
1200 edge to cases hash table.
1201
1202 Clear all the CASE_CHAINs to prevent problems with copying of
1203 SWITCH_EXPRs and structure sharing rules, then free the hash table
1204 element. */
1205
1206 bool
1207 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1208 {
1209 tree t, next;
1210
1211 for (t = value; t; t = next)
1212 {
1213 next = CASE_CHAIN (t);
1214 CASE_CHAIN (t) = NULL;
1215 }
1216
1217 return true;
1218 }
1219
1220 /* Start recording information mapping edges to case labels. */
1221
1222 void
1223 start_recording_case_labels (void)
1224 {
1225 gcc_assert (edge_to_cases == NULL);
1226 edge_to_cases = new hash_map<edge, tree>;
1227 touched_switch_bbs = BITMAP_ALLOC (NULL);
1228 }
1229
1230 /* Return nonzero if we are recording information for case labels. */
1231
1232 static bool
1233 recording_case_labels_p (void)
1234 {
1235 return (edge_to_cases != NULL);
1236 }
1237
1238 /* Stop recording information mapping edges to case labels and
1239 remove any information we have recorded. */
1240 void
1241 end_recording_case_labels (void)
1242 {
1243 bitmap_iterator bi;
1244 unsigned i;
1245 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1246 delete edge_to_cases;
1247 edge_to_cases = NULL;
1248 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1249 {
1250 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1251 if (bb)
1252 {
1253 gimple *stmt = last_stmt (bb);
1254 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1255 group_case_labels_stmt (as_a <gswitch *> (stmt));
1256 }
1257 }
1258 BITMAP_FREE (touched_switch_bbs);
1259 }
1260
1261 /* If we are inside a {start,end}_recording_cases block, then return
1262 a chain of CASE_LABEL_EXPRs from T which reference E.
1263
1264 Otherwise return NULL. */
1265
1266 static tree
1267 get_cases_for_edge (edge e, gswitch *t)
1268 {
1269 tree *slot;
1270 size_t i, n;
1271
1272 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1273 chains available. Return NULL so the caller can detect this case. */
1274 if (!recording_case_labels_p ())
1275 return NULL;
1276
1277 slot = edge_to_cases->get (e);
1278 if (slot)
1279 return *slot;
1280
1281 /* If we did not find E in the hash table, then this must be the first
1282 time we have been queried for information about E & T. Add all the
1283 elements from T to the hash table then perform the query again. */
1284
1285 n = gimple_switch_num_labels (t);
1286 for (i = 0; i < n; i++)
1287 {
1288 tree elt = gimple_switch_label (t, i);
1289 tree lab = CASE_LABEL (elt);
1290 basic_block label_bb = label_to_block (lab);
1291 edge this_edge = find_edge (e->src, label_bb);
1292
1293 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1294 a new chain. */
1295 tree &s = edge_to_cases->get_or_insert (this_edge);
1296 CASE_CHAIN (elt) = s;
1297 s = elt;
1298 }
1299
1300 return *edge_to_cases->get (e);
1301 }
1302
1303 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1304
1305 static void
1306 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1307 {
1308 size_t i, n;
1309
1310 n = gimple_switch_num_labels (entry);
1311
1312 for (i = 0; i < n; ++i)
1313 {
1314 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1315 basic_block label_bb = label_to_block (lab);
1316 make_edge (bb, label_bb, 0);
1317 }
1318 }
1319
1320
1321 /* Return the basic block holding label DEST. */
1322
1323 basic_block
1324 label_to_block_fn (struct function *ifun, tree dest)
1325 {
1326 int uid = LABEL_DECL_UID (dest);
1327
1328 /* We would die hard when faced by an undefined label. Emit a label to
1329 the very first basic block. This will hopefully make even the dataflow
1330 and undefined variable warnings quite right. */
1331 if (seen_error () && uid < 0)
1332 {
1333 gimple_stmt_iterator gsi =
1334 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1335 gimple *stmt;
1336
1337 stmt = gimple_build_label (dest);
1338 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1339 uid = LABEL_DECL_UID (dest);
1340 }
1341 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1342 return NULL;
1343 return (*ifun->cfg->x_label_to_block_map)[uid];
1344 }
1345
1346 /* Create edges for a goto statement at block BB. Returns true
1347 if abnormal edges should be created. */
1348
1349 static bool
1350 make_goto_expr_edges (basic_block bb)
1351 {
1352 gimple_stmt_iterator last = gsi_last_bb (bb);
1353 gimple *goto_t = gsi_stmt (last);
1354
1355 /* A simple GOTO creates normal edges. */
1356 if (simple_goto_p (goto_t))
1357 {
1358 tree dest = gimple_goto_dest (goto_t);
1359 basic_block label_bb = label_to_block (dest);
1360 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1361 e->goto_locus = gimple_location (goto_t);
1362 gsi_remove (&last, true);
1363 return false;
1364 }
1365
1366 /* A computed GOTO creates abnormal edges. */
1367 return true;
1368 }
1369
1370 /* Create edges for an asm statement with labels at block BB. */
1371
1372 static void
1373 make_gimple_asm_edges (basic_block bb)
1374 {
1375 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1376 int i, n = gimple_asm_nlabels (stmt);
1377
1378 for (i = 0; i < n; ++i)
1379 {
1380 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1381 basic_block label_bb = label_to_block (label);
1382 make_edge (bb, label_bb, 0);
1383 }
1384 }
1385
1386 /*---------------------------------------------------------------------------
1387 Flowgraph analysis
1388 ---------------------------------------------------------------------------*/
1389
1390 /* Cleanup useless labels in basic blocks. This is something we wish
1391 to do early because it allows us to group case labels before creating
1392 the edges for the CFG, and it speeds up block statement iterators in
1393 all passes later on.
1394 We rerun this pass after CFG is created, to get rid of the labels that
1395 are no longer referenced. After then we do not run it any more, since
1396 (almost) no new labels should be created. */
1397
1398 /* A map from basic block index to the leading label of that block. */
1399 static struct label_record
1400 {
1401 /* The label. */
1402 tree label;
1403
1404 /* True if the label is referenced from somewhere. */
1405 bool used;
1406 } *label_for_bb;
1407
1408 /* Given LABEL return the first label in the same basic block. */
1409
1410 static tree
1411 main_block_label (tree label)
1412 {
1413 basic_block bb = label_to_block (label);
1414 tree main_label = label_for_bb[bb->index].label;
1415
1416 /* label_to_block possibly inserted undefined label into the chain. */
1417 if (!main_label)
1418 {
1419 label_for_bb[bb->index].label = label;
1420 main_label = label;
1421 }
1422
1423 label_for_bb[bb->index].used = true;
1424 return main_label;
1425 }
1426
1427 /* Clean up redundant labels within the exception tree. */
1428
1429 static void
1430 cleanup_dead_labels_eh (void)
1431 {
1432 eh_landing_pad lp;
1433 eh_region r;
1434 tree lab;
1435 int i;
1436
1437 if (cfun->eh == NULL)
1438 return;
1439
1440 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1441 if (lp && lp->post_landing_pad)
1442 {
1443 lab = main_block_label (lp->post_landing_pad);
1444 if (lab != lp->post_landing_pad)
1445 {
1446 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1447 EH_LANDING_PAD_NR (lab) = lp->index;
1448 }
1449 }
1450
1451 FOR_ALL_EH_REGION (r)
1452 switch (r->type)
1453 {
1454 case ERT_CLEANUP:
1455 case ERT_MUST_NOT_THROW:
1456 break;
1457
1458 case ERT_TRY:
1459 {
1460 eh_catch c;
1461 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1462 {
1463 lab = c->label;
1464 if (lab)
1465 c->label = main_block_label (lab);
1466 }
1467 }
1468 break;
1469
1470 case ERT_ALLOWED_EXCEPTIONS:
1471 lab = r->u.allowed.label;
1472 if (lab)
1473 r->u.allowed.label = main_block_label (lab);
1474 break;
1475 }
1476 }
1477
1478
1479 /* Cleanup redundant labels. This is a three-step process:
1480 1) Find the leading label for each block.
1481 2) Redirect all references to labels to the leading labels.
1482 3) Cleanup all useless labels. */
1483
1484 void
1485 cleanup_dead_labels (void)
1486 {
1487 basic_block bb;
1488 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1489
1490 /* Find a suitable label for each block. We use the first user-defined
1491 label if there is one, or otherwise just the first label we see. */
1492 FOR_EACH_BB_FN (bb, cfun)
1493 {
1494 gimple_stmt_iterator i;
1495
1496 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1497 {
1498 tree label;
1499 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1500
1501 if (!label_stmt)
1502 break;
1503
1504 label = gimple_label_label (label_stmt);
1505
1506 /* If we have not yet seen a label for the current block,
1507 remember this one and see if there are more labels. */
1508 if (!label_for_bb[bb->index].label)
1509 {
1510 label_for_bb[bb->index].label = label;
1511 continue;
1512 }
1513
1514 /* If we did see a label for the current block already, but it
1515 is an artificially created label, replace it if the current
1516 label is a user defined label. */
1517 if (!DECL_ARTIFICIAL (label)
1518 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1519 {
1520 label_for_bb[bb->index].label = label;
1521 break;
1522 }
1523 }
1524 }
1525
1526 /* Now redirect all jumps/branches to the selected label.
1527 First do so for each block ending in a control statement. */
1528 FOR_EACH_BB_FN (bb, cfun)
1529 {
1530 gimple *stmt = last_stmt (bb);
1531 tree label, new_label;
1532
1533 if (!stmt)
1534 continue;
1535
1536 switch (gimple_code (stmt))
1537 {
1538 case GIMPLE_COND:
1539 {
1540 gcond *cond_stmt = as_a <gcond *> (stmt);
1541 label = gimple_cond_true_label (cond_stmt);
1542 if (label)
1543 {
1544 new_label = main_block_label (label);
1545 if (new_label != label)
1546 gimple_cond_set_true_label (cond_stmt, new_label);
1547 }
1548
1549 label = gimple_cond_false_label (cond_stmt);
1550 if (label)
1551 {
1552 new_label = main_block_label (label);
1553 if (new_label != label)
1554 gimple_cond_set_false_label (cond_stmt, new_label);
1555 }
1556 }
1557 break;
1558
1559 case GIMPLE_SWITCH:
1560 {
1561 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1562 size_t i, n = gimple_switch_num_labels (switch_stmt);
1563
1564 /* Replace all destination labels. */
1565 for (i = 0; i < n; ++i)
1566 {
1567 tree case_label = gimple_switch_label (switch_stmt, i);
1568 label = CASE_LABEL (case_label);
1569 new_label = main_block_label (label);
1570 if (new_label != label)
1571 CASE_LABEL (case_label) = new_label;
1572 }
1573 break;
1574 }
1575
1576 case GIMPLE_ASM:
1577 {
1578 gasm *asm_stmt = as_a <gasm *> (stmt);
1579 int i, n = gimple_asm_nlabels (asm_stmt);
1580
1581 for (i = 0; i < n; ++i)
1582 {
1583 tree cons = gimple_asm_label_op (asm_stmt, i);
1584 tree label = main_block_label (TREE_VALUE (cons));
1585 TREE_VALUE (cons) = label;
1586 }
1587 break;
1588 }
1589
1590 /* We have to handle gotos until they're removed, and we don't
1591 remove them until after we've created the CFG edges. */
1592 case GIMPLE_GOTO:
1593 if (!computed_goto_p (stmt))
1594 {
1595 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1596 label = gimple_goto_dest (goto_stmt);
1597 new_label = main_block_label (label);
1598 if (new_label != label)
1599 gimple_goto_set_dest (goto_stmt, new_label);
1600 }
1601 break;
1602
1603 case GIMPLE_TRANSACTION:
1604 {
1605 gtransaction *txn = as_a <gtransaction *> (stmt);
1606
1607 label = gimple_transaction_label_norm (txn);
1608 if (label)
1609 {
1610 new_label = main_block_label (label);
1611 if (new_label != label)
1612 gimple_transaction_set_label_norm (txn, new_label);
1613 }
1614
1615 label = gimple_transaction_label_uninst (txn);
1616 if (label)
1617 {
1618 new_label = main_block_label (label);
1619 if (new_label != label)
1620 gimple_transaction_set_label_uninst (txn, new_label);
1621 }
1622
1623 label = gimple_transaction_label_over (txn);
1624 if (label)
1625 {
1626 new_label = main_block_label (label);
1627 if (new_label != label)
1628 gimple_transaction_set_label_over (txn, new_label);
1629 }
1630 }
1631 break;
1632
1633 default:
1634 break;
1635 }
1636 }
1637
1638 /* Do the same for the exception region tree labels. */
1639 cleanup_dead_labels_eh ();
1640
1641 /* Finally, purge dead labels. All user-defined labels and labels that
1642 can be the target of non-local gotos and labels which have their
1643 address taken are preserved. */
1644 FOR_EACH_BB_FN (bb, cfun)
1645 {
1646 gimple_stmt_iterator i;
1647 tree label_for_this_bb = label_for_bb[bb->index].label;
1648
1649 if (!label_for_this_bb)
1650 continue;
1651
1652 /* If the main label of the block is unused, we may still remove it. */
1653 if (!label_for_bb[bb->index].used)
1654 label_for_this_bb = NULL;
1655
1656 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1657 {
1658 tree label;
1659 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1660
1661 if (!label_stmt)
1662 break;
1663
1664 label = gimple_label_label (label_stmt);
1665
1666 if (label == label_for_this_bb
1667 || !DECL_ARTIFICIAL (label)
1668 || DECL_NONLOCAL (label)
1669 || FORCED_LABEL (label))
1670 gsi_next (&i);
1671 else
1672 gsi_remove (&i, true);
1673 }
1674 }
1675
1676 free (label_for_bb);
1677 }
1678
1679 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1680 the ones jumping to the same label.
1681 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1682
1683 bool
1684 group_case_labels_stmt (gswitch *stmt)
1685 {
1686 int old_size = gimple_switch_num_labels (stmt);
1687 int i, next_index, new_size;
1688 basic_block default_bb = NULL;
1689
1690 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1691
1692 /* Look for possible opportunities to merge cases. */
1693 new_size = i = 1;
1694 while (i < old_size)
1695 {
1696 tree base_case, base_high;
1697 basic_block base_bb;
1698
1699 base_case = gimple_switch_label (stmt, i);
1700
1701 gcc_assert (base_case);
1702 base_bb = label_to_block (CASE_LABEL (base_case));
1703
1704 /* Discard cases that have the same destination as the default case or
1705 whose destiniation blocks have already been removed as unreachable. */
1706 if (base_bb == NULL || base_bb == default_bb)
1707 {
1708 i++;
1709 continue;
1710 }
1711
1712 base_high = CASE_HIGH (base_case)
1713 ? CASE_HIGH (base_case)
1714 : CASE_LOW (base_case);
1715 next_index = i + 1;
1716
1717 /* Try to merge case labels. Break out when we reach the end
1718 of the label vector or when we cannot merge the next case
1719 label with the current one. */
1720 while (next_index < old_size)
1721 {
1722 tree merge_case = gimple_switch_label (stmt, next_index);
1723 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1724 wide_int bhp1 = wi::add (base_high, 1);
1725
1726 /* Merge the cases if they jump to the same place,
1727 and their ranges are consecutive. */
1728 if (merge_bb == base_bb
1729 && wi::eq_p (CASE_LOW (merge_case), bhp1))
1730 {
1731 base_high = CASE_HIGH (merge_case) ?
1732 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1733 CASE_HIGH (base_case) = base_high;
1734 next_index++;
1735 }
1736 else
1737 break;
1738 }
1739
1740 /* Discard cases that have an unreachable destination block. */
1741 if (EDGE_COUNT (base_bb->succs) == 0
1742 && gimple_seq_unreachable_p (bb_seq (base_bb)))
1743 {
1744 edge base_edge = find_edge (gimple_bb (stmt), base_bb);
1745 if (base_edge != NULL)
1746 remove_edge_and_dominated_blocks (base_edge);
1747 i = next_index;
1748 continue;
1749 }
1750
1751 if (new_size < i)
1752 gimple_switch_set_label (stmt, new_size,
1753 gimple_switch_label (stmt, i));
1754 i = next_index;
1755 new_size++;
1756 }
1757
1758 gcc_assert (new_size <= old_size);
1759
1760 if (new_size < old_size)
1761 gimple_switch_set_num_labels (stmt, new_size);
1762
1763 return new_size < old_size;
1764 }
1765
1766 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1767 and scan the sorted vector of cases. Combine the ones jumping to the
1768 same label. */
1769
1770 bool
1771 group_case_labels (void)
1772 {
1773 basic_block bb;
1774 bool changed = false;
1775
1776 FOR_EACH_BB_FN (bb, cfun)
1777 {
1778 gimple *stmt = last_stmt (bb);
1779 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1780 changed |= group_case_labels_stmt (as_a <gswitch *> (stmt));
1781 }
1782
1783 return changed;
1784 }
1785
1786 /* Checks whether we can merge block B into block A. */
1787
1788 static bool
1789 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1790 {
1791 gimple *stmt;
1792
1793 if (!single_succ_p (a))
1794 return false;
1795
1796 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1797 return false;
1798
1799 if (single_succ (a) != b)
1800 return false;
1801
1802 if (!single_pred_p (b))
1803 return false;
1804
1805 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1806 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1807 return false;
1808
1809 /* If A ends by a statement causing exceptions or something similar, we
1810 cannot merge the blocks. */
1811 stmt = last_stmt (a);
1812 if (stmt && stmt_ends_bb_p (stmt))
1813 return false;
1814
1815 /* Do not allow a block with only a non-local label to be merged. */
1816 if (stmt)
1817 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1818 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1819 return false;
1820
1821 /* Examine the labels at the beginning of B. */
1822 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1823 gsi_next (&gsi))
1824 {
1825 tree lab;
1826 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1827 if (!label_stmt)
1828 break;
1829 lab = gimple_label_label (label_stmt);
1830
1831 /* Do not remove user forced labels or for -O0 any user labels. */
1832 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1833 return false;
1834 }
1835
1836 /* Protect simple loop latches. We only want to avoid merging
1837 the latch with the loop header or with a block in another
1838 loop in this case. */
1839 if (current_loops
1840 && b->loop_father->latch == b
1841 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1842 && (b->loop_father->header == a
1843 || b->loop_father != a->loop_father))
1844 return false;
1845
1846 /* It must be possible to eliminate all phi nodes in B. If ssa form
1847 is not up-to-date and a name-mapping is registered, we cannot eliminate
1848 any phis. Symbols marked for renaming are never a problem though. */
1849 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1850 gsi_next (&gsi))
1851 {
1852 gphi *phi = gsi.phi ();
1853 /* Technically only new names matter. */
1854 if (name_registered_for_update_p (PHI_RESULT (phi)))
1855 return false;
1856 }
1857
1858 /* When not optimizing, don't merge if we'd lose goto_locus. */
1859 if (!optimize
1860 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1861 {
1862 location_t goto_locus = single_succ_edge (a)->goto_locus;
1863 gimple_stmt_iterator prev, next;
1864 prev = gsi_last_nondebug_bb (a);
1865 next = gsi_after_labels (b);
1866 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1867 gsi_next_nondebug (&next);
1868 if ((gsi_end_p (prev)
1869 || gimple_location (gsi_stmt (prev)) != goto_locus)
1870 && (gsi_end_p (next)
1871 || gimple_location (gsi_stmt (next)) != goto_locus))
1872 return false;
1873 }
1874
1875 return true;
1876 }
1877
1878 /* Replaces all uses of NAME by VAL. */
1879
1880 void
1881 replace_uses_by (tree name, tree val)
1882 {
1883 imm_use_iterator imm_iter;
1884 use_operand_p use;
1885 gimple *stmt;
1886 edge e;
1887
1888 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1889 {
1890 /* Mark the block if we change the last stmt in it. */
1891 if (cfgcleanup_altered_bbs
1892 && stmt_ends_bb_p (stmt))
1893 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1894
1895 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1896 {
1897 replace_exp (use, val);
1898
1899 if (gimple_code (stmt) == GIMPLE_PHI)
1900 {
1901 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1902 PHI_ARG_INDEX_FROM_USE (use));
1903 if (e->flags & EDGE_ABNORMAL
1904 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1905 {
1906 /* This can only occur for virtual operands, since
1907 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1908 would prevent replacement. */
1909 gcc_checking_assert (virtual_operand_p (name));
1910 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1911 }
1912 }
1913 }
1914
1915 if (gimple_code (stmt) != GIMPLE_PHI)
1916 {
1917 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1918 gimple *orig_stmt = stmt;
1919 size_t i;
1920
1921 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1922 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1923 only change sth from non-invariant to invariant, and only
1924 when propagating constants. */
1925 if (is_gimple_min_invariant (val))
1926 for (i = 0; i < gimple_num_ops (stmt); i++)
1927 {
1928 tree op = gimple_op (stmt, i);
1929 /* Operands may be empty here. For example, the labels
1930 of a GIMPLE_COND are nulled out following the creation
1931 of the corresponding CFG edges. */
1932 if (op && TREE_CODE (op) == ADDR_EXPR)
1933 recompute_tree_invariant_for_addr_expr (op);
1934 }
1935
1936 if (fold_stmt (&gsi))
1937 stmt = gsi_stmt (gsi);
1938
1939 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1940 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1941
1942 update_stmt (stmt);
1943 }
1944 }
1945
1946 gcc_checking_assert (has_zero_uses (name));
1947
1948 /* Also update the trees stored in loop structures. */
1949 if (current_loops)
1950 {
1951 struct loop *loop;
1952
1953 FOR_EACH_LOOP (loop, 0)
1954 {
1955 substitute_in_loop_info (loop, name, val);
1956 }
1957 }
1958 }
1959
1960 /* Merge block B into block A. */
1961
1962 static void
1963 gimple_merge_blocks (basic_block a, basic_block b)
1964 {
1965 gimple_stmt_iterator last, gsi;
1966 gphi_iterator psi;
1967
1968 if (dump_file)
1969 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1970
1971 /* Remove all single-valued PHI nodes from block B of the form
1972 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1973 gsi = gsi_last_bb (a);
1974 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1975 {
1976 gimple *phi = gsi_stmt (psi);
1977 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1978 gimple *copy;
1979 bool may_replace_uses = (virtual_operand_p (def)
1980 || may_propagate_copy (def, use));
1981
1982 /* In case we maintain loop closed ssa form, do not propagate arguments
1983 of loop exit phi nodes. */
1984 if (current_loops
1985 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1986 && !virtual_operand_p (def)
1987 && TREE_CODE (use) == SSA_NAME
1988 && a->loop_father != b->loop_father)
1989 may_replace_uses = false;
1990
1991 if (!may_replace_uses)
1992 {
1993 gcc_assert (!virtual_operand_p (def));
1994
1995 /* Note that just emitting the copies is fine -- there is no problem
1996 with ordering of phi nodes. This is because A is the single
1997 predecessor of B, therefore results of the phi nodes cannot
1998 appear as arguments of the phi nodes. */
1999 copy = gimple_build_assign (def, use);
2000 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
2001 remove_phi_node (&psi, false);
2002 }
2003 else
2004 {
2005 /* If we deal with a PHI for virtual operands, we can simply
2006 propagate these without fussing with folding or updating
2007 the stmt. */
2008 if (virtual_operand_p (def))
2009 {
2010 imm_use_iterator iter;
2011 use_operand_p use_p;
2012 gimple *stmt;
2013
2014 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
2015 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2016 SET_USE (use_p, use);
2017
2018 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2019 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
2020 }
2021 else
2022 replace_uses_by (def, use);
2023
2024 remove_phi_node (&psi, true);
2025 }
2026 }
2027
2028 /* Ensure that B follows A. */
2029 move_block_after (b, a);
2030
2031 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
2032 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
2033
2034 /* Remove labels from B and set gimple_bb to A for other statements. */
2035 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
2036 {
2037 gimple *stmt = gsi_stmt (gsi);
2038 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2039 {
2040 tree label = gimple_label_label (label_stmt);
2041 int lp_nr;
2042
2043 gsi_remove (&gsi, false);
2044
2045 /* Now that we can thread computed gotos, we might have
2046 a situation where we have a forced label in block B
2047 However, the label at the start of block B might still be
2048 used in other ways (think about the runtime checking for
2049 Fortran assigned gotos). So we can not just delete the
2050 label. Instead we move the label to the start of block A. */
2051 if (FORCED_LABEL (label))
2052 {
2053 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
2054 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
2055 }
2056 /* Other user labels keep around in a form of a debug stmt. */
2057 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
2058 {
2059 gimple *dbg = gimple_build_debug_bind (label,
2060 integer_zero_node,
2061 stmt);
2062 gimple_debug_bind_reset_value (dbg);
2063 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
2064 }
2065
2066 lp_nr = EH_LANDING_PAD_NR (label);
2067 if (lp_nr)
2068 {
2069 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2070 lp->post_landing_pad = NULL;
2071 }
2072 }
2073 else
2074 {
2075 gimple_set_bb (stmt, a);
2076 gsi_next (&gsi);
2077 }
2078 }
2079
2080 /* When merging two BBs, if their counts are different, the larger count
2081 is selected as the new bb count. This is to handle inconsistent
2082 profiles. */
2083 if (a->loop_father == b->loop_father)
2084 {
2085 a->count = a->count.merge (b->count);
2086 a->frequency = MAX (a->frequency, b->frequency);
2087 }
2088
2089 /* Merge the sequences. */
2090 last = gsi_last_bb (a);
2091 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2092 set_bb_seq (b, NULL);
2093
2094 if (cfgcleanup_altered_bbs)
2095 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2096 }
2097
2098
2099 /* Return the one of two successors of BB that is not reachable by a
2100 complex edge, if there is one. Else, return BB. We use
2101 this in optimizations that use post-dominators for their heuristics,
2102 to catch the cases in C++ where function calls are involved. */
2103
2104 basic_block
2105 single_noncomplex_succ (basic_block bb)
2106 {
2107 edge e0, e1;
2108 if (EDGE_COUNT (bb->succs) != 2)
2109 return bb;
2110
2111 e0 = EDGE_SUCC (bb, 0);
2112 e1 = EDGE_SUCC (bb, 1);
2113 if (e0->flags & EDGE_COMPLEX)
2114 return e1->dest;
2115 if (e1->flags & EDGE_COMPLEX)
2116 return e0->dest;
2117
2118 return bb;
2119 }
2120
2121 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2122
2123 void
2124 notice_special_calls (gcall *call)
2125 {
2126 int flags = gimple_call_flags (call);
2127
2128 if (flags & ECF_MAY_BE_ALLOCA)
2129 cfun->calls_alloca = true;
2130 if (flags & ECF_RETURNS_TWICE)
2131 cfun->calls_setjmp = true;
2132 }
2133
2134
2135 /* Clear flags set by notice_special_calls. Used by dead code removal
2136 to update the flags. */
2137
2138 void
2139 clear_special_calls (void)
2140 {
2141 cfun->calls_alloca = false;
2142 cfun->calls_setjmp = false;
2143 }
2144
2145 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2146
2147 static void
2148 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2149 {
2150 /* Since this block is no longer reachable, we can just delete all
2151 of its PHI nodes. */
2152 remove_phi_nodes (bb);
2153
2154 /* Remove edges to BB's successors. */
2155 while (EDGE_COUNT (bb->succs) > 0)
2156 remove_edge (EDGE_SUCC (bb, 0));
2157 }
2158
2159
2160 /* Remove statements of basic block BB. */
2161
2162 static void
2163 remove_bb (basic_block bb)
2164 {
2165 gimple_stmt_iterator i;
2166
2167 if (dump_file)
2168 {
2169 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2170 if (dump_flags & TDF_DETAILS)
2171 {
2172 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2173 fprintf (dump_file, "\n");
2174 }
2175 }
2176
2177 if (current_loops)
2178 {
2179 struct loop *loop = bb->loop_father;
2180
2181 /* If a loop gets removed, clean up the information associated
2182 with it. */
2183 if (loop->latch == bb
2184 || loop->header == bb)
2185 free_numbers_of_iterations_estimates (loop);
2186 }
2187
2188 /* Remove all the instructions in the block. */
2189 if (bb_seq (bb) != NULL)
2190 {
2191 /* Walk backwards so as to get a chance to substitute all
2192 released DEFs into debug stmts. See
2193 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2194 details. */
2195 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2196 {
2197 gimple *stmt = gsi_stmt (i);
2198 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2199 if (label_stmt
2200 && (FORCED_LABEL (gimple_label_label (label_stmt))
2201 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2202 {
2203 basic_block new_bb;
2204 gimple_stmt_iterator new_gsi;
2205
2206 /* A non-reachable non-local label may still be referenced.
2207 But it no longer needs to carry the extra semantics of
2208 non-locality. */
2209 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2210 {
2211 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2212 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2213 }
2214
2215 new_bb = bb->prev_bb;
2216 new_gsi = gsi_start_bb (new_bb);
2217 gsi_remove (&i, false);
2218 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2219 }
2220 else
2221 {
2222 /* Release SSA definitions. */
2223 release_defs (stmt);
2224 gsi_remove (&i, true);
2225 }
2226
2227 if (gsi_end_p (i))
2228 i = gsi_last_bb (bb);
2229 else
2230 gsi_prev (&i);
2231 }
2232 }
2233
2234 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2235 bb->il.gimple.seq = NULL;
2236 bb->il.gimple.phi_nodes = NULL;
2237 }
2238
2239
2240 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2241 predicate VAL, return the edge that will be taken out of the block.
2242 If VAL does not match a unique edge, NULL is returned. */
2243
2244 edge
2245 find_taken_edge (basic_block bb, tree val)
2246 {
2247 gimple *stmt;
2248
2249 stmt = last_stmt (bb);
2250
2251 gcc_assert (is_ctrl_stmt (stmt));
2252
2253 if (gimple_code (stmt) == GIMPLE_COND)
2254 return find_taken_edge_cond_expr (bb, val);
2255
2256 if (gimple_code (stmt) == GIMPLE_SWITCH)
2257 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), bb, val);
2258
2259 if (computed_goto_p (stmt))
2260 {
2261 /* Only optimize if the argument is a label, if the argument is
2262 not a label then we can not construct a proper CFG.
2263
2264 It may be the case that we only need to allow the LABEL_REF to
2265 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2266 appear inside a LABEL_EXPR just to be safe. */
2267 if (val
2268 && (TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2269 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2270 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2271 return NULL;
2272 }
2273
2274 gcc_unreachable ();
2275 }
2276
2277 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2278 statement, determine which of the outgoing edges will be taken out of the
2279 block. Return NULL if either edge may be taken. */
2280
2281 static edge
2282 find_taken_edge_computed_goto (basic_block bb, tree val)
2283 {
2284 basic_block dest;
2285 edge e = NULL;
2286
2287 dest = label_to_block (val);
2288 if (dest)
2289 {
2290 e = find_edge (bb, dest);
2291 gcc_assert (e != NULL);
2292 }
2293
2294 return e;
2295 }
2296
2297 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2298 statement, determine which of the two edges will be taken out of the
2299 block. Return NULL if either edge may be taken. */
2300
2301 static edge
2302 find_taken_edge_cond_expr (basic_block bb, tree val)
2303 {
2304 edge true_edge, false_edge;
2305
2306 if (val == NULL
2307 || TREE_CODE (val) != INTEGER_CST)
2308 return NULL;
2309
2310 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2311
2312 return (integer_zerop (val) ? false_edge : true_edge);
2313 }
2314
2315 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2316 statement, determine which edge will be taken out of the block. Return
2317 NULL if any edge may be taken. */
2318
2319 static edge
2320 find_taken_edge_switch_expr (gswitch *switch_stmt, basic_block bb,
2321 tree val)
2322 {
2323 basic_block dest_bb;
2324 edge e;
2325 tree taken_case;
2326
2327 if (gimple_switch_num_labels (switch_stmt) == 1)
2328 taken_case = gimple_switch_default_label (switch_stmt);
2329 else if (! val || TREE_CODE (val) != INTEGER_CST)
2330 return NULL;
2331 else
2332 taken_case = find_case_label_for_value (switch_stmt, val);
2333 dest_bb = label_to_block (CASE_LABEL (taken_case));
2334
2335 e = find_edge (bb, dest_bb);
2336 gcc_assert (e);
2337 return e;
2338 }
2339
2340
2341 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2342 We can make optimal use here of the fact that the case labels are
2343 sorted: We can do a binary search for a case matching VAL. */
2344
2345 static tree
2346 find_case_label_for_value (gswitch *switch_stmt, tree val)
2347 {
2348 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2349 tree default_case = gimple_switch_default_label (switch_stmt);
2350
2351 for (low = 0, high = n; high - low > 1; )
2352 {
2353 size_t i = (high + low) / 2;
2354 tree t = gimple_switch_label (switch_stmt, i);
2355 int cmp;
2356
2357 /* Cache the result of comparing CASE_LOW and val. */
2358 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2359
2360 if (cmp > 0)
2361 high = i;
2362 else
2363 low = i;
2364
2365 if (CASE_HIGH (t) == NULL)
2366 {
2367 /* A singe-valued case label. */
2368 if (cmp == 0)
2369 return t;
2370 }
2371 else
2372 {
2373 /* A case range. We can only handle integer ranges. */
2374 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2375 return t;
2376 }
2377 }
2378
2379 return default_case;
2380 }
2381
2382
2383 /* Dump a basic block on stderr. */
2384
2385 void
2386 gimple_debug_bb (basic_block bb)
2387 {
2388 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2389 }
2390
2391
2392 /* Dump basic block with index N on stderr. */
2393
2394 basic_block
2395 gimple_debug_bb_n (int n)
2396 {
2397 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2398 return BASIC_BLOCK_FOR_FN (cfun, n);
2399 }
2400
2401
2402 /* Dump the CFG on stderr.
2403
2404 FLAGS are the same used by the tree dumping functions
2405 (see TDF_* in dumpfile.h). */
2406
2407 void
2408 gimple_debug_cfg (dump_flags_t flags)
2409 {
2410 gimple_dump_cfg (stderr, flags);
2411 }
2412
2413
2414 /* Dump the program showing basic block boundaries on the given FILE.
2415
2416 FLAGS are the same used by the tree dumping functions (see TDF_* in
2417 tree.h). */
2418
2419 void
2420 gimple_dump_cfg (FILE *file, dump_flags_t flags)
2421 {
2422 if (flags & TDF_DETAILS)
2423 {
2424 dump_function_header (file, current_function_decl, flags);
2425 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2426 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2427 last_basic_block_for_fn (cfun));
2428
2429 brief_dump_cfg (file, flags);
2430 fprintf (file, "\n");
2431 }
2432
2433 if (flags & TDF_STATS)
2434 dump_cfg_stats (file);
2435
2436 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2437 }
2438
2439
2440 /* Dump CFG statistics on FILE. */
2441
2442 void
2443 dump_cfg_stats (FILE *file)
2444 {
2445 static long max_num_merged_labels = 0;
2446 unsigned long size, total = 0;
2447 long num_edges;
2448 basic_block bb;
2449 const char * const fmt_str = "%-30s%-13s%12s\n";
2450 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2451 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2452 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2453 const char *funcname = current_function_name ();
2454
2455 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2456
2457 fprintf (file, "---------------------------------------------------------\n");
2458 fprintf (file, fmt_str, "", " Number of ", "Memory");
2459 fprintf (file, fmt_str, "", " instances ", "used ");
2460 fprintf (file, "---------------------------------------------------------\n");
2461
2462 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2463 total += size;
2464 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2465 SCALE (size), LABEL (size));
2466
2467 num_edges = 0;
2468 FOR_EACH_BB_FN (bb, cfun)
2469 num_edges += EDGE_COUNT (bb->succs);
2470 size = num_edges * sizeof (struct edge_def);
2471 total += size;
2472 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2473
2474 fprintf (file, "---------------------------------------------------------\n");
2475 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2476 LABEL (total));
2477 fprintf (file, "---------------------------------------------------------\n");
2478 fprintf (file, "\n");
2479
2480 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2481 max_num_merged_labels = cfg_stats.num_merged_labels;
2482
2483 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2484 cfg_stats.num_merged_labels, max_num_merged_labels);
2485
2486 fprintf (file, "\n");
2487 }
2488
2489
2490 /* Dump CFG statistics on stderr. Keep extern so that it's always
2491 linked in the final executable. */
2492
2493 DEBUG_FUNCTION void
2494 debug_cfg_stats (void)
2495 {
2496 dump_cfg_stats (stderr);
2497 }
2498
2499 /*---------------------------------------------------------------------------
2500 Miscellaneous helpers
2501 ---------------------------------------------------------------------------*/
2502
2503 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2504 flow. Transfers of control flow associated with EH are excluded. */
2505
2506 static bool
2507 call_can_make_abnormal_goto (gimple *t)
2508 {
2509 /* If the function has no non-local labels, then a call cannot make an
2510 abnormal transfer of control. */
2511 if (!cfun->has_nonlocal_label
2512 && !cfun->calls_setjmp)
2513 return false;
2514
2515 /* Likewise if the call has no side effects. */
2516 if (!gimple_has_side_effects (t))
2517 return false;
2518
2519 /* Likewise if the called function is leaf. */
2520 if (gimple_call_flags (t) & ECF_LEAF)
2521 return false;
2522
2523 return true;
2524 }
2525
2526
2527 /* Return true if T can make an abnormal transfer of control flow.
2528 Transfers of control flow associated with EH are excluded. */
2529
2530 bool
2531 stmt_can_make_abnormal_goto (gimple *t)
2532 {
2533 if (computed_goto_p (t))
2534 return true;
2535 if (is_gimple_call (t))
2536 return call_can_make_abnormal_goto (t);
2537 return false;
2538 }
2539
2540
2541 /* Return true if T represents a stmt that always transfers control. */
2542
2543 bool
2544 is_ctrl_stmt (gimple *t)
2545 {
2546 switch (gimple_code (t))
2547 {
2548 case GIMPLE_COND:
2549 case GIMPLE_SWITCH:
2550 case GIMPLE_GOTO:
2551 case GIMPLE_RETURN:
2552 case GIMPLE_RESX:
2553 return true;
2554 default:
2555 return false;
2556 }
2557 }
2558
2559
2560 /* Return true if T is a statement that may alter the flow of control
2561 (e.g., a call to a non-returning function). */
2562
2563 bool
2564 is_ctrl_altering_stmt (gimple *t)
2565 {
2566 gcc_assert (t);
2567
2568 switch (gimple_code (t))
2569 {
2570 case GIMPLE_CALL:
2571 /* Per stmt call flag indicates whether the call could alter
2572 controlflow. */
2573 if (gimple_call_ctrl_altering_p (t))
2574 return true;
2575 break;
2576
2577 case GIMPLE_EH_DISPATCH:
2578 /* EH_DISPATCH branches to the individual catch handlers at
2579 this level of a try or allowed-exceptions region. It can
2580 fallthru to the next statement as well. */
2581 return true;
2582
2583 case GIMPLE_ASM:
2584 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2585 return true;
2586 break;
2587
2588 CASE_GIMPLE_OMP:
2589 /* OpenMP directives alter control flow. */
2590 return true;
2591
2592 case GIMPLE_TRANSACTION:
2593 /* A transaction start alters control flow. */
2594 return true;
2595
2596 default:
2597 break;
2598 }
2599
2600 /* If a statement can throw, it alters control flow. */
2601 return stmt_can_throw_internal (t);
2602 }
2603
2604
2605 /* Return true if T is a simple local goto. */
2606
2607 bool
2608 simple_goto_p (gimple *t)
2609 {
2610 return (gimple_code (t) == GIMPLE_GOTO
2611 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2612 }
2613
2614
2615 /* Return true if STMT should start a new basic block. PREV_STMT is
2616 the statement preceding STMT. It is used when STMT is a label or a
2617 case label. Labels should only start a new basic block if their
2618 previous statement wasn't a label. Otherwise, sequence of labels
2619 would generate unnecessary basic blocks that only contain a single
2620 label. */
2621
2622 static inline bool
2623 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2624 {
2625 if (stmt == NULL)
2626 return false;
2627
2628 /* Labels start a new basic block only if the preceding statement
2629 wasn't a label of the same type. This prevents the creation of
2630 consecutive blocks that have nothing but a single label. */
2631 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2632 {
2633 /* Nonlocal and computed GOTO targets always start a new block. */
2634 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2635 || FORCED_LABEL (gimple_label_label (label_stmt)))
2636 return true;
2637
2638 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2639 {
2640 if (DECL_NONLOCAL (gimple_label_label (
2641 as_a <glabel *> (prev_stmt))))
2642 return true;
2643
2644 cfg_stats.num_merged_labels++;
2645 return false;
2646 }
2647 else
2648 return true;
2649 }
2650 else if (gimple_code (stmt) == GIMPLE_CALL)
2651 {
2652 if (gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2653 /* setjmp acts similar to a nonlocal GOTO target and thus should
2654 start a new block. */
2655 return true;
2656 if (gimple_call_internal_p (stmt, IFN_PHI)
2657 && prev_stmt
2658 && gimple_code (prev_stmt) != GIMPLE_LABEL
2659 && (gimple_code (prev_stmt) != GIMPLE_CALL
2660 || ! gimple_call_internal_p (prev_stmt, IFN_PHI)))
2661 /* PHI nodes start a new block unless preceeded by a label
2662 or another PHI. */
2663 return true;
2664 }
2665
2666 return false;
2667 }
2668
2669
2670 /* Return true if T should end a basic block. */
2671
2672 bool
2673 stmt_ends_bb_p (gimple *t)
2674 {
2675 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2676 }
2677
2678 /* Remove block annotations and other data structures. */
2679
2680 void
2681 delete_tree_cfg_annotations (struct function *fn)
2682 {
2683 vec_free (label_to_block_map_for_fn (fn));
2684 }
2685
2686 /* Return the virtual phi in BB. */
2687
2688 gphi *
2689 get_virtual_phi (basic_block bb)
2690 {
2691 for (gphi_iterator gsi = gsi_start_phis (bb);
2692 !gsi_end_p (gsi);
2693 gsi_next (&gsi))
2694 {
2695 gphi *phi = gsi.phi ();
2696
2697 if (virtual_operand_p (PHI_RESULT (phi)))
2698 return phi;
2699 }
2700
2701 return NULL;
2702 }
2703
2704 /* Return the first statement in basic block BB. */
2705
2706 gimple *
2707 first_stmt (basic_block bb)
2708 {
2709 gimple_stmt_iterator i = gsi_start_bb (bb);
2710 gimple *stmt = NULL;
2711
2712 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2713 {
2714 gsi_next (&i);
2715 stmt = NULL;
2716 }
2717 return stmt;
2718 }
2719
2720 /* Return the first non-label statement in basic block BB. */
2721
2722 static gimple *
2723 first_non_label_stmt (basic_block bb)
2724 {
2725 gimple_stmt_iterator i = gsi_start_bb (bb);
2726 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2727 gsi_next (&i);
2728 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2729 }
2730
2731 /* Return the last statement in basic block BB. */
2732
2733 gimple *
2734 last_stmt (basic_block bb)
2735 {
2736 gimple_stmt_iterator i = gsi_last_bb (bb);
2737 gimple *stmt = NULL;
2738
2739 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2740 {
2741 gsi_prev (&i);
2742 stmt = NULL;
2743 }
2744 return stmt;
2745 }
2746
2747 /* Return the last statement of an otherwise empty block. Return NULL
2748 if the block is totally empty, or if it contains more than one
2749 statement. */
2750
2751 gimple *
2752 last_and_only_stmt (basic_block bb)
2753 {
2754 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2755 gimple *last, *prev;
2756
2757 if (gsi_end_p (i))
2758 return NULL;
2759
2760 last = gsi_stmt (i);
2761 gsi_prev_nondebug (&i);
2762 if (gsi_end_p (i))
2763 return last;
2764
2765 /* Empty statements should no longer appear in the instruction stream.
2766 Everything that might have appeared before should be deleted by
2767 remove_useless_stmts, and the optimizers should just gsi_remove
2768 instead of smashing with build_empty_stmt.
2769
2770 Thus the only thing that should appear here in a block containing
2771 one executable statement is a label. */
2772 prev = gsi_stmt (i);
2773 if (gimple_code (prev) == GIMPLE_LABEL)
2774 return last;
2775 else
2776 return NULL;
2777 }
2778
2779 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2780
2781 static void
2782 reinstall_phi_args (edge new_edge, edge old_edge)
2783 {
2784 edge_var_map *vm;
2785 int i;
2786 gphi_iterator phis;
2787
2788 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2789 if (!v)
2790 return;
2791
2792 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2793 v->iterate (i, &vm) && !gsi_end_p (phis);
2794 i++, gsi_next (&phis))
2795 {
2796 gphi *phi = phis.phi ();
2797 tree result = redirect_edge_var_map_result (vm);
2798 tree arg = redirect_edge_var_map_def (vm);
2799
2800 gcc_assert (result == gimple_phi_result (phi));
2801
2802 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2803 }
2804
2805 redirect_edge_var_map_clear (old_edge);
2806 }
2807
2808 /* Returns the basic block after which the new basic block created
2809 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2810 near its "logical" location. This is of most help to humans looking
2811 at debugging dumps. */
2812
2813 basic_block
2814 split_edge_bb_loc (edge edge_in)
2815 {
2816 basic_block dest = edge_in->dest;
2817 basic_block dest_prev = dest->prev_bb;
2818
2819 if (dest_prev)
2820 {
2821 edge e = find_edge (dest_prev, dest);
2822 if (e && !(e->flags & EDGE_COMPLEX))
2823 return edge_in->src;
2824 }
2825 return dest_prev;
2826 }
2827
2828 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2829 Abort on abnormal edges. */
2830
2831 static basic_block
2832 gimple_split_edge (edge edge_in)
2833 {
2834 basic_block new_bb, after_bb, dest;
2835 edge new_edge, e;
2836
2837 /* Abnormal edges cannot be split. */
2838 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2839
2840 dest = edge_in->dest;
2841
2842 after_bb = split_edge_bb_loc (edge_in);
2843
2844 new_bb = create_empty_bb (after_bb);
2845 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2846 new_bb->count = edge_in->count;
2847 new_edge = make_single_succ_edge (new_bb, dest, EDGE_FALLTHRU);
2848
2849 e = redirect_edge_and_branch (edge_in, new_bb);
2850 gcc_assert (e == edge_in);
2851 reinstall_phi_args (new_edge, e);
2852
2853 return new_bb;
2854 }
2855
2856
2857 /* Verify properties of the address expression T with base object BASE. */
2858
2859 static tree
2860 verify_address (tree t, tree base)
2861 {
2862 bool old_constant;
2863 bool old_side_effects;
2864 bool new_constant;
2865 bool new_side_effects;
2866
2867 old_constant = TREE_CONSTANT (t);
2868 old_side_effects = TREE_SIDE_EFFECTS (t);
2869
2870 recompute_tree_invariant_for_addr_expr (t);
2871 new_side_effects = TREE_SIDE_EFFECTS (t);
2872 new_constant = TREE_CONSTANT (t);
2873
2874 if (old_constant != new_constant)
2875 {
2876 error ("constant not recomputed when ADDR_EXPR changed");
2877 return t;
2878 }
2879 if (old_side_effects != new_side_effects)
2880 {
2881 error ("side effects not recomputed when ADDR_EXPR changed");
2882 return t;
2883 }
2884
2885 if (!(VAR_P (base)
2886 || TREE_CODE (base) == PARM_DECL
2887 || TREE_CODE (base) == RESULT_DECL))
2888 return NULL_TREE;
2889
2890 if (DECL_GIMPLE_REG_P (base))
2891 {
2892 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2893 return base;
2894 }
2895
2896 return NULL_TREE;
2897 }
2898
2899 /* Callback for walk_tree, check that all elements with address taken are
2900 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2901 inside a PHI node. */
2902
2903 static tree
2904 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2905 {
2906 tree t = *tp, x;
2907
2908 if (TYPE_P (t))
2909 *walk_subtrees = 0;
2910
2911 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2912 #define CHECK_OP(N, MSG) \
2913 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2914 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2915
2916 switch (TREE_CODE (t))
2917 {
2918 case SSA_NAME:
2919 if (SSA_NAME_IN_FREE_LIST (t))
2920 {
2921 error ("SSA name in freelist but still referenced");
2922 return *tp;
2923 }
2924 break;
2925
2926 case PARM_DECL:
2927 case VAR_DECL:
2928 case RESULT_DECL:
2929 {
2930 tree context = decl_function_context (t);
2931 if (context != cfun->decl
2932 && !SCOPE_FILE_SCOPE_P (context)
2933 && !TREE_STATIC (t)
2934 && !DECL_EXTERNAL (t))
2935 {
2936 error ("Local declaration from a different function");
2937 return t;
2938 }
2939 }
2940 break;
2941
2942 case INDIRECT_REF:
2943 error ("INDIRECT_REF in gimple IL");
2944 return t;
2945
2946 case MEM_REF:
2947 x = TREE_OPERAND (t, 0);
2948 if (!POINTER_TYPE_P (TREE_TYPE (x))
2949 || !is_gimple_mem_ref_addr (x))
2950 {
2951 error ("invalid first operand of MEM_REF");
2952 return x;
2953 }
2954 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2955 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2956 {
2957 error ("invalid offset operand of MEM_REF");
2958 return TREE_OPERAND (t, 1);
2959 }
2960 if (TREE_CODE (x) == ADDR_EXPR)
2961 {
2962 tree va = verify_address (x, TREE_OPERAND (x, 0));
2963 if (va)
2964 return va;
2965 x = TREE_OPERAND (x, 0);
2966 }
2967 walk_tree (&x, verify_expr, data, NULL);
2968 *walk_subtrees = 0;
2969 break;
2970
2971 case ASSERT_EXPR:
2972 x = fold (ASSERT_EXPR_COND (t));
2973 if (x == boolean_false_node)
2974 {
2975 error ("ASSERT_EXPR with an always-false condition");
2976 return *tp;
2977 }
2978 break;
2979
2980 case MODIFY_EXPR:
2981 error ("MODIFY_EXPR not expected while having tuples");
2982 return *tp;
2983
2984 case ADDR_EXPR:
2985 {
2986 tree tem;
2987
2988 gcc_assert (is_gimple_address (t));
2989
2990 /* Skip any references (they will be checked when we recurse down the
2991 tree) and ensure that any variable used as a prefix is marked
2992 addressable. */
2993 for (x = TREE_OPERAND (t, 0);
2994 handled_component_p (x);
2995 x = TREE_OPERAND (x, 0))
2996 ;
2997
2998 if ((tem = verify_address (t, x)))
2999 return tem;
3000
3001 if (!(VAR_P (x)
3002 || TREE_CODE (x) == PARM_DECL
3003 || TREE_CODE (x) == RESULT_DECL))
3004 return NULL;
3005
3006 if (!TREE_ADDRESSABLE (x))
3007 {
3008 error ("address taken, but ADDRESSABLE bit not set");
3009 return x;
3010 }
3011
3012 break;
3013 }
3014
3015 case COND_EXPR:
3016 x = COND_EXPR_COND (t);
3017 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
3018 {
3019 error ("non-integral used in condition");
3020 return x;
3021 }
3022 if (!is_gimple_condexpr (x))
3023 {
3024 error ("invalid conditional operand");
3025 return x;
3026 }
3027 break;
3028
3029 case NON_LVALUE_EXPR:
3030 case TRUTH_NOT_EXPR:
3031 gcc_unreachable ();
3032
3033 CASE_CONVERT:
3034 case FIX_TRUNC_EXPR:
3035 case FLOAT_EXPR:
3036 case NEGATE_EXPR:
3037 case ABS_EXPR:
3038 case BIT_NOT_EXPR:
3039 CHECK_OP (0, "invalid operand to unary operator");
3040 break;
3041
3042 case REALPART_EXPR:
3043 case IMAGPART_EXPR:
3044 case BIT_FIELD_REF:
3045 if (!is_gimple_reg_type (TREE_TYPE (t)))
3046 {
3047 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
3048 return t;
3049 }
3050
3051 if (TREE_CODE (t) == BIT_FIELD_REF)
3052 {
3053 tree t0 = TREE_OPERAND (t, 0);
3054 tree t1 = TREE_OPERAND (t, 1);
3055 tree t2 = TREE_OPERAND (t, 2);
3056 if (!tree_fits_uhwi_p (t1)
3057 || !tree_fits_uhwi_p (t2)
3058 || !types_compatible_p (bitsizetype, TREE_TYPE (t1))
3059 || !types_compatible_p (bitsizetype, TREE_TYPE (t2)))
3060 {
3061 error ("invalid position or size operand to BIT_FIELD_REF");
3062 return t;
3063 }
3064 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
3065 && (TYPE_PRECISION (TREE_TYPE (t))
3066 != tree_to_uhwi (t1)))
3067 {
3068 error ("integral result type precision does not match "
3069 "field size of BIT_FIELD_REF");
3070 return t;
3071 }
3072 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
3073 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
3074 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (t)))
3075 != tree_to_uhwi (t1)))
3076 {
3077 error ("mode size of non-integral result does not "
3078 "match field size of BIT_FIELD_REF");
3079 return t;
3080 }
3081 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
3082 && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
3083 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
3084 {
3085 error ("position plus size exceeds size of referenced object in "
3086 "BIT_FIELD_REF");
3087 return t;
3088 }
3089 }
3090 t = TREE_OPERAND (t, 0);
3091
3092 /* Fall-through. */
3093 case COMPONENT_REF:
3094 case ARRAY_REF:
3095 case ARRAY_RANGE_REF:
3096 case VIEW_CONVERT_EXPR:
3097 /* We have a nest of references. Verify that each of the operands
3098 that determine where to reference is either a constant or a variable,
3099 verify that the base is valid, and then show we've already checked
3100 the subtrees. */
3101 while (handled_component_p (t))
3102 {
3103 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3104 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3105 else if (TREE_CODE (t) == ARRAY_REF
3106 || TREE_CODE (t) == ARRAY_RANGE_REF)
3107 {
3108 CHECK_OP (1, "invalid array index");
3109 if (TREE_OPERAND (t, 2))
3110 CHECK_OP (2, "invalid array lower bound");
3111 if (TREE_OPERAND (t, 3))
3112 CHECK_OP (3, "invalid array stride");
3113 }
3114 else if (TREE_CODE (t) == BIT_FIELD_REF
3115 || TREE_CODE (t) == REALPART_EXPR
3116 || TREE_CODE (t) == IMAGPART_EXPR)
3117 {
3118 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
3119 "REALPART_EXPR");
3120 return t;
3121 }
3122
3123 t = TREE_OPERAND (t, 0);
3124 }
3125
3126 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
3127 {
3128 error ("invalid reference prefix");
3129 return t;
3130 }
3131 walk_tree (&t, verify_expr, data, NULL);
3132 *walk_subtrees = 0;
3133 break;
3134 case PLUS_EXPR:
3135 case MINUS_EXPR:
3136 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3137 POINTER_PLUS_EXPR. */
3138 if (POINTER_TYPE_P (TREE_TYPE (t)))
3139 {
3140 error ("invalid operand to plus/minus, type is a pointer");
3141 return t;
3142 }
3143 CHECK_OP (0, "invalid operand to binary operator");
3144 CHECK_OP (1, "invalid operand to binary operator");
3145 break;
3146
3147 case POINTER_PLUS_EXPR:
3148 /* Check to make sure the first operand is a pointer or reference type. */
3149 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3150 {
3151 error ("invalid operand to pointer plus, first operand is not a pointer");
3152 return t;
3153 }
3154 /* Check to make sure the second operand is a ptrofftype. */
3155 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
3156 {
3157 error ("invalid operand to pointer plus, second operand is not an "
3158 "integer type of appropriate width");
3159 return t;
3160 }
3161 /* FALLTHROUGH */
3162 case LT_EXPR:
3163 case LE_EXPR:
3164 case GT_EXPR:
3165 case GE_EXPR:
3166 case EQ_EXPR:
3167 case NE_EXPR:
3168 case UNORDERED_EXPR:
3169 case ORDERED_EXPR:
3170 case UNLT_EXPR:
3171 case UNLE_EXPR:
3172 case UNGT_EXPR:
3173 case UNGE_EXPR:
3174 case UNEQ_EXPR:
3175 case LTGT_EXPR:
3176 case MULT_EXPR:
3177 case TRUNC_DIV_EXPR:
3178 case CEIL_DIV_EXPR:
3179 case FLOOR_DIV_EXPR:
3180 case ROUND_DIV_EXPR:
3181 case TRUNC_MOD_EXPR:
3182 case CEIL_MOD_EXPR:
3183 case FLOOR_MOD_EXPR:
3184 case ROUND_MOD_EXPR:
3185 case RDIV_EXPR:
3186 case EXACT_DIV_EXPR:
3187 case MIN_EXPR:
3188 case MAX_EXPR:
3189 case LSHIFT_EXPR:
3190 case RSHIFT_EXPR:
3191 case LROTATE_EXPR:
3192 case RROTATE_EXPR:
3193 case BIT_IOR_EXPR:
3194 case BIT_XOR_EXPR:
3195 case BIT_AND_EXPR:
3196 CHECK_OP (0, "invalid operand to binary operator");
3197 CHECK_OP (1, "invalid operand to binary operator");
3198 break;
3199
3200 case CONSTRUCTOR:
3201 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3202 *walk_subtrees = 0;
3203 break;
3204
3205 case CASE_LABEL_EXPR:
3206 if (CASE_CHAIN (t))
3207 {
3208 error ("invalid CASE_CHAIN");
3209 return t;
3210 }
3211 break;
3212
3213 default:
3214 break;
3215 }
3216 return NULL;
3217
3218 #undef CHECK_OP
3219 }
3220
3221
3222 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3223 Returns true if there is an error, otherwise false. */
3224
3225 static bool
3226 verify_types_in_gimple_min_lval (tree expr)
3227 {
3228 tree op;
3229
3230 if (is_gimple_id (expr))
3231 return false;
3232
3233 if (TREE_CODE (expr) != TARGET_MEM_REF
3234 && TREE_CODE (expr) != MEM_REF)
3235 {
3236 error ("invalid expression for min lvalue");
3237 return true;
3238 }
3239
3240 /* TARGET_MEM_REFs are strange beasts. */
3241 if (TREE_CODE (expr) == TARGET_MEM_REF)
3242 return false;
3243
3244 op = TREE_OPERAND (expr, 0);
3245 if (!is_gimple_val (op))
3246 {
3247 error ("invalid operand in indirect reference");
3248 debug_generic_stmt (op);
3249 return true;
3250 }
3251 /* Memory references now generally can involve a value conversion. */
3252
3253 return false;
3254 }
3255
3256 /* Verify if EXPR is a valid GIMPLE reference expression. If
3257 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3258 if there is an error, otherwise false. */
3259
3260 static bool
3261 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3262 {
3263 while (handled_component_p (expr))
3264 {
3265 tree op = TREE_OPERAND (expr, 0);
3266
3267 if (TREE_CODE (expr) == ARRAY_REF
3268 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3269 {
3270 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3271 || (TREE_OPERAND (expr, 2)
3272 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3273 || (TREE_OPERAND (expr, 3)
3274 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3275 {
3276 error ("invalid operands to array reference");
3277 debug_generic_stmt (expr);
3278 return true;
3279 }
3280 }
3281
3282 /* Verify if the reference array element types are compatible. */
3283 if (TREE_CODE (expr) == ARRAY_REF
3284 && !useless_type_conversion_p (TREE_TYPE (expr),
3285 TREE_TYPE (TREE_TYPE (op))))
3286 {
3287 error ("type mismatch in array reference");
3288 debug_generic_stmt (TREE_TYPE (expr));
3289 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3290 return true;
3291 }
3292 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3293 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3294 TREE_TYPE (TREE_TYPE (op))))
3295 {
3296 error ("type mismatch in array range reference");
3297 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3298 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3299 return true;
3300 }
3301
3302 if ((TREE_CODE (expr) == REALPART_EXPR
3303 || TREE_CODE (expr) == IMAGPART_EXPR)
3304 && !useless_type_conversion_p (TREE_TYPE (expr),
3305 TREE_TYPE (TREE_TYPE (op))))
3306 {
3307 error ("type mismatch in real/imagpart reference");
3308 debug_generic_stmt (TREE_TYPE (expr));
3309 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3310 return true;
3311 }
3312
3313 if (TREE_CODE (expr) == COMPONENT_REF
3314 && !useless_type_conversion_p (TREE_TYPE (expr),
3315 TREE_TYPE (TREE_OPERAND (expr, 1))))
3316 {
3317 error ("type mismatch in component reference");
3318 debug_generic_stmt (TREE_TYPE (expr));
3319 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3320 return true;
3321 }
3322
3323 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3324 {
3325 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3326 that their operand is not an SSA name or an invariant when
3327 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3328 bug). Otherwise there is nothing to verify, gross mismatches at
3329 most invoke undefined behavior. */
3330 if (require_lvalue
3331 && (TREE_CODE (op) == SSA_NAME
3332 || is_gimple_min_invariant (op)))
3333 {
3334 error ("conversion of an SSA_NAME on the left hand side");
3335 debug_generic_stmt (expr);
3336 return true;
3337 }
3338 else if (TREE_CODE (op) == SSA_NAME
3339 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3340 {
3341 error ("conversion of register to a different size");
3342 debug_generic_stmt (expr);
3343 return true;
3344 }
3345 else if (!handled_component_p (op))
3346 return false;
3347 }
3348
3349 expr = op;
3350 }
3351
3352 if (TREE_CODE (expr) == MEM_REF)
3353 {
3354 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3355 {
3356 error ("invalid address operand in MEM_REF");
3357 debug_generic_stmt (expr);
3358 return true;
3359 }
3360 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3361 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3362 {
3363 error ("invalid offset operand in MEM_REF");
3364 debug_generic_stmt (expr);
3365 return true;
3366 }
3367 }
3368 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3369 {
3370 if (!TMR_BASE (expr)
3371 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3372 {
3373 error ("invalid address operand in TARGET_MEM_REF");
3374 return true;
3375 }
3376 if (!TMR_OFFSET (expr)
3377 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3378 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3379 {
3380 error ("invalid offset operand in TARGET_MEM_REF");
3381 debug_generic_stmt (expr);
3382 return true;
3383 }
3384 }
3385
3386 return ((require_lvalue || !is_gimple_min_invariant (expr))
3387 && verify_types_in_gimple_min_lval (expr));
3388 }
3389
3390 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3391 list of pointer-to types that is trivially convertible to DEST. */
3392
3393 static bool
3394 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3395 {
3396 tree src;
3397
3398 if (!TYPE_POINTER_TO (src_obj))
3399 return true;
3400
3401 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3402 if (useless_type_conversion_p (dest, src))
3403 return true;
3404
3405 return false;
3406 }
3407
3408 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3409 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3410
3411 static bool
3412 valid_fixed_convert_types_p (tree type1, tree type2)
3413 {
3414 return (FIXED_POINT_TYPE_P (type1)
3415 && (INTEGRAL_TYPE_P (type2)
3416 || SCALAR_FLOAT_TYPE_P (type2)
3417 || FIXED_POINT_TYPE_P (type2)));
3418 }
3419
3420 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3421 is a problem, otherwise false. */
3422
3423 static bool
3424 verify_gimple_call (gcall *stmt)
3425 {
3426 tree fn = gimple_call_fn (stmt);
3427 tree fntype, fndecl;
3428 unsigned i;
3429
3430 if (gimple_call_internal_p (stmt))
3431 {
3432 if (fn)
3433 {
3434 error ("gimple call has two targets");
3435 debug_generic_stmt (fn);
3436 return true;
3437 }
3438 /* FIXME : for passing label as arg in internal fn PHI from GIMPLE FE*/
3439 else if (gimple_call_internal_fn (stmt) == IFN_PHI)
3440 {
3441 return false;
3442 }
3443 }
3444 else
3445 {
3446 if (!fn)
3447 {
3448 error ("gimple call has no target");
3449 return true;
3450 }
3451 }
3452
3453 if (fn && !is_gimple_call_addr (fn))
3454 {
3455 error ("invalid function in gimple call");
3456 debug_generic_stmt (fn);
3457 return true;
3458 }
3459
3460 if (fn
3461 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3462 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3463 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3464 {
3465 error ("non-function in gimple call");
3466 return true;
3467 }
3468
3469 fndecl = gimple_call_fndecl (stmt);
3470 if (fndecl
3471 && TREE_CODE (fndecl) == FUNCTION_DECL
3472 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3473 && !DECL_PURE_P (fndecl)
3474 && !TREE_READONLY (fndecl))
3475 {
3476 error ("invalid pure const state for function");
3477 return true;
3478 }
3479
3480 tree lhs = gimple_call_lhs (stmt);
3481 if (lhs
3482 && (!is_gimple_lvalue (lhs)
3483 || verify_types_in_gimple_reference (lhs, true)))
3484 {
3485 error ("invalid LHS in gimple call");
3486 return true;
3487 }
3488
3489 if (gimple_call_ctrl_altering_p (stmt)
3490 && gimple_call_noreturn_p (stmt)
3491 && should_remove_lhs_p (lhs))
3492 {
3493 error ("LHS in noreturn call");
3494 return true;
3495 }
3496
3497 fntype = gimple_call_fntype (stmt);
3498 if (fntype
3499 && lhs
3500 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3501 /* ??? At least C++ misses conversions at assignments from
3502 void * call results.
3503 For now simply allow arbitrary pointer type conversions. */
3504 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3505 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3506 {
3507 error ("invalid conversion in gimple call");
3508 debug_generic_stmt (TREE_TYPE (lhs));
3509 debug_generic_stmt (TREE_TYPE (fntype));
3510 return true;
3511 }
3512
3513 if (gimple_call_chain (stmt)
3514 && !is_gimple_val (gimple_call_chain (stmt)))
3515 {
3516 error ("invalid static chain in gimple call");
3517 debug_generic_stmt (gimple_call_chain (stmt));
3518 return true;
3519 }
3520
3521 /* If there is a static chain argument, the call should either be
3522 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3523 if (gimple_call_chain (stmt)
3524 && fndecl
3525 && !DECL_STATIC_CHAIN (fndecl))
3526 {
3527 error ("static chain with function that doesn%'t use one");
3528 return true;
3529 }
3530
3531 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3532 {
3533 switch (DECL_FUNCTION_CODE (fndecl))
3534 {
3535 case BUILT_IN_UNREACHABLE:
3536 case BUILT_IN_TRAP:
3537 if (gimple_call_num_args (stmt) > 0)
3538 {
3539 /* Built-in unreachable with parameters might not be caught by
3540 undefined behavior sanitizer. Front-ends do check users do not
3541 call them that way but we also produce calls to
3542 __builtin_unreachable internally, for example when IPA figures
3543 out a call cannot happen in a legal program. In such cases,
3544 we must make sure arguments are stripped off. */
3545 error ("__builtin_unreachable or __builtin_trap call with "
3546 "arguments");
3547 return true;
3548 }
3549 break;
3550 default:
3551 break;
3552 }
3553 }
3554
3555 /* ??? The C frontend passes unpromoted arguments in case it
3556 didn't see a function declaration before the call. So for now
3557 leave the call arguments mostly unverified. Once we gimplify
3558 unit-at-a-time we have a chance to fix this. */
3559
3560 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3561 {
3562 tree arg = gimple_call_arg (stmt, i);
3563 if ((is_gimple_reg_type (TREE_TYPE (arg))
3564 && !is_gimple_val (arg))
3565 || (!is_gimple_reg_type (TREE_TYPE (arg))
3566 && !is_gimple_lvalue (arg)))
3567 {
3568 error ("invalid argument to gimple call");
3569 debug_generic_expr (arg);
3570 return true;
3571 }
3572 }
3573
3574 return false;
3575 }
3576
3577 /* Verifies the gimple comparison with the result type TYPE and
3578 the operands OP0 and OP1, comparison code is CODE. */
3579
3580 static bool
3581 verify_gimple_comparison (tree type, tree op0, tree op1, enum tree_code code)
3582 {
3583 tree op0_type = TREE_TYPE (op0);
3584 tree op1_type = TREE_TYPE (op1);
3585
3586 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3587 {
3588 error ("invalid operands in gimple comparison");
3589 return true;
3590 }
3591
3592 /* For comparisons we do not have the operations type as the
3593 effective type the comparison is carried out in. Instead
3594 we require that either the first operand is trivially
3595 convertible into the second, or the other way around.
3596 Because we special-case pointers to void we allow
3597 comparisons of pointers with the same mode as well. */
3598 if (!useless_type_conversion_p (op0_type, op1_type)
3599 && !useless_type_conversion_p (op1_type, op0_type)
3600 && (!POINTER_TYPE_P (op0_type)
3601 || !POINTER_TYPE_P (op1_type)
3602 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3603 {
3604 error ("mismatching comparison operand types");
3605 debug_generic_expr (op0_type);
3606 debug_generic_expr (op1_type);
3607 return true;
3608 }
3609
3610 /* The resulting type of a comparison may be an effective boolean type. */
3611 if (INTEGRAL_TYPE_P (type)
3612 && (TREE_CODE (type) == BOOLEAN_TYPE
3613 || TYPE_PRECISION (type) == 1))
3614 {
3615 if ((TREE_CODE (op0_type) == VECTOR_TYPE
3616 || TREE_CODE (op1_type) == VECTOR_TYPE)
3617 && code != EQ_EXPR && code != NE_EXPR
3618 && !VECTOR_BOOLEAN_TYPE_P (op0_type)
3619 && !VECTOR_INTEGER_TYPE_P (op0_type))
3620 {
3621 error ("unsupported operation or type for vector comparison"
3622 " returning a boolean");
3623 debug_generic_expr (op0_type);
3624 debug_generic_expr (op1_type);
3625 return true;
3626 }
3627 }
3628 /* Or a boolean vector type with the same element count
3629 as the comparison operand types. */
3630 else if (TREE_CODE (type) == VECTOR_TYPE
3631 && TREE_CODE (TREE_TYPE (type)) == BOOLEAN_TYPE)
3632 {
3633 if (TREE_CODE (op0_type) != VECTOR_TYPE
3634 || TREE_CODE (op1_type) != VECTOR_TYPE)
3635 {
3636 error ("non-vector operands in vector comparison");
3637 debug_generic_expr (op0_type);
3638 debug_generic_expr (op1_type);
3639 return true;
3640 }
3641
3642 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type))
3643 {
3644 error ("invalid vector comparison resulting type");
3645 debug_generic_expr (type);
3646 return true;
3647 }
3648 }
3649 else
3650 {
3651 error ("bogus comparison result type");
3652 debug_generic_expr (type);
3653 return true;
3654 }
3655
3656 return false;
3657 }
3658
3659 /* Verify a gimple assignment statement STMT with an unary rhs.
3660 Returns true if anything is wrong. */
3661
3662 static bool
3663 verify_gimple_assign_unary (gassign *stmt)
3664 {
3665 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3666 tree lhs = gimple_assign_lhs (stmt);
3667 tree lhs_type = TREE_TYPE (lhs);
3668 tree rhs1 = gimple_assign_rhs1 (stmt);
3669 tree rhs1_type = TREE_TYPE (rhs1);
3670
3671 if (!is_gimple_reg (lhs))
3672 {
3673 error ("non-register as LHS of unary operation");
3674 return true;
3675 }
3676
3677 if (!is_gimple_val (rhs1))
3678 {
3679 error ("invalid operand in unary operation");
3680 return true;
3681 }
3682
3683 /* First handle conversions. */
3684 switch (rhs_code)
3685 {
3686 CASE_CONVERT:
3687 {
3688 /* Allow conversions from pointer type to integral type only if
3689 there is no sign or zero extension involved.
3690 For targets were the precision of ptrofftype doesn't match that
3691 of pointers we need to allow arbitrary conversions to ptrofftype. */
3692 if ((POINTER_TYPE_P (lhs_type)
3693 && INTEGRAL_TYPE_P (rhs1_type))
3694 || (POINTER_TYPE_P (rhs1_type)
3695 && INTEGRAL_TYPE_P (lhs_type)
3696 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3697 || ptrofftype_p (sizetype))))
3698 return false;
3699
3700 /* Allow conversion from integral to offset type and vice versa. */
3701 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3702 && INTEGRAL_TYPE_P (rhs1_type))
3703 || (INTEGRAL_TYPE_P (lhs_type)
3704 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3705 return false;
3706
3707 /* Otherwise assert we are converting between types of the
3708 same kind. */
3709 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3710 {
3711 error ("invalid types in nop conversion");
3712 debug_generic_expr (lhs_type);
3713 debug_generic_expr (rhs1_type);
3714 return true;
3715 }
3716
3717 return false;
3718 }
3719
3720 case ADDR_SPACE_CONVERT_EXPR:
3721 {
3722 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3723 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3724 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3725 {
3726 error ("invalid types in address space conversion");
3727 debug_generic_expr (lhs_type);
3728 debug_generic_expr (rhs1_type);
3729 return true;
3730 }
3731
3732 return false;
3733 }
3734
3735 case FIXED_CONVERT_EXPR:
3736 {
3737 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3738 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3739 {
3740 error ("invalid types in fixed-point conversion");
3741 debug_generic_expr (lhs_type);
3742 debug_generic_expr (rhs1_type);
3743 return true;
3744 }
3745
3746 return false;
3747 }
3748
3749 case FLOAT_EXPR:
3750 {
3751 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3752 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3753 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3754 {
3755 error ("invalid types in conversion to floating point");
3756 debug_generic_expr (lhs_type);
3757 debug_generic_expr (rhs1_type);
3758 return true;
3759 }
3760
3761 return false;
3762 }
3763
3764 case FIX_TRUNC_EXPR:
3765 {
3766 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3767 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3768 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3769 {
3770 error ("invalid types in conversion to integer");
3771 debug_generic_expr (lhs_type);
3772 debug_generic_expr (rhs1_type);
3773 return true;
3774 }
3775
3776 return false;
3777 }
3778 case REDUC_MAX_EXPR:
3779 case REDUC_MIN_EXPR:
3780 case REDUC_PLUS_EXPR:
3781 if (!VECTOR_TYPE_P (rhs1_type)
3782 || !useless_type_conversion_p (lhs_type, TREE_TYPE (rhs1_type)))
3783 {
3784 error ("reduction should convert from vector to element type");
3785 debug_generic_expr (lhs_type);
3786 debug_generic_expr (rhs1_type);
3787 return true;
3788 }
3789 return false;
3790
3791 case VEC_UNPACK_HI_EXPR:
3792 case VEC_UNPACK_LO_EXPR:
3793 case VEC_UNPACK_FLOAT_HI_EXPR:
3794 case VEC_UNPACK_FLOAT_LO_EXPR:
3795 /* FIXME. */
3796 return false;
3797
3798 case NEGATE_EXPR:
3799 case ABS_EXPR:
3800 case BIT_NOT_EXPR:
3801 case PAREN_EXPR:
3802 case CONJ_EXPR:
3803 break;
3804
3805 default:
3806 gcc_unreachable ();
3807 }
3808
3809 /* For the remaining codes assert there is no conversion involved. */
3810 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3811 {
3812 error ("non-trivial conversion in unary operation");
3813 debug_generic_expr (lhs_type);
3814 debug_generic_expr (rhs1_type);
3815 return true;
3816 }
3817
3818 return false;
3819 }
3820
3821 /* Verify a gimple assignment statement STMT with a binary rhs.
3822 Returns true if anything is wrong. */
3823
3824 static bool
3825 verify_gimple_assign_binary (gassign *stmt)
3826 {
3827 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3828 tree lhs = gimple_assign_lhs (stmt);
3829 tree lhs_type = TREE_TYPE (lhs);
3830 tree rhs1 = gimple_assign_rhs1 (stmt);
3831 tree rhs1_type = TREE_TYPE (rhs1);
3832 tree rhs2 = gimple_assign_rhs2 (stmt);
3833 tree rhs2_type = TREE_TYPE (rhs2);
3834
3835 if (!is_gimple_reg (lhs))
3836 {
3837 error ("non-register as LHS of binary operation");
3838 return true;
3839 }
3840
3841 if (!is_gimple_val (rhs1)
3842 || !is_gimple_val (rhs2))
3843 {
3844 error ("invalid operands in binary operation");
3845 return true;
3846 }
3847
3848 /* First handle operations that involve different types. */
3849 switch (rhs_code)
3850 {
3851 case COMPLEX_EXPR:
3852 {
3853 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3854 || !(INTEGRAL_TYPE_P (rhs1_type)
3855 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3856 || !(INTEGRAL_TYPE_P (rhs2_type)
3857 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3858 {
3859 error ("type mismatch in complex expression");
3860 debug_generic_expr (lhs_type);
3861 debug_generic_expr (rhs1_type);
3862 debug_generic_expr (rhs2_type);
3863 return true;
3864 }
3865
3866 return false;
3867 }
3868
3869 case LSHIFT_EXPR:
3870 case RSHIFT_EXPR:
3871 case LROTATE_EXPR:
3872 case RROTATE_EXPR:
3873 {
3874 /* Shifts and rotates are ok on integral types, fixed point
3875 types and integer vector types. */
3876 if ((!INTEGRAL_TYPE_P (rhs1_type)
3877 && !FIXED_POINT_TYPE_P (rhs1_type)
3878 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3879 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3880 || (!INTEGRAL_TYPE_P (rhs2_type)
3881 /* Vector shifts of vectors are also ok. */
3882 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3883 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3884 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3885 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3886 || !useless_type_conversion_p (lhs_type, rhs1_type))
3887 {
3888 error ("type mismatch in shift expression");
3889 debug_generic_expr (lhs_type);
3890 debug_generic_expr (rhs1_type);
3891 debug_generic_expr (rhs2_type);
3892 return true;
3893 }
3894
3895 return false;
3896 }
3897
3898 case WIDEN_LSHIFT_EXPR:
3899 {
3900 if (!INTEGRAL_TYPE_P (lhs_type)
3901 || !INTEGRAL_TYPE_P (rhs1_type)
3902 || TREE_CODE (rhs2) != INTEGER_CST
3903 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3904 {
3905 error ("type mismatch in widening vector shift expression");
3906 debug_generic_expr (lhs_type);
3907 debug_generic_expr (rhs1_type);
3908 debug_generic_expr (rhs2_type);
3909 return true;
3910 }
3911
3912 return false;
3913 }
3914
3915 case VEC_WIDEN_LSHIFT_HI_EXPR:
3916 case VEC_WIDEN_LSHIFT_LO_EXPR:
3917 {
3918 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3919 || TREE_CODE (lhs_type) != VECTOR_TYPE
3920 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3921 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3922 || TREE_CODE (rhs2) != INTEGER_CST
3923 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3924 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3925 {
3926 error ("type mismatch in widening vector shift expression");
3927 debug_generic_expr (lhs_type);
3928 debug_generic_expr (rhs1_type);
3929 debug_generic_expr (rhs2_type);
3930 return true;
3931 }
3932
3933 return false;
3934 }
3935
3936 case PLUS_EXPR:
3937 case MINUS_EXPR:
3938 {
3939 tree lhs_etype = lhs_type;
3940 tree rhs1_etype = rhs1_type;
3941 tree rhs2_etype = rhs2_type;
3942 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3943 {
3944 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3945 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3946 {
3947 error ("invalid non-vector operands to vector valued plus");
3948 return true;
3949 }
3950 lhs_etype = TREE_TYPE (lhs_type);
3951 rhs1_etype = TREE_TYPE (rhs1_type);
3952 rhs2_etype = TREE_TYPE (rhs2_type);
3953 }
3954 if (POINTER_TYPE_P (lhs_etype)
3955 || POINTER_TYPE_P (rhs1_etype)
3956 || POINTER_TYPE_P (rhs2_etype))
3957 {
3958 error ("invalid (pointer) operands to plus/minus");
3959 return true;
3960 }
3961
3962 /* Continue with generic binary expression handling. */
3963 break;
3964 }
3965
3966 case POINTER_PLUS_EXPR:
3967 {
3968 if (!POINTER_TYPE_P (rhs1_type)
3969 || !useless_type_conversion_p (lhs_type, rhs1_type)
3970 || !ptrofftype_p (rhs2_type))
3971 {
3972 error ("type mismatch in pointer plus expression");
3973 debug_generic_stmt (lhs_type);
3974 debug_generic_stmt (rhs1_type);
3975 debug_generic_stmt (rhs2_type);
3976 return true;
3977 }
3978
3979 return false;
3980 }
3981
3982 case TRUTH_ANDIF_EXPR:
3983 case TRUTH_ORIF_EXPR:
3984 case TRUTH_AND_EXPR:
3985 case TRUTH_OR_EXPR:
3986 case TRUTH_XOR_EXPR:
3987
3988 gcc_unreachable ();
3989
3990 case LT_EXPR:
3991 case LE_EXPR:
3992 case GT_EXPR:
3993 case GE_EXPR:
3994 case EQ_EXPR:
3995 case NE_EXPR:
3996 case UNORDERED_EXPR:
3997 case ORDERED_EXPR:
3998 case UNLT_EXPR:
3999 case UNLE_EXPR:
4000 case UNGT_EXPR:
4001 case UNGE_EXPR:
4002 case UNEQ_EXPR:
4003 case LTGT_EXPR:
4004 /* Comparisons are also binary, but the result type is not
4005 connected to the operand types. */
4006 return verify_gimple_comparison (lhs_type, rhs1, rhs2, rhs_code);
4007
4008 case WIDEN_MULT_EXPR:
4009 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
4010 return true;
4011 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
4012 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
4013
4014 case WIDEN_SUM_EXPR:
4015 case VEC_WIDEN_MULT_HI_EXPR:
4016 case VEC_WIDEN_MULT_LO_EXPR:
4017 case VEC_WIDEN_MULT_EVEN_EXPR:
4018 case VEC_WIDEN_MULT_ODD_EXPR:
4019 case VEC_PACK_TRUNC_EXPR:
4020 case VEC_PACK_SAT_EXPR:
4021 case VEC_PACK_FIX_TRUNC_EXPR:
4022 /* FIXME. */
4023 return false;
4024
4025 case MULT_EXPR:
4026 case MULT_HIGHPART_EXPR:
4027 case TRUNC_DIV_EXPR:
4028 case CEIL_DIV_EXPR:
4029 case FLOOR_DIV_EXPR:
4030 case ROUND_DIV_EXPR:
4031 case TRUNC_MOD_EXPR:
4032 case CEIL_MOD_EXPR:
4033 case FLOOR_MOD_EXPR:
4034 case ROUND_MOD_EXPR:
4035 case RDIV_EXPR:
4036 case EXACT_DIV_EXPR:
4037 case MIN_EXPR:
4038 case MAX_EXPR:
4039 case BIT_IOR_EXPR:
4040 case BIT_XOR_EXPR:
4041 case BIT_AND_EXPR:
4042 /* Continue with generic binary expression handling. */
4043 break;
4044
4045 default:
4046 gcc_unreachable ();
4047 }
4048
4049 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4050 || !useless_type_conversion_p (lhs_type, rhs2_type))
4051 {
4052 error ("type mismatch in binary expression");
4053 debug_generic_stmt (lhs_type);
4054 debug_generic_stmt (rhs1_type);
4055 debug_generic_stmt (rhs2_type);
4056 return true;
4057 }
4058
4059 return false;
4060 }
4061
4062 /* Verify a gimple assignment statement STMT with a ternary rhs.
4063 Returns true if anything is wrong. */
4064
4065 static bool
4066 verify_gimple_assign_ternary (gassign *stmt)
4067 {
4068 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4069 tree lhs = gimple_assign_lhs (stmt);
4070 tree lhs_type = TREE_TYPE (lhs);
4071 tree rhs1 = gimple_assign_rhs1 (stmt);
4072 tree rhs1_type = TREE_TYPE (rhs1);
4073 tree rhs2 = gimple_assign_rhs2 (stmt);
4074 tree rhs2_type = TREE_TYPE (rhs2);
4075 tree rhs3 = gimple_assign_rhs3 (stmt);
4076 tree rhs3_type = TREE_TYPE (rhs3);
4077
4078 if (!is_gimple_reg (lhs))
4079 {
4080 error ("non-register as LHS of ternary operation");
4081 return true;
4082 }
4083
4084 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
4085 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
4086 || !is_gimple_val (rhs2)
4087 || !is_gimple_val (rhs3))
4088 {
4089 error ("invalid operands in ternary operation");
4090 return true;
4091 }
4092
4093 /* First handle operations that involve different types. */
4094 switch (rhs_code)
4095 {
4096 case WIDEN_MULT_PLUS_EXPR:
4097 case WIDEN_MULT_MINUS_EXPR:
4098 if ((!INTEGRAL_TYPE_P (rhs1_type)
4099 && !FIXED_POINT_TYPE_P (rhs1_type))
4100 || !useless_type_conversion_p (rhs1_type, rhs2_type)
4101 || !useless_type_conversion_p (lhs_type, rhs3_type)
4102 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
4103 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
4104 {
4105 error ("type mismatch in widening multiply-accumulate expression");
4106 debug_generic_expr (lhs_type);
4107 debug_generic_expr (rhs1_type);
4108 debug_generic_expr (rhs2_type);
4109 debug_generic_expr (rhs3_type);
4110 return true;
4111 }
4112 break;
4113
4114 case FMA_EXPR:
4115 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4116 || !useless_type_conversion_p (lhs_type, rhs2_type)
4117 || !useless_type_conversion_p (lhs_type, rhs3_type))
4118 {
4119 error ("type mismatch in fused multiply-add expression");
4120 debug_generic_expr (lhs_type);
4121 debug_generic_expr (rhs1_type);
4122 debug_generic_expr (rhs2_type);
4123 debug_generic_expr (rhs3_type);
4124 return true;
4125 }
4126 break;
4127
4128 case VEC_COND_EXPR:
4129 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4130 || TYPE_VECTOR_SUBPARTS (rhs1_type)
4131 != TYPE_VECTOR_SUBPARTS (lhs_type))
4132 {
4133 error ("the first argument of a VEC_COND_EXPR must be of a "
4134 "boolean vector type of the same number of elements "
4135 "as the result");
4136 debug_generic_expr (lhs_type);
4137 debug_generic_expr (rhs1_type);
4138 return true;
4139 }
4140 /* Fallthrough. */
4141 case COND_EXPR:
4142 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4143 || !useless_type_conversion_p (lhs_type, rhs3_type))
4144 {
4145 error ("type mismatch in conditional expression");
4146 debug_generic_expr (lhs_type);
4147 debug_generic_expr (rhs2_type);
4148 debug_generic_expr (rhs3_type);
4149 return true;
4150 }
4151 break;
4152
4153 case VEC_PERM_EXPR:
4154 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4155 || !useless_type_conversion_p (lhs_type, rhs2_type))
4156 {
4157 error ("type mismatch in vector permute expression");
4158 debug_generic_expr (lhs_type);
4159 debug_generic_expr (rhs1_type);
4160 debug_generic_expr (rhs2_type);
4161 debug_generic_expr (rhs3_type);
4162 return true;
4163 }
4164
4165 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4166 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4167 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4168 {
4169 error ("vector types expected in vector permute expression");
4170 debug_generic_expr (lhs_type);
4171 debug_generic_expr (rhs1_type);
4172 debug_generic_expr (rhs2_type);
4173 debug_generic_expr (rhs3_type);
4174 return true;
4175 }
4176
4177 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
4178 || TYPE_VECTOR_SUBPARTS (rhs2_type)
4179 != TYPE_VECTOR_SUBPARTS (rhs3_type)
4180 || TYPE_VECTOR_SUBPARTS (rhs3_type)
4181 != TYPE_VECTOR_SUBPARTS (lhs_type))
4182 {
4183 error ("vectors with different element number found "
4184 "in vector permute expression");
4185 debug_generic_expr (lhs_type);
4186 debug_generic_expr (rhs1_type);
4187 debug_generic_expr (rhs2_type);
4188 debug_generic_expr (rhs3_type);
4189 return true;
4190 }
4191
4192 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4193 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
4194 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
4195 {
4196 error ("invalid mask type in vector permute expression");
4197 debug_generic_expr (lhs_type);
4198 debug_generic_expr (rhs1_type);
4199 debug_generic_expr (rhs2_type);
4200 debug_generic_expr (rhs3_type);
4201 return true;
4202 }
4203
4204 return false;
4205
4206 case SAD_EXPR:
4207 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4208 || !useless_type_conversion_p (lhs_type, rhs3_type)
4209 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4210 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4211 {
4212 error ("type mismatch in sad expression");
4213 debug_generic_expr (lhs_type);
4214 debug_generic_expr (rhs1_type);
4215 debug_generic_expr (rhs2_type);
4216 debug_generic_expr (rhs3_type);
4217 return true;
4218 }
4219
4220 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4221 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4222 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4223 {
4224 error ("vector types expected in sad expression");
4225 debug_generic_expr (lhs_type);
4226 debug_generic_expr (rhs1_type);
4227 debug_generic_expr (rhs2_type);
4228 debug_generic_expr (rhs3_type);
4229 return true;
4230 }
4231
4232 return false;
4233
4234 case BIT_INSERT_EXPR:
4235 if (! useless_type_conversion_p (lhs_type, rhs1_type))
4236 {
4237 error ("type mismatch in BIT_INSERT_EXPR");
4238 debug_generic_expr (lhs_type);
4239 debug_generic_expr (rhs1_type);
4240 return true;
4241 }
4242 if (! ((INTEGRAL_TYPE_P (rhs1_type)
4243 && INTEGRAL_TYPE_P (rhs2_type))
4244 || (VECTOR_TYPE_P (rhs1_type)
4245 && types_compatible_p (TREE_TYPE (rhs1_type), rhs2_type))))
4246 {
4247 error ("not allowed type combination in BIT_INSERT_EXPR");
4248 debug_generic_expr (rhs1_type);
4249 debug_generic_expr (rhs2_type);
4250 return true;
4251 }
4252 if (! tree_fits_uhwi_p (rhs3)
4253 || ! types_compatible_p (bitsizetype, TREE_TYPE (rhs3))
4254 || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type)))
4255 {
4256 error ("invalid position or size in BIT_INSERT_EXPR");
4257 return true;
4258 }
4259 if (INTEGRAL_TYPE_P (rhs1_type))
4260 {
4261 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4262 if (bitpos >= TYPE_PRECISION (rhs1_type)
4263 || (bitpos + TYPE_PRECISION (rhs2_type)
4264 > TYPE_PRECISION (rhs1_type)))
4265 {
4266 error ("insertion out of range in BIT_INSERT_EXPR");
4267 return true;
4268 }
4269 }
4270 else if (VECTOR_TYPE_P (rhs1_type))
4271 {
4272 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4273 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (TYPE_SIZE (rhs2_type));
4274 if (bitpos % bitsize != 0)
4275 {
4276 error ("vector insertion not at element boundary");
4277 return true;
4278 }
4279 }
4280 return false;
4281
4282 case DOT_PROD_EXPR:
4283 case REALIGN_LOAD_EXPR:
4284 /* FIXME. */
4285 return false;
4286
4287 default:
4288 gcc_unreachable ();
4289 }
4290 return false;
4291 }
4292
4293 /* Verify a gimple assignment statement STMT with a single rhs.
4294 Returns true if anything is wrong. */
4295
4296 static bool
4297 verify_gimple_assign_single (gassign *stmt)
4298 {
4299 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4300 tree lhs = gimple_assign_lhs (stmt);
4301 tree lhs_type = TREE_TYPE (lhs);
4302 tree rhs1 = gimple_assign_rhs1 (stmt);
4303 tree rhs1_type = TREE_TYPE (rhs1);
4304 bool res = false;
4305
4306 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4307 {
4308 error ("non-trivial conversion at assignment");
4309 debug_generic_expr (lhs_type);
4310 debug_generic_expr (rhs1_type);
4311 return true;
4312 }
4313
4314 if (gimple_clobber_p (stmt)
4315 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4316 {
4317 error ("non-decl/MEM_REF LHS in clobber statement");
4318 debug_generic_expr (lhs);
4319 return true;
4320 }
4321
4322 if (handled_component_p (lhs)
4323 || TREE_CODE (lhs) == MEM_REF
4324 || TREE_CODE (lhs) == TARGET_MEM_REF)
4325 res |= verify_types_in_gimple_reference (lhs, true);
4326
4327 /* Special codes we cannot handle via their class. */
4328 switch (rhs_code)
4329 {
4330 case ADDR_EXPR:
4331 {
4332 tree op = TREE_OPERAND (rhs1, 0);
4333 if (!is_gimple_addressable (op))
4334 {
4335 error ("invalid operand in unary expression");
4336 return true;
4337 }
4338
4339 /* Technically there is no longer a need for matching types, but
4340 gimple hygiene asks for this check. In LTO we can end up
4341 combining incompatible units and thus end up with addresses
4342 of globals that change their type to a common one. */
4343 if (!in_lto_p
4344 && !types_compatible_p (TREE_TYPE (op),
4345 TREE_TYPE (TREE_TYPE (rhs1)))
4346 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4347 TREE_TYPE (op)))
4348 {
4349 error ("type mismatch in address expression");
4350 debug_generic_stmt (TREE_TYPE (rhs1));
4351 debug_generic_stmt (TREE_TYPE (op));
4352 return true;
4353 }
4354
4355 return verify_types_in_gimple_reference (op, true);
4356 }
4357
4358 /* tcc_reference */
4359 case INDIRECT_REF:
4360 error ("INDIRECT_REF in gimple IL");
4361 return true;
4362
4363 case COMPONENT_REF:
4364 case BIT_FIELD_REF:
4365 case ARRAY_REF:
4366 case ARRAY_RANGE_REF:
4367 case VIEW_CONVERT_EXPR:
4368 case REALPART_EXPR:
4369 case IMAGPART_EXPR:
4370 case TARGET_MEM_REF:
4371 case MEM_REF:
4372 if (!is_gimple_reg (lhs)
4373 && is_gimple_reg_type (TREE_TYPE (lhs)))
4374 {
4375 error ("invalid rhs for gimple memory store");
4376 debug_generic_stmt (lhs);
4377 debug_generic_stmt (rhs1);
4378 return true;
4379 }
4380 return res || verify_types_in_gimple_reference (rhs1, false);
4381
4382 /* tcc_constant */
4383 case SSA_NAME:
4384 case INTEGER_CST:
4385 case REAL_CST:
4386 case FIXED_CST:
4387 case COMPLEX_CST:
4388 case VECTOR_CST:
4389 case STRING_CST:
4390 return res;
4391
4392 /* tcc_declaration */
4393 case CONST_DECL:
4394 return res;
4395 case VAR_DECL:
4396 case PARM_DECL:
4397 if (!is_gimple_reg (lhs)
4398 && !is_gimple_reg (rhs1)
4399 && is_gimple_reg_type (TREE_TYPE (lhs)))
4400 {
4401 error ("invalid rhs for gimple memory store");
4402 debug_generic_stmt (lhs);
4403 debug_generic_stmt (rhs1);
4404 return true;
4405 }
4406 return res;
4407
4408 case CONSTRUCTOR:
4409 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4410 {
4411 unsigned int i;
4412 tree elt_i, elt_v, elt_t = NULL_TREE;
4413
4414 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4415 return res;
4416 /* For vector CONSTRUCTORs we require that either it is empty
4417 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4418 (then the element count must be correct to cover the whole
4419 outer vector and index must be NULL on all elements, or it is
4420 a CONSTRUCTOR of scalar elements, where we as an exception allow
4421 smaller number of elements (assuming zero filling) and
4422 consecutive indexes as compared to NULL indexes (such
4423 CONSTRUCTORs can appear in the IL from FEs). */
4424 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4425 {
4426 if (elt_t == NULL_TREE)
4427 {
4428 elt_t = TREE_TYPE (elt_v);
4429 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4430 {
4431 tree elt_t = TREE_TYPE (elt_v);
4432 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4433 TREE_TYPE (elt_t)))
4434 {
4435 error ("incorrect type of vector CONSTRUCTOR"
4436 " elements");
4437 debug_generic_stmt (rhs1);
4438 return true;
4439 }
4440 else if (CONSTRUCTOR_NELTS (rhs1)
4441 * TYPE_VECTOR_SUBPARTS (elt_t)
4442 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4443 {
4444 error ("incorrect number of vector CONSTRUCTOR"
4445 " elements");
4446 debug_generic_stmt (rhs1);
4447 return true;
4448 }
4449 }
4450 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4451 elt_t))
4452 {
4453 error ("incorrect type of vector CONSTRUCTOR elements");
4454 debug_generic_stmt (rhs1);
4455 return true;
4456 }
4457 else if (CONSTRUCTOR_NELTS (rhs1)
4458 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4459 {
4460 error ("incorrect number of vector CONSTRUCTOR elements");
4461 debug_generic_stmt (rhs1);
4462 return true;
4463 }
4464 }
4465 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4466 {
4467 error ("incorrect type of vector CONSTRUCTOR elements");
4468 debug_generic_stmt (rhs1);
4469 return true;
4470 }
4471 if (elt_i != NULL_TREE
4472 && (TREE_CODE (elt_t) == VECTOR_TYPE
4473 || TREE_CODE (elt_i) != INTEGER_CST
4474 || compare_tree_int (elt_i, i) != 0))
4475 {
4476 error ("vector CONSTRUCTOR with non-NULL element index");
4477 debug_generic_stmt (rhs1);
4478 return true;
4479 }
4480 if (!is_gimple_val (elt_v))
4481 {
4482 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4483 debug_generic_stmt (rhs1);
4484 return true;
4485 }
4486 }
4487 }
4488 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4489 {
4490 error ("non-vector CONSTRUCTOR with elements");
4491 debug_generic_stmt (rhs1);
4492 return true;
4493 }
4494 return res;
4495 case OBJ_TYPE_REF:
4496 case ASSERT_EXPR:
4497 case WITH_SIZE_EXPR:
4498 /* FIXME. */
4499 return res;
4500
4501 default:;
4502 }
4503
4504 return res;
4505 }
4506
4507 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4508 is a problem, otherwise false. */
4509
4510 static bool
4511 verify_gimple_assign (gassign *stmt)
4512 {
4513 switch (gimple_assign_rhs_class (stmt))
4514 {
4515 case GIMPLE_SINGLE_RHS:
4516 return verify_gimple_assign_single (stmt);
4517
4518 case GIMPLE_UNARY_RHS:
4519 return verify_gimple_assign_unary (stmt);
4520
4521 case GIMPLE_BINARY_RHS:
4522 return verify_gimple_assign_binary (stmt);
4523
4524 case GIMPLE_TERNARY_RHS:
4525 return verify_gimple_assign_ternary (stmt);
4526
4527 default:
4528 gcc_unreachable ();
4529 }
4530 }
4531
4532 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4533 is a problem, otherwise false. */
4534
4535 static bool
4536 verify_gimple_return (greturn *stmt)
4537 {
4538 tree op = gimple_return_retval (stmt);
4539 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4540
4541 /* We cannot test for present return values as we do not fix up missing
4542 return values from the original source. */
4543 if (op == NULL)
4544 return false;
4545
4546 if (!is_gimple_val (op)
4547 && TREE_CODE (op) != RESULT_DECL)
4548 {
4549 error ("invalid operand in return statement");
4550 debug_generic_stmt (op);
4551 return true;
4552 }
4553
4554 if ((TREE_CODE (op) == RESULT_DECL
4555 && DECL_BY_REFERENCE (op))
4556 || (TREE_CODE (op) == SSA_NAME
4557 && SSA_NAME_VAR (op)
4558 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4559 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4560 op = TREE_TYPE (op);
4561
4562 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4563 {
4564 error ("invalid conversion in return statement");
4565 debug_generic_stmt (restype);
4566 debug_generic_stmt (TREE_TYPE (op));
4567 return true;
4568 }
4569
4570 return false;
4571 }
4572
4573
4574 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4575 is a problem, otherwise false. */
4576
4577 static bool
4578 verify_gimple_goto (ggoto *stmt)
4579 {
4580 tree dest = gimple_goto_dest (stmt);
4581
4582 /* ??? We have two canonical forms of direct goto destinations, a
4583 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4584 if (TREE_CODE (dest) != LABEL_DECL
4585 && (!is_gimple_val (dest)
4586 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4587 {
4588 error ("goto destination is neither a label nor a pointer");
4589 return true;
4590 }
4591
4592 return false;
4593 }
4594
4595 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4596 is a problem, otherwise false. */
4597
4598 static bool
4599 verify_gimple_switch (gswitch *stmt)
4600 {
4601 unsigned int i, n;
4602 tree elt, prev_upper_bound = NULL_TREE;
4603 tree index_type, elt_type = NULL_TREE;
4604
4605 if (!is_gimple_val (gimple_switch_index (stmt)))
4606 {
4607 error ("invalid operand to switch statement");
4608 debug_generic_stmt (gimple_switch_index (stmt));
4609 return true;
4610 }
4611
4612 index_type = TREE_TYPE (gimple_switch_index (stmt));
4613 if (! INTEGRAL_TYPE_P (index_type))
4614 {
4615 error ("non-integral type switch statement");
4616 debug_generic_expr (index_type);
4617 return true;
4618 }
4619
4620 elt = gimple_switch_label (stmt, 0);
4621 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4622 {
4623 error ("invalid default case label in switch statement");
4624 debug_generic_expr (elt);
4625 return true;
4626 }
4627
4628 n = gimple_switch_num_labels (stmt);
4629 for (i = 1; i < n; i++)
4630 {
4631 elt = gimple_switch_label (stmt, i);
4632
4633 if (! CASE_LOW (elt))
4634 {
4635 error ("invalid case label in switch statement");
4636 debug_generic_expr (elt);
4637 return true;
4638 }
4639 if (CASE_HIGH (elt)
4640 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4641 {
4642 error ("invalid case range in switch statement");
4643 debug_generic_expr (elt);
4644 return true;
4645 }
4646
4647 if (elt_type)
4648 {
4649 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4650 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4651 {
4652 error ("type mismatch for case label in switch statement");
4653 debug_generic_expr (elt);
4654 return true;
4655 }
4656 }
4657 else
4658 {
4659 elt_type = TREE_TYPE (CASE_LOW (elt));
4660 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4661 {
4662 error ("type precision mismatch in switch statement");
4663 return true;
4664 }
4665 }
4666
4667 if (prev_upper_bound)
4668 {
4669 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4670 {
4671 error ("case labels not sorted in switch statement");
4672 return true;
4673 }
4674 }
4675
4676 prev_upper_bound = CASE_HIGH (elt);
4677 if (! prev_upper_bound)
4678 prev_upper_bound = CASE_LOW (elt);
4679 }
4680
4681 return false;
4682 }
4683
4684 /* Verify a gimple debug statement STMT.
4685 Returns true if anything is wrong. */
4686
4687 static bool
4688 verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
4689 {
4690 /* There isn't much that could be wrong in a gimple debug stmt. A
4691 gimple debug bind stmt, for example, maps a tree, that's usually
4692 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4693 component or member of an aggregate type, to another tree, that
4694 can be an arbitrary expression. These stmts expand into debug
4695 insns, and are converted to debug notes by var-tracking.c. */
4696 return false;
4697 }
4698
4699 /* Verify a gimple label statement STMT.
4700 Returns true if anything is wrong. */
4701
4702 static bool
4703 verify_gimple_label (glabel *stmt)
4704 {
4705 tree decl = gimple_label_label (stmt);
4706 int uid;
4707 bool err = false;
4708
4709 if (TREE_CODE (decl) != LABEL_DECL)
4710 return true;
4711 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4712 && DECL_CONTEXT (decl) != current_function_decl)
4713 {
4714 error ("label's context is not the current function decl");
4715 err |= true;
4716 }
4717
4718 uid = LABEL_DECL_UID (decl);
4719 if (cfun->cfg
4720 && (uid == -1
4721 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4722 {
4723 error ("incorrect entry in label_to_block_map");
4724 err |= true;
4725 }
4726
4727 uid = EH_LANDING_PAD_NR (decl);
4728 if (uid)
4729 {
4730 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4731 if (decl != lp->post_landing_pad)
4732 {
4733 error ("incorrect setting of landing pad number");
4734 err |= true;
4735 }
4736 }
4737
4738 return err;
4739 }
4740
4741 /* Verify a gimple cond statement STMT.
4742 Returns true if anything is wrong. */
4743
4744 static bool
4745 verify_gimple_cond (gcond *stmt)
4746 {
4747 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4748 {
4749 error ("invalid comparison code in gimple cond");
4750 return true;
4751 }
4752 if (!(!gimple_cond_true_label (stmt)
4753 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4754 || !(!gimple_cond_false_label (stmt)
4755 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4756 {
4757 error ("invalid labels in gimple cond");
4758 return true;
4759 }
4760
4761 return verify_gimple_comparison (boolean_type_node,
4762 gimple_cond_lhs (stmt),
4763 gimple_cond_rhs (stmt),
4764 gimple_cond_code (stmt));
4765 }
4766
4767 /* Verify the GIMPLE statement STMT. Returns true if there is an
4768 error, otherwise false. */
4769
4770 static bool
4771 verify_gimple_stmt (gimple *stmt)
4772 {
4773 switch (gimple_code (stmt))
4774 {
4775 case GIMPLE_ASSIGN:
4776 return verify_gimple_assign (as_a <gassign *> (stmt));
4777
4778 case GIMPLE_LABEL:
4779 return verify_gimple_label (as_a <glabel *> (stmt));
4780
4781 case GIMPLE_CALL:
4782 return verify_gimple_call (as_a <gcall *> (stmt));
4783
4784 case GIMPLE_COND:
4785 return verify_gimple_cond (as_a <gcond *> (stmt));
4786
4787 case GIMPLE_GOTO:
4788 return verify_gimple_goto (as_a <ggoto *> (stmt));
4789
4790 case GIMPLE_SWITCH:
4791 return verify_gimple_switch (as_a <gswitch *> (stmt));
4792
4793 case GIMPLE_RETURN:
4794 return verify_gimple_return (as_a <greturn *> (stmt));
4795
4796 case GIMPLE_ASM:
4797 return false;
4798
4799 case GIMPLE_TRANSACTION:
4800 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4801
4802 /* Tuples that do not have tree operands. */
4803 case GIMPLE_NOP:
4804 case GIMPLE_PREDICT:
4805 case GIMPLE_RESX:
4806 case GIMPLE_EH_DISPATCH:
4807 case GIMPLE_EH_MUST_NOT_THROW:
4808 return false;
4809
4810 CASE_GIMPLE_OMP:
4811 /* OpenMP directives are validated by the FE and never operated
4812 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4813 non-gimple expressions when the main index variable has had
4814 its address taken. This does not affect the loop itself
4815 because the header of an GIMPLE_OMP_FOR is merely used to determine
4816 how to setup the parallel iteration. */
4817 return false;
4818
4819 case GIMPLE_DEBUG:
4820 return verify_gimple_debug (stmt);
4821
4822 default:
4823 gcc_unreachable ();
4824 }
4825 }
4826
4827 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4828 and false otherwise. */
4829
4830 static bool
4831 verify_gimple_phi (gimple *phi)
4832 {
4833 bool err = false;
4834 unsigned i;
4835 tree phi_result = gimple_phi_result (phi);
4836 bool virtual_p;
4837
4838 if (!phi_result)
4839 {
4840 error ("invalid PHI result");
4841 return true;
4842 }
4843
4844 virtual_p = virtual_operand_p (phi_result);
4845 if (TREE_CODE (phi_result) != SSA_NAME
4846 || (virtual_p
4847 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4848 {
4849 error ("invalid PHI result");
4850 err = true;
4851 }
4852
4853 for (i = 0; i < gimple_phi_num_args (phi); i++)
4854 {
4855 tree t = gimple_phi_arg_def (phi, i);
4856
4857 if (!t)
4858 {
4859 error ("missing PHI def");
4860 err |= true;
4861 continue;
4862 }
4863 /* Addressable variables do have SSA_NAMEs but they
4864 are not considered gimple values. */
4865 else if ((TREE_CODE (t) == SSA_NAME
4866 && virtual_p != virtual_operand_p (t))
4867 || (virtual_p
4868 && (TREE_CODE (t) != SSA_NAME
4869 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4870 || (!virtual_p
4871 && !is_gimple_val (t)))
4872 {
4873 error ("invalid PHI argument");
4874 debug_generic_expr (t);
4875 err |= true;
4876 }
4877 #ifdef ENABLE_TYPES_CHECKING
4878 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4879 {
4880 error ("incompatible types in PHI argument %u", i);
4881 debug_generic_stmt (TREE_TYPE (phi_result));
4882 debug_generic_stmt (TREE_TYPE (t));
4883 err |= true;
4884 }
4885 #endif
4886 }
4887
4888 return err;
4889 }
4890
4891 /* Verify the GIMPLE statements inside the sequence STMTS. */
4892
4893 static bool
4894 verify_gimple_in_seq_2 (gimple_seq stmts)
4895 {
4896 gimple_stmt_iterator ittr;
4897 bool err = false;
4898
4899 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4900 {
4901 gimple *stmt = gsi_stmt (ittr);
4902
4903 switch (gimple_code (stmt))
4904 {
4905 case GIMPLE_BIND:
4906 err |= verify_gimple_in_seq_2 (
4907 gimple_bind_body (as_a <gbind *> (stmt)));
4908 break;
4909
4910 case GIMPLE_TRY:
4911 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4912 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4913 break;
4914
4915 case GIMPLE_EH_FILTER:
4916 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4917 break;
4918
4919 case GIMPLE_EH_ELSE:
4920 {
4921 geh_else *eh_else = as_a <geh_else *> (stmt);
4922 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
4923 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
4924 }
4925 break;
4926
4927 case GIMPLE_CATCH:
4928 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
4929 as_a <gcatch *> (stmt)));
4930 break;
4931
4932 case GIMPLE_TRANSACTION:
4933 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
4934 break;
4935
4936 default:
4937 {
4938 bool err2 = verify_gimple_stmt (stmt);
4939 if (err2)
4940 debug_gimple_stmt (stmt);
4941 err |= err2;
4942 }
4943 }
4944 }
4945
4946 return err;
4947 }
4948
4949 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4950 is a problem, otherwise false. */
4951
4952 static bool
4953 verify_gimple_transaction (gtransaction *stmt)
4954 {
4955 tree lab;
4956
4957 lab = gimple_transaction_label_norm (stmt);
4958 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4959 return true;
4960 lab = gimple_transaction_label_uninst (stmt);
4961 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4962 return true;
4963 lab = gimple_transaction_label_over (stmt);
4964 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4965 return true;
4966
4967 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4968 }
4969
4970
4971 /* Verify the GIMPLE statements inside the statement list STMTS. */
4972
4973 DEBUG_FUNCTION void
4974 verify_gimple_in_seq (gimple_seq stmts)
4975 {
4976 timevar_push (TV_TREE_STMT_VERIFY);
4977 if (verify_gimple_in_seq_2 (stmts))
4978 internal_error ("verify_gimple failed");
4979 timevar_pop (TV_TREE_STMT_VERIFY);
4980 }
4981
4982 /* Return true when the T can be shared. */
4983
4984 static bool
4985 tree_node_can_be_shared (tree t)
4986 {
4987 if (IS_TYPE_OR_DECL_P (t)
4988 || is_gimple_min_invariant (t)
4989 || TREE_CODE (t) == SSA_NAME
4990 || t == error_mark_node
4991 || TREE_CODE (t) == IDENTIFIER_NODE)
4992 return true;
4993
4994 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4995 return true;
4996
4997 if (DECL_P (t))
4998 return true;
4999
5000 return false;
5001 }
5002
5003 /* Called via walk_tree. Verify tree sharing. */
5004
5005 static tree
5006 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
5007 {
5008 hash_set<void *> *visited = (hash_set<void *> *) data;
5009
5010 if (tree_node_can_be_shared (*tp))
5011 {
5012 *walk_subtrees = false;
5013 return NULL;
5014 }
5015
5016 if (visited->add (*tp))
5017 return *tp;
5018
5019 return NULL;
5020 }
5021
5022 /* Called via walk_gimple_stmt. Verify tree sharing. */
5023
5024 static tree
5025 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
5026 {
5027 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5028 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
5029 }
5030
5031 static bool eh_error_found;
5032 bool
5033 verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
5034 hash_set<gimple *> *visited)
5035 {
5036 if (!visited->contains (stmt))
5037 {
5038 error ("dead STMT in EH table");
5039 debug_gimple_stmt (stmt);
5040 eh_error_found = true;
5041 }
5042 return true;
5043 }
5044
5045 /* Verify if the location LOCs block is in BLOCKS. */
5046
5047 static bool
5048 verify_location (hash_set<tree> *blocks, location_t loc)
5049 {
5050 tree block = LOCATION_BLOCK (loc);
5051 if (block != NULL_TREE
5052 && !blocks->contains (block))
5053 {
5054 error ("location references block not in block tree");
5055 return true;
5056 }
5057 if (block != NULL_TREE)
5058 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
5059 return false;
5060 }
5061
5062 /* Called via walk_tree. Verify that expressions have no blocks. */
5063
5064 static tree
5065 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
5066 {
5067 if (!EXPR_P (*tp))
5068 {
5069 *walk_subtrees = false;
5070 return NULL;
5071 }
5072
5073 location_t loc = EXPR_LOCATION (*tp);
5074 if (LOCATION_BLOCK (loc) != NULL)
5075 return *tp;
5076
5077 return NULL;
5078 }
5079
5080 /* Called via walk_tree. Verify locations of expressions. */
5081
5082 static tree
5083 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
5084 {
5085 hash_set<tree> *blocks = (hash_set<tree> *) data;
5086
5087 if (VAR_P (*tp) && DECL_HAS_DEBUG_EXPR_P (*tp))
5088 {
5089 tree t = DECL_DEBUG_EXPR (*tp);
5090 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
5091 if (addr)
5092 return addr;
5093 }
5094 if ((VAR_P (*tp)
5095 || TREE_CODE (*tp) == PARM_DECL
5096 || TREE_CODE (*tp) == RESULT_DECL)
5097 && DECL_HAS_VALUE_EXPR_P (*tp))
5098 {
5099 tree t = DECL_VALUE_EXPR (*tp);
5100 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
5101 if (addr)
5102 return addr;
5103 }
5104
5105 if (!EXPR_P (*tp))
5106 {
5107 *walk_subtrees = false;
5108 return NULL;
5109 }
5110
5111 location_t loc = EXPR_LOCATION (*tp);
5112 if (verify_location (blocks, loc))
5113 return *tp;
5114
5115 return NULL;
5116 }
5117
5118 /* Called via walk_gimple_op. Verify locations of expressions. */
5119
5120 static tree
5121 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
5122 {
5123 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5124 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
5125 }
5126
5127 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
5128
5129 static void
5130 collect_subblocks (hash_set<tree> *blocks, tree block)
5131 {
5132 tree t;
5133 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
5134 {
5135 blocks->add (t);
5136 collect_subblocks (blocks, t);
5137 }
5138 }
5139
5140 /* Verify the GIMPLE statements in the CFG of FN. */
5141
5142 DEBUG_FUNCTION void
5143 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
5144 {
5145 basic_block bb;
5146 bool err = false;
5147
5148 timevar_push (TV_TREE_STMT_VERIFY);
5149 hash_set<void *> visited;
5150 hash_set<gimple *> visited_stmts;
5151
5152 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
5153 hash_set<tree> blocks;
5154 if (DECL_INITIAL (fn->decl))
5155 {
5156 blocks.add (DECL_INITIAL (fn->decl));
5157 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
5158 }
5159
5160 FOR_EACH_BB_FN (bb, fn)
5161 {
5162 gimple_stmt_iterator gsi;
5163
5164 for (gphi_iterator gpi = gsi_start_phis (bb);
5165 !gsi_end_p (gpi);
5166 gsi_next (&gpi))
5167 {
5168 gphi *phi = gpi.phi ();
5169 bool err2 = false;
5170 unsigned i;
5171
5172 visited_stmts.add (phi);
5173
5174 if (gimple_bb (phi) != bb)
5175 {
5176 error ("gimple_bb (phi) is set to a wrong basic block");
5177 err2 = true;
5178 }
5179
5180 err2 |= verify_gimple_phi (phi);
5181
5182 /* Only PHI arguments have locations. */
5183 if (gimple_location (phi) != UNKNOWN_LOCATION)
5184 {
5185 error ("PHI node with location");
5186 err2 = true;
5187 }
5188
5189 for (i = 0; i < gimple_phi_num_args (phi); i++)
5190 {
5191 tree arg = gimple_phi_arg_def (phi, i);
5192 tree addr = walk_tree (&arg, verify_node_sharing_1,
5193 &visited, NULL);
5194 if (addr)
5195 {
5196 error ("incorrect sharing of tree nodes");
5197 debug_generic_expr (addr);
5198 err2 |= true;
5199 }
5200 location_t loc = gimple_phi_arg_location (phi, i);
5201 if (virtual_operand_p (gimple_phi_result (phi))
5202 && loc != UNKNOWN_LOCATION)
5203 {
5204 error ("virtual PHI with argument locations");
5205 err2 = true;
5206 }
5207 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5208 if (addr)
5209 {
5210 debug_generic_expr (addr);
5211 err2 = true;
5212 }
5213 err2 |= verify_location (&blocks, loc);
5214 }
5215
5216 if (err2)
5217 debug_gimple_stmt (phi);
5218 err |= err2;
5219 }
5220
5221 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5222 {
5223 gimple *stmt = gsi_stmt (gsi);
5224 bool err2 = false;
5225 struct walk_stmt_info wi;
5226 tree addr;
5227 int lp_nr;
5228
5229 visited_stmts.add (stmt);
5230
5231 if (gimple_bb (stmt) != bb)
5232 {
5233 error ("gimple_bb (stmt) is set to a wrong basic block");
5234 err2 = true;
5235 }
5236
5237 err2 |= verify_gimple_stmt (stmt);
5238 err2 |= verify_location (&blocks, gimple_location (stmt));
5239
5240 memset (&wi, 0, sizeof (wi));
5241 wi.info = (void *) &visited;
5242 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5243 if (addr)
5244 {
5245 error ("incorrect sharing of tree nodes");
5246 debug_generic_expr (addr);
5247 err2 |= true;
5248 }
5249
5250 memset (&wi, 0, sizeof (wi));
5251 wi.info = (void *) &blocks;
5252 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5253 if (addr)
5254 {
5255 debug_generic_expr (addr);
5256 err2 |= true;
5257 }
5258
5259 /* ??? Instead of not checking these stmts at all the walker
5260 should know its context via wi. */
5261 if (!is_gimple_debug (stmt)
5262 && !is_gimple_omp (stmt))
5263 {
5264 memset (&wi, 0, sizeof (wi));
5265 addr = walk_gimple_op (stmt, verify_expr, &wi);
5266 if (addr)
5267 {
5268 debug_generic_expr (addr);
5269 inform (gimple_location (stmt), "in statement");
5270 err2 |= true;
5271 }
5272 }
5273
5274 /* If the statement is marked as part of an EH region, then it is
5275 expected that the statement could throw. Verify that when we
5276 have optimizations that simplify statements such that we prove
5277 that they cannot throw, that we update other data structures
5278 to match. */
5279 lp_nr = lookup_stmt_eh_lp (stmt);
5280 if (lp_nr > 0)
5281 {
5282 if (!stmt_could_throw_p (stmt))
5283 {
5284 if (verify_nothrow)
5285 {
5286 error ("statement marked for throw, but doesn%'t");
5287 err2 |= true;
5288 }
5289 }
5290 else if (!gsi_one_before_end_p (gsi))
5291 {
5292 error ("statement marked for throw in middle of block");
5293 err2 |= true;
5294 }
5295 }
5296
5297 if (err2)
5298 debug_gimple_stmt (stmt);
5299 err |= err2;
5300 }
5301 }
5302
5303 eh_error_found = false;
5304 hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5305 if (eh_table)
5306 eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
5307 (&visited_stmts);
5308
5309 if (err || eh_error_found)
5310 internal_error ("verify_gimple failed");
5311
5312 verify_histograms ();
5313 timevar_pop (TV_TREE_STMT_VERIFY);
5314 }
5315
5316
5317 /* Verifies that the flow information is OK. */
5318
5319 static int
5320 gimple_verify_flow_info (void)
5321 {
5322 int err = 0;
5323 basic_block bb;
5324 gimple_stmt_iterator gsi;
5325 gimple *stmt;
5326 edge e;
5327 edge_iterator ei;
5328
5329 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5330 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5331 {
5332 error ("ENTRY_BLOCK has IL associated with it");
5333 err = 1;
5334 }
5335
5336 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5337 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5338 {
5339 error ("EXIT_BLOCK has IL associated with it");
5340 err = 1;
5341 }
5342
5343 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5344 if (e->flags & EDGE_FALLTHRU)
5345 {
5346 error ("fallthru to exit from bb %d", e->src->index);
5347 err = 1;
5348 }
5349
5350 FOR_EACH_BB_FN (bb, cfun)
5351 {
5352 bool found_ctrl_stmt = false;
5353
5354 stmt = NULL;
5355
5356 /* Skip labels on the start of basic block. */
5357 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5358 {
5359 tree label;
5360 gimple *prev_stmt = stmt;
5361
5362 stmt = gsi_stmt (gsi);
5363
5364 if (gimple_code (stmt) != GIMPLE_LABEL)
5365 break;
5366
5367 label = gimple_label_label (as_a <glabel *> (stmt));
5368 if (prev_stmt && DECL_NONLOCAL (label))
5369 {
5370 error ("nonlocal label ");
5371 print_generic_expr (stderr, label);
5372 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5373 bb->index);
5374 err = 1;
5375 }
5376
5377 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5378 {
5379 error ("EH landing pad label ");
5380 print_generic_expr (stderr, label);
5381 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5382 bb->index);
5383 err = 1;
5384 }
5385
5386 if (label_to_block (label) != bb)
5387 {
5388 error ("label ");
5389 print_generic_expr (stderr, label);
5390 fprintf (stderr, " to block does not match in bb %d",
5391 bb->index);
5392 err = 1;
5393 }
5394
5395 if (decl_function_context (label) != current_function_decl)
5396 {
5397 error ("label ");
5398 print_generic_expr (stderr, label);
5399 fprintf (stderr, " has incorrect context in bb %d",
5400 bb->index);
5401 err = 1;
5402 }
5403 }
5404
5405 /* Verify that body of basic block BB is free of control flow. */
5406 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5407 {
5408 gimple *stmt = gsi_stmt (gsi);
5409
5410 if (found_ctrl_stmt)
5411 {
5412 error ("control flow in the middle of basic block %d",
5413 bb->index);
5414 err = 1;
5415 }
5416
5417 if (stmt_ends_bb_p (stmt))
5418 found_ctrl_stmt = true;
5419
5420 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5421 {
5422 error ("label ");
5423 print_generic_expr (stderr, gimple_label_label (label_stmt));
5424 fprintf (stderr, " in the middle of basic block %d", bb->index);
5425 err = 1;
5426 }
5427 }
5428
5429 gsi = gsi_last_bb (bb);
5430 if (gsi_end_p (gsi))
5431 continue;
5432
5433 stmt = gsi_stmt (gsi);
5434
5435 if (gimple_code (stmt) == GIMPLE_LABEL)
5436 continue;
5437
5438 err |= verify_eh_edges (stmt);
5439
5440 if (is_ctrl_stmt (stmt))
5441 {
5442 FOR_EACH_EDGE (e, ei, bb->succs)
5443 if (e->flags & EDGE_FALLTHRU)
5444 {
5445 error ("fallthru edge after a control statement in bb %d",
5446 bb->index);
5447 err = 1;
5448 }
5449 }
5450
5451 if (gimple_code (stmt) != GIMPLE_COND)
5452 {
5453 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5454 after anything else but if statement. */
5455 FOR_EACH_EDGE (e, ei, bb->succs)
5456 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5457 {
5458 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5459 bb->index);
5460 err = 1;
5461 }
5462 }
5463
5464 switch (gimple_code (stmt))
5465 {
5466 case GIMPLE_COND:
5467 {
5468 edge true_edge;
5469 edge false_edge;
5470
5471 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5472
5473 if (!true_edge
5474 || !false_edge
5475 || !(true_edge->flags & EDGE_TRUE_VALUE)
5476 || !(false_edge->flags & EDGE_FALSE_VALUE)
5477 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5478 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5479 || EDGE_COUNT (bb->succs) >= 3)
5480 {
5481 error ("wrong outgoing edge flags at end of bb %d",
5482 bb->index);
5483 err = 1;
5484 }
5485 }
5486 break;
5487
5488 case GIMPLE_GOTO:
5489 if (simple_goto_p (stmt))
5490 {
5491 error ("explicit goto at end of bb %d", bb->index);
5492 err = 1;
5493 }
5494 else
5495 {
5496 /* FIXME. We should double check that the labels in the
5497 destination blocks have their address taken. */
5498 FOR_EACH_EDGE (e, ei, bb->succs)
5499 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5500 | EDGE_FALSE_VALUE))
5501 || !(e->flags & EDGE_ABNORMAL))
5502 {
5503 error ("wrong outgoing edge flags at end of bb %d",
5504 bb->index);
5505 err = 1;
5506 }
5507 }
5508 break;
5509
5510 case GIMPLE_CALL:
5511 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5512 break;
5513 /* fallthru */
5514 case GIMPLE_RETURN:
5515 if (!single_succ_p (bb)
5516 || (single_succ_edge (bb)->flags
5517 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5518 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5519 {
5520 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5521 err = 1;
5522 }
5523 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5524 {
5525 error ("return edge does not point to exit in bb %d",
5526 bb->index);
5527 err = 1;
5528 }
5529 break;
5530
5531 case GIMPLE_SWITCH:
5532 {
5533 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5534 tree prev;
5535 edge e;
5536 size_t i, n;
5537
5538 n = gimple_switch_num_labels (switch_stmt);
5539
5540 /* Mark all the destination basic blocks. */
5541 for (i = 0; i < n; ++i)
5542 {
5543 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5544 basic_block label_bb = label_to_block (lab);
5545 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5546 label_bb->aux = (void *)1;
5547 }
5548
5549 /* Verify that the case labels are sorted. */
5550 prev = gimple_switch_label (switch_stmt, 0);
5551 for (i = 1; i < n; ++i)
5552 {
5553 tree c = gimple_switch_label (switch_stmt, i);
5554 if (!CASE_LOW (c))
5555 {
5556 error ("found default case not at the start of "
5557 "case vector");
5558 err = 1;
5559 continue;
5560 }
5561 if (CASE_LOW (prev)
5562 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5563 {
5564 error ("case labels not sorted: ");
5565 print_generic_expr (stderr, prev);
5566 fprintf (stderr," is greater than ");
5567 print_generic_expr (stderr, c);
5568 fprintf (stderr," but comes before it.\n");
5569 err = 1;
5570 }
5571 prev = c;
5572 }
5573 /* VRP will remove the default case if it can prove it will
5574 never be executed. So do not verify there always exists
5575 a default case here. */
5576
5577 FOR_EACH_EDGE (e, ei, bb->succs)
5578 {
5579 if (!e->dest->aux)
5580 {
5581 error ("extra outgoing edge %d->%d",
5582 bb->index, e->dest->index);
5583 err = 1;
5584 }
5585
5586 e->dest->aux = (void *)2;
5587 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5588 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5589 {
5590 error ("wrong outgoing edge flags at end of bb %d",
5591 bb->index);
5592 err = 1;
5593 }
5594 }
5595
5596 /* Check that we have all of them. */
5597 for (i = 0; i < n; ++i)
5598 {
5599 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5600 basic_block label_bb = label_to_block (lab);
5601
5602 if (label_bb->aux != (void *)2)
5603 {
5604 error ("missing edge %i->%i", bb->index, label_bb->index);
5605 err = 1;
5606 }
5607 }
5608
5609 FOR_EACH_EDGE (e, ei, bb->succs)
5610 e->dest->aux = (void *)0;
5611 }
5612 break;
5613
5614 case GIMPLE_EH_DISPATCH:
5615 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5616 break;
5617
5618 default:
5619 break;
5620 }
5621 }
5622
5623 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5624 verify_dominators (CDI_DOMINATORS);
5625
5626 return err;
5627 }
5628
5629
5630 /* Updates phi nodes after creating a forwarder block joined
5631 by edge FALLTHRU. */
5632
5633 static void
5634 gimple_make_forwarder_block (edge fallthru)
5635 {
5636 edge e;
5637 edge_iterator ei;
5638 basic_block dummy, bb;
5639 tree var;
5640 gphi_iterator gsi;
5641
5642 dummy = fallthru->src;
5643 bb = fallthru->dest;
5644
5645 if (single_pred_p (bb))
5646 return;
5647
5648 /* If we redirected a branch we must create new PHI nodes at the
5649 start of BB. */
5650 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5651 {
5652 gphi *phi, *new_phi;
5653
5654 phi = gsi.phi ();
5655 var = gimple_phi_result (phi);
5656 new_phi = create_phi_node (var, bb);
5657 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5658 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5659 UNKNOWN_LOCATION);
5660 }
5661
5662 /* Add the arguments we have stored on edges. */
5663 FOR_EACH_EDGE (e, ei, bb->preds)
5664 {
5665 if (e == fallthru)
5666 continue;
5667
5668 flush_pending_stmts (e);
5669 }
5670 }
5671
5672
5673 /* Return a non-special label in the head of basic block BLOCK.
5674 Create one if it doesn't exist. */
5675
5676 tree
5677 gimple_block_label (basic_block bb)
5678 {
5679 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5680 bool first = true;
5681 tree label;
5682 glabel *stmt;
5683
5684 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5685 {
5686 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5687 if (!stmt)
5688 break;
5689 label = gimple_label_label (stmt);
5690 if (!DECL_NONLOCAL (label))
5691 {
5692 if (!first)
5693 gsi_move_before (&i, &s);
5694 return label;
5695 }
5696 }
5697
5698 label = create_artificial_label (UNKNOWN_LOCATION);
5699 stmt = gimple_build_label (label);
5700 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5701 return label;
5702 }
5703
5704
5705 /* Attempt to perform edge redirection by replacing a possibly complex
5706 jump instruction by a goto or by removing the jump completely.
5707 This can apply only if all edges now point to the same block. The
5708 parameters and return values are equivalent to
5709 redirect_edge_and_branch. */
5710
5711 static edge
5712 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5713 {
5714 basic_block src = e->src;
5715 gimple_stmt_iterator i;
5716 gimple *stmt;
5717
5718 /* We can replace or remove a complex jump only when we have exactly
5719 two edges. */
5720 if (EDGE_COUNT (src->succs) != 2
5721 /* Verify that all targets will be TARGET. Specifically, the
5722 edge that is not E must also go to TARGET. */
5723 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5724 return NULL;
5725
5726 i = gsi_last_bb (src);
5727 if (gsi_end_p (i))
5728 return NULL;
5729
5730 stmt = gsi_stmt (i);
5731
5732 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5733 {
5734 gsi_remove (&i, true);
5735 e = ssa_redirect_edge (e, target);
5736 e->flags = EDGE_FALLTHRU;
5737 return e;
5738 }
5739
5740 return NULL;
5741 }
5742
5743
5744 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5745 edge representing the redirected branch. */
5746
5747 static edge
5748 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5749 {
5750 basic_block bb = e->src;
5751 gimple_stmt_iterator gsi;
5752 edge ret;
5753 gimple *stmt;
5754
5755 if (e->flags & EDGE_ABNORMAL)
5756 return NULL;
5757
5758 if (e->dest == dest)
5759 return NULL;
5760
5761 if (e->flags & EDGE_EH)
5762 return redirect_eh_edge (e, dest);
5763
5764 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5765 {
5766 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5767 if (ret)
5768 return ret;
5769 }
5770
5771 gsi = gsi_last_bb (bb);
5772 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5773
5774 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5775 {
5776 case GIMPLE_COND:
5777 /* For COND_EXPR, we only need to redirect the edge. */
5778 break;
5779
5780 case GIMPLE_GOTO:
5781 /* No non-abnormal edges should lead from a non-simple goto, and
5782 simple ones should be represented implicitly. */
5783 gcc_unreachable ();
5784
5785 case GIMPLE_SWITCH:
5786 {
5787 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5788 tree label = gimple_block_label (dest);
5789 tree cases = get_cases_for_edge (e, switch_stmt);
5790
5791 /* If we have a list of cases associated with E, then use it
5792 as it's a lot faster than walking the entire case vector. */
5793 if (cases)
5794 {
5795 edge e2 = find_edge (e->src, dest);
5796 tree last, first;
5797
5798 first = cases;
5799 while (cases)
5800 {
5801 last = cases;
5802 CASE_LABEL (cases) = label;
5803 cases = CASE_CHAIN (cases);
5804 }
5805
5806 /* If there was already an edge in the CFG, then we need
5807 to move all the cases associated with E to E2. */
5808 if (e2)
5809 {
5810 tree cases2 = get_cases_for_edge (e2, switch_stmt);
5811
5812 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5813 CASE_CHAIN (cases2) = first;
5814 }
5815 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5816 }
5817 else
5818 {
5819 size_t i, n = gimple_switch_num_labels (switch_stmt);
5820
5821 for (i = 0; i < n; i++)
5822 {
5823 tree elt = gimple_switch_label (switch_stmt, i);
5824 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5825 CASE_LABEL (elt) = label;
5826 }
5827 }
5828 }
5829 break;
5830
5831 case GIMPLE_ASM:
5832 {
5833 gasm *asm_stmt = as_a <gasm *> (stmt);
5834 int i, n = gimple_asm_nlabels (asm_stmt);
5835 tree label = NULL;
5836
5837 for (i = 0; i < n; ++i)
5838 {
5839 tree cons = gimple_asm_label_op (asm_stmt, i);
5840 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5841 {
5842 if (!label)
5843 label = gimple_block_label (dest);
5844 TREE_VALUE (cons) = label;
5845 }
5846 }
5847
5848 /* If we didn't find any label matching the former edge in the
5849 asm labels, we must be redirecting the fallthrough
5850 edge. */
5851 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5852 }
5853 break;
5854
5855 case GIMPLE_RETURN:
5856 gsi_remove (&gsi, true);
5857 e->flags |= EDGE_FALLTHRU;
5858 break;
5859
5860 case GIMPLE_OMP_RETURN:
5861 case GIMPLE_OMP_CONTINUE:
5862 case GIMPLE_OMP_SECTIONS_SWITCH:
5863 case GIMPLE_OMP_FOR:
5864 /* The edges from OMP constructs can be simply redirected. */
5865 break;
5866
5867 case GIMPLE_EH_DISPATCH:
5868 if (!(e->flags & EDGE_FALLTHRU))
5869 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
5870 break;
5871
5872 case GIMPLE_TRANSACTION:
5873 if (e->flags & EDGE_TM_ABORT)
5874 gimple_transaction_set_label_over (as_a <gtransaction *> (stmt),
5875 gimple_block_label (dest));
5876 else if (e->flags & EDGE_TM_UNINSTRUMENTED)
5877 gimple_transaction_set_label_uninst (as_a <gtransaction *> (stmt),
5878 gimple_block_label (dest));
5879 else
5880 gimple_transaction_set_label_norm (as_a <gtransaction *> (stmt),
5881 gimple_block_label (dest));
5882 break;
5883
5884 default:
5885 /* Otherwise it must be a fallthru edge, and we don't need to
5886 do anything besides redirecting it. */
5887 gcc_assert (e->flags & EDGE_FALLTHRU);
5888 break;
5889 }
5890
5891 /* Update/insert PHI nodes as necessary. */
5892
5893 /* Now update the edges in the CFG. */
5894 e = ssa_redirect_edge (e, dest);
5895
5896 return e;
5897 }
5898
5899 /* Returns true if it is possible to remove edge E by redirecting
5900 it to the destination of the other edge from E->src. */
5901
5902 static bool
5903 gimple_can_remove_branch_p (const_edge e)
5904 {
5905 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5906 return false;
5907
5908 return true;
5909 }
5910
5911 /* Simple wrapper, as we can always redirect fallthru edges. */
5912
5913 static basic_block
5914 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5915 {
5916 e = gimple_redirect_edge_and_branch (e, dest);
5917 gcc_assert (e);
5918
5919 return NULL;
5920 }
5921
5922
5923 /* Splits basic block BB after statement STMT (but at least after the
5924 labels). If STMT is NULL, BB is split just after the labels. */
5925
5926 static basic_block
5927 gimple_split_block (basic_block bb, void *stmt)
5928 {
5929 gimple_stmt_iterator gsi;
5930 gimple_stmt_iterator gsi_tgt;
5931 gimple_seq list;
5932 basic_block new_bb;
5933 edge e;
5934 edge_iterator ei;
5935
5936 new_bb = create_empty_bb (bb);
5937
5938 /* Redirect the outgoing edges. */
5939 new_bb->succs = bb->succs;
5940 bb->succs = NULL;
5941 FOR_EACH_EDGE (e, ei, new_bb->succs)
5942 e->src = new_bb;
5943
5944 /* Get a stmt iterator pointing to the first stmt to move. */
5945 if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
5946 gsi = gsi_after_labels (bb);
5947 else
5948 {
5949 gsi = gsi_for_stmt ((gimple *) stmt);
5950 gsi_next (&gsi);
5951 }
5952
5953 /* Move everything from GSI to the new basic block. */
5954 if (gsi_end_p (gsi))
5955 return new_bb;
5956
5957 /* Split the statement list - avoid re-creating new containers as this
5958 brings ugly quadratic memory consumption in the inliner.
5959 (We are still quadratic since we need to update stmt BB pointers,
5960 sadly.) */
5961 gsi_split_seq_before (&gsi, &list);
5962 set_bb_seq (new_bb, list);
5963 for (gsi_tgt = gsi_start (list);
5964 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5965 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5966
5967 return new_bb;
5968 }
5969
5970
5971 /* Moves basic block BB after block AFTER. */
5972
5973 static bool
5974 gimple_move_block_after (basic_block bb, basic_block after)
5975 {
5976 if (bb->prev_bb == after)
5977 return true;
5978
5979 unlink_block (bb);
5980 link_block (bb, after);
5981
5982 return true;
5983 }
5984
5985
5986 /* Return TRUE if block BB has no executable statements, otherwise return
5987 FALSE. */
5988
5989 static bool
5990 gimple_empty_block_p (basic_block bb)
5991 {
5992 /* BB must have no executable statements. */
5993 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5994 if (phi_nodes (bb))
5995 return false;
5996 if (gsi_end_p (gsi))
5997 return true;
5998 if (is_gimple_debug (gsi_stmt (gsi)))
5999 gsi_next_nondebug (&gsi);
6000 return gsi_end_p (gsi);
6001 }
6002
6003
6004 /* Split a basic block if it ends with a conditional branch and if the
6005 other part of the block is not empty. */
6006
6007 static basic_block
6008 gimple_split_block_before_cond_jump (basic_block bb)
6009 {
6010 gimple *last, *split_point;
6011 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6012 if (gsi_end_p (gsi))
6013 return NULL;
6014 last = gsi_stmt (gsi);
6015 if (gimple_code (last) != GIMPLE_COND
6016 && gimple_code (last) != GIMPLE_SWITCH)
6017 return NULL;
6018 gsi_prev (&gsi);
6019 split_point = gsi_stmt (gsi);
6020 return split_block (bb, split_point)->dest;
6021 }
6022
6023
6024 /* Return true if basic_block can be duplicated. */
6025
6026 static bool
6027 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
6028 {
6029 return true;
6030 }
6031
6032 /* Create a duplicate of the basic block BB. NOTE: This does not
6033 preserve SSA form. */
6034
6035 static basic_block
6036 gimple_duplicate_bb (basic_block bb)
6037 {
6038 basic_block new_bb;
6039 gimple_stmt_iterator gsi_tgt;
6040
6041 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
6042
6043 /* Copy the PHI nodes. We ignore PHI node arguments here because
6044 the incoming edges have not been setup yet. */
6045 for (gphi_iterator gpi = gsi_start_phis (bb);
6046 !gsi_end_p (gpi);
6047 gsi_next (&gpi))
6048 {
6049 gphi *phi, *copy;
6050 phi = gpi.phi ();
6051 copy = create_phi_node (NULL_TREE, new_bb);
6052 create_new_def_for (gimple_phi_result (phi), copy,
6053 gimple_phi_result_ptr (copy));
6054 gimple_set_uid (copy, gimple_uid (phi));
6055 }
6056
6057 gsi_tgt = gsi_start_bb (new_bb);
6058 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6059 !gsi_end_p (gsi);
6060 gsi_next (&gsi))
6061 {
6062 def_operand_p def_p;
6063 ssa_op_iter op_iter;
6064 tree lhs;
6065 gimple *stmt, *copy;
6066
6067 stmt = gsi_stmt (gsi);
6068 if (gimple_code (stmt) == GIMPLE_LABEL)
6069 continue;
6070
6071 /* Don't duplicate label debug stmts. */
6072 if (gimple_debug_bind_p (stmt)
6073 && TREE_CODE (gimple_debug_bind_get_var (stmt))
6074 == LABEL_DECL)
6075 continue;
6076
6077 /* Create a new copy of STMT and duplicate STMT's virtual
6078 operands. */
6079 copy = gimple_copy (stmt);
6080 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
6081
6082 maybe_duplicate_eh_stmt (copy, stmt);
6083 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
6084
6085 /* When copying around a stmt writing into a local non-user
6086 aggregate, make sure it won't share stack slot with other
6087 vars. */
6088 lhs = gimple_get_lhs (stmt);
6089 if (lhs && TREE_CODE (lhs) != SSA_NAME)
6090 {
6091 tree base = get_base_address (lhs);
6092 if (base
6093 && (VAR_P (base) || TREE_CODE (base) == RESULT_DECL)
6094 && DECL_IGNORED_P (base)
6095 && !TREE_STATIC (base)
6096 && !DECL_EXTERNAL (base)
6097 && (!VAR_P (base) || !DECL_HAS_VALUE_EXPR_P (base)))
6098 DECL_NONSHAREABLE (base) = 1;
6099 }
6100
6101 /* Create new names for all the definitions created by COPY and
6102 add replacement mappings for each new name. */
6103 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
6104 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
6105 }
6106
6107 return new_bb;
6108 }
6109
6110 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
6111
6112 static void
6113 add_phi_args_after_copy_edge (edge e_copy)
6114 {
6115 basic_block bb, bb_copy = e_copy->src, dest;
6116 edge e;
6117 edge_iterator ei;
6118 gphi *phi, *phi_copy;
6119 tree def;
6120 gphi_iterator psi, psi_copy;
6121
6122 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
6123 return;
6124
6125 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
6126
6127 if (e_copy->dest->flags & BB_DUPLICATED)
6128 dest = get_bb_original (e_copy->dest);
6129 else
6130 dest = e_copy->dest;
6131
6132 e = find_edge (bb, dest);
6133 if (!e)
6134 {
6135 /* During loop unrolling the target of the latch edge is copied.
6136 In this case we are not looking for edge to dest, but to
6137 duplicated block whose original was dest. */
6138 FOR_EACH_EDGE (e, ei, bb->succs)
6139 {
6140 if ((e->dest->flags & BB_DUPLICATED)
6141 && get_bb_original (e->dest) == dest)
6142 break;
6143 }
6144
6145 gcc_assert (e != NULL);
6146 }
6147
6148 for (psi = gsi_start_phis (e->dest),
6149 psi_copy = gsi_start_phis (e_copy->dest);
6150 !gsi_end_p (psi);
6151 gsi_next (&psi), gsi_next (&psi_copy))
6152 {
6153 phi = psi.phi ();
6154 phi_copy = psi_copy.phi ();
6155 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
6156 add_phi_arg (phi_copy, def, e_copy,
6157 gimple_phi_arg_location_from_edge (phi, e));
6158 }
6159 }
6160
6161
6162 /* Basic block BB_COPY was created by code duplication. Add phi node
6163 arguments for edges going out of BB_COPY. The blocks that were
6164 duplicated have BB_DUPLICATED set. */
6165
6166 void
6167 add_phi_args_after_copy_bb (basic_block bb_copy)
6168 {
6169 edge e_copy;
6170 edge_iterator ei;
6171
6172 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
6173 {
6174 add_phi_args_after_copy_edge (e_copy);
6175 }
6176 }
6177
6178 /* Blocks in REGION_COPY array of length N_REGION were created by
6179 duplication of basic blocks. Add phi node arguments for edges
6180 going from these blocks. If E_COPY is not NULL, also add
6181 phi node arguments for its destination.*/
6182
6183 void
6184 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
6185 edge e_copy)
6186 {
6187 unsigned i;
6188
6189 for (i = 0; i < n_region; i++)
6190 region_copy[i]->flags |= BB_DUPLICATED;
6191
6192 for (i = 0; i < n_region; i++)
6193 add_phi_args_after_copy_bb (region_copy[i]);
6194 if (e_copy)
6195 add_phi_args_after_copy_edge (e_copy);
6196
6197 for (i = 0; i < n_region; i++)
6198 region_copy[i]->flags &= ~BB_DUPLICATED;
6199 }
6200
6201 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6202 important exit edge EXIT. By important we mean that no SSA name defined
6203 inside region is live over the other exit edges of the region. All entry
6204 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6205 to the duplicate of the region. Dominance and loop information is
6206 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6207 UPDATE_DOMINANCE is false then we assume that the caller will update the
6208 dominance information after calling this function. The new basic
6209 blocks are stored to REGION_COPY in the same order as they had in REGION,
6210 provided that REGION_COPY is not NULL.
6211 The function returns false if it is unable to copy the region,
6212 true otherwise. */
6213
6214 bool
6215 gimple_duplicate_sese_region (edge entry, edge exit,
6216 basic_block *region, unsigned n_region,
6217 basic_block *region_copy,
6218 bool update_dominance)
6219 {
6220 unsigned i;
6221 bool free_region_copy = false, copying_header = false;
6222 struct loop *loop = entry->dest->loop_father;
6223 edge exit_copy;
6224 vec<basic_block> doms;
6225 edge redirected;
6226 int total_freq = 0, entry_freq = 0;
6227 profile_count total_count = profile_count::uninitialized ();
6228 profile_count entry_count = profile_count::uninitialized ();
6229
6230 if (!can_copy_bbs_p (region, n_region))
6231 return false;
6232
6233 /* Some sanity checking. Note that we do not check for all possible
6234 missuses of the functions. I.e. if you ask to copy something weird,
6235 it will work, but the state of structures probably will not be
6236 correct. */
6237 for (i = 0; i < n_region; i++)
6238 {
6239 /* We do not handle subloops, i.e. all the blocks must belong to the
6240 same loop. */
6241 if (region[i]->loop_father != loop)
6242 return false;
6243
6244 if (region[i] != entry->dest
6245 && region[i] == loop->header)
6246 return false;
6247 }
6248
6249 /* In case the function is used for loop header copying (which is the primary
6250 use), ensure that EXIT and its copy will be new latch and entry edges. */
6251 if (loop->header == entry->dest)
6252 {
6253 copying_header = true;
6254
6255 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6256 return false;
6257
6258 for (i = 0; i < n_region; i++)
6259 if (region[i] != exit->src
6260 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6261 return false;
6262 }
6263
6264 initialize_original_copy_tables ();
6265
6266 if (copying_header)
6267 set_loop_copy (loop, loop_outer (loop));
6268 else
6269 set_loop_copy (loop, loop);
6270
6271 if (!region_copy)
6272 {
6273 region_copy = XNEWVEC (basic_block, n_region);
6274 free_region_copy = true;
6275 }
6276
6277 /* Record blocks outside the region that are dominated by something
6278 inside. */
6279 if (update_dominance)
6280 {
6281 doms.create (0);
6282 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6283 }
6284
6285 if (entry->dest->count.initialized_p ())
6286 {
6287 total_count = entry->dest->count;
6288 entry_count = entry->count;
6289 /* Fix up corner cases, to avoid division by zero or creation of negative
6290 frequencies. */
6291 if (entry_count > total_count)
6292 entry_count = total_count;
6293 }
6294 if (!(total_count > 0) || !(entry_count > 0))
6295 {
6296 total_freq = entry->dest->frequency;
6297 entry_freq = EDGE_FREQUENCY (entry);
6298 /* Fix up corner cases, to avoid division by zero or creation of negative
6299 frequencies. */
6300 if (total_freq == 0)
6301 total_freq = 1;
6302 else if (entry_freq > total_freq)
6303 entry_freq = total_freq;
6304 }
6305
6306 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6307 split_edge_bb_loc (entry), update_dominance);
6308 if (total_count > 0 && entry_count > 0)
6309 {
6310 scale_bbs_frequencies_profile_count (region, n_region,
6311 total_count - entry_count,
6312 total_count);
6313 scale_bbs_frequencies_profile_count (region_copy, n_region, entry_count,
6314 total_count);
6315 }
6316 else
6317 {
6318 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
6319 total_freq);
6320 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
6321 }
6322
6323 if (copying_header)
6324 {
6325 loop->header = exit->dest;
6326 loop->latch = exit->src;
6327 }
6328
6329 /* Redirect the entry and add the phi node arguments. */
6330 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6331 gcc_assert (redirected != NULL);
6332 flush_pending_stmts (entry);
6333
6334 /* Concerning updating of dominators: We must recount dominators
6335 for entry block and its copy. Anything that is outside of the
6336 region, but was dominated by something inside needs recounting as
6337 well. */
6338 if (update_dominance)
6339 {
6340 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6341 doms.safe_push (get_bb_original (entry->dest));
6342 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6343 doms.release ();
6344 }
6345
6346 /* Add the other PHI node arguments. */
6347 add_phi_args_after_copy (region_copy, n_region, NULL);
6348
6349 if (free_region_copy)
6350 free (region_copy);
6351
6352 free_original_copy_tables ();
6353 return true;
6354 }
6355
6356 /* Checks if BB is part of the region defined by N_REGION BBS. */
6357 static bool
6358 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6359 {
6360 unsigned int n;
6361
6362 for (n = 0; n < n_region; n++)
6363 {
6364 if (bb == bbs[n])
6365 return true;
6366 }
6367 return false;
6368 }
6369
6370 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6371 are stored to REGION_COPY in the same order in that they appear
6372 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6373 the region, EXIT an exit from it. The condition guarding EXIT
6374 is moved to ENTRY. Returns true if duplication succeeds, false
6375 otherwise.
6376
6377 For example,
6378
6379 some_code;
6380 if (cond)
6381 A;
6382 else
6383 B;
6384
6385 is transformed to
6386
6387 if (cond)
6388 {
6389 some_code;
6390 A;
6391 }
6392 else
6393 {
6394 some_code;
6395 B;
6396 }
6397 */
6398
6399 bool
6400 gimple_duplicate_sese_tail (edge entry, edge exit,
6401 basic_block *region, unsigned n_region,
6402 basic_block *region_copy)
6403 {
6404 unsigned i;
6405 bool free_region_copy = false;
6406 struct loop *loop = exit->dest->loop_father;
6407 struct loop *orig_loop = entry->dest->loop_father;
6408 basic_block switch_bb, entry_bb, nentry_bb;
6409 vec<basic_block> doms;
6410 int total_freq = 0, exit_freq = 0;
6411 profile_count total_count = profile_count::uninitialized (),
6412 exit_count = profile_count::uninitialized ();
6413 edge exits[2], nexits[2], e;
6414 gimple_stmt_iterator gsi;
6415 gimple *cond_stmt;
6416 edge sorig, snew;
6417 basic_block exit_bb;
6418 gphi_iterator psi;
6419 gphi *phi;
6420 tree def;
6421 struct loop *target, *aloop, *cloop;
6422
6423 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6424 exits[0] = exit;
6425 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6426
6427 if (!can_copy_bbs_p (region, n_region))
6428 return false;
6429
6430 initialize_original_copy_tables ();
6431 set_loop_copy (orig_loop, loop);
6432
6433 target= loop;
6434 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6435 {
6436 if (bb_part_of_region_p (aloop->header, region, n_region))
6437 {
6438 cloop = duplicate_loop (aloop, target);
6439 duplicate_subloops (aloop, cloop);
6440 }
6441 }
6442
6443 if (!region_copy)
6444 {
6445 region_copy = XNEWVEC (basic_block, n_region);
6446 free_region_copy = true;
6447 }
6448
6449 gcc_assert (!need_ssa_update_p (cfun));
6450
6451 /* Record blocks outside the region that are dominated by something
6452 inside. */
6453 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6454
6455 if (exit->src->count > 0)
6456 {
6457 total_count = exit->src->count;
6458 exit_count = exit->count;
6459 /* Fix up corner cases, to avoid division by zero or creation of negative
6460 frequencies. */
6461 if (exit_count > total_count)
6462 exit_count = total_count;
6463 }
6464 else
6465 {
6466 total_freq = exit->src->frequency;
6467 exit_freq = EDGE_FREQUENCY (exit);
6468 /* Fix up corner cases, to avoid division by zero or creation of negative
6469 frequencies. */
6470 if (total_freq == 0)
6471 total_freq = 1;
6472 if (exit_freq > total_freq)
6473 exit_freq = total_freq;
6474 }
6475
6476 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6477 split_edge_bb_loc (exit), true);
6478 if (total_count.initialized_p ())
6479 {
6480 scale_bbs_frequencies_profile_count (region, n_region,
6481 total_count - exit_count,
6482 total_count);
6483 scale_bbs_frequencies_profile_count (region_copy, n_region, exit_count,
6484 total_count);
6485 }
6486 else
6487 {
6488 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6489 total_freq);
6490 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6491 }
6492
6493 /* Create the switch block, and put the exit condition to it. */
6494 entry_bb = entry->dest;
6495 nentry_bb = get_bb_copy (entry_bb);
6496 if (!last_stmt (entry->src)
6497 || !stmt_ends_bb_p (last_stmt (entry->src)))
6498 switch_bb = entry->src;
6499 else
6500 switch_bb = split_edge (entry);
6501 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6502
6503 gsi = gsi_last_bb (switch_bb);
6504 cond_stmt = last_stmt (exit->src);
6505 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6506 cond_stmt = gimple_copy (cond_stmt);
6507
6508 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6509
6510 sorig = single_succ_edge (switch_bb);
6511 sorig->flags = exits[1]->flags;
6512 sorig->probability = exits[1]->probability;
6513 sorig->count = exits[1]->count;
6514 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6515 snew->probability = exits[0]->probability;
6516 snew->count = exits[1]->count;
6517
6518
6519 /* Register the new edge from SWITCH_BB in loop exit lists. */
6520 rescan_loop_exit (snew, true, false);
6521
6522 /* Add the PHI node arguments. */
6523 add_phi_args_after_copy (region_copy, n_region, snew);
6524
6525 /* Get rid of now superfluous conditions and associated edges (and phi node
6526 arguments). */
6527 exit_bb = exit->dest;
6528
6529 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6530 PENDING_STMT (e) = NULL;
6531
6532 /* The latch of ORIG_LOOP was copied, and so was the backedge
6533 to the original header. We redirect this backedge to EXIT_BB. */
6534 for (i = 0; i < n_region; i++)
6535 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6536 {
6537 gcc_assert (single_succ_edge (region_copy[i]));
6538 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6539 PENDING_STMT (e) = NULL;
6540 for (psi = gsi_start_phis (exit_bb);
6541 !gsi_end_p (psi);
6542 gsi_next (&psi))
6543 {
6544 phi = psi.phi ();
6545 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6546 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6547 }
6548 }
6549 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6550 PENDING_STMT (e) = NULL;
6551
6552 /* Anything that is outside of the region, but was dominated by something
6553 inside needs to update dominance info. */
6554 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6555 doms.release ();
6556 /* Update the SSA web. */
6557 update_ssa (TODO_update_ssa);
6558
6559 if (free_region_copy)
6560 free (region_copy);
6561
6562 free_original_copy_tables ();
6563 return true;
6564 }
6565
6566 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6567 adding blocks when the dominator traversal reaches EXIT. This
6568 function silently assumes that ENTRY strictly dominates EXIT. */
6569
6570 void
6571 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6572 vec<basic_block> *bbs_p)
6573 {
6574 basic_block son;
6575
6576 for (son = first_dom_son (CDI_DOMINATORS, entry);
6577 son;
6578 son = next_dom_son (CDI_DOMINATORS, son))
6579 {
6580 bbs_p->safe_push (son);
6581 if (son != exit)
6582 gather_blocks_in_sese_region (son, exit, bbs_p);
6583 }
6584 }
6585
6586 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6587 The duplicates are recorded in VARS_MAP. */
6588
6589 static void
6590 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6591 tree to_context)
6592 {
6593 tree t = *tp, new_t;
6594 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6595
6596 if (DECL_CONTEXT (t) == to_context)
6597 return;
6598
6599 bool existed;
6600 tree &loc = vars_map->get_or_insert (t, &existed);
6601
6602 if (!existed)
6603 {
6604 if (SSA_VAR_P (t))
6605 {
6606 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6607 add_local_decl (f, new_t);
6608 }
6609 else
6610 {
6611 gcc_assert (TREE_CODE (t) == CONST_DECL);
6612 new_t = copy_node (t);
6613 }
6614 DECL_CONTEXT (new_t) = to_context;
6615
6616 loc = new_t;
6617 }
6618 else
6619 new_t = loc;
6620
6621 *tp = new_t;
6622 }
6623
6624
6625 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6626 VARS_MAP maps old ssa names and var_decls to the new ones. */
6627
6628 static tree
6629 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6630 tree to_context)
6631 {
6632 tree new_name;
6633
6634 gcc_assert (!virtual_operand_p (name));
6635
6636 tree *loc = vars_map->get (name);
6637
6638 if (!loc)
6639 {
6640 tree decl = SSA_NAME_VAR (name);
6641 if (decl)
6642 {
6643 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name));
6644 replace_by_duplicate_decl (&decl, vars_map, to_context);
6645 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6646 decl, SSA_NAME_DEF_STMT (name));
6647 }
6648 else
6649 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6650 name, SSA_NAME_DEF_STMT (name));
6651
6652 /* Now that we've used the def stmt to define new_name, make sure it
6653 doesn't define name anymore. */
6654 SSA_NAME_DEF_STMT (name) = NULL;
6655
6656 vars_map->put (name, new_name);
6657 }
6658 else
6659 new_name = *loc;
6660
6661 return new_name;
6662 }
6663
6664 struct move_stmt_d
6665 {
6666 tree orig_block;
6667 tree new_block;
6668 tree from_context;
6669 tree to_context;
6670 hash_map<tree, tree> *vars_map;
6671 htab_t new_label_map;
6672 hash_map<void *, void *> *eh_map;
6673 bool remap_decls_p;
6674 };
6675
6676 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6677 contained in *TP if it has been ORIG_BLOCK previously and change the
6678 DECL_CONTEXT of every local variable referenced in *TP. */
6679
6680 static tree
6681 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6682 {
6683 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6684 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6685 tree t = *tp;
6686
6687 if (EXPR_P (t))
6688 {
6689 tree block = TREE_BLOCK (t);
6690 if (block == NULL_TREE)
6691 ;
6692 else if (block == p->orig_block
6693 || p->orig_block == NULL_TREE)
6694 TREE_SET_BLOCK (t, p->new_block);
6695 else if (flag_checking)
6696 {
6697 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6698 block = BLOCK_SUPERCONTEXT (block);
6699 gcc_assert (block == p->orig_block);
6700 }
6701 }
6702 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6703 {
6704 if (TREE_CODE (t) == SSA_NAME)
6705 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6706 else if (TREE_CODE (t) == PARM_DECL
6707 && gimple_in_ssa_p (cfun))
6708 *tp = *(p->vars_map->get (t));
6709 else if (TREE_CODE (t) == LABEL_DECL)
6710 {
6711 if (p->new_label_map)
6712 {
6713 struct tree_map in, *out;
6714 in.base.from = t;
6715 out = (struct tree_map *)
6716 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6717 if (out)
6718 *tp = t = out->to;
6719 }
6720
6721 DECL_CONTEXT (t) = p->to_context;
6722 }
6723 else if (p->remap_decls_p)
6724 {
6725 /* Replace T with its duplicate. T should no longer appear in the
6726 parent function, so this looks wasteful; however, it may appear
6727 in referenced_vars, and more importantly, as virtual operands of
6728 statements, and in alias lists of other variables. It would be
6729 quite difficult to expunge it from all those places. ??? It might
6730 suffice to do this for addressable variables. */
6731 if ((VAR_P (t) && !is_global_var (t))
6732 || TREE_CODE (t) == CONST_DECL)
6733 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6734 }
6735 *walk_subtrees = 0;
6736 }
6737 else if (TYPE_P (t))
6738 *walk_subtrees = 0;
6739
6740 return NULL_TREE;
6741 }
6742
6743 /* Helper for move_stmt_r. Given an EH region number for the source
6744 function, map that to the duplicate EH regio number in the dest. */
6745
6746 static int
6747 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6748 {
6749 eh_region old_r, new_r;
6750
6751 old_r = get_eh_region_from_number (old_nr);
6752 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6753
6754 return new_r->index;
6755 }
6756
6757 /* Similar, but operate on INTEGER_CSTs. */
6758
6759 static tree
6760 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6761 {
6762 int old_nr, new_nr;
6763
6764 old_nr = tree_to_shwi (old_t_nr);
6765 new_nr = move_stmt_eh_region_nr (old_nr, p);
6766
6767 return build_int_cst (integer_type_node, new_nr);
6768 }
6769
6770 /* Like move_stmt_op, but for gimple statements.
6771
6772 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6773 contained in the current statement in *GSI_P and change the
6774 DECL_CONTEXT of every local variable referenced in the current
6775 statement. */
6776
6777 static tree
6778 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6779 struct walk_stmt_info *wi)
6780 {
6781 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6782 gimple *stmt = gsi_stmt (*gsi_p);
6783 tree block = gimple_block (stmt);
6784
6785 if (block == p->orig_block
6786 || (p->orig_block == NULL_TREE
6787 && block != NULL_TREE))
6788 gimple_set_block (stmt, p->new_block);
6789
6790 switch (gimple_code (stmt))
6791 {
6792 case GIMPLE_CALL:
6793 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6794 {
6795 tree r, fndecl = gimple_call_fndecl (stmt);
6796 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6797 switch (DECL_FUNCTION_CODE (fndecl))
6798 {
6799 case BUILT_IN_EH_COPY_VALUES:
6800 r = gimple_call_arg (stmt, 1);
6801 r = move_stmt_eh_region_tree_nr (r, p);
6802 gimple_call_set_arg (stmt, 1, r);
6803 /* FALLTHRU */
6804
6805 case BUILT_IN_EH_POINTER:
6806 case BUILT_IN_EH_FILTER:
6807 r = gimple_call_arg (stmt, 0);
6808 r = move_stmt_eh_region_tree_nr (r, p);
6809 gimple_call_set_arg (stmt, 0, r);
6810 break;
6811
6812 default:
6813 break;
6814 }
6815 }
6816 break;
6817
6818 case GIMPLE_RESX:
6819 {
6820 gresx *resx_stmt = as_a <gresx *> (stmt);
6821 int r = gimple_resx_region (resx_stmt);
6822 r = move_stmt_eh_region_nr (r, p);
6823 gimple_resx_set_region (resx_stmt, r);
6824 }
6825 break;
6826
6827 case GIMPLE_EH_DISPATCH:
6828 {
6829 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
6830 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
6831 r = move_stmt_eh_region_nr (r, p);
6832 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
6833 }
6834 break;
6835
6836 case GIMPLE_OMP_RETURN:
6837 case GIMPLE_OMP_CONTINUE:
6838 break;
6839 default:
6840 if (is_gimple_omp (stmt))
6841 {
6842 /* Do not remap variables inside OMP directives. Variables
6843 referenced in clauses and directive header belong to the
6844 parent function and should not be moved into the child
6845 function. */
6846 bool save_remap_decls_p = p->remap_decls_p;
6847 p->remap_decls_p = false;
6848 *handled_ops_p = true;
6849
6850 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6851 move_stmt_op, wi);
6852
6853 p->remap_decls_p = save_remap_decls_p;
6854 }
6855 break;
6856 }
6857
6858 return NULL_TREE;
6859 }
6860
6861 /* Move basic block BB from function CFUN to function DEST_FN. The
6862 block is moved out of the original linked list and placed after
6863 block AFTER in the new list. Also, the block is removed from the
6864 original array of blocks and placed in DEST_FN's array of blocks.
6865 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6866 updated to reflect the moved edges.
6867
6868 The local variables are remapped to new instances, VARS_MAP is used
6869 to record the mapping. */
6870
6871 static void
6872 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6873 basic_block after, bool update_edge_count_p,
6874 struct move_stmt_d *d)
6875 {
6876 struct control_flow_graph *cfg;
6877 edge_iterator ei;
6878 edge e;
6879 gimple_stmt_iterator si;
6880 unsigned old_len, new_len;
6881
6882 /* Remove BB from dominance structures. */
6883 delete_from_dominance_info (CDI_DOMINATORS, bb);
6884
6885 /* Move BB from its current loop to the copy in the new function. */
6886 if (current_loops)
6887 {
6888 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6889 if (new_loop)
6890 bb->loop_father = new_loop;
6891 }
6892
6893 /* Link BB to the new linked list. */
6894 move_block_after (bb, after);
6895
6896 /* Update the edge count in the corresponding flowgraphs. */
6897 if (update_edge_count_p)
6898 FOR_EACH_EDGE (e, ei, bb->succs)
6899 {
6900 cfun->cfg->x_n_edges--;
6901 dest_cfun->cfg->x_n_edges++;
6902 }
6903
6904 /* Remove BB from the original basic block array. */
6905 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6906 cfun->cfg->x_n_basic_blocks--;
6907
6908 /* Grow DEST_CFUN's basic block array if needed. */
6909 cfg = dest_cfun->cfg;
6910 cfg->x_n_basic_blocks++;
6911 if (bb->index >= cfg->x_last_basic_block)
6912 cfg->x_last_basic_block = bb->index + 1;
6913
6914 old_len = vec_safe_length (cfg->x_basic_block_info);
6915 if ((unsigned) cfg->x_last_basic_block >= old_len)
6916 {
6917 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6918 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6919 }
6920
6921 (*cfg->x_basic_block_info)[bb->index] = bb;
6922
6923 /* Remap the variables in phi nodes. */
6924 for (gphi_iterator psi = gsi_start_phis (bb);
6925 !gsi_end_p (psi); )
6926 {
6927 gphi *phi = psi.phi ();
6928 use_operand_p use;
6929 tree op = PHI_RESULT (phi);
6930 ssa_op_iter oi;
6931 unsigned i;
6932
6933 if (virtual_operand_p (op))
6934 {
6935 /* Remove the phi nodes for virtual operands (alias analysis will be
6936 run for the new function, anyway). */
6937 remove_phi_node (&psi, true);
6938 continue;
6939 }
6940
6941 SET_PHI_RESULT (phi,
6942 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6943 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6944 {
6945 op = USE_FROM_PTR (use);
6946 if (TREE_CODE (op) == SSA_NAME)
6947 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6948 }
6949
6950 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6951 {
6952 location_t locus = gimple_phi_arg_location (phi, i);
6953 tree block = LOCATION_BLOCK (locus);
6954
6955 if (locus == UNKNOWN_LOCATION)
6956 continue;
6957 if (d->orig_block == NULL_TREE || block == d->orig_block)
6958 {
6959 locus = set_block (locus, d->new_block);
6960 gimple_phi_arg_set_location (phi, i, locus);
6961 }
6962 }
6963
6964 gsi_next (&psi);
6965 }
6966
6967 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6968 {
6969 gimple *stmt = gsi_stmt (si);
6970 struct walk_stmt_info wi;
6971
6972 memset (&wi, 0, sizeof (wi));
6973 wi.info = d;
6974 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6975
6976 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
6977 {
6978 tree label = gimple_label_label (label_stmt);
6979 int uid = LABEL_DECL_UID (label);
6980
6981 gcc_assert (uid > -1);
6982
6983 old_len = vec_safe_length (cfg->x_label_to_block_map);
6984 if (old_len <= (unsigned) uid)
6985 {
6986 new_len = 3 * uid / 2 + 1;
6987 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6988 }
6989
6990 (*cfg->x_label_to_block_map)[uid] = bb;
6991 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6992
6993 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6994
6995 if (uid >= dest_cfun->cfg->last_label_uid)
6996 dest_cfun->cfg->last_label_uid = uid + 1;
6997 }
6998
6999 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
7000 remove_stmt_from_eh_lp_fn (cfun, stmt);
7001
7002 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
7003 gimple_remove_stmt_histograms (cfun, stmt);
7004
7005 /* We cannot leave any operands allocated from the operand caches of
7006 the current function. */
7007 free_stmt_operands (cfun, stmt);
7008 push_cfun (dest_cfun);
7009 update_stmt (stmt);
7010 pop_cfun ();
7011 }
7012
7013 FOR_EACH_EDGE (e, ei, bb->succs)
7014 if (e->goto_locus != UNKNOWN_LOCATION)
7015 {
7016 tree block = LOCATION_BLOCK (e->goto_locus);
7017 if (d->orig_block == NULL_TREE
7018 || block == d->orig_block)
7019 e->goto_locus = set_block (e->goto_locus, d->new_block);
7020 }
7021 }
7022
7023 /* Examine the statements in BB (which is in SRC_CFUN); find and return
7024 the outermost EH region. Use REGION as the incoming base EH region. */
7025
7026 static eh_region
7027 find_outermost_region_in_block (struct function *src_cfun,
7028 basic_block bb, eh_region region)
7029 {
7030 gimple_stmt_iterator si;
7031
7032 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7033 {
7034 gimple *stmt = gsi_stmt (si);
7035 eh_region stmt_region;
7036 int lp_nr;
7037
7038 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
7039 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
7040 if (stmt_region)
7041 {
7042 if (region == NULL)
7043 region = stmt_region;
7044 else if (stmt_region != region)
7045 {
7046 region = eh_region_outermost (src_cfun, stmt_region, region);
7047 gcc_assert (region != NULL);
7048 }
7049 }
7050 }
7051
7052 return region;
7053 }
7054
7055 static tree
7056 new_label_mapper (tree decl, void *data)
7057 {
7058 htab_t hash = (htab_t) data;
7059 struct tree_map *m;
7060 void **slot;
7061
7062 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
7063
7064 m = XNEW (struct tree_map);
7065 m->hash = DECL_UID (decl);
7066 m->base.from = decl;
7067 m->to = create_artificial_label (UNKNOWN_LOCATION);
7068 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
7069 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
7070 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
7071
7072 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
7073 gcc_assert (*slot == NULL);
7074
7075 *slot = m;
7076
7077 return m->to;
7078 }
7079
7080 /* Tree walker to replace the decls used inside value expressions by
7081 duplicates. */
7082
7083 static tree
7084 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
7085 {
7086 struct replace_decls_d *rd = (struct replace_decls_d *)data;
7087
7088 switch (TREE_CODE (*tp))
7089 {
7090 case VAR_DECL:
7091 case PARM_DECL:
7092 case RESULT_DECL:
7093 replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
7094 break;
7095 default:
7096 break;
7097 }
7098
7099 if (IS_TYPE_OR_DECL_P (*tp))
7100 *walk_subtrees = false;
7101
7102 return NULL;
7103 }
7104
7105 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7106 subblocks. */
7107
7108 static void
7109 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
7110 tree to_context)
7111 {
7112 tree *tp, t;
7113
7114 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
7115 {
7116 t = *tp;
7117 if (!VAR_P (t) && TREE_CODE (t) != CONST_DECL)
7118 continue;
7119 replace_by_duplicate_decl (&t, vars_map, to_context);
7120 if (t != *tp)
7121 {
7122 if (VAR_P (*tp) && DECL_HAS_VALUE_EXPR_P (*tp))
7123 {
7124 tree x = DECL_VALUE_EXPR (*tp);
7125 struct replace_decls_d rd = { vars_map, to_context };
7126 unshare_expr (x);
7127 walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
7128 SET_DECL_VALUE_EXPR (t, x);
7129 DECL_HAS_VALUE_EXPR_P (t) = 1;
7130 }
7131 DECL_CHAIN (t) = DECL_CHAIN (*tp);
7132 *tp = t;
7133 }
7134 }
7135
7136 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
7137 replace_block_vars_by_duplicates (block, vars_map, to_context);
7138 }
7139
7140 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7141 from FN1 to FN2. */
7142
7143 static void
7144 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
7145 struct loop *loop)
7146 {
7147 /* Discard it from the old loop array. */
7148 (*get_loops (fn1))[loop->num] = NULL;
7149
7150 /* Place it in the new loop array, assigning it a new number. */
7151 loop->num = number_of_loops (fn2);
7152 vec_safe_push (loops_for_fn (fn2)->larray, loop);
7153
7154 /* Recurse to children. */
7155 for (loop = loop->inner; loop; loop = loop->next)
7156 fixup_loop_arrays_after_move (fn1, fn2, loop);
7157 }
7158
7159 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7160 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
7161
7162 DEBUG_FUNCTION void
7163 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
7164 {
7165 basic_block bb;
7166 edge_iterator ei;
7167 edge e;
7168 bitmap bbs = BITMAP_ALLOC (NULL);
7169 int i;
7170
7171 gcc_assert (entry != NULL);
7172 gcc_assert (entry != exit);
7173 gcc_assert (bbs_p != NULL);
7174
7175 gcc_assert (bbs_p->length () > 0);
7176
7177 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7178 bitmap_set_bit (bbs, bb->index);
7179
7180 gcc_assert (bitmap_bit_p (bbs, entry->index));
7181 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
7182
7183 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7184 {
7185 if (bb == entry)
7186 {
7187 gcc_assert (single_pred_p (entry));
7188 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
7189 }
7190 else
7191 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
7192 {
7193 e = ei_edge (ei);
7194 gcc_assert (bitmap_bit_p (bbs, e->src->index));
7195 }
7196
7197 if (bb == exit)
7198 {
7199 gcc_assert (single_succ_p (exit));
7200 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
7201 }
7202 else
7203 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
7204 {
7205 e = ei_edge (ei);
7206 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
7207 }
7208 }
7209
7210 BITMAP_FREE (bbs);
7211 }
7212
7213 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7214
7215 bool
7216 gather_ssa_name_hash_map_from (tree const &from, tree const &, void *data)
7217 {
7218 bitmap release_names = (bitmap)data;
7219
7220 if (TREE_CODE (from) != SSA_NAME)
7221 return true;
7222
7223 bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7224 return true;
7225 }
7226
7227 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7228 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7229 single basic block in the original CFG and the new basic block is
7230 returned. DEST_CFUN must not have a CFG yet.
7231
7232 Note that the region need not be a pure SESE region. Blocks inside
7233 the region may contain calls to abort/exit. The only restriction
7234 is that ENTRY_BB should be the only entry point and it must
7235 dominate EXIT_BB.
7236
7237 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7238 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7239 to the new function.
7240
7241 All local variables referenced in the region are assumed to be in
7242 the corresponding BLOCK_VARS and unexpanded variable lists
7243 associated with DEST_CFUN.
7244
7245 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7246 reimplement move_sese_region_to_fn by duplicating the region rather than
7247 moving it. */
7248
7249 basic_block
7250 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7251 basic_block exit_bb, tree orig_block)
7252 {
7253 vec<basic_block> bbs, dom_bbs;
7254 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7255 basic_block after, bb, *entry_pred, *exit_succ, abb;
7256 struct function *saved_cfun = cfun;
7257 int *entry_flag, *exit_flag;
7258 profile_probability *entry_prob, *exit_prob;
7259 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7260 edge e;
7261 edge_iterator ei;
7262 htab_t new_label_map;
7263 hash_map<void *, void *> *eh_map;
7264 struct loop *loop = entry_bb->loop_father;
7265 struct loop *loop0 = get_loop (saved_cfun, 0);
7266 struct move_stmt_d d;
7267
7268 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7269 region. */
7270 gcc_assert (entry_bb != exit_bb
7271 && (!exit_bb
7272 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7273
7274 /* Collect all the blocks in the region. Manually add ENTRY_BB
7275 because it won't be added by dfs_enumerate_from. */
7276 bbs.create (0);
7277 bbs.safe_push (entry_bb);
7278 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7279
7280 if (flag_checking)
7281 verify_sese (entry_bb, exit_bb, &bbs);
7282
7283 /* The blocks that used to be dominated by something in BBS will now be
7284 dominated by the new block. */
7285 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7286 bbs.address (),
7287 bbs.length ());
7288
7289 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7290 the predecessor edges to ENTRY_BB and the successor edges to
7291 EXIT_BB so that we can re-attach them to the new basic block that
7292 will replace the region. */
7293 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7294 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7295 entry_flag = XNEWVEC (int, num_entry_edges);
7296 entry_prob = XNEWVEC (profile_probability, num_entry_edges);
7297 i = 0;
7298 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7299 {
7300 entry_prob[i] = e->probability;
7301 entry_flag[i] = e->flags;
7302 entry_pred[i++] = e->src;
7303 remove_edge (e);
7304 }
7305
7306 if (exit_bb)
7307 {
7308 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7309 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7310 exit_flag = XNEWVEC (int, num_exit_edges);
7311 exit_prob = XNEWVEC (profile_probability, num_exit_edges);
7312 i = 0;
7313 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7314 {
7315 exit_prob[i] = e->probability;
7316 exit_flag[i] = e->flags;
7317 exit_succ[i++] = e->dest;
7318 remove_edge (e);
7319 }
7320 }
7321 else
7322 {
7323 num_exit_edges = 0;
7324 exit_succ = NULL;
7325 exit_flag = NULL;
7326 exit_prob = NULL;
7327 }
7328
7329 /* Switch context to the child function to initialize DEST_FN's CFG. */
7330 gcc_assert (dest_cfun->cfg == NULL);
7331 push_cfun (dest_cfun);
7332
7333 init_empty_tree_cfg ();
7334
7335 /* Initialize EH information for the new function. */
7336 eh_map = NULL;
7337 new_label_map = NULL;
7338 if (saved_cfun->eh)
7339 {
7340 eh_region region = NULL;
7341
7342 FOR_EACH_VEC_ELT (bbs, i, bb)
7343 region = find_outermost_region_in_block (saved_cfun, bb, region);
7344
7345 init_eh_for_function ();
7346 if (region != NULL)
7347 {
7348 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7349 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7350 new_label_mapper, new_label_map);
7351 }
7352 }
7353
7354 /* Initialize an empty loop tree. */
7355 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7356 init_loops_structure (dest_cfun, loops, 1);
7357 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7358 set_loops_for_fn (dest_cfun, loops);
7359
7360 /* Move the outlined loop tree part. */
7361 num_nodes = bbs.length ();
7362 FOR_EACH_VEC_ELT (bbs, i, bb)
7363 {
7364 if (bb->loop_father->header == bb)
7365 {
7366 struct loop *this_loop = bb->loop_father;
7367 struct loop *outer = loop_outer (this_loop);
7368 if (outer == loop
7369 /* If the SESE region contains some bbs ending with
7370 a noreturn call, those are considered to belong
7371 to the outermost loop in saved_cfun, rather than
7372 the entry_bb's loop_father. */
7373 || outer == loop0)
7374 {
7375 if (outer != loop)
7376 num_nodes -= this_loop->num_nodes;
7377 flow_loop_tree_node_remove (bb->loop_father);
7378 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7379 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7380 }
7381 }
7382 else if (bb->loop_father == loop0 && loop0 != loop)
7383 num_nodes--;
7384
7385 /* Remove loop exits from the outlined region. */
7386 if (loops_for_fn (saved_cfun)->exits)
7387 FOR_EACH_EDGE (e, ei, bb->succs)
7388 {
7389 struct loops *l = loops_for_fn (saved_cfun);
7390 loop_exit **slot
7391 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7392 NO_INSERT);
7393 if (slot)
7394 l->exits->clear_slot (slot);
7395 }
7396 }
7397
7398
7399 /* Adjust the number of blocks in the tree root of the outlined part. */
7400 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7401
7402 /* Setup a mapping to be used by move_block_to_fn. */
7403 loop->aux = current_loops->tree_root;
7404 loop0->aux = current_loops->tree_root;
7405
7406 pop_cfun ();
7407
7408 /* Move blocks from BBS into DEST_CFUN. */
7409 gcc_assert (bbs.length () >= 2);
7410 after = dest_cfun->cfg->x_entry_block_ptr;
7411 hash_map<tree, tree> vars_map;
7412
7413 memset (&d, 0, sizeof (d));
7414 d.orig_block = orig_block;
7415 d.new_block = DECL_INITIAL (dest_cfun->decl);
7416 d.from_context = cfun->decl;
7417 d.to_context = dest_cfun->decl;
7418 d.vars_map = &vars_map;
7419 d.new_label_map = new_label_map;
7420 d.eh_map = eh_map;
7421 d.remap_decls_p = true;
7422
7423 if (gimple_in_ssa_p (cfun))
7424 for (tree arg = DECL_ARGUMENTS (d.to_context); arg; arg = DECL_CHAIN (arg))
7425 {
7426 tree narg = make_ssa_name_fn (dest_cfun, arg, gimple_build_nop ());
7427 set_ssa_default_def (dest_cfun, arg, narg);
7428 vars_map.put (arg, narg);
7429 }
7430
7431 FOR_EACH_VEC_ELT (bbs, i, bb)
7432 {
7433 /* No need to update edge counts on the last block. It has
7434 already been updated earlier when we detached the region from
7435 the original CFG. */
7436 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7437 after = bb;
7438 }
7439
7440 loop->aux = NULL;
7441 loop0->aux = NULL;
7442 /* Loop sizes are no longer correct, fix them up. */
7443 loop->num_nodes -= num_nodes;
7444 for (struct loop *outer = loop_outer (loop);
7445 outer; outer = loop_outer (outer))
7446 outer->num_nodes -= num_nodes;
7447 loop0->num_nodes -= bbs.length () - num_nodes;
7448
7449 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7450 {
7451 struct loop *aloop;
7452 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7453 if (aloop != NULL)
7454 {
7455 if (aloop->simduid)
7456 {
7457 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7458 d.to_context);
7459 dest_cfun->has_simduid_loops = true;
7460 }
7461 if (aloop->force_vectorize)
7462 dest_cfun->has_force_vectorize_loops = true;
7463 }
7464 }
7465
7466 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7467 if (orig_block)
7468 {
7469 tree block;
7470 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7471 == NULL_TREE);
7472 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7473 = BLOCK_SUBBLOCKS (orig_block);
7474 for (block = BLOCK_SUBBLOCKS (orig_block);
7475 block; block = BLOCK_CHAIN (block))
7476 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7477 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7478 }
7479
7480 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7481 &vars_map, dest_cfun->decl);
7482
7483 if (new_label_map)
7484 htab_delete (new_label_map);
7485 if (eh_map)
7486 delete eh_map;
7487
7488 if (gimple_in_ssa_p (cfun))
7489 {
7490 /* We need to release ssa-names in a defined order, so first find them,
7491 and then iterate in ascending version order. */
7492 bitmap release_names = BITMAP_ALLOC (NULL);
7493 vars_map.traverse<void *, gather_ssa_name_hash_map_from> (release_names);
7494 bitmap_iterator bi;
7495 unsigned i;
7496 EXECUTE_IF_SET_IN_BITMAP (release_names, 0, i, bi)
7497 release_ssa_name (ssa_name (i));
7498 BITMAP_FREE (release_names);
7499 }
7500
7501 /* Rewire the entry and exit blocks. The successor to the entry
7502 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7503 the child function. Similarly, the predecessor of DEST_FN's
7504 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7505 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7506 various CFG manipulation function get to the right CFG.
7507
7508 FIXME, this is silly. The CFG ought to become a parameter to
7509 these helpers. */
7510 push_cfun (dest_cfun);
7511 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7512 if (exit_bb)
7513 make_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7514 pop_cfun ();
7515
7516 /* Back in the original function, the SESE region has disappeared,
7517 create a new basic block in its place. */
7518 bb = create_empty_bb (entry_pred[0]);
7519 if (current_loops)
7520 add_bb_to_loop (bb, loop);
7521 for (i = 0; i < num_entry_edges; i++)
7522 {
7523 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7524 e->probability = entry_prob[i];
7525 }
7526
7527 for (i = 0; i < num_exit_edges; i++)
7528 {
7529 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7530 e->probability = exit_prob[i];
7531 }
7532
7533 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7534 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7535 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7536 dom_bbs.release ();
7537
7538 if (exit_bb)
7539 {
7540 free (exit_prob);
7541 free (exit_flag);
7542 free (exit_succ);
7543 }
7544 free (entry_prob);
7545 free (entry_flag);
7546 free (entry_pred);
7547 bbs.release ();
7548
7549 return bb;
7550 }
7551
7552 /* Dump default def DEF to file FILE using FLAGS and indentation
7553 SPC. */
7554
7555 static void
7556 dump_default_def (FILE *file, tree def, int spc, dump_flags_t flags)
7557 {
7558 for (int i = 0; i < spc; ++i)
7559 fprintf (file, " ");
7560 dump_ssaname_info_to_file (file, def, spc);
7561
7562 print_generic_expr (file, TREE_TYPE (def), flags);
7563 fprintf (file, " ");
7564 print_generic_expr (file, def, flags);
7565 fprintf (file, " = ");
7566 print_generic_expr (file, SSA_NAME_VAR (def), flags);
7567 fprintf (file, ";\n");
7568 }
7569
7570 /* Print no_sanitize attribute to FILE for a given attribute VALUE. */
7571
7572 static void
7573 print_no_sanitize_attr_value (FILE *file, tree value)
7574 {
7575 unsigned int flags = tree_to_uhwi (value);
7576 bool first = true;
7577 for (int i = 0; sanitizer_opts[i].name != NULL; ++i)
7578 {
7579 if ((sanitizer_opts[i].flag & flags) == sanitizer_opts[i].flag)
7580 {
7581 if (!first)
7582 fprintf (file, " | ");
7583 fprintf (file, "%s", sanitizer_opts[i].name);
7584 first = false;
7585 }
7586 }
7587 }
7588
7589 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7590 */
7591
7592 void
7593 dump_function_to_file (tree fndecl, FILE *file, dump_flags_t flags)
7594 {
7595 tree arg, var, old_current_fndecl = current_function_decl;
7596 struct function *dsf;
7597 bool ignore_topmost_bind = false, any_var = false;
7598 basic_block bb;
7599 tree chain;
7600 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7601 && decl_is_tm_clone (fndecl));
7602 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7603
7604 if (DECL_ATTRIBUTES (fndecl) != NULL_TREE)
7605 {
7606 fprintf (file, "__attribute__((");
7607
7608 bool first = true;
7609 tree chain;
7610 for (chain = DECL_ATTRIBUTES (fndecl); chain;
7611 first = false, chain = TREE_CHAIN (chain))
7612 {
7613 if (!first)
7614 fprintf (file, ", ");
7615
7616 tree name = get_attribute_name (chain);
7617 print_generic_expr (file, name, dump_flags);
7618 if (TREE_VALUE (chain) != NULL_TREE)
7619 {
7620 fprintf (file, " (");
7621
7622 if (strstr (IDENTIFIER_POINTER (name), "no_sanitize"))
7623 print_no_sanitize_attr_value (file, TREE_VALUE (chain));
7624 else
7625 print_generic_expr (file, TREE_VALUE (chain), dump_flags);
7626 fprintf (file, ")");
7627 }
7628 }
7629
7630 fprintf (file, "))\n");
7631 }
7632
7633 current_function_decl = fndecl;
7634 if (flags & TDF_GIMPLE)
7635 {
7636 print_generic_expr (file, TREE_TYPE (TREE_TYPE (fndecl)),
7637 dump_flags | TDF_SLIM);
7638 fprintf (file, " __GIMPLE ()\n%s (", function_name (fun));
7639 }
7640 else
7641 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7642
7643 arg = DECL_ARGUMENTS (fndecl);
7644 while (arg)
7645 {
7646 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7647 fprintf (file, " ");
7648 print_generic_expr (file, arg, dump_flags);
7649 if (DECL_CHAIN (arg))
7650 fprintf (file, ", ");
7651 arg = DECL_CHAIN (arg);
7652 }
7653 fprintf (file, ")\n");
7654
7655 dsf = DECL_STRUCT_FUNCTION (fndecl);
7656 if (dsf && (flags & TDF_EH))
7657 dump_eh_tree (file, dsf);
7658
7659 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7660 {
7661 dump_node (fndecl, TDF_SLIM | flags, file);
7662 current_function_decl = old_current_fndecl;
7663 return;
7664 }
7665
7666 /* When GIMPLE is lowered, the variables are no longer available in
7667 BIND_EXPRs, so display them separately. */
7668 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7669 {
7670 unsigned ix;
7671 ignore_topmost_bind = true;
7672
7673 fprintf (file, "{\n");
7674 if (gimple_in_ssa_p (fun)
7675 && (flags & TDF_ALIAS))
7676 {
7677 for (arg = DECL_ARGUMENTS (fndecl); arg != NULL;
7678 arg = DECL_CHAIN (arg))
7679 {
7680 tree def = ssa_default_def (fun, arg);
7681 if (def)
7682 dump_default_def (file, def, 2, flags);
7683 }
7684
7685 tree res = DECL_RESULT (fun->decl);
7686 if (res != NULL_TREE
7687 && DECL_BY_REFERENCE (res))
7688 {
7689 tree def = ssa_default_def (fun, res);
7690 if (def)
7691 dump_default_def (file, def, 2, flags);
7692 }
7693
7694 tree static_chain = fun->static_chain_decl;
7695 if (static_chain != NULL_TREE)
7696 {
7697 tree def = ssa_default_def (fun, static_chain);
7698 if (def)
7699 dump_default_def (file, def, 2, flags);
7700 }
7701 }
7702
7703 if (!vec_safe_is_empty (fun->local_decls))
7704 FOR_EACH_LOCAL_DECL (fun, ix, var)
7705 {
7706 print_generic_decl (file, var, flags);
7707 fprintf (file, "\n");
7708
7709 any_var = true;
7710 }
7711
7712 tree name;
7713
7714 if (gimple_in_ssa_p (cfun))
7715 FOR_EACH_SSA_NAME (ix, name, cfun)
7716 {
7717 if (!SSA_NAME_VAR (name))
7718 {
7719 fprintf (file, " ");
7720 print_generic_expr (file, TREE_TYPE (name), flags);
7721 fprintf (file, " ");
7722 print_generic_expr (file, name, flags);
7723 fprintf (file, ";\n");
7724
7725 any_var = true;
7726 }
7727 }
7728 }
7729
7730 if (fun && fun->decl == fndecl
7731 && fun->cfg
7732 && basic_block_info_for_fn (fun))
7733 {
7734 /* If the CFG has been built, emit a CFG-based dump. */
7735 if (!ignore_topmost_bind)
7736 fprintf (file, "{\n");
7737
7738 if (any_var && n_basic_blocks_for_fn (fun))
7739 fprintf (file, "\n");
7740
7741 FOR_EACH_BB_FN (bb, fun)
7742 dump_bb (file, bb, 2, flags);
7743
7744 fprintf (file, "}\n");
7745 }
7746 else if (fun->curr_properties & PROP_gimple_any)
7747 {
7748 /* The function is now in GIMPLE form but the CFG has not been
7749 built yet. Emit the single sequence of GIMPLE statements
7750 that make up its body. */
7751 gimple_seq body = gimple_body (fndecl);
7752
7753 if (gimple_seq_first_stmt (body)
7754 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7755 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7756 print_gimple_seq (file, body, 0, flags);
7757 else
7758 {
7759 if (!ignore_topmost_bind)
7760 fprintf (file, "{\n");
7761
7762 if (any_var)
7763 fprintf (file, "\n");
7764
7765 print_gimple_seq (file, body, 2, flags);
7766 fprintf (file, "}\n");
7767 }
7768 }
7769 else
7770 {
7771 int indent;
7772
7773 /* Make a tree based dump. */
7774 chain = DECL_SAVED_TREE (fndecl);
7775 if (chain && TREE_CODE (chain) == BIND_EXPR)
7776 {
7777 if (ignore_topmost_bind)
7778 {
7779 chain = BIND_EXPR_BODY (chain);
7780 indent = 2;
7781 }
7782 else
7783 indent = 0;
7784 }
7785 else
7786 {
7787 if (!ignore_topmost_bind)
7788 {
7789 fprintf (file, "{\n");
7790 /* No topmost bind, pretend it's ignored for later. */
7791 ignore_topmost_bind = true;
7792 }
7793 indent = 2;
7794 }
7795
7796 if (any_var)
7797 fprintf (file, "\n");
7798
7799 print_generic_stmt_indented (file, chain, flags, indent);
7800 if (ignore_topmost_bind)
7801 fprintf (file, "}\n");
7802 }
7803
7804 if (flags & TDF_ENUMERATE_LOCALS)
7805 dump_enumerated_decls (file, flags);
7806 fprintf (file, "\n\n");
7807
7808 current_function_decl = old_current_fndecl;
7809 }
7810
7811 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7812
7813 DEBUG_FUNCTION void
7814 debug_function (tree fn, dump_flags_t flags)
7815 {
7816 dump_function_to_file (fn, stderr, flags);
7817 }
7818
7819
7820 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7821
7822 static void
7823 print_pred_bbs (FILE *file, basic_block bb)
7824 {
7825 edge e;
7826 edge_iterator ei;
7827
7828 FOR_EACH_EDGE (e, ei, bb->preds)
7829 fprintf (file, "bb_%d ", e->src->index);
7830 }
7831
7832
7833 /* Print on FILE the indexes for the successors of basic_block BB. */
7834
7835 static void
7836 print_succ_bbs (FILE *file, basic_block bb)
7837 {
7838 edge e;
7839 edge_iterator ei;
7840
7841 FOR_EACH_EDGE (e, ei, bb->succs)
7842 fprintf (file, "bb_%d ", e->dest->index);
7843 }
7844
7845 /* Print to FILE the basic block BB following the VERBOSITY level. */
7846
7847 void
7848 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7849 {
7850 char *s_indent = (char *) alloca ((size_t) indent + 1);
7851 memset ((void *) s_indent, ' ', (size_t) indent);
7852 s_indent[indent] = '\0';
7853
7854 /* Print basic_block's header. */
7855 if (verbosity >= 2)
7856 {
7857 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7858 print_pred_bbs (file, bb);
7859 fprintf (file, "}, succs = {");
7860 print_succ_bbs (file, bb);
7861 fprintf (file, "})\n");
7862 }
7863
7864 /* Print basic_block's body. */
7865 if (verbosity >= 3)
7866 {
7867 fprintf (file, "%s {\n", s_indent);
7868 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7869 fprintf (file, "%s }\n", s_indent);
7870 }
7871 }
7872
7873 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7874
7875 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7876 VERBOSITY level this outputs the contents of the loop, or just its
7877 structure. */
7878
7879 static void
7880 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7881 {
7882 char *s_indent;
7883 basic_block bb;
7884
7885 if (loop == NULL)
7886 return;
7887
7888 s_indent = (char *) alloca ((size_t) indent + 1);
7889 memset ((void *) s_indent, ' ', (size_t) indent);
7890 s_indent[indent] = '\0';
7891
7892 /* Print loop's header. */
7893 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7894 if (loop->header)
7895 fprintf (file, "header = %d", loop->header->index);
7896 else
7897 {
7898 fprintf (file, "deleted)\n");
7899 return;
7900 }
7901 if (loop->latch)
7902 fprintf (file, ", latch = %d", loop->latch->index);
7903 else
7904 fprintf (file, ", multiple latches");
7905 fprintf (file, ", niter = ");
7906 print_generic_expr (file, loop->nb_iterations);
7907
7908 if (loop->any_upper_bound)
7909 {
7910 fprintf (file, ", upper_bound = ");
7911 print_decu (loop->nb_iterations_upper_bound, file);
7912 }
7913 if (loop->any_likely_upper_bound)
7914 {
7915 fprintf (file, ", likely_upper_bound = ");
7916 print_decu (loop->nb_iterations_likely_upper_bound, file);
7917 }
7918
7919 if (loop->any_estimate)
7920 {
7921 fprintf (file, ", estimate = ");
7922 print_decu (loop->nb_iterations_estimate, file);
7923 }
7924 fprintf (file, ")\n");
7925
7926 /* Print loop's body. */
7927 if (verbosity >= 1)
7928 {
7929 fprintf (file, "%s{\n", s_indent);
7930 FOR_EACH_BB_FN (bb, cfun)
7931 if (bb->loop_father == loop)
7932 print_loops_bb (file, bb, indent, verbosity);
7933
7934 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7935 fprintf (file, "%s}\n", s_indent);
7936 }
7937 }
7938
7939 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7940 spaces. Following VERBOSITY level this outputs the contents of the
7941 loop, or just its structure. */
7942
7943 static void
7944 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7945 int verbosity)
7946 {
7947 if (loop == NULL)
7948 return;
7949
7950 print_loop (file, loop, indent, verbosity);
7951 print_loop_and_siblings (file, loop->next, indent, verbosity);
7952 }
7953
7954 /* Follow a CFG edge from the entry point of the program, and on entry
7955 of a loop, pretty print the loop structure on FILE. */
7956
7957 void
7958 print_loops (FILE *file, int verbosity)
7959 {
7960 basic_block bb;
7961
7962 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7963 fprintf (file, "\nLoops in function: %s\n", current_function_name ());
7964 if (bb && bb->loop_father)
7965 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7966 }
7967
7968 /* Dump a loop. */
7969
7970 DEBUG_FUNCTION void
7971 debug (struct loop &ref)
7972 {
7973 print_loop (stderr, &ref, 0, /*verbosity*/0);
7974 }
7975
7976 DEBUG_FUNCTION void
7977 debug (struct loop *ptr)
7978 {
7979 if (ptr)
7980 debug (*ptr);
7981 else
7982 fprintf (stderr, "<nil>\n");
7983 }
7984
7985 /* Dump a loop verbosely. */
7986
7987 DEBUG_FUNCTION void
7988 debug_verbose (struct loop &ref)
7989 {
7990 print_loop (stderr, &ref, 0, /*verbosity*/3);
7991 }
7992
7993 DEBUG_FUNCTION void
7994 debug_verbose (struct loop *ptr)
7995 {
7996 if (ptr)
7997 debug (*ptr);
7998 else
7999 fprintf (stderr, "<nil>\n");
8000 }
8001
8002
8003 /* Debugging loops structure at tree level, at some VERBOSITY level. */
8004
8005 DEBUG_FUNCTION void
8006 debug_loops (int verbosity)
8007 {
8008 print_loops (stderr, verbosity);
8009 }
8010
8011 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
8012
8013 DEBUG_FUNCTION void
8014 debug_loop (struct loop *loop, int verbosity)
8015 {
8016 print_loop (stderr, loop, 0, verbosity);
8017 }
8018
8019 /* Print on stderr the code of loop number NUM, at some VERBOSITY
8020 level. */
8021
8022 DEBUG_FUNCTION void
8023 debug_loop_num (unsigned num, int verbosity)
8024 {
8025 debug_loop (get_loop (cfun, num), verbosity);
8026 }
8027
8028 /* Return true if BB ends with a call, possibly followed by some
8029 instructions that must stay with the call. Return false,
8030 otherwise. */
8031
8032 static bool
8033 gimple_block_ends_with_call_p (basic_block bb)
8034 {
8035 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8036 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
8037 }
8038
8039
8040 /* Return true if BB ends with a conditional branch. Return false,
8041 otherwise. */
8042
8043 static bool
8044 gimple_block_ends_with_condjump_p (const_basic_block bb)
8045 {
8046 gimple *stmt = last_stmt (CONST_CAST_BB (bb));
8047 return (stmt && gimple_code (stmt) == GIMPLE_COND);
8048 }
8049
8050
8051 /* Return true if statement T may terminate execution of BB in ways not
8052 explicitly represtented in the CFG. */
8053
8054 bool
8055 stmt_can_terminate_bb_p (gimple *t)
8056 {
8057 tree fndecl = NULL_TREE;
8058 int call_flags = 0;
8059
8060 /* Eh exception not handled internally terminates execution of the whole
8061 function. */
8062 if (stmt_can_throw_external (t))
8063 return true;
8064
8065 /* NORETURN and LONGJMP calls already have an edge to exit.
8066 CONST and PURE calls do not need one.
8067 We don't currently check for CONST and PURE here, although
8068 it would be a good idea, because those attributes are
8069 figured out from the RTL in mark_constant_function, and
8070 the counter incrementation code from -fprofile-arcs
8071 leads to different results from -fbranch-probabilities. */
8072 if (is_gimple_call (t))
8073 {
8074 fndecl = gimple_call_fndecl (t);
8075 call_flags = gimple_call_flags (t);
8076 }
8077
8078 if (is_gimple_call (t)
8079 && fndecl
8080 && DECL_BUILT_IN (fndecl)
8081 && (call_flags & ECF_NOTHROW)
8082 && !(call_flags & ECF_RETURNS_TWICE)
8083 /* fork() doesn't really return twice, but the effect of
8084 wrapping it in __gcov_fork() which calls __gcov_flush()
8085 and clears the counters before forking has the same
8086 effect as returning twice. Force a fake edge. */
8087 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
8088 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
8089 return false;
8090
8091 if (is_gimple_call (t))
8092 {
8093 edge_iterator ei;
8094 edge e;
8095 basic_block bb;
8096
8097 if (call_flags & (ECF_PURE | ECF_CONST)
8098 && !(call_flags & ECF_LOOPING_CONST_OR_PURE))
8099 return false;
8100
8101 /* Function call may do longjmp, terminate program or do other things.
8102 Special case noreturn that have non-abnormal edges out as in this case
8103 the fact is sufficiently represented by lack of edges out of T. */
8104 if (!(call_flags & ECF_NORETURN))
8105 return true;
8106
8107 bb = gimple_bb (t);
8108 FOR_EACH_EDGE (e, ei, bb->succs)
8109 if ((e->flags & EDGE_FAKE) == 0)
8110 return true;
8111 }
8112
8113 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
8114 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
8115 return true;
8116
8117 return false;
8118 }
8119
8120
8121 /* Add fake edges to the function exit for any non constant and non
8122 noreturn calls (or noreturn calls with EH/abnormal edges),
8123 volatile inline assembly in the bitmap of blocks specified by BLOCKS
8124 or to the whole CFG if BLOCKS is zero. Return the number of blocks
8125 that were split.
8126
8127 The goal is to expose cases in which entering a basic block does
8128 not imply that all subsequent instructions must be executed. */
8129
8130 static int
8131 gimple_flow_call_edges_add (sbitmap blocks)
8132 {
8133 int i;
8134 int blocks_split = 0;
8135 int last_bb = last_basic_block_for_fn (cfun);
8136 bool check_last_block = false;
8137
8138 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
8139 return 0;
8140
8141 if (! blocks)
8142 check_last_block = true;
8143 else
8144 check_last_block = bitmap_bit_p (blocks,
8145 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
8146
8147 /* In the last basic block, before epilogue generation, there will be
8148 a fallthru edge to EXIT. Special care is required if the last insn
8149 of the last basic block is a call because make_edge folds duplicate
8150 edges, which would result in the fallthru edge also being marked
8151 fake, which would result in the fallthru edge being removed by
8152 remove_fake_edges, which would result in an invalid CFG.
8153
8154 Moreover, we can't elide the outgoing fake edge, since the block
8155 profiler needs to take this into account in order to solve the minimal
8156 spanning tree in the case that the call doesn't return.
8157
8158 Handle this by adding a dummy instruction in a new last basic block. */
8159 if (check_last_block)
8160 {
8161 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
8162 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8163 gimple *t = NULL;
8164
8165 if (!gsi_end_p (gsi))
8166 t = gsi_stmt (gsi);
8167
8168 if (t && stmt_can_terminate_bb_p (t))
8169 {
8170 edge e;
8171
8172 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8173 if (e)
8174 {
8175 gsi_insert_on_edge (e, gimple_build_nop ());
8176 gsi_commit_edge_inserts ();
8177 }
8178 }
8179 }
8180
8181 /* Now add fake edges to the function exit for any non constant
8182 calls since there is no way that we can determine if they will
8183 return or not... */
8184 for (i = 0; i < last_bb; i++)
8185 {
8186 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8187 gimple_stmt_iterator gsi;
8188 gimple *stmt, *last_stmt;
8189
8190 if (!bb)
8191 continue;
8192
8193 if (blocks && !bitmap_bit_p (blocks, i))
8194 continue;
8195
8196 gsi = gsi_last_nondebug_bb (bb);
8197 if (!gsi_end_p (gsi))
8198 {
8199 last_stmt = gsi_stmt (gsi);
8200 do
8201 {
8202 stmt = gsi_stmt (gsi);
8203 if (stmt_can_terminate_bb_p (stmt))
8204 {
8205 edge e;
8206
8207 /* The handling above of the final block before the
8208 epilogue should be enough to verify that there is
8209 no edge to the exit block in CFG already.
8210 Calling make_edge in such case would cause us to
8211 mark that edge as fake and remove it later. */
8212 if (flag_checking && stmt == last_stmt)
8213 {
8214 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8215 gcc_assert (e == NULL);
8216 }
8217
8218 /* Note that the following may create a new basic block
8219 and renumber the existing basic blocks. */
8220 if (stmt != last_stmt)
8221 {
8222 e = split_block (bb, stmt);
8223 if (e)
8224 blocks_split++;
8225 }
8226 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
8227 e->probability = profile_probability::guessed_never ();
8228 e->count = profile_count::guessed_zero ();
8229 }
8230 gsi_prev (&gsi);
8231 }
8232 while (!gsi_end_p (gsi));
8233 }
8234 }
8235
8236 if (blocks_split)
8237 checking_verify_flow_info ();
8238
8239 return blocks_split;
8240 }
8241
8242 /* Removes edge E and all the blocks dominated by it, and updates dominance
8243 information. The IL in E->src needs to be updated separately.
8244 If dominance info is not available, only the edge E is removed.*/
8245
8246 void
8247 remove_edge_and_dominated_blocks (edge e)
8248 {
8249 vec<basic_block> bbs_to_remove = vNULL;
8250 vec<basic_block> bbs_to_fix_dom = vNULL;
8251 edge f;
8252 edge_iterator ei;
8253 bool none_removed = false;
8254 unsigned i;
8255 basic_block bb, dbb;
8256 bitmap_iterator bi;
8257
8258 /* If we are removing a path inside a non-root loop that may change
8259 loop ownership of blocks or remove loops. Mark loops for fixup. */
8260 if (current_loops
8261 && loop_outer (e->src->loop_father) != NULL
8262 && e->src->loop_father == e->dest->loop_father)
8263 loops_state_set (LOOPS_NEED_FIXUP);
8264
8265 if (!dom_info_available_p (CDI_DOMINATORS))
8266 {
8267 remove_edge (e);
8268 return;
8269 }
8270
8271 /* No updating is needed for edges to exit. */
8272 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8273 {
8274 if (cfgcleanup_altered_bbs)
8275 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8276 remove_edge (e);
8277 return;
8278 }
8279
8280 /* First, we find the basic blocks to remove. If E->dest has a predecessor
8281 that is not dominated by E->dest, then this set is empty. Otherwise,
8282 all the basic blocks dominated by E->dest are removed.
8283
8284 Also, to DF_IDOM we store the immediate dominators of the blocks in
8285 the dominance frontier of E (i.e., of the successors of the
8286 removed blocks, if there are any, and of E->dest otherwise). */
8287 FOR_EACH_EDGE (f, ei, e->dest->preds)
8288 {
8289 if (f == e)
8290 continue;
8291
8292 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
8293 {
8294 none_removed = true;
8295 break;
8296 }
8297 }
8298
8299 auto_bitmap df, df_idom;
8300 if (none_removed)
8301 bitmap_set_bit (df_idom,
8302 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
8303 else
8304 {
8305 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
8306 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8307 {
8308 FOR_EACH_EDGE (f, ei, bb->succs)
8309 {
8310 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
8311 bitmap_set_bit (df, f->dest->index);
8312 }
8313 }
8314 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8315 bitmap_clear_bit (df, bb->index);
8316
8317 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
8318 {
8319 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8320 bitmap_set_bit (df_idom,
8321 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
8322 }
8323 }
8324
8325 if (cfgcleanup_altered_bbs)
8326 {
8327 /* Record the set of the altered basic blocks. */
8328 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8329 bitmap_ior_into (cfgcleanup_altered_bbs, df);
8330 }
8331
8332 /* Remove E and the cancelled blocks. */
8333 if (none_removed)
8334 remove_edge (e);
8335 else
8336 {
8337 /* Walk backwards so as to get a chance to substitute all
8338 released DEFs into debug stmts. See
8339 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
8340 details. */
8341 for (i = bbs_to_remove.length (); i-- > 0; )
8342 delete_basic_block (bbs_to_remove[i]);
8343 }
8344
8345 /* Update the dominance information. The immediate dominator may change only
8346 for blocks whose immediate dominator belongs to DF_IDOM:
8347
8348 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8349 removal. Let Z the arbitrary block such that idom(Z) = Y and
8350 Z dominates X after the removal. Before removal, there exists a path P
8351 from Y to X that avoids Z. Let F be the last edge on P that is
8352 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
8353 dominates W, and because of P, Z does not dominate W), and W belongs to
8354 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
8355 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
8356 {
8357 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8358 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
8359 dbb;
8360 dbb = next_dom_son (CDI_DOMINATORS, dbb))
8361 bbs_to_fix_dom.safe_push (dbb);
8362 }
8363
8364 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8365
8366 bbs_to_remove.release ();
8367 bbs_to_fix_dom.release ();
8368 }
8369
8370 /* Purge dead EH edges from basic block BB. */
8371
8372 bool
8373 gimple_purge_dead_eh_edges (basic_block bb)
8374 {
8375 bool changed = false;
8376 edge e;
8377 edge_iterator ei;
8378 gimple *stmt = last_stmt (bb);
8379
8380 if (stmt && stmt_can_throw_internal (stmt))
8381 return false;
8382
8383 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8384 {
8385 if (e->flags & EDGE_EH)
8386 {
8387 remove_edge_and_dominated_blocks (e);
8388 changed = true;
8389 }
8390 else
8391 ei_next (&ei);
8392 }
8393
8394 return changed;
8395 }
8396
8397 /* Purge dead EH edges from basic block listed in BLOCKS. */
8398
8399 bool
8400 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8401 {
8402 bool changed = false;
8403 unsigned i;
8404 bitmap_iterator bi;
8405
8406 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8407 {
8408 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8409
8410 /* Earlier gimple_purge_dead_eh_edges could have removed
8411 this basic block already. */
8412 gcc_assert (bb || changed);
8413 if (bb != NULL)
8414 changed |= gimple_purge_dead_eh_edges (bb);
8415 }
8416
8417 return changed;
8418 }
8419
8420 /* Purge dead abnormal call edges from basic block BB. */
8421
8422 bool
8423 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8424 {
8425 bool changed = false;
8426 edge e;
8427 edge_iterator ei;
8428 gimple *stmt = last_stmt (bb);
8429
8430 if (!cfun->has_nonlocal_label
8431 && !cfun->calls_setjmp)
8432 return false;
8433
8434 if (stmt && stmt_can_make_abnormal_goto (stmt))
8435 return false;
8436
8437 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8438 {
8439 if (e->flags & EDGE_ABNORMAL)
8440 {
8441 if (e->flags & EDGE_FALLTHRU)
8442 e->flags &= ~EDGE_ABNORMAL;
8443 else
8444 remove_edge_and_dominated_blocks (e);
8445 changed = true;
8446 }
8447 else
8448 ei_next (&ei);
8449 }
8450
8451 return changed;
8452 }
8453
8454 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8455
8456 bool
8457 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8458 {
8459 bool changed = false;
8460 unsigned i;
8461 bitmap_iterator bi;
8462
8463 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8464 {
8465 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8466
8467 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8468 this basic block already. */
8469 gcc_assert (bb || changed);
8470 if (bb != NULL)
8471 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8472 }
8473
8474 return changed;
8475 }
8476
8477 /* This function is called whenever a new edge is created or
8478 redirected. */
8479
8480 static void
8481 gimple_execute_on_growing_pred (edge e)
8482 {
8483 basic_block bb = e->dest;
8484
8485 if (!gimple_seq_empty_p (phi_nodes (bb)))
8486 reserve_phi_args_for_new_edge (bb);
8487 }
8488
8489 /* This function is called immediately before edge E is removed from
8490 the edge vector E->dest->preds. */
8491
8492 static void
8493 gimple_execute_on_shrinking_pred (edge e)
8494 {
8495 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8496 remove_phi_args (e);
8497 }
8498
8499 /*---------------------------------------------------------------------------
8500 Helper functions for Loop versioning
8501 ---------------------------------------------------------------------------*/
8502
8503 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8504 of 'first'. Both of them are dominated by 'new_head' basic block. When
8505 'new_head' was created by 'second's incoming edge it received phi arguments
8506 on the edge by split_edge(). Later, additional edge 'e' was created to
8507 connect 'new_head' and 'first'. Now this routine adds phi args on this
8508 additional edge 'e' that new_head to second edge received as part of edge
8509 splitting. */
8510
8511 static void
8512 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8513 basic_block new_head, edge e)
8514 {
8515 gphi *phi1, *phi2;
8516 gphi_iterator psi1, psi2;
8517 tree def;
8518 edge e2 = find_edge (new_head, second);
8519
8520 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8521 edge, we should always have an edge from NEW_HEAD to SECOND. */
8522 gcc_assert (e2 != NULL);
8523
8524 /* Browse all 'second' basic block phi nodes and add phi args to
8525 edge 'e' for 'first' head. PHI args are always in correct order. */
8526
8527 for (psi2 = gsi_start_phis (second),
8528 psi1 = gsi_start_phis (first);
8529 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8530 gsi_next (&psi2), gsi_next (&psi1))
8531 {
8532 phi1 = psi1.phi ();
8533 phi2 = psi2.phi ();
8534 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8535 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8536 }
8537 }
8538
8539
8540 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8541 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8542 the destination of the ELSE part. */
8543
8544 static void
8545 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8546 basic_block second_head ATTRIBUTE_UNUSED,
8547 basic_block cond_bb, void *cond_e)
8548 {
8549 gimple_stmt_iterator gsi;
8550 gimple *new_cond_expr;
8551 tree cond_expr = (tree) cond_e;
8552 edge e0;
8553
8554 /* Build new conditional expr */
8555 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8556 NULL_TREE, NULL_TREE);
8557
8558 /* Add new cond in cond_bb. */
8559 gsi = gsi_last_bb (cond_bb);
8560 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8561
8562 /* Adjust edges appropriately to connect new head with first head
8563 as well as second head. */
8564 e0 = single_succ_edge (cond_bb);
8565 e0->flags &= ~EDGE_FALLTHRU;
8566 e0->flags |= EDGE_FALSE_VALUE;
8567 }
8568
8569
8570 /* Do book-keeping of basic block BB for the profile consistency checker.
8571 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8572 then do post-pass accounting. Store the counting in RECORD. */
8573 static void
8574 gimple_account_profile_record (basic_block bb, int after_pass,
8575 struct profile_record *record)
8576 {
8577 gimple_stmt_iterator i;
8578 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8579 {
8580 record->size[after_pass]
8581 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8582 if (bb->count.initialized_p ())
8583 record->time[after_pass]
8584 += estimate_num_insns (gsi_stmt (i),
8585 &eni_time_weights) * bb->count.to_gcov_type ();
8586 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8587 record->time[after_pass]
8588 += estimate_num_insns (gsi_stmt (i),
8589 &eni_time_weights) * bb->frequency;
8590 }
8591 }
8592
8593 struct cfg_hooks gimple_cfg_hooks = {
8594 "gimple",
8595 gimple_verify_flow_info,
8596 gimple_dump_bb, /* dump_bb */
8597 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8598 create_bb, /* create_basic_block */
8599 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8600 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8601 gimple_can_remove_branch_p, /* can_remove_branch_p */
8602 remove_bb, /* delete_basic_block */
8603 gimple_split_block, /* split_block */
8604 gimple_move_block_after, /* move_block_after */
8605 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8606 gimple_merge_blocks, /* merge_blocks */
8607 gimple_predict_edge, /* predict_edge */
8608 gimple_predicted_by_p, /* predicted_by_p */
8609 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8610 gimple_duplicate_bb, /* duplicate_block */
8611 gimple_split_edge, /* split_edge */
8612 gimple_make_forwarder_block, /* make_forward_block */
8613 NULL, /* tidy_fallthru_edge */
8614 NULL, /* force_nonfallthru */
8615 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8616 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8617 gimple_flow_call_edges_add, /* flow_call_edges_add */
8618 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8619 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8620 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8621 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8622 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8623 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8624 flush_pending_stmts, /* flush_pending_stmts */
8625 gimple_empty_block_p, /* block_empty_p */
8626 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8627 gimple_account_profile_record,
8628 };
8629
8630
8631 /* Split all critical edges. */
8632
8633 unsigned int
8634 split_critical_edges (void)
8635 {
8636 basic_block bb;
8637 edge e;
8638 edge_iterator ei;
8639
8640 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8641 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8642 mappings around the calls to split_edge. */
8643 start_recording_case_labels ();
8644 FOR_ALL_BB_FN (bb, cfun)
8645 {
8646 FOR_EACH_EDGE (e, ei, bb->succs)
8647 {
8648 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8649 split_edge (e);
8650 /* PRE inserts statements to edges and expects that
8651 since split_critical_edges was done beforehand, committing edge
8652 insertions will not split more edges. In addition to critical
8653 edges we must split edges that have multiple successors and
8654 end by control flow statements, such as RESX.
8655 Go ahead and split them too. This matches the logic in
8656 gimple_find_edge_insert_loc. */
8657 else if ((!single_pred_p (e->dest)
8658 || !gimple_seq_empty_p (phi_nodes (e->dest))
8659 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8660 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8661 && !(e->flags & EDGE_ABNORMAL))
8662 {
8663 gimple_stmt_iterator gsi;
8664
8665 gsi = gsi_last_bb (e->src);
8666 if (!gsi_end_p (gsi)
8667 && stmt_ends_bb_p (gsi_stmt (gsi))
8668 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8669 && !gimple_call_builtin_p (gsi_stmt (gsi),
8670 BUILT_IN_RETURN)))
8671 split_edge (e);
8672 }
8673 }
8674 }
8675 end_recording_case_labels ();
8676 return 0;
8677 }
8678
8679 namespace {
8680
8681 const pass_data pass_data_split_crit_edges =
8682 {
8683 GIMPLE_PASS, /* type */
8684 "crited", /* name */
8685 OPTGROUP_NONE, /* optinfo_flags */
8686 TV_TREE_SPLIT_EDGES, /* tv_id */
8687 PROP_cfg, /* properties_required */
8688 PROP_no_crit_edges, /* properties_provided */
8689 0, /* properties_destroyed */
8690 0, /* todo_flags_start */
8691 0, /* todo_flags_finish */
8692 };
8693
8694 class pass_split_crit_edges : public gimple_opt_pass
8695 {
8696 public:
8697 pass_split_crit_edges (gcc::context *ctxt)
8698 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8699 {}
8700
8701 /* opt_pass methods: */
8702 virtual unsigned int execute (function *) { return split_critical_edges (); }
8703
8704 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8705 }; // class pass_split_crit_edges
8706
8707 } // anon namespace
8708
8709 gimple_opt_pass *
8710 make_pass_split_crit_edges (gcc::context *ctxt)
8711 {
8712 return new pass_split_crit_edges (ctxt);
8713 }
8714
8715
8716 /* Insert COND expression which is GIMPLE_COND after STMT
8717 in basic block BB with appropriate basic block split
8718 and creation of a new conditionally executed basic block.
8719 Update profile so the new bb is visited with probability PROB.
8720 Return created basic block. */
8721 basic_block
8722 insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond,
8723 profile_probability prob)
8724 {
8725 edge fall = split_block (bb, stmt);
8726 gimple_stmt_iterator iter = gsi_last_bb (bb);
8727 basic_block new_bb;
8728
8729 /* Insert cond statement. */
8730 gcc_assert (gimple_code (cond) == GIMPLE_COND);
8731 if (gsi_end_p (iter))
8732 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
8733 else
8734 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
8735
8736 /* Create conditionally executed block. */
8737 new_bb = create_empty_bb (bb);
8738 edge e = make_edge (bb, new_bb, EDGE_TRUE_VALUE);
8739 e->probability = prob;
8740 e->count = bb->count.apply_probability (prob);
8741 new_bb->count = e->count;
8742 new_bb->frequency = prob.apply (bb->frequency);
8743 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
8744
8745 /* Fix edge for split bb. */
8746 fall->flags = EDGE_FALSE_VALUE;
8747 fall->count -= e->count;
8748 fall->probability -= e->probability;
8749
8750 /* Update dominance info. */
8751 if (dom_info_available_p (CDI_DOMINATORS))
8752 {
8753 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
8754 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
8755 }
8756
8757 /* Update loop info. */
8758 if (current_loops)
8759 add_bb_to_loop (new_bb, bb->loop_father);
8760
8761 return new_bb;
8762 }
8763
8764 /* Build a ternary operation and gimplify it. Emit code before GSI.
8765 Return the gimple_val holding the result. */
8766
8767 tree
8768 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8769 tree type, tree a, tree b, tree c)
8770 {
8771 tree ret;
8772 location_t loc = gimple_location (gsi_stmt (*gsi));
8773
8774 ret = fold_build3_loc (loc, code, type, a, b, c);
8775 STRIP_NOPS (ret);
8776
8777 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8778 GSI_SAME_STMT);
8779 }
8780
8781 /* Build a binary operation and gimplify it. Emit code before GSI.
8782 Return the gimple_val holding the result. */
8783
8784 tree
8785 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8786 tree type, tree a, tree b)
8787 {
8788 tree ret;
8789
8790 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8791 STRIP_NOPS (ret);
8792
8793 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8794 GSI_SAME_STMT);
8795 }
8796
8797 /* Build a unary operation and gimplify it. Emit code before GSI.
8798 Return the gimple_val holding the result. */
8799
8800 tree
8801 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8802 tree a)
8803 {
8804 tree ret;
8805
8806 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8807 STRIP_NOPS (ret);
8808
8809 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8810 GSI_SAME_STMT);
8811 }
8812
8813
8814 \f
8815 /* Given a basic block B which ends with a conditional and has
8816 precisely two successors, determine which of the edges is taken if
8817 the conditional is true and which is taken if the conditional is
8818 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8819
8820 void
8821 extract_true_false_edges_from_block (basic_block b,
8822 edge *true_edge,
8823 edge *false_edge)
8824 {
8825 edge e = EDGE_SUCC (b, 0);
8826
8827 if (e->flags & EDGE_TRUE_VALUE)
8828 {
8829 *true_edge = e;
8830 *false_edge = EDGE_SUCC (b, 1);
8831 }
8832 else
8833 {
8834 *false_edge = e;
8835 *true_edge = EDGE_SUCC (b, 1);
8836 }
8837 }
8838
8839
8840 /* From a controlling predicate in the immediate dominator DOM of
8841 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
8842 predicate evaluates to true and false and store them to
8843 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
8844 they are non-NULL. Returns true if the edges can be determined,
8845 else return false. */
8846
8847 bool
8848 extract_true_false_controlled_edges (basic_block dom, basic_block phiblock,
8849 edge *true_controlled_edge,
8850 edge *false_controlled_edge)
8851 {
8852 basic_block bb = phiblock;
8853 edge true_edge, false_edge, tem;
8854 edge e0 = NULL, e1 = NULL;
8855
8856 /* We have to verify that one edge into the PHI node is dominated
8857 by the true edge of the predicate block and the other edge
8858 dominated by the false edge. This ensures that the PHI argument
8859 we are going to take is completely determined by the path we
8860 take from the predicate block.
8861 We can only use BB dominance checks below if the destination of
8862 the true/false edges are dominated by their edge, thus only
8863 have a single predecessor. */
8864 extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
8865 tem = EDGE_PRED (bb, 0);
8866 if (tem == true_edge
8867 || (single_pred_p (true_edge->dest)
8868 && (tem->src == true_edge->dest
8869 || dominated_by_p (CDI_DOMINATORS,
8870 tem->src, true_edge->dest))))
8871 e0 = tem;
8872 else if (tem == false_edge
8873 || (single_pred_p (false_edge->dest)
8874 && (tem->src == false_edge->dest
8875 || dominated_by_p (CDI_DOMINATORS,
8876 tem->src, false_edge->dest))))
8877 e1 = tem;
8878 else
8879 return false;
8880 tem = EDGE_PRED (bb, 1);
8881 if (tem == true_edge
8882 || (single_pred_p (true_edge->dest)
8883 && (tem->src == true_edge->dest
8884 || dominated_by_p (CDI_DOMINATORS,
8885 tem->src, true_edge->dest))))
8886 e0 = tem;
8887 else if (tem == false_edge
8888 || (single_pred_p (false_edge->dest)
8889 && (tem->src == false_edge->dest
8890 || dominated_by_p (CDI_DOMINATORS,
8891 tem->src, false_edge->dest))))
8892 e1 = tem;
8893 else
8894 return false;
8895 if (!e0 || !e1)
8896 return false;
8897
8898 if (true_controlled_edge)
8899 *true_controlled_edge = e0;
8900 if (false_controlled_edge)
8901 *false_controlled_edge = e1;
8902
8903 return true;
8904 }
8905
8906
8907
8908 /* Emit return warnings. */
8909
8910 namespace {
8911
8912 const pass_data pass_data_warn_function_return =
8913 {
8914 GIMPLE_PASS, /* type */
8915 "*warn_function_return", /* name */
8916 OPTGROUP_NONE, /* optinfo_flags */
8917 TV_NONE, /* tv_id */
8918 PROP_cfg, /* properties_required */
8919 0, /* properties_provided */
8920 0, /* properties_destroyed */
8921 0, /* todo_flags_start */
8922 0, /* todo_flags_finish */
8923 };
8924
8925 class pass_warn_function_return : public gimple_opt_pass
8926 {
8927 public:
8928 pass_warn_function_return (gcc::context *ctxt)
8929 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8930 {}
8931
8932 /* opt_pass methods: */
8933 virtual unsigned int execute (function *);
8934
8935 }; // class pass_warn_function_return
8936
8937 unsigned int
8938 pass_warn_function_return::execute (function *fun)
8939 {
8940 source_location location;
8941 gimple *last;
8942 edge e;
8943 edge_iterator ei;
8944
8945 if (!targetm.warn_func_return (fun->decl))
8946 return 0;
8947
8948 /* If we have a path to EXIT, then we do return. */
8949 if (TREE_THIS_VOLATILE (fun->decl)
8950 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
8951 {
8952 location = UNKNOWN_LOCATION;
8953 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8954 {
8955 last = last_stmt (e->src);
8956 if ((gimple_code (last) == GIMPLE_RETURN
8957 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8958 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8959 break;
8960 }
8961 if (location == UNKNOWN_LOCATION)
8962 location = cfun->function_end_locus;
8963 warning_at (location, 0, "%<noreturn%> function does return");
8964 }
8965
8966 /* If we see "return;" in some basic block, then we do reach the end
8967 without returning a value. */
8968 else if (warn_return_type
8969 && !TREE_NO_WARNING (fun->decl)
8970 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0
8971 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
8972 {
8973 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8974 {
8975 gimple *last = last_stmt (e->src);
8976 greturn *return_stmt = dyn_cast <greturn *> (last);
8977 if (return_stmt
8978 && gimple_return_retval (return_stmt) == NULL
8979 && !gimple_no_warning_p (last))
8980 {
8981 location = gimple_location (last);
8982 if (location == UNKNOWN_LOCATION)
8983 location = fun->function_end_locus;
8984 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8985 TREE_NO_WARNING (fun->decl) = 1;
8986 break;
8987 }
8988 }
8989 }
8990 return 0;
8991 }
8992
8993 } // anon namespace
8994
8995 gimple_opt_pass *
8996 make_pass_warn_function_return (gcc::context *ctxt)
8997 {
8998 return new pass_warn_function_return (ctxt);
8999 }
9000
9001 /* Walk a gimplified function and warn for functions whose return value is
9002 ignored and attribute((warn_unused_result)) is set. This is done before
9003 inlining, so we don't have to worry about that. */
9004
9005 static void
9006 do_warn_unused_result (gimple_seq seq)
9007 {
9008 tree fdecl, ftype;
9009 gimple_stmt_iterator i;
9010
9011 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
9012 {
9013 gimple *g = gsi_stmt (i);
9014
9015 switch (gimple_code (g))
9016 {
9017 case GIMPLE_BIND:
9018 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
9019 break;
9020 case GIMPLE_TRY:
9021 do_warn_unused_result (gimple_try_eval (g));
9022 do_warn_unused_result (gimple_try_cleanup (g));
9023 break;
9024 case GIMPLE_CATCH:
9025 do_warn_unused_result (gimple_catch_handler (
9026 as_a <gcatch *> (g)));
9027 break;
9028 case GIMPLE_EH_FILTER:
9029 do_warn_unused_result (gimple_eh_filter_failure (g));
9030 break;
9031
9032 case GIMPLE_CALL:
9033 if (gimple_call_lhs (g))
9034 break;
9035 if (gimple_call_internal_p (g))
9036 break;
9037
9038 /* This is a naked call, as opposed to a GIMPLE_CALL with an
9039 LHS. All calls whose value is ignored should be
9040 represented like this. Look for the attribute. */
9041 fdecl = gimple_call_fndecl (g);
9042 ftype = gimple_call_fntype (g);
9043
9044 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
9045 {
9046 location_t loc = gimple_location (g);
9047
9048 if (fdecl)
9049 warning_at (loc, OPT_Wunused_result,
9050 "ignoring return value of %qD, "
9051 "declared with attribute warn_unused_result",
9052 fdecl);
9053 else
9054 warning_at (loc, OPT_Wunused_result,
9055 "ignoring return value of function "
9056 "declared with attribute warn_unused_result");
9057 }
9058 break;
9059
9060 default:
9061 /* Not a container, not a call, or a call whose value is used. */
9062 break;
9063 }
9064 }
9065 }
9066
9067 namespace {
9068
9069 const pass_data pass_data_warn_unused_result =
9070 {
9071 GIMPLE_PASS, /* type */
9072 "*warn_unused_result", /* name */
9073 OPTGROUP_NONE, /* optinfo_flags */
9074 TV_NONE, /* tv_id */
9075 PROP_gimple_any, /* properties_required */
9076 0, /* properties_provided */
9077 0, /* properties_destroyed */
9078 0, /* todo_flags_start */
9079 0, /* todo_flags_finish */
9080 };
9081
9082 class pass_warn_unused_result : public gimple_opt_pass
9083 {
9084 public:
9085 pass_warn_unused_result (gcc::context *ctxt)
9086 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
9087 {}
9088
9089 /* opt_pass methods: */
9090 virtual bool gate (function *) { return flag_warn_unused_result; }
9091 virtual unsigned int execute (function *)
9092 {
9093 do_warn_unused_result (gimple_body (current_function_decl));
9094 return 0;
9095 }
9096
9097 }; // class pass_warn_unused_result
9098
9099 } // anon namespace
9100
9101 gimple_opt_pass *
9102 make_pass_warn_unused_result (gcc::context *ctxt)
9103 {
9104 return new pass_warn_unused_result (ctxt);
9105 }
9106
9107 /* IPA passes, compilation of earlier functions or inlining
9108 might have changed some properties, such as marked functions nothrow,
9109 pure, const or noreturn.
9110 Remove redundant edges and basic blocks, and create new ones if necessary.
9111
9112 This pass can't be executed as stand alone pass from pass manager, because
9113 in between inlining and this fixup the verify_flow_info would fail. */
9114
9115 unsigned int
9116 execute_fixup_cfg (void)
9117 {
9118 basic_block bb;
9119 gimple_stmt_iterator gsi;
9120 int todo = 0;
9121 edge e;
9122 edge_iterator ei;
9123 cgraph_node *node = cgraph_node::get (current_function_decl);
9124 profile_count num = node->count;
9125 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
9126 bool scale = num.initialized_p ()
9127 && (den > 0 || num == profile_count::zero ())
9128 && !(num == den);
9129
9130 if (scale)
9131 {
9132 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
9133 EXIT_BLOCK_PTR_FOR_FN (cfun)->count
9134 = EXIT_BLOCK_PTR_FOR_FN (cfun)->count.apply_scale (num, den);
9135
9136 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
9137 e->count = e->count.apply_scale (num, den);
9138 }
9139
9140 FOR_EACH_BB_FN (bb, cfun)
9141 {
9142 if (scale)
9143 bb->count = bb->count.apply_scale (num, den);
9144 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
9145 {
9146 gimple *stmt = gsi_stmt (gsi);
9147 tree decl = is_gimple_call (stmt)
9148 ? gimple_call_fndecl (stmt)
9149 : NULL;
9150 if (decl)
9151 {
9152 int flags = gimple_call_flags (stmt);
9153 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
9154 {
9155 if (gimple_purge_dead_abnormal_call_edges (bb))
9156 todo |= TODO_cleanup_cfg;
9157
9158 if (gimple_in_ssa_p (cfun))
9159 {
9160 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9161 update_stmt (stmt);
9162 }
9163 }
9164
9165 if (flags & ECF_NORETURN
9166 && fixup_noreturn_call (stmt))
9167 todo |= TODO_cleanup_cfg;
9168 }
9169
9170 /* Remove stores to variables we marked write-only.
9171 Keep access when store has side effect, i.e. in case when source
9172 is volatile. */
9173 if (gimple_store_p (stmt)
9174 && !gimple_has_side_effects (stmt))
9175 {
9176 tree lhs = get_base_address (gimple_get_lhs (stmt));
9177
9178 if (VAR_P (lhs)
9179 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9180 && varpool_node::get (lhs)->writeonly)
9181 {
9182 unlink_stmt_vdef (stmt);
9183 gsi_remove (&gsi, true);
9184 release_defs (stmt);
9185 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9186 continue;
9187 }
9188 }
9189 /* For calls we can simply remove LHS when it is known
9190 to be write-only. */
9191 if (is_gimple_call (stmt)
9192 && gimple_get_lhs (stmt))
9193 {
9194 tree lhs = get_base_address (gimple_get_lhs (stmt));
9195
9196 if (VAR_P (lhs)
9197 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9198 && varpool_node::get (lhs)->writeonly)
9199 {
9200 gimple_call_set_lhs (stmt, NULL);
9201 update_stmt (stmt);
9202 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9203 }
9204 }
9205
9206 if (maybe_clean_eh_stmt (stmt)
9207 && gimple_purge_dead_eh_edges (bb))
9208 todo |= TODO_cleanup_cfg;
9209 gsi_next (&gsi);
9210 }
9211
9212 if (scale)
9213 FOR_EACH_EDGE (e, ei, bb->succs)
9214 e->count = e->count.apply_scale (num, den);
9215
9216 /* If we have a basic block with no successors that does not
9217 end with a control statement or a noreturn call end it with
9218 a call to __builtin_unreachable. This situation can occur
9219 when inlining a noreturn call that does in fact return. */
9220 if (EDGE_COUNT (bb->succs) == 0)
9221 {
9222 gimple *stmt = last_stmt (bb);
9223 if (!stmt
9224 || (!is_ctrl_stmt (stmt)
9225 && (!is_gimple_call (stmt)
9226 || !gimple_call_noreturn_p (stmt))))
9227 {
9228 if (stmt && is_gimple_call (stmt))
9229 gimple_call_set_ctrl_altering (stmt, false);
9230 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9231 stmt = gimple_build_call (fndecl, 0);
9232 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9233 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
9234 if (!cfun->after_inlining)
9235 {
9236 gcall *call_stmt = dyn_cast <gcall *> (stmt);
9237 int freq
9238 = compute_call_stmt_bb_frequency (current_function_decl,
9239 bb);
9240 node->create_edge (cgraph_node::get_create (fndecl),
9241 call_stmt, bb->count, freq);
9242 }
9243 }
9244 }
9245 }
9246 if (scale)
9247 compute_function_frequency ();
9248
9249 if (current_loops
9250 && (todo & TODO_cleanup_cfg))
9251 loops_state_set (LOOPS_NEED_FIXUP);
9252
9253 return todo;
9254 }
9255
9256 namespace {
9257
9258 const pass_data pass_data_fixup_cfg =
9259 {
9260 GIMPLE_PASS, /* type */
9261 "fixup_cfg", /* name */
9262 OPTGROUP_NONE, /* optinfo_flags */
9263 TV_NONE, /* tv_id */
9264 PROP_cfg, /* properties_required */
9265 0, /* properties_provided */
9266 0, /* properties_destroyed */
9267 0, /* todo_flags_start */
9268 0, /* todo_flags_finish */
9269 };
9270
9271 class pass_fixup_cfg : public gimple_opt_pass
9272 {
9273 public:
9274 pass_fixup_cfg (gcc::context *ctxt)
9275 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
9276 {}
9277
9278 /* opt_pass methods: */
9279 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
9280 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
9281
9282 }; // class pass_fixup_cfg
9283
9284 } // anon namespace
9285
9286 gimple_opt_pass *
9287 make_pass_fixup_cfg (gcc::context *ctxt)
9288 {
9289 return new pass_fixup_cfg (ctxt);
9290 }
9291
9292 /* Garbage collection support for edge_def. */
9293
9294 extern void gt_ggc_mx (tree&);
9295 extern void gt_ggc_mx (gimple *&);
9296 extern void gt_ggc_mx (rtx&);
9297 extern void gt_ggc_mx (basic_block&);
9298
9299 static void
9300 gt_ggc_mx (rtx_insn *& x)
9301 {
9302 if (x)
9303 gt_ggc_mx_rtx_def ((void *) x);
9304 }
9305
9306 void
9307 gt_ggc_mx (edge_def *e)
9308 {
9309 tree block = LOCATION_BLOCK (e->goto_locus);
9310 gt_ggc_mx (e->src);
9311 gt_ggc_mx (e->dest);
9312 if (current_ir_type () == IR_GIMPLE)
9313 gt_ggc_mx (e->insns.g);
9314 else
9315 gt_ggc_mx (e->insns.r);
9316 gt_ggc_mx (block);
9317 }
9318
9319 /* PCH support for edge_def. */
9320
9321 extern void gt_pch_nx (tree&);
9322 extern void gt_pch_nx (gimple *&);
9323 extern void gt_pch_nx (rtx&);
9324 extern void gt_pch_nx (basic_block&);
9325
9326 static void
9327 gt_pch_nx (rtx_insn *& x)
9328 {
9329 if (x)
9330 gt_pch_nx_rtx_def ((void *) x);
9331 }
9332
9333 void
9334 gt_pch_nx (edge_def *e)
9335 {
9336 tree block = LOCATION_BLOCK (e->goto_locus);
9337 gt_pch_nx (e->src);
9338 gt_pch_nx (e->dest);
9339 if (current_ir_type () == IR_GIMPLE)
9340 gt_pch_nx (e->insns.g);
9341 else
9342 gt_pch_nx (e->insns.r);
9343 gt_pch_nx (block);
9344 }
9345
9346 void
9347 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
9348 {
9349 tree block = LOCATION_BLOCK (e->goto_locus);
9350 op (&(e->src), cookie);
9351 op (&(e->dest), cookie);
9352 if (current_ir_type () == IR_GIMPLE)
9353 op (&(e->insns.g), cookie);
9354 else
9355 op (&(e->insns.r), cookie);
9356 op (&(block), cookie);
9357 }
9358
9359 #if CHECKING_P
9360
9361 namespace selftest {
9362
9363 /* Helper function for CFG selftests: create a dummy function decl
9364 and push it as cfun. */
9365
9366 static tree
9367 push_fndecl (const char *name)
9368 {
9369 tree fn_type = build_function_type_array (integer_type_node, 0, NULL);
9370 /* FIXME: this uses input_location: */
9371 tree fndecl = build_fn_decl (name, fn_type);
9372 tree retval = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
9373 NULL_TREE, integer_type_node);
9374 DECL_RESULT (fndecl) = retval;
9375 push_struct_function (fndecl);
9376 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9377 ASSERT_TRUE (fun != NULL);
9378 init_empty_tree_cfg_for_function (fun);
9379 ASSERT_EQ (2, n_basic_blocks_for_fn (fun));
9380 ASSERT_EQ (0, n_edges_for_fn (fun));
9381 return fndecl;
9382 }
9383
9384 /* These tests directly create CFGs.
9385 Compare with the static fns within tree-cfg.c:
9386 - build_gimple_cfg
9387 - make_blocks: calls create_basic_block (seq, bb);
9388 - make_edges. */
9389
9390 /* Verify a simple cfg of the form:
9391 ENTRY -> A -> B -> C -> EXIT. */
9392
9393 static void
9394 test_linear_chain ()
9395 {
9396 gimple_register_cfg_hooks ();
9397
9398 tree fndecl = push_fndecl ("cfg_test_linear_chain");
9399 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9400
9401 /* Create some empty blocks. */
9402 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9403 basic_block bb_b = create_empty_bb (bb_a);
9404 basic_block bb_c = create_empty_bb (bb_b);
9405
9406 ASSERT_EQ (5, n_basic_blocks_for_fn (fun));
9407 ASSERT_EQ (0, n_edges_for_fn (fun));
9408
9409 /* Create some edges: a simple linear chain of BBs. */
9410 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9411 make_edge (bb_a, bb_b, 0);
9412 make_edge (bb_b, bb_c, 0);
9413 make_edge (bb_c, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9414
9415 /* Verify the edges. */
9416 ASSERT_EQ (4, n_edges_for_fn (fun));
9417 ASSERT_EQ (NULL, ENTRY_BLOCK_PTR_FOR_FN (fun)->preds);
9418 ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs->length ());
9419 ASSERT_EQ (1, bb_a->preds->length ());
9420 ASSERT_EQ (1, bb_a->succs->length ());
9421 ASSERT_EQ (1, bb_b->preds->length ());
9422 ASSERT_EQ (1, bb_b->succs->length ());
9423 ASSERT_EQ (1, bb_c->preds->length ());
9424 ASSERT_EQ (1, bb_c->succs->length ());
9425 ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun)->preds->length ());
9426 ASSERT_EQ (NULL, EXIT_BLOCK_PTR_FOR_FN (fun)->succs);
9427
9428 /* Verify the dominance information
9429 Each BB in our simple chain should be dominated by the one before
9430 it. */
9431 calculate_dominance_info (CDI_DOMINATORS);
9432 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9433 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9434 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9435 ASSERT_EQ (1, dom_by_b.length ());
9436 ASSERT_EQ (bb_c, dom_by_b[0]);
9437 free_dominance_info (CDI_DOMINATORS);
9438 dom_by_b.release ();
9439
9440 /* Similarly for post-dominance: each BB in our chain is post-dominated
9441 by the one after it. */
9442 calculate_dominance_info (CDI_POST_DOMINATORS);
9443 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9444 ASSERT_EQ (bb_c, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9445 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9446 ASSERT_EQ (1, postdom_by_b.length ());
9447 ASSERT_EQ (bb_a, postdom_by_b[0]);
9448 free_dominance_info (CDI_POST_DOMINATORS);
9449 postdom_by_b.release ();
9450
9451 pop_cfun ();
9452 }
9453
9454 /* Verify a simple CFG of the form:
9455 ENTRY
9456 |
9457 A
9458 / \
9459 /t \f
9460 B C
9461 \ /
9462 \ /
9463 D
9464 |
9465 EXIT. */
9466
9467 static void
9468 test_diamond ()
9469 {
9470 gimple_register_cfg_hooks ();
9471
9472 tree fndecl = push_fndecl ("cfg_test_diamond");
9473 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9474
9475 /* Create some empty blocks. */
9476 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9477 basic_block bb_b = create_empty_bb (bb_a);
9478 basic_block bb_c = create_empty_bb (bb_a);
9479 basic_block bb_d = create_empty_bb (bb_b);
9480
9481 ASSERT_EQ (6, n_basic_blocks_for_fn (fun));
9482 ASSERT_EQ (0, n_edges_for_fn (fun));
9483
9484 /* Create the edges. */
9485 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9486 make_edge (bb_a, bb_b, EDGE_TRUE_VALUE);
9487 make_edge (bb_a, bb_c, EDGE_FALSE_VALUE);
9488 make_edge (bb_b, bb_d, 0);
9489 make_edge (bb_c, bb_d, 0);
9490 make_edge (bb_d, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9491
9492 /* Verify the edges. */
9493 ASSERT_EQ (6, n_edges_for_fn (fun));
9494 ASSERT_EQ (1, bb_a->preds->length ());
9495 ASSERT_EQ (2, bb_a->succs->length ());
9496 ASSERT_EQ (1, bb_b->preds->length ());
9497 ASSERT_EQ (1, bb_b->succs->length ());
9498 ASSERT_EQ (1, bb_c->preds->length ());
9499 ASSERT_EQ (1, bb_c->succs->length ());
9500 ASSERT_EQ (2, bb_d->preds->length ());
9501 ASSERT_EQ (1, bb_d->succs->length ());
9502
9503 /* Verify the dominance information. */
9504 calculate_dominance_info (CDI_DOMINATORS);
9505 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9506 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9507 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_d));
9508 vec<basic_block> dom_by_a = get_dominated_by (CDI_DOMINATORS, bb_a);
9509 ASSERT_EQ (3, dom_by_a.length ()); /* B, C, D, in some order. */
9510 dom_by_a.release ();
9511 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9512 ASSERT_EQ (0, dom_by_b.length ());
9513 dom_by_b.release ();
9514 free_dominance_info (CDI_DOMINATORS);
9515
9516 /* Similarly for post-dominance. */
9517 calculate_dominance_info (CDI_POST_DOMINATORS);
9518 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9519 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9520 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_c));
9521 vec<basic_block> postdom_by_d = get_dominated_by (CDI_POST_DOMINATORS, bb_d);
9522 ASSERT_EQ (3, postdom_by_d.length ()); /* A, B, C in some order. */
9523 postdom_by_d.release ();
9524 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9525 ASSERT_EQ (0, postdom_by_b.length ());
9526 postdom_by_b.release ();
9527 free_dominance_info (CDI_POST_DOMINATORS);
9528
9529 pop_cfun ();
9530 }
9531
9532 /* Verify that we can handle a CFG containing a "complete" aka
9533 fully-connected subgraph (where A B C D below all have edges
9534 pointing to each other node, also to themselves).
9535 e.g.:
9536 ENTRY EXIT
9537 | ^
9538 | /
9539 | /
9540 | /
9541 V/
9542 A<--->B
9543 ^^ ^^
9544 | \ / |
9545 | X |
9546 | / \ |
9547 VV VV
9548 C<--->D
9549 */
9550
9551 static void
9552 test_fully_connected ()
9553 {
9554 gimple_register_cfg_hooks ();
9555
9556 tree fndecl = push_fndecl ("cfg_fully_connected");
9557 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9558
9559 const int n = 4;
9560
9561 /* Create some empty blocks. */
9562 auto_vec <basic_block> subgraph_nodes;
9563 for (int i = 0; i < n; i++)
9564 subgraph_nodes.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun)));
9565
9566 ASSERT_EQ (n + 2, n_basic_blocks_for_fn (fun));
9567 ASSERT_EQ (0, n_edges_for_fn (fun));
9568
9569 /* Create the edges. */
9570 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), subgraph_nodes[0], EDGE_FALLTHRU);
9571 make_edge (subgraph_nodes[0], EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9572 for (int i = 0; i < n; i++)
9573 for (int j = 0; j < n; j++)
9574 make_edge (subgraph_nodes[i], subgraph_nodes[j], 0);
9575
9576 /* Verify the edges. */
9577 ASSERT_EQ (2 + (n * n), n_edges_for_fn (fun));
9578 /* The first one is linked to ENTRY/EXIT as well as itself and
9579 everything else. */
9580 ASSERT_EQ (n + 1, subgraph_nodes[0]->preds->length ());
9581 ASSERT_EQ (n + 1, subgraph_nodes[0]->succs->length ());
9582 /* The other ones in the subgraph are linked to everything in
9583 the subgraph (including themselves). */
9584 for (int i = 1; i < n; i++)
9585 {
9586 ASSERT_EQ (n, subgraph_nodes[i]->preds->length ());
9587 ASSERT_EQ (n, subgraph_nodes[i]->succs->length ());
9588 }
9589
9590 /* Verify the dominance information. */
9591 calculate_dominance_info (CDI_DOMINATORS);
9592 /* The initial block in the subgraph should be dominated by ENTRY. */
9593 ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun),
9594 get_immediate_dominator (CDI_DOMINATORS,
9595 subgraph_nodes[0]));
9596 /* Every other block in the subgraph should be dominated by the
9597 initial block. */
9598 for (int i = 1; i < n; i++)
9599 ASSERT_EQ (subgraph_nodes[0],
9600 get_immediate_dominator (CDI_DOMINATORS,
9601 subgraph_nodes[i]));
9602 free_dominance_info (CDI_DOMINATORS);
9603
9604 /* Similarly for post-dominance. */
9605 calculate_dominance_info (CDI_POST_DOMINATORS);
9606 /* The initial block in the subgraph should be postdominated by EXIT. */
9607 ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun),
9608 get_immediate_dominator (CDI_POST_DOMINATORS,
9609 subgraph_nodes[0]));
9610 /* Every other block in the subgraph should be postdominated by the
9611 initial block, since that leads to EXIT. */
9612 for (int i = 1; i < n; i++)
9613 ASSERT_EQ (subgraph_nodes[0],
9614 get_immediate_dominator (CDI_POST_DOMINATORS,
9615 subgraph_nodes[i]));
9616 free_dominance_info (CDI_POST_DOMINATORS);
9617
9618 pop_cfun ();
9619 }
9620
9621 /* Run all of the selftests within this file. */
9622
9623 void
9624 tree_cfg_c_tests ()
9625 {
9626 test_linear_chain ();
9627 test_diamond ();
9628 test_fully_connected ();
9629 }
9630
9631 } // namespace selftest
9632
9633 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
9634 - loop
9635 - nested loops
9636 - switch statement (a block with many out-edges)
9637 - something that jumps to itself
9638 - etc */
9639
9640 #endif /* CHECKING_P */