openmp: Implement OpenMP 5.0 base-pointer attachement and clause ordering
[gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2020 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "cfganal.h"
40 #include "gimple-fold.h"
41 #include "tree-eh.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-cfg.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
49 #include "tree-dfa.h"
50 #include "tree-ssa.h"
51 #include "except.h"
52 #include "cfgloop.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "omp-general.h"
58 #include "omp-expand.h"
59 #include "tree-cfgcleanup.h"
60 #include "gimplify.h"
61 #include "attribs.h"
62 #include "selftest.h"
63 #include "opts.h"
64 #include "asan.h"
65 #include "profile.h"
66
67 /* This file contains functions for building the Control Flow Graph (CFG)
68 for a function tree. */
69
70 /* Local declarations. */
71
72 /* Initial capacity for the basic block array. */
73 static const int initial_cfg_capacity = 20;
74
75 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
76 which use a particular edge. The CASE_LABEL_EXPRs are chained together
77 via their CASE_CHAIN field, which we clear after we're done with the
78 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
79
80 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
81 update the case vector in response to edge redirections.
82
83 Right now this table is set up and torn down at key points in the
84 compilation process. It would be nice if we could make the table
85 more persistent. The key is getting notification of changes to
86 the CFG (particularly edge removal, creation and redirection). */
87
88 static hash_map<edge, tree> *edge_to_cases;
89
90 /* If we record edge_to_cases, this bitmap will hold indexes
91 of basic blocks that end in a GIMPLE_SWITCH which we touched
92 due to edge manipulations. */
93
94 static bitmap touched_switch_bbs;
95
96 /* CFG statistics. */
97 struct cfg_stats_d
98 {
99 long num_merged_labels;
100 };
101
102 static struct cfg_stats_d cfg_stats;
103
104 /* Data to pass to replace_block_vars_by_duplicates_1. */
105 struct replace_decls_d
106 {
107 hash_map<tree, tree> *vars_map;
108 tree to_context;
109 };
110
111 /* Hash table to store last discriminator assigned for each locus. */
112 struct locus_discrim_map
113 {
114 int location_line;
115 int discriminator;
116 };
117
118 /* Hashtable helpers. */
119
120 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
121 {
122 static inline hashval_t hash (const locus_discrim_map *);
123 static inline bool equal (const locus_discrim_map *,
124 const locus_discrim_map *);
125 };
126
127 /* Trivial hash function for a location_t. ITEM is a pointer to
128 a hash table entry that maps a location_t to a discriminator. */
129
130 inline hashval_t
131 locus_discrim_hasher::hash (const locus_discrim_map *item)
132 {
133 return item->location_line;
134 }
135
136 /* Equality function for the locus-to-discriminator map. A and B
137 point to the two hash table entries to compare. */
138
139 inline bool
140 locus_discrim_hasher::equal (const locus_discrim_map *a,
141 const locus_discrim_map *b)
142 {
143 return a->location_line == b->location_line;
144 }
145
146 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
147
148 /* Basic blocks and flowgraphs. */
149 static void make_blocks (gimple_seq);
150
151 /* Edges. */
152 static void make_edges (void);
153 static void assign_discriminators (void);
154 static void make_cond_expr_edges (basic_block);
155 static void make_gimple_switch_edges (gswitch *, basic_block);
156 static bool make_goto_expr_edges (basic_block);
157 static void make_gimple_asm_edges (basic_block);
158 static edge gimple_redirect_edge_and_branch (edge, basic_block);
159 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
160
161 /* Various helpers. */
162 static inline bool stmt_starts_bb_p (gimple *, gimple *);
163 static int gimple_verify_flow_info (void);
164 static void gimple_make_forwarder_block (edge);
165 static gimple *first_non_label_stmt (basic_block);
166 static bool verify_gimple_transaction (gtransaction *);
167 static bool call_can_make_abnormal_goto (gimple *);
168
169 /* Flowgraph optimization and cleanup. */
170 static void gimple_merge_blocks (basic_block, basic_block);
171 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
172 static void remove_bb (basic_block);
173 static edge find_taken_edge_computed_goto (basic_block, tree);
174 static edge find_taken_edge_cond_expr (const gcond *, tree);
175
176 void
177 init_empty_tree_cfg_for_function (struct function *fn)
178 {
179 /* Initialize the basic block array. */
180 init_flow (fn);
181 profile_status_for_fn (fn) = PROFILE_ABSENT;
182 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
183 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
184 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
185 initial_cfg_capacity, true);
186
187 /* Build a mapping of labels to their associated blocks. */
188 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
189 initial_cfg_capacity, true);
190
191 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
192 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
193
194 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
195 = EXIT_BLOCK_PTR_FOR_FN (fn);
196 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
197 = ENTRY_BLOCK_PTR_FOR_FN (fn);
198 }
199
200 void
201 init_empty_tree_cfg (void)
202 {
203 init_empty_tree_cfg_for_function (cfun);
204 }
205
206 /*---------------------------------------------------------------------------
207 Create basic blocks
208 ---------------------------------------------------------------------------*/
209
210 /* Entry point to the CFG builder for trees. SEQ is the sequence of
211 statements to be added to the flowgraph. */
212
213 static void
214 build_gimple_cfg (gimple_seq seq)
215 {
216 /* Register specific gimple functions. */
217 gimple_register_cfg_hooks ();
218
219 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
220
221 init_empty_tree_cfg ();
222
223 make_blocks (seq);
224
225 /* Make sure there is always at least one block, even if it's empty. */
226 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
227 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
228
229 /* Adjust the size of the array. */
230 if (basic_block_info_for_fn (cfun)->length ()
231 < (size_t) n_basic_blocks_for_fn (cfun))
232 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
233 n_basic_blocks_for_fn (cfun));
234
235 /* To speed up statement iterator walks, we first purge dead labels. */
236 cleanup_dead_labels ();
237
238 /* Group case nodes to reduce the number of edges.
239 We do this after cleaning up dead labels because otherwise we miss
240 a lot of obvious case merging opportunities. */
241 group_case_labels ();
242
243 /* Create the edges of the flowgraph. */
244 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
245 make_edges ();
246 assign_discriminators ();
247 cleanup_dead_labels ();
248 delete discriminator_per_locus;
249 discriminator_per_locus = NULL;
250 }
251
252 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
253 them and propagate the information to LOOP. We assume that the annotations
254 come immediately before the condition in BB, if any. */
255
256 static void
257 replace_loop_annotate_in_block (basic_block bb, class loop *loop)
258 {
259 gimple_stmt_iterator gsi = gsi_last_bb (bb);
260 gimple *stmt = gsi_stmt (gsi);
261
262 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
263 return;
264
265 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
266 {
267 stmt = gsi_stmt (gsi);
268 if (gimple_code (stmt) != GIMPLE_CALL)
269 break;
270 if (!gimple_call_internal_p (stmt)
271 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
272 break;
273
274 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
275 {
276 case annot_expr_ivdep_kind:
277 loop->safelen = INT_MAX;
278 break;
279 case annot_expr_unroll_kind:
280 loop->unroll
281 = (unsigned short) tree_to_shwi (gimple_call_arg (stmt, 2));
282 cfun->has_unroll = true;
283 break;
284 case annot_expr_no_vector_kind:
285 loop->dont_vectorize = true;
286 break;
287 case annot_expr_vector_kind:
288 loop->force_vectorize = true;
289 cfun->has_force_vectorize_loops = true;
290 break;
291 case annot_expr_parallel_kind:
292 loop->can_be_parallel = true;
293 loop->safelen = INT_MAX;
294 break;
295 default:
296 gcc_unreachable ();
297 }
298
299 stmt = gimple_build_assign (gimple_call_lhs (stmt),
300 gimple_call_arg (stmt, 0));
301 gsi_replace (&gsi, stmt, true);
302 }
303 }
304
305 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
306 them and propagate the information to the loop. We assume that the
307 annotations come immediately before the condition of the loop. */
308
309 static void
310 replace_loop_annotate (void)
311 {
312 class loop *loop;
313 basic_block bb;
314 gimple_stmt_iterator gsi;
315 gimple *stmt;
316
317 FOR_EACH_LOOP (loop, 0)
318 {
319 /* First look into the header. */
320 replace_loop_annotate_in_block (loop->header, loop);
321
322 /* Then look into the latch, if any. */
323 if (loop->latch)
324 replace_loop_annotate_in_block (loop->latch, loop);
325
326 /* Push the global flag_finite_loops state down to individual loops. */
327 loop->finite_p = flag_finite_loops;
328 }
329
330 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
331 FOR_EACH_BB_FN (bb, cfun)
332 {
333 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
334 {
335 stmt = gsi_stmt (gsi);
336 if (gimple_code (stmt) != GIMPLE_CALL)
337 continue;
338 if (!gimple_call_internal_p (stmt)
339 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
340 continue;
341
342 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
343 {
344 case annot_expr_ivdep_kind:
345 case annot_expr_unroll_kind:
346 case annot_expr_no_vector_kind:
347 case annot_expr_vector_kind:
348 case annot_expr_parallel_kind:
349 break;
350 default:
351 gcc_unreachable ();
352 }
353
354 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
355 stmt = gimple_build_assign (gimple_call_lhs (stmt),
356 gimple_call_arg (stmt, 0));
357 gsi_replace (&gsi, stmt, true);
358 }
359 }
360 }
361
362 static unsigned int
363 execute_build_cfg (void)
364 {
365 gimple_seq body = gimple_body (current_function_decl);
366
367 build_gimple_cfg (body);
368 gimple_set_body (current_function_decl, NULL);
369 if (dump_file && (dump_flags & TDF_DETAILS))
370 {
371 fprintf (dump_file, "Scope blocks:\n");
372 dump_scope_blocks (dump_file, dump_flags);
373 }
374 cleanup_tree_cfg ();
375 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
376 replace_loop_annotate ();
377 return 0;
378 }
379
380 namespace {
381
382 const pass_data pass_data_build_cfg =
383 {
384 GIMPLE_PASS, /* type */
385 "cfg", /* name */
386 OPTGROUP_NONE, /* optinfo_flags */
387 TV_TREE_CFG, /* tv_id */
388 PROP_gimple_leh, /* properties_required */
389 ( PROP_cfg | PROP_loops ), /* properties_provided */
390 0, /* properties_destroyed */
391 0, /* todo_flags_start */
392 0, /* todo_flags_finish */
393 };
394
395 class pass_build_cfg : public gimple_opt_pass
396 {
397 public:
398 pass_build_cfg (gcc::context *ctxt)
399 : gimple_opt_pass (pass_data_build_cfg, ctxt)
400 {}
401
402 /* opt_pass methods: */
403 virtual unsigned int execute (function *) { return execute_build_cfg (); }
404
405 }; // class pass_build_cfg
406
407 } // anon namespace
408
409 gimple_opt_pass *
410 make_pass_build_cfg (gcc::context *ctxt)
411 {
412 return new pass_build_cfg (ctxt);
413 }
414
415
416 /* Return true if T is a computed goto. */
417
418 bool
419 computed_goto_p (gimple *t)
420 {
421 return (gimple_code (t) == GIMPLE_GOTO
422 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
423 }
424
425 /* Returns true if the sequence of statements STMTS only contains
426 a call to __builtin_unreachable (). */
427
428 bool
429 gimple_seq_unreachable_p (gimple_seq stmts)
430 {
431 if (stmts == NULL
432 /* Return false if -fsanitize=unreachable, we don't want to
433 optimize away those calls, but rather turn them into
434 __ubsan_handle_builtin_unreachable () or __builtin_trap ()
435 later. */
436 || sanitize_flags_p (SANITIZE_UNREACHABLE))
437 return false;
438
439 gimple_stmt_iterator gsi = gsi_last (stmts);
440
441 if (!gimple_call_builtin_p (gsi_stmt (gsi), BUILT_IN_UNREACHABLE))
442 return false;
443
444 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
445 {
446 gimple *stmt = gsi_stmt (gsi);
447 if (gimple_code (stmt) != GIMPLE_LABEL
448 && !is_gimple_debug (stmt)
449 && !gimple_clobber_p (stmt))
450 return false;
451 }
452 return true;
453 }
454
455 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
456 the other edge points to a bb with just __builtin_unreachable ().
457 I.e. return true for C->M edge in:
458 <bb C>:
459 ...
460 if (something)
461 goto <bb N>;
462 else
463 goto <bb M>;
464 <bb N>:
465 __builtin_unreachable ();
466 <bb M>: */
467
468 bool
469 assert_unreachable_fallthru_edge_p (edge e)
470 {
471 basic_block pred_bb = e->src;
472 gimple *last = last_stmt (pred_bb);
473 if (last && gimple_code (last) == GIMPLE_COND)
474 {
475 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
476 if (other_bb == e->dest)
477 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
478 if (EDGE_COUNT (other_bb->succs) == 0)
479 return gimple_seq_unreachable_p (bb_seq (other_bb));
480 }
481 return false;
482 }
483
484
485 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
486 could alter control flow except via eh. We initialize the flag at
487 CFG build time and only ever clear it later. */
488
489 static void
490 gimple_call_initialize_ctrl_altering (gimple *stmt)
491 {
492 int flags = gimple_call_flags (stmt);
493
494 /* A call alters control flow if it can make an abnormal goto. */
495 if (call_can_make_abnormal_goto (stmt)
496 /* A call also alters control flow if it does not return. */
497 || flags & ECF_NORETURN
498 /* TM ending statements have backedges out of the transaction.
499 Return true so we split the basic block containing them.
500 Note that the TM_BUILTIN test is merely an optimization. */
501 || ((flags & ECF_TM_BUILTIN)
502 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
503 /* BUILT_IN_RETURN call is same as return statement. */
504 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
505 /* IFN_UNIQUE should be the last insn, to make checking for it
506 as cheap as possible. */
507 || (gimple_call_internal_p (stmt)
508 && gimple_call_internal_unique_p (stmt)))
509 gimple_call_set_ctrl_altering (stmt, true);
510 else
511 gimple_call_set_ctrl_altering (stmt, false);
512 }
513
514
515 /* Insert SEQ after BB and build a flowgraph. */
516
517 static basic_block
518 make_blocks_1 (gimple_seq seq, basic_block bb)
519 {
520 gimple_stmt_iterator i = gsi_start (seq);
521 gimple *stmt = NULL;
522 gimple *prev_stmt = NULL;
523 bool start_new_block = true;
524 bool first_stmt_of_seq = true;
525
526 while (!gsi_end_p (i))
527 {
528 /* PREV_STMT should only be set to a debug stmt if the debug
529 stmt is before nondebug stmts. Once stmt reaches a nondebug
530 nonlabel, prev_stmt will be set to it, so that
531 stmt_starts_bb_p will know to start a new block if a label is
532 found. However, if stmt was a label after debug stmts only,
533 keep the label in prev_stmt even if we find further debug
534 stmts, for there may be other labels after them, and they
535 should land in the same block. */
536 if (!prev_stmt || !stmt || !is_gimple_debug (stmt))
537 prev_stmt = stmt;
538 stmt = gsi_stmt (i);
539
540 if (stmt && is_gimple_call (stmt))
541 gimple_call_initialize_ctrl_altering (stmt);
542
543 /* If the statement starts a new basic block or if we have determined
544 in a previous pass that we need to create a new block for STMT, do
545 so now. */
546 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
547 {
548 if (!first_stmt_of_seq)
549 gsi_split_seq_before (&i, &seq);
550 bb = create_basic_block (seq, bb);
551 start_new_block = false;
552 prev_stmt = NULL;
553 }
554
555 /* Now add STMT to BB and create the subgraphs for special statement
556 codes. */
557 gimple_set_bb (stmt, bb);
558
559 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
560 next iteration. */
561 if (stmt_ends_bb_p (stmt))
562 {
563 /* If the stmt can make abnormal goto use a new temporary
564 for the assignment to the LHS. This makes sure the old value
565 of the LHS is available on the abnormal edge. Otherwise
566 we will end up with overlapping life-ranges for abnormal
567 SSA names. */
568 if (gimple_has_lhs (stmt)
569 && stmt_can_make_abnormal_goto (stmt)
570 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
571 {
572 tree lhs = gimple_get_lhs (stmt);
573 tree tmp = create_tmp_var (TREE_TYPE (lhs));
574 gimple *s = gimple_build_assign (lhs, tmp);
575 gimple_set_location (s, gimple_location (stmt));
576 gimple_set_block (s, gimple_block (stmt));
577 gimple_set_lhs (stmt, tmp);
578 gsi_insert_after (&i, s, GSI_SAME_STMT);
579 }
580 start_new_block = true;
581 }
582
583 gsi_next (&i);
584 first_stmt_of_seq = false;
585 }
586 return bb;
587 }
588
589 /* Build a flowgraph for the sequence of stmts SEQ. */
590
591 static void
592 make_blocks (gimple_seq seq)
593 {
594 /* Look for debug markers right before labels, and move the debug
595 stmts after the labels. Accepting labels among debug markers
596 adds no value, just complexity; if we wanted to annotate labels
597 with view numbers (so sequencing among markers would matter) or
598 somesuch, we're probably better off still moving the labels, but
599 adding other debug annotations in their original positions or
600 emitting nonbind or bind markers associated with the labels in
601 the original position of the labels.
602
603 Moving labels would probably be simpler, but we can't do that:
604 moving labels assigns label ids to them, and doing so because of
605 debug markers makes for -fcompare-debug and possibly even codegen
606 differences. So, we have to move the debug stmts instead. To
607 that end, we scan SEQ backwards, marking the position of the
608 latest (earliest we find) label, and moving debug stmts that are
609 not separated from it by nondebug nonlabel stmts after the
610 label. */
611 if (MAY_HAVE_DEBUG_MARKER_STMTS)
612 {
613 gimple_stmt_iterator label = gsi_none ();
614
615 for (gimple_stmt_iterator i = gsi_last (seq); !gsi_end_p (i); gsi_prev (&i))
616 {
617 gimple *stmt = gsi_stmt (i);
618
619 /* If this is the first label we encounter (latest in SEQ)
620 before nondebug stmts, record its position. */
621 if (is_a <glabel *> (stmt))
622 {
623 if (gsi_end_p (label))
624 label = i;
625 continue;
626 }
627
628 /* Without a recorded label position to move debug stmts to,
629 there's nothing to do. */
630 if (gsi_end_p (label))
631 continue;
632
633 /* Move the debug stmt at I after LABEL. */
634 if (is_gimple_debug (stmt))
635 {
636 gcc_assert (gimple_debug_nonbind_marker_p (stmt));
637 /* As STMT is removed, I advances to the stmt after
638 STMT, so the gsi_prev in the for "increment"
639 expression gets us to the stmt we're to visit after
640 STMT. LABEL, however, would advance to the moved
641 stmt if we passed it to gsi_move_after, so pass it a
642 copy instead, so as to keep LABEL pointing to the
643 LABEL. */
644 gimple_stmt_iterator copy = label;
645 gsi_move_after (&i, &copy);
646 continue;
647 }
648
649 /* There aren't any (more?) debug stmts before label, so
650 there isn't anything else to move after it. */
651 label = gsi_none ();
652 }
653 }
654
655 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
656 }
657
658 /* Create and return a new empty basic block after bb AFTER. */
659
660 static basic_block
661 create_bb (void *h, void *e, basic_block after)
662 {
663 basic_block bb;
664
665 gcc_assert (!e);
666
667 /* Create and initialize a new basic block. Since alloc_block uses
668 GC allocation that clears memory to allocate a basic block, we do
669 not have to clear the newly allocated basic block here. */
670 bb = alloc_block ();
671
672 bb->index = last_basic_block_for_fn (cfun);
673 bb->flags = BB_NEW;
674 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
675
676 /* Add the new block to the linked list of blocks. */
677 link_block (bb, after);
678
679 /* Grow the basic block array if needed. */
680 if ((size_t) last_basic_block_for_fn (cfun)
681 == basic_block_info_for_fn (cfun)->length ())
682 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
683 last_basic_block_for_fn (cfun) + 1);
684
685 /* Add the newly created block to the array. */
686 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
687
688 n_basic_blocks_for_fn (cfun)++;
689 last_basic_block_for_fn (cfun)++;
690
691 return bb;
692 }
693
694
695 /*---------------------------------------------------------------------------
696 Edge creation
697 ---------------------------------------------------------------------------*/
698
699 /* If basic block BB has an abnormal edge to a basic block
700 containing IFN_ABNORMAL_DISPATCHER internal call, return
701 that the dispatcher's basic block, otherwise return NULL. */
702
703 basic_block
704 get_abnormal_succ_dispatcher (basic_block bb)
705 {
706 edge e;
707 edge_iterator ei;
708
709 FOR_EACH_EDGE (e, ei, bb->succs)
710 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
711 {
712 gimple_stmt_iterator gsi
713 = gsi_start_nondebug_after_labels_bb (e->dest);
714 gimple *g = gsi_stmt (gsi);
715 if (g && gimple_call_internal_p (g, IFN_ABNORMAL_DISPATCHER))
716 return e->dest;
717 }
718 return NULL;
719 }
720
721 /* Helper function for make_edges. Create a basic block with
722 with ABNORMAL_DISPATCHER internal call in it if needed, and
723 create abnormal edges from BBS to it and from it to FOR_BB
724 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
725
726 static void
727 handle_abnormal_edges (basic_block *dispatcher_bbs,
728 basic_block for_bb, int *bb_to_omp_idx,
729 auto_vec<basic_block> *bbs, bool computed_goto)
730 {
731 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
732 unsigned int idx = 0;
733 basic_block bb;
734 bool inner = false;
735
736 if (bb_to_omp_idx)
737 {
738 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
739 if (bb_to_omp_idx[for_bb->index] != 0)
740 inner = true;
741 }
742
743 /* If the dispatcher has been created already, then there are basic
744 blocks with abnormal edges to it, so just make a new edge to
745 for_bb. */
746 if (*dispatcher == NULL)
747 {
748 /* Check if there are any basic blocks that need to have
749 abnormal edges to this dispatcher. If there are none, return
750 early. */
751 if (bb_to_omp_idx == NULL)
752 {
753 if (bbs->is_empty ())
754 return;
755 }
756 else
757 {
758 FOR_EACH_VEC_ELT (*bbs, idx, bb)
759 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
760 break;
761 if (bb == NULL)
762 return;
763 }
764
765 /* Create the dispatcher bb. */
766 *dispatcher = create_basic_block (NULL, for_bb);
767 if (computed_goto)
768 {
769 /* Factor computed gotos into a common computed goto site. Also
770 record the location of that site so that we can un-factor the
771 gotos after we have converted back to normal form. */
772 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
773
774 /* Create the destination of the factored goto. Each original
775 computed goto will put its desired destination into this
776 variable and jump to the label we create immediately below. */
777 tree var = create_tmp_var (ptr_type_node, "gotovar");
778
779 /* Build a label for the new block which will contain the
780 factored computed goto. */
781 tree factored_label_decl
782 = create_artificial_label (UNKNOWN_LOCATION);
783 gimple *factored_computed_goto_label
784 = gimple_build_label (factored_label_decl);
785 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
786
787 /* Build our new computed goto. */
788 gimple *factored_computed_goto = gimple_build_goto (var);
789 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
790
791 FOR_EACH_VEC_ELT (*bbs, idx, bb)
792 {
793 if (bb_to_omp_idx
794 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
795 continue;
796
797 gsi = gsi_last_bb (bb);
798 gimple *last = gsi_stmt (gsi);
799
800 gcc_assert (computed_goto_p (last));
801
802 /* Copy the original computed goto's destination into VAR. */
803 gimple *assignment
804 = gimple_build_assign (var, gimple_goto_dest (last));
805 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
806
807 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
808 e->goto_locus = gimple_location (last);
809 gsi_remove (&gsi, true);
810 }
811 }
812 else
813 {
814 tree arg = inner ? boolean_true_node : boolean_false_node;
815 gimple *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
816 1, arg);
817 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
818 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
819
820 /* Create predecessor edges of the dispatcher. */
821 FOR_EACH_VEC_ELT (*bbs, idx, bb)
822 {
823 if (bb_to_omp_idx
824 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
825 continue;
826 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
827 }
828 }
829 }
830
831 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
832 }
833
834 /* Creates outgoing edges for BB. Returns 1 when it ends with an
835 computed goto, returns 2 when it ends with a statement that
836 might return to this function via an nonlocal goto, otherwise
837 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
838
839 static int
840 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
841 {
842 gimple *last = last_stmt (bb);
843 bool fallthru = false;
844 int ret = 0;
845
846 if (!last)
847 return ret;
848
849 switch (gimple_code (last))
850 {
851 case GIMPLE_GOTO:
852 if (make_goto_expr_edges (bb))
853 ret = 1;
854 fallthru = false;
855 break;
856 case GIMPLE_RETURN:
857 {
858 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
859 e->goto_locus = gimple_location (last);
860 fallthru = false;
861 }
862 break;
863 case GIMPLE_COND:
864 make_cond_expr_edges (bb);
865 fallthru = false;
866 break;
867 case GIMPLE_SWITCH:
868 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
869 fallthru = false;
870 break;
871 case GIMPLE_RESX:
872 make_eh_edges (last);
873 fallthru = false;
874 break;
875 case GIMPLE_EH_DISPATCH:
876 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
877 break;
878
879 case GIMPLE_CALL:
880 /* If this function receives a nonlocal goto, then we need to
881 make edges from this call site to all the nonlocal goto
882 handlers. */
883 if (stmt_can_make_abnormal_goto (last))
884 ret = 2;
885
886 /* If this statement has reachable exception handlers, then
887 create abnormal edges to them. */
888 make_eh_edges (last);
889
890 /* BUILTIN_RETURN is really a return statement. */
891 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
892 {
893 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
894 fallthru = false;
895 }
896 /* Some calls are known not to return. */
897 else
898 fallthru = !gimple_call_noreturn_p (last);
899 break;
900
901 case GIMPLE_ASSIGN:
902 /* A GIMPLE_ASSIGN may throw internally and thus be considered
903 control-altering. */
904 if (is_ctrl_altering_stmt (last))
905 make_eh_edges (last);
906 fallthru = true;
907 break;
908
909 case GIMPLE_ASM:
910 make_gimple_asm_edges (bb);
911 fallthru = true;
912 break;
913
914 CASE_GIMPLE_OMP:
915 fallthru = omp_make_gimple_edges (bb, pcur_region, pomp_index);
916 break;
917
918 case GIMPLE_TRANSACTION:
919 {
920 gtransaction *txn = as_a <gtransaction *> (last);
921 tree label1 = gimple_transaction_label_norm (txn);
922 tree label2 = gimple_transaction_label_uninst (txn);
923
924 if (label1)
925 make_edge (bb, label_to_block (cfun, label1), EDGE_FALLTHRU);
926 if (label2)
927 make_edge (bb, label_to_block (cfun, label2),
928 EDGE_TM_UNINSTRUMENTED | (label1 ? 0 : EDGE_FALLTHRU));
929
930 tree label3 = gimple_transaction_label_over (txn);
931 if (gimple_transaction_subcode (txn)
932 & (GTMA_HAVE_ABORT | GTMA_IS_OUTER))
933 make_edge (bb, label_to_block (cfun, label3), EDGE_TM_ABORT);
934
935 fallthru = false;
936 }
937 break;
938
939 default:
940 gcc_assert (!stmt_ends_bb_p (last));
941 fallthru = true;
942 break;
943 }
944
945 if (fallthru)
946 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
947
948 return ret;
949 }
950
951 /* Join all the blocks in the flowgraph. */
952
953 static void
954 make_edges (void)
955 {
956 basic_block bb;
957 struct omp_region *cur_region = NULL;
958 auto_vec<basic_block> ab_edge_goto;
959 auto_vec<basic_block> ab_edge_call;
960 int *bb_to_omp_idx = NULL;
961 int cur_omp_region_idx = 0;
962
963 /* Create an edge from entry to the first block with executable
964 statements in it. */
965 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
966 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
967 EDGE_FALLTHRU);
968
969 /* Traverse the basic block array placing edges. */
970 FOR_EACH_BB_FN (bb, cfun)
971 {
972 int mer;
973
974 if (bb_to_omp_idx)
975 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
976
977 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
978 if (mer == 1)
979 ab_edge_goto.safe_push (bb);
980 else if (mer == 2)
981 ab_edge_call.safe_push (bb);
982
983 if (cur_region && bb_to_omp_idx == NULL)
984 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
985 }
986
987 /* Computed gotos are hell to deal with, especially if there are
988 lots of them with a large number of destinations. So we factor
989 them to a common computed goto location before we build the
990 edge list. After we convert back to normal form, we will un-factor
991 the computed gotos since factoring introduces an unwanted jump.
992 For non-local gotos and abnormal edges from calls to calls that return
993 twice or forced labels, factor the abnormal edges too, by having all
994 abnormal edges from the calls go to a common artificial basic block
995 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
996 basic block to all forced labels and calls returning twice.
997 We do this per-OpenMP structured block, because those regions
998 are guaranteed to be single entry single exit by the standard,
999 so it is not allowed to enter or exit such regions abnormally this way,
1000 thus all computed gotos, non-local gotos and setjmp/longjmp calls
1001 must not transfer control across SESE region boundaries. */
1002 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
1003 {
1004 gimple_stmt_iterator gsi;
1005 basic_block dispatcher_bb_array[2] = { NULL, NULL };
1006 basic_block *dispatcher_bbs = dispatcher_bb_array;
1007 int count = n_basic_blocks_for_fn (cfun);
1008
1009 if (bb_to_omp_idx)
1010 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
1011
1012 FOR_EACH_BB_FN (bb, cfun)
1013 {
1014 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1015 {
1016 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1017 tree target;
1018
1019 if (!label_stmt)
1020 break;
1021
1022 target = gimple_label_label (label_stmt);
1023
1024 /* Make an edge to every label block that has been marked as a
1025 potential target for a computed goto or a non-local goto. */
1026 if (FORCED_LABEL (target))
1027 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1028 &ab_edge_goto, true);
1029 if (DECL_NONLOCAL (target))
1030 {
1031 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1032 &ab_edge_call, false);
1033 break;
1034 }
1035 }
1036
1037 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
1038 gsi_next_nondebug (&gsi);
1039 if (!gsi_end_p (gsi))
1040 {
1041 /* Make an edge to every setjmp-like call. */
1042 gimple *call_stmt = gsi_stmt (gsi);
1043 if (is_gimple_call (call_stmt)
1044 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
1045 || gimple_call_builtin_p (call_stmt,
1046 BUILT_IN_SETJMP_RECEIVER)))
1047 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1048 &ab_edge_call, false);
1049 }
1050 }
1051
1052 if (bb_to_omp_idx)
1053 XDELETE (dispatcher_bbs);
1054 }
1055
1056 XDELETE (bb_to_omp_idx);
1057
1058 omp_free_regions ();
1059 }
1060
1061 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1062 needed. Returns true if new bbs were created.
1063 Note: This is transitional code, and should not be used for new code. We
1064 should be able to get rid of this by rewriting all target va-arg
1065 gimplification hooks to use an interface gimple_build_cond_value as described
1066 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1067
1068 bool
1069 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
1070 {
1071 gimple *stmt = gsi_stmt (*gsi);
1072 basic_block bb = gimple_bb (stmt);
1073 basic_block lastbb, afterbb;
1074 int old_num_bbs = n_basic_blocks_for_fn (cfun);
1075 edge e;
1076 lastbb = make_blocks_1 (seq, bb);
1077 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
1078 return false;
1079 e = split_block (bb, stmt);
1080 /* Move e->dest to come after the new basic blocks. */
1081 afterbb = e->dest;
1082 unlink_block (afterbb);
1083 link_block (afterbb, lastbb);
1084 redirect_edge_succ (e, bb->next_bb);
1085 bb = bb->next_bb;
1086 while (bb != afterbb)
1087 {
1088 struct omp_region *cur_region = NULL;
1089 profile_count cnt = profile_count::zero ();
1090 bool all = true;
1091
1092 int cur_omp_region_idx = 0;
1093 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1094 gcc_assert (!mer && !cur_region);
1095 add_bb_to_loop (bb, afterbb->loop_father);
1096
1097 edge e;
1098 edge_iterator ei;
1099 FOR_EACH_EDGE (e, ei, bb->preds)
1100 {
1101 if (e->count ().initialized_p ())
1102 cnt += e->count ();
1103 else
1104 all = false;
1105 }
1106 tree_guess_outgoing_edge_probabilities (bb);
1107 if (all || profile_status_for_fn (cfun) == PROFILE_READ)
1108 bb->count = cnt;
1109
1110 bb = bb->next_bb;
1111 }
1112 return true;
1113 }
1114
1115 /* Find the next available discriminator value for LOCUS. The
1116 discriminator distinguishes among several basic blocks that
1117 share a common locus, allowing for more accurate sample-based
1118 profiling. */
1119
1120 static int
1121 next_discriminator_for_locus (int line)
1122 {
1123 struct locus_discrim_map item;
1124 struct locus_discrim_map **slot;
1125
1126 item.location_line = line;
1127 item.discriminator = 0;
1128 slot = discriminator_per_locus->find_slot_with_hash (&item, line, INSERT);
1129 gcc_assert (slot);
1130 if (*slot == HTAB_EMPTY_ENTRY)
1131 {
1132 *slot = XNEW (struct locus_discrim_map);
1133 gcc_assert (*slot);
1134 (*slot)->location_line = line;
1135 (*slot)->discriminator = 0;
1136 }
1137 (*slot)->discriminator++;
1138 return (*slot)->discriminator;
1139 }
1140
1141 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1142
1143 static bool
1144 same_line_p (location_t locus1, expanded_location *from, location_t locus2)
1145 {
1146 expanded_location to;
1147
1148 if (locus1 == locus2)
1149 return true;
1150
1151 to = expand_location (locus2);
1152
1153 if (from->line != to.line)
1154 return false;
1155 if (from->file == to.file)
1156 return true;
1157 return (from->file != NULL
1158 && to.file != NULL
1159 && filename_cmp (from->file, to.file) == 0);
1160 }
1161
1162 /* Assign discriminators to each basic block. */
1163
1164 static void
1165 assign_discriminators (void)
1166 {
1167 basic_block bb;
1168
1169 FOR_EACH_BB_FN (bb, cfun)
1170 {
1171 edge e;
1172 edge_iterator ei;
1173 gimple *last = last_stmt (bb);
1174 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1175
1176 if (locus == UNKNOWN_LOCATION)
1177 continue;
1178
1179 expanded_location locus_e = expand_location (locus);
1180
1181 FOR_EACH_EDGE (e, ei, bb->succs)
1182 {
1183 gimple *first = first_non_label_stmt (e->dest);
1184 gimple *last = last_stmt (e->dest);
1185 if ((first && same_line_p (locus, &locus_e,
1186 gimple_location (first)))
1187 || (last && same_line_p (locus, &locus_e,
1188 gimple_location (last))))
1189 {
1190 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1191 bb->discriminator
1192 = next_discriminator_for_locus (locus_e.line);
1193 else
1194 e->dest->discriminator
1195 = next_discriminator_for_locus (locus_e.line);
1196 }
1197 }
1198 }
1199 }
1200
1201 /* Create the edges for a GIMPLE_COND starting at block BB. */
1202
1203 static void
1204 make_cond_expr_edges (basic_block bb)
1205 {
1206 gcond *entry = as_a <gcond *> (last_stmt (bb));
1207 gimple *then_stmt, *else_stmt;
1208 basic_block then_bb, else_bb;
1209 tree then_label, else_label;
1210 edge e;
1211
1212 gcc_assert (entry);
1213 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1214
1215 /* Entry basic blocks for each component. */
1216 then_label = gimple_cond_true_label (entry);
1217 else_label = gimple_cond_false_label (entry);
1218 then_bb = label_to_block (cfun, then_label);
1219 else_bb = label_to_block (cfun, else_label);
1220 then_stmt = first_stmt (then_bb);
1221 else_stmt = first_stmt (else_bb);
1222
1223 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1224 e->goto_locus = gimple_location (then_stmt);
1225 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1226 if (e)
1227 e->goto_locus = gimple_location (else_stmt);
1228
1229 /* We do not need the labels anymore. */
1230 gimple_cond_set_true_label (entry, NULL_TREE);
1231 gimple_cond_set_false_label (entry, NULL_TREE);
1232 }
1233
1234
1235 /* Called for each element in the hash table (P) as we delete the
1236 edge to cases hash table.
1237
1238 Clear all the CASE_CHAINs to prevent problems with copying of
1239 SWITCH_EXPRs and structure sharing rules, then free the hash table
1240 element. */
1241
1242 bool
1243 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1244 {
1245 tree t, next;
1246
1247 for (t = value; t; t = next)
1248 {
1249 next = CASE_CHAIN (t);
1250 CASE_CHAIN (t) = NULL;
1251 }
1252
1253 return true;
1254 }
1255
1256 /* Start recording information mapping edges to case labels. */
1257
1258 void
1259 start_recording_case_labels (void)
1260 {
1261 gcc_assert (edge_to_cases == NULL);
1262 edge_to_cases = new hash_map<edge, tree>;
1263 touched_switch_bbs = BITMAP_ALLOC (NULL);
1264 }
1265
1266 /* Return nonzero if we are recording information for case labels. */
1267
1268 static bool
1269 recording_case_labels_p (void)
1270 {
1271 return (edge_to_cases != NULL);
1272 }
1273
1274 /* Stop recording information mapping edges to case labels and
1275 remove any information we have recorded. */
1276 void
1277 end_recording_case_labels (void)
1278 {
1279 bitmap_iterator bi;
1280 unsigned i;
1281 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1282 delete edge_to_cases;
1283 edge_to_cases = NULL;
1284 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1285 {
1286 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1287 if (bb)
1288 {
1289 gimple *stmt = last_stmt (bb);
1290 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1291 group_case_labels_stmt (as_a <gswitch *> (stmt));
1292 }
1293 }
1294 BITMAP_FREE (touched_switch_bbs);
1295 }
1296
1297 /* If we are inside a {start,end}_recording_cases block, then return
1298 a chain of CASE_LABEL_EXPRs from T which reference E.
1299
1300 Otherwise return NULL. */
1301
1302 static tree
1303 get_cases_for_edge (edge e, gswitch *t)
1304 {
1305 tree *slot;
1306 size_t i, n;
1307
1308 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1309 chains available. Return NULL so the caller can detect this case. */
1310 if (!recording_case_labels_p ())
1311 return NULL;
1312
1313 slot = edge_to_cases->get (e);
1314 if (slot)
1315 return *slot;
1316
1317 /* If we did not find E in the hash table, then this must be the first
1318 time we have been queried for information about E & T. Add all the
1319 elements from T to the hash table then perform the query again. */
1320
1321 n = gimple_switch_num_labels (t);
1322 for (i = 0; i < n; i++)
1323 {
1324 tree elt = gimple_switch_label (t, i);
1325 tree lab = CASE_LABEL (elt);
1326 basic_block label_bb = label_to_block (cfun, lab);
1327 edge this_edge = find_edge (e->src, label_bb);
1328
1329 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1330 a new chain. */
1331 tree &s = edge_to_cases->get_or_insert (this_edge);
1332 CASE_CHAIN (elt) = s;
1333 s = elt;
1334 }
1335
1336 return *edge_to_cases->get (e);
1337 }
1338
1339 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1340
1341 static void
1342 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1343 {
1344 size_t i, n;
1345
1346 n = gimple_switch_num_labels (entry);
1347
1348 for (i = 0; i < n; ++i)
1349 {
1350 basic_block label_bb = gimple_switch_label_bb (cfun, entry, i);
1351 make_edge (bb, label_bb, 0);
1352 }
1353 }
1354
1355
1356 /* Return the basic block holding label DEST. */
1357
1358 basic_block
1359 label_to_block (struct function *ifun, tree dest)
1360 {
1361 int uid = LABEL_DECL_UID (dest);
1362
1363 /* We would die hard when faced by an undefined label. Emit a label to
1364 the very first basic block. This will hopefully make even the dataflow
1365 and undefined variable warnings quite right. */
1366 if (seen_error () && uid < 0)
1367 {
1368 gimple_stmt_iterator gsi =
1369 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1370 gimple *stmt;
1371
1372 stmt = gimple_build_label (dest);
1373 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1374 uid = LABEL_DECL_UID (dest);
1375 }
1376 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1377 return NULL;
1378 return (*ifun->cfg->x_label_to_block_map)[uid];
1379 }
1380
1381 /* Create edges for a goto statement at block BB. Returns true
1382 if abnormal edges should be created. */
1383
1384 static bool
1385 make_goto_expr_edges (basic_block bb)
1386 {
1387 gimple_stmt_iterator last = gsi_last_bb (bb);
1388 gimple *goto_t = gsi_stmt (last);
1389
1390 /* A simple GOTO creates normal edges. */
1391 if (simple_goto_p (goto_t))
1392 {
1393 tree dest = gimple_goto_dest (goto_t);
1394 basic_block label_bb = label_to_block (cfun, dest);
1395 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1396 e->goto_locus = gimple_location (goto_t);
1397 gsi_remove (&last, true);
1398 return false;
1399 }
1400
1401 /* A computed GOTO creates abnormal edges. */
1402 return true;
1403 }
1404
1405 /* Create edges for an asm statement with labels at block BB. */
1406
1407 static void
1408 make_gimple_asm_edges (basic_block bb)
1409 {
1410 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1411 int i, n = gimple_asm_nlabels (stmt);
1412
1413 for (i = 0; i < n; ++i)
1414 {
1415 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1416 basic_block label_bb = label_to_block (cfun, label);
1417 make_edge (bb, label_bb, 0);
1418 }
1419 }
1420
1421 /*---------------------------------------------------------------------------
1422 Flowgraph analysis
1423 ---------------------------------------------------------------------------*/
1424
1425 /* Cleanup useless labels in basic blocks. This is something we wish
1426 to do early because it allows us to group case labels before creating
1427 the edges for the CFG, and it speeds up block statement iterators in
1428 all passes later on.
1429 We rerun this pass after CFG is created, to get rid of the labels that
1430 are no longer referenced. After then we do not run it any more, since
1431 (almost) no new labels should be created. */
1432
1433 /* A map from basic block index to the leading label of that block. */
1434 struct label_record
1435 {
1436 /* The label. */
1437 tree label;
1438
1439 /* True if the label is referenced from somewhere. */
1440 bool used;
1441 };
1442
1443 /* Given LABEL return the first label in the same basic block. */
1444
1445 static tree
1446 main_block_label (tree label, label_record *label_for_bb)
1447 {
1448 basic_block bb = label_to_block (cfun, label);
1449 tree main_label = label_for_bb[bb->index].label;
1450
1451 /* label_to_block possibly inserted undefined label into the chain. */
1452 if (!main_label)
1453 {
1454 label_for_bb[bb->index].label = label;
1455 main_label = label;
1456 }
1457
1458 label_for_bb[bb->index].used = true;
1459 return main_label;
1460 }
1461
1462 /* Clean up redundant labels within the exception tree. */
1463
1464 static void
1465 cleanup_dead_labels_eh (label_record *label_for_bb)
1466 {
1467 eh_landing_pad lp;
1468 eh_region r;
1469 tree lab;
1470 int i;
1471
1472 if (cfun->eh == NULL)
1473 return;
1474
1475 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1476 if (lp && lp->post_landing_pad)
1477 {
1478 lab = main_block_label (lp->post_landing_pad, label_for_bb);
1479 if (lab != lp->post_landing_pad)
1480 {
1481 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1482 EH_LANDING_PAD_NR (lab) = lp->index;
1483 }
1484 }
1485
1486 FOR_ALL_EH_REGION (r)
1487 switch (r->type)
1488 {
1489 case ERT_CLEANUP:
1490 case ERT_MUST_NOT_THROW:
1491 break;
1492
1493 case ERT_TRY:
1494 {
1495 eh_catch c;
1496 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1497 {
1498 lab = c->label;
1499 if (lab)
1500 c->label = main_block_label (lab, label_for_bb);
1501 }
1502 }
1503 break;
1504
1505 case ERT_ALLOWED_EXCEPTIONS:
1506 lab = r->u.allowed.label;
1507 if (lab)
1508 r->u.allowed.label = main_block_label (lab, label_for_bb);
1509 break;
1510 }
1511 }
1512
1513
1514 /* Cleanup redundant labels. This is a three-step process:
1515 1) Find the leading label for each block.
1516 2) Redirect all references to labels to the leading labels.
1517 3) Cleanup all useless labels. */
1518
1519 void
1520 cleanup_dead_labels (void)
1521 {
1522 basic_block bb;
1523 label_record *label_for_bb = XCNEWVEC (struct label_record,
1524 last_basic_block_for_fn (cfun));
1525
1526 /* Find a suitable label for each block. We use the first user-defined
1527 label if there is one, or otherwise just the first label we see. */
1528 FOR_EACH_BB_FN (bb, cfun)
1529 {
1530 gimple_stmt_iterator i;
1531
1532 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1533 {
1534 tree label;
1535 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1536
1537 if (!label_stmt)
1538 break;
1539
1540 label = gimple_label_label (label_stmt);
1541
1542 /* If we have not yet seen a label for the current block,
1543 remember this one and see if there are more labels. */
1544 if (!label_for_bb[bb->index].label)
1545 {
1546 label_for_bb[bb->index].label = label;
1547 continue;
1548 }
1549
1550 /* If we did see a label for the current block already, but it
1551 is an artificially created label, replace it if the current
1552 label is a user defined label. */
1553 if (!DECL_ARTIFICIAL (label)
1554 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1555 {
1556 label_for_bb[bb->index].label = label;
1557 break;
1558 }
1559 }
1560 }
1561
1562 /* Now redirect all jumps/branches to the selected label.
1563 First do so for each block ending in a control statement. */
1564 FOR_EACH_BB_FN (bb, cfun)
1565 {
1566 gimple *stmt = last_stmt (bb);
1567 tree label, new_label;
1568
1569 if (!stmt)
1570 continue;
1571
1572 switch (gimple_code (stmt))
1573 {
1574 case GIMPLE_COND:
1575 {
1576 gcond *cond_stmt = as_a <gcond *> (stmt);
1577 label = gimple_cond_true_label (cond_stmt);
1578 if (label)
1579 {
1580 new_label = main_block_label (label, label_for_bb);
1581 if (new_label != label)
1582 gimple_cond_set_true_label (cond_stmt, new_label);
1583 }
1584
1585 label = gimple_cond_false_label (cond_stmt);
1586 if (label)
1587 {
1588 new_label = main_block_label (label, label_for_bb);
1589 if (new_label != label)
1590 gimple_cond_set_false_label (cond_stmt, new_label);
1591 }
1592 }
1593 break;
1594
1595 case GIMPLE_SWITCH:
1596 {
1597 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1598 size_t i, n = gimple_switch_num_labels (switch_stmt);
1599
1600 /* Replace all destination labels. */
1601 for (i = 0; i < n; ++i)
1602 {
1603 tree case_label = gimple_switch_label (switch_stmt, i);
1604 label = CASE_LABEL (case_label);
1605 new_label = main_block_label (label, label_for_bb);
1606 if (new_label != label)
1607 CASE_LABEL (case_label) = new_label;
1608 }
1609 break;
1610 }
1611
1612 case GIMPLE_ASM:
1613 {
1614 gasm *asm_stmt = as_a <gasm *> (stmt);
1615 int i, n = gimple_asm_nlabels (asm_stmt);
1616
1617 for (i = 0; i < n; ++i)
1618 {
1619 tree cons = gimple_asm_label_op (asm_stmt, i);
1620 tree label = main_block_label (TREE_VALUE (cons), label_for_bb);
1621 TREE_VALUE (cons) = label;
1622 }
1623 break;
1624 }
1625
1626 /* We have to handle gotos until they're removed, and we don't
1627 remove them until after we've created the CFG edges. */
1628 case GIMPLE_GOTO:
1629 if (!computed_goto_p (stmt))
1630 {
1631 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1632 label = gimple_goto_dest (goto_stmt);
1633 new_label = main_block_label (label, label_for_bb);
1634 if (new_label != label)
1635 gimple_goto_set_dest (goto_stmt, new_label);
1636 }
1637 break;
1638
1639 case GIMPLE_TRANSACTION:
1640 {
1641 gtransaction *txn = as_a <gtransaction *> (stmt);
1642
1643 label = gimple_transaction_label_norm (txn);
1644 if (label)
1645 {
1646 new_label = main_block_label (label, label_for_bb);
1647 if (new_label != label)
1648 gimple_transaction_set_label_norm (txn, new_label);
1649 }
1650
1651 label = gimple_transaction_label_uninst (txn);
1652 if (label)
1653 {
1654 new_label = main_block_label (label, label_for_bb);
1655 if (new_label != label)
1656 gimple_transaction_set_label_uninst (txn, new_label);
1657 }
1658
1659 label = gimple_transaction_label_over (txn);
1660 if (label)
1661 {
1662 new_label = main_block_label (label, label_for_bb);
1663 if (new_label != label)
1664 gimple_transaction_set_label_over (txn, new_label);
1665 }
1666 }
1667 break;
1668
1669 default:
1670 break;
1671 }
1672 }
1673
1674 /* Do the same for the exception region tree labels. */
1675 cleanup_dead_labels_eh (label_for_bb);
1676
1677 /* Finally, purge dead labels. All user-defined labels and labels that
1678 can be the target of non-local gotos and labels which have their
1679 address taken are preserved. */
1680 FOR_EACH_BB_FN (bb, cfun)
1681 {
1682 gimple_stmt_iterator i;
1683 tree label_for_this_bb = label_for_bb[bb->index].label;
1684
1685 if (!label_for_this_bb)
1686 continue;
1687
1688 /* If the main label of the block is unused, we may still remove it. */
1689 if (!label_for_bb[bb->index].used)
1690 label_for_this_bb = NULL;
1691
1692 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1693 {
1694 tree label;
1695 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1696
1697 if (!label_stmt)
1698 break;
1699
1700 label = gimple_label_label (label_stmt);
1701
1702 if (label == label_for_this_bb
1703 || !DECL_ARTIFICIAL (label)
1704 || DECL_NONLOCAL (label)
1705 || FORCED_LABEL (label))
1706 gsi_next (&i);
1707 else
1708 gsi_remove (&i, true);
1709 }
1710 }
1711
1712 free (label_for_bb);
1713 }
1714
1715 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1716 the ones jumping to the same label.
1717 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1718
1719 bool
1720 group_case_labels_stmt (gswitch *stmt)
1721 {
1722 int old_size = gimple_switch_num_labels (stmt);
1723 int i, next_index, new_size;
1724 basic_block default_bb = NULL;
1725 hash_set<tree> *removed_labels = NULL;
1726
1727 default_bb = gimple_switch_default_bb (cfun, stmt);
1728
1729 /* Look for possible opportunities to merge cases. */
1730 new_size = i = 1;
1731 while (i < old_size)
1732 {
1733 tree base_case, base_high;
1734 basic_block base_bb;
1735
1736 base_case = gimple_switch_label (stmt, i);
1737
1738 gcc_assert (base_case);
1739 base_bb = label_to_block (cfun, CASE_LABEL (base_case));
1740
1741 /* Discard cases that have the same destination as the default case or
1742 whose destination blocks have already been removed as unreachable. */
1743 if (base_bb == NULL
1744 || base_bb == default_bb
1745 || (removed_labels
1746 && removed_labels->contains (CASE_LABEL (base_case))))
1747 {
1748 i++;
1749 continue;
1750 }
1751
1752 base_high = CASE_HIGH (base_case)
1753 ? CASE_HIGH (base_case)
1754 : CASE_LOW (base_case);
1755 next_index = i + 1;
1756
1757 /* Try to merge case labels. Break out when we reach the end
1758 of the label vector or when we cannot merge the next case
1759 label with the current one. */
1760 while (next_index < old_size)
1761 {
1762 tree merge_case = gimple_switch_label (stmt, next_index);
1763 basic_block merge_bb = label_to_block (cfun, CASE_LABEL (merge_case));
1764 wide_int bhp1 = wi::to_wide (base_high) + 1;
1765
1766 /* Merge the cases if they jump to the same place,
1767 and their ranges are consecutive. */
1768 if (merge_bb == base_bb
1769 && (removed_labels == NULL
1770 || !removed_labels->contains (CASE_LABEL (merge_case)))
1771 && wi::to_wide (CASE_LOW (merge_case)) == bhp1)
1772 {
1773 base_high
1774 = (CASE_HIGH (merge_case)
1775 ? CASE_HIGH (merge_case) : CASE_LOW (merge_case));
1776 CASE_HIGH (base_case) = base_high;
1777 next_index++;
1778 }
1779 else
1780 break;
1781 }
1782
1783 /* Discard cases that have an unreachable destination block. */
1784 if (EDGE_COUNT (base_bb->succs) == 0
1785 && gimple_seq_unreachable_p (bb_seq (base_bb))
1786 /* Don't optimize this if __builtin_unreachable () is the
1787 implicitly added one by the C++ FE too early, before
1788 -Wreturn-type can be diagnosed. We'll optimize it later
1789 during switchconv pass or any other cfg cleanup. */
1790 && (gimple_in_ssa_p (cfun)
1791 || (LOCATION_LOCUS (gimple_location (last_stmt (base_bb)))
1792 != BUILTINS_LOCATION)))
1793 {
1794 edge base_edge = find_edge (gimple_bb (stmt), base_bb);
1795 if (base_edge != NULL)
1796 {
1797 for (gimple_stmt_iterator gsi = gsi_start_bb (base_bb);
1798 !gsi_end_p (gsi); gsi_next (&gsi))
1799 if (glabel *stmt = dyn_cast <glabel *> (gsi_stmt (gsi)))
1800 {
1801 if (FORCED_LABEL (gimple_label_label (stmt))
1802 || DECL_NONLOCAL (gimple_label_label (stmt)))
1803 {
1804 /* Forced/non-local labels aren't going to be removed,
1805 but they will be moved to some neighbouring basic
1806 block. If some later case label refers to one of
1807 those labels, we should throw that case away rather
1808 than keeping it around and refering to some random
1809 other basic block without an edge to it. */
1810 if (removed_labels == NULL)
1811 removed_labels = new hash_set<tree>;
1812 removed_labels->add (gimple_label_label (stmt));
1813 }
1814 }
1815 else
1816 break;
1817 remove_edge_and_dominated_blocks (base_edge);
1818 }
1819 i = next_index;
1820 continue;
1821 }
1822
1823 if (new_size < i)
1824 gimple_switch_set_label (stmt, new_size,
1825 gimple_switch_label (stmt, i));
1826 i = next_index;
1827 new_size++;
1828 }
1829
1830 gcc_assert (new_size <= old_size);
1831
1832 if (new_size < old_size)
1833 gimple_switch_set_num_labels (stmt, new_size);
1834
1835 delete removed_labels;
1836 return new_size < old_size;
1837 }
1838
1839 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1840 and scan the sorted vector of cases. Combine the ones jumping to the
1841 same label. */
1842
1843 bool
1844 group_case_labels (void)
1845 {
1846 basic_block bb;
1847 bool changed = false;
1848
1849 FOR_EACH_BB_FN (bb, cfun)
1850 {
1851 gimple *stmt = last_stmt (bb);
1852 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1853 changed |= group_case_labels_stmt (as_a <gswitch *> (stmt));
1854 }
1855
1856 return changed;
1857 }
1858
1859 /* Checks whether we can merge block B into block A. */
1860
1861 static bool
1862 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1863 {
1864 gimple *stmt;
1865
1866 if (!single_succ_p (a))
1867 return false;
1868
1869 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1870 return false;
1871
1872 if (single_succ (a) != b)
1873 return false;
1874
1875 if (!single_pred_p (b))
1876 return false;
1877
1878 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1879 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1880 return false;
1881
1882 /* If A ends by a statement causing exceptions or something similar, we
1883 cannot merge the blocks. */
1884 stmt = last_stmt (a);
1885 if (stmt && stmt_ends_bb_p (stmt))
1886 return false;
1887
1888 /* Do not allow a block with only a non-local label to be merged. */
1889 if (stmt)
1890 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1891 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1892 return false;
1893
1894 /* Examine the labels at the beginning of B. */
1895 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1896 gsi_next (&gsi))
1897 {
1898 tree lab;
1899 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1900 if (!label_stmt)
1901 break;
1902 lab = gimple_label_label (label_stmt);
1903
1904 /* Do not remove user forced labels or for -O0 any user labels. */
1905 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1906 return false;
1907 }
1908
1909 /* Protect simple loop latches. We only want to avoid merging
1910 the latch with the loop header or with a block in another
1911 loop in this case. */
1912 if (current_loops
1913 && b->loop_father->latch == b
1914 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1915 && (b->loop_father->header == a
1916 || b->loop_father != a->loop_father))
1917 return false;
1918
1919 /* It must be possible to eliminate all phi nodes in B. If ssa form
1920 is not up-to-date and a name-mapping is registered, we cannot eliminate
1921 any phis. Symbols marked for renaming are never a problem though. */
1922 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1923 gsi_next (&gsi))
1924 {
1925 gphi *phi = gsi.phi ();
1926 /* Technically only new names matter. */
1927 if (name_registered_for_update_p (PHI_RESULT (phi)))
1928 return false;
1929 }
1930
1931 /* When not optimizing, don't merge if we'd lose goto_locus. */
1932 if (!optimize
1933 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1934 {
1935 location_t goto_locus = single_succ_edge (a)->goto_locus;
1936 gimple_stmt_iterator prev, next;
1937 prev = gsi_last_nondebug_bb (a);
1938 next = gsi_after_labels (b);
1939 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1940 gsi_next_nondebug (&next);
1941 if ((gsi_end_p (prev)
1942 || gimple_location (gsi_stmt (prev)) != goto_locus)
1943 && (gsi_end_p (next)
1944 || gimple_location (gsi_stmt (next)) != goto_locus))
1945 return false;
1946 }
1947
1948 return true;
1949 }
1950
1951 /* Replaces all uses of NAME by VAL. */
1952
1953 void
1954 replace_uses_by (tree name, tree val)
1955 {
1956 imm_use_iterator imm_iter;
1957 use_operand_p use;
1958 gimple *stmt;
1959 edge e;
1960
1961 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1962 {
1963 /* Mark the block if we change the last stmt in it. */
1964 if (cfgcleanup_altered_bbs
1965 && stmt_ends_bb_p (stmt))
1966 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1967
1968 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1969 {
1970 replace_exp (use, val);
1971
1972 if (gimple_code (stmt) == GIMPLE_PHI)
1973 {
1974 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1975 PHI_ARG_INDEX_FROM_USE (use));
1976 if (e->flags & EDGE_ABNORMAL
1977 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1978 {
1979 /* This can only occur for virtual operands, since
1980 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1981 would prevent replacement. */
1982 gcc_checking_assert (virtual_operand_p (name));
1983 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1984 }
1985 }
1986 }
1987
1988 if (gimple_code (stmt) != GIMPLE_PHI)
1989 {
1990 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1991 gimple *orig_stmt = stmt;
1992 size_t i;
1993
1994 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1995 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1996 only change sth from non-invariant to invariant, and only
1997 when propagating constants. */
1998 if (is_gimple_min_invariant (val))
1999 for (i = 0; i < gimple_num_ops (stmt); i++)
2000 {
2001 tree op = gimple_op (stmt, i);
2002 /* Operands may be empty here. For example, the labels
2003 of a GIMPLE_COND are nulled out following the creation
2004 of the corresponding CFG edges. */
2005 if (op && TREE_CODE (op) == ADDR_EXPR)
2006 recompute_tree_invariant_for_addr_expr (op);
2007 }
2008
2009 if (fold_stmt (&gsi))
2010 stmt = gsi_stmt (gsi);
2011
2012 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
2013 gimple_purge_dead_eh_edges (gimple_bb (stmt));
2014
2015 update_stmt (stmt);
2016 }
2017 }
2018
2019 gcc_checking_assert (has_zero_uses (name));
2020
2021 /* Also update the trees stored in loop structures. */
2022 if (current_loops)
2023 {
2024 class loop *loop;
2025
2026 FOR_EACH_LOOP (loop, 0)
2027 {
2028 substitute_in_loop_info (loop, name, val);
2029 }
2030 }
2031 }
2032
2033 /* Merge block B into block A. */
2034
2035 static void
2036 gimple_merge_blocks (basic_block a, basic_block b)
2037 {
2038 gimple_stmt_iterator last, gsi;
2039 gphi_iterator psi;
2040
2041 if (dump_file)
2042 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
2043
2044 /* Remove all single-valued PHI nodes from block B of the form
2045 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
2046 gsi = gsi_last_bb (a);
2047 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
2048 {
2049 gimple *phi = gsi_stmt (psi);
2050 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
2051 gimple *copy;
2052 bool may_replace_uses = (virtual_operand_p (def)
2053 || may_propagate_copy (def, use));
2054
2055 /* In case we maintain loop closed ssa form, do not propagate arguments
2056 of loop exit phi nodes. */
2057 if (current_loops
2058 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
2059 && !virtual_operand_p (def)
2060 && TREE_CODE (use) == SSA_NAME
2061 && a->loop_father != b->loop_father)
2062 may_replace_uses = false;
2063
2064 if (!may_replace_uses)
2065 {
2066 gcc_assert (!virtual_operand_p (def));
2067
2068 /* Note that just emitting the copies is fine -- there is no problem
2069 with ordering of phi nodes. This is because A is the single
2070 predecessor of B, therefore results of the phi nodes cannot
2071 appear as arguments of the phi nodes. */
2072 copy = gimple_build_assign (def, use);
2073 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
2074 remove_phi_node (&psi, false);
2075 }
2076 else
2077 {
2078 /* If we deal with a PHI for virtual operands, we can simply
2079 propagate these without fussing with folding or updating
2080 the stmt. */
2081 if (virtual_operand_p (def))
2082 {
2083 imm_use_iterator iter;
2084 use_operand_p use_p;
2085 gimple *stmt;
2086
2087 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
2088 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2089 SET_USE (use_p, use);
2090
2091 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2092 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
2093 }
2094 else
2095 replace_uses_by (def, use);
2096
2097 remove_phi_node (&psi, true);
2098 }
2099 }
2100
2101 /* Ensure that B follows A. */
2102 move_block_after (b, a);
2103
2104 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
2105 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
2106
2107 /* Remove labels from B and set gimple_bb to A for other statements. */
2108 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
2109 {
2110 gimple *stmt = gsi_stmt (gsi);
2111 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2112 {
2113 tree label = gimple_label_label (label_stmt);
2114 int lp_nr;
2115
2116 gsi_remove (&gsi, false);
2117
2118 /* Now that we can thread computed gotos, we might have
2119 a situation where we have a forced label in block B
2120 However, the label at the start of block B might still be
2121 used in other ways (think about the runtime checking for
2122 Fortran assigned gotos). So we cannot just delete the
2123 label. Instead we move the label to the start of block A. */
2124 if (FORCED_LABEL (label))
2125 {
2126 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
2127 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
2128 }
2129 /* Other user labels keep around in a form of a debug stmt. */
2130 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_BIND_STMTS)
2131 {
2132 gimple *dbg = gimple_build_debug_bind (label,
2133 integer_zero_node,
2134 stmt);
2135 gimple_debug_bind_reset_value (dbg);
2136 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
2137 }
2138
2139 lp_nr = EH_LANDING_PAD_NR (label);
2140 if (lp_nr)
2141 {
2142 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2143 lp->post_landing_pad = NULL;
2144 }
2145 }
2146 else
2147 {
2148 gimple_set_bb (stmt, a);
2149 gsi_next (&gsi);
2150 }
2151 }
2152
2153 /* When merging two BBs, if their counts are different, the larger count
2154 is selected as the new bb count. This is to handle inconsistent
2155 profiles. */
2156 if (a->loop_father == b->loop_father)
2157 {
2158 a->count = a->count.merge (b->count);
2159 }
2160
2161 /* Merge the sequences. */
2162 last = gsi_last_bb (a);
2163 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2164 set_bb_seq (b, NULL);
2165
2166 if (cfgcleanup_altered_bbs)
2167 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2168 }
2169
2170
2171 /* Return the one of two successors of BB that is not reachable by a
2172 complex edge, if there is one. Else, return BB. We use
2173 this in optimizations that use post-dominators for their heuristics,
2174 to catch the cases in C++ where function calls are involved. */
2175
2176 basic_block
2177 single_noncomplex_succ (basic_block bb)
2178 {
2179 edge e0, e1;
2180 if (EDGE_COUNT (bb->succs) != 2)
2181 return bb;
2182
2183 e0 = EDGE_SUCC (bb, 0);
2184 e1 = EDGE_SUCC (bb, 1);
2185 if (e0->flags & EDGE_COMPLEX)
2186 return e1->dest;
2187 if (e1->flags & EDGE_COMPLEX)
2188 return e0->dest;
2189
2190 return bb;
2191 }
2192
2193 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2194
2195 void
2196 notice_special_calls (gcall *call)
2197 {
2198 int flags = gimple_call_flags (call);
2199
2200 if (flags & ECF_MAY_BE_ALLOCA)
2201 cfun->calls_alloca = true;
2202 if (flags & ECF_RETURNS_TWICE)
2203 cfun->calls_setjmp = true;
2204 }
2205
2206
2207 /* Clear flags set by notice_special_calls. Used by dead code removal
2208 to update the flags. */
2209
2210 void
2211 clear_special_calls (void)
2212 {
2213 cfun->calls_alloca = false;
2214 cfun->calls_setjmp = false;
2215 }
2216
2217 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2218
2219 static void
2220 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2221 {
2222 /* Since this block is no longer reachable, we can just delete all
2223 of its PHI nodes. */
2224 remove_phi_nodes (bb);
2225
2226 /* Remove edges to BB's successors. */
2227 while (EDGE_COUNT (bb->succs) > 0)
2228 remove_edge (EDGE_SUCC (bb, 0));
2229 }
2230
2231
2232 /* Remove statements of basic block BB. */
2233
2234 static void
2235 remove_bb (basic_block bb)
2236 {
2237 gimple_stmt_iterator i;
2238
2239 if (dump_file)
2240 {
2241 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2242 if (dump_flags & TDF_DETAILS)
2243 {
2244 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2245 fprintf (dump_file, "\n");
2246 }
2247 }
2248
2249 if (current_loops)
2250 {
2251 class loop *loop = bb->loop_father;
2252
2253 /* If a loop gets removed, clean up the information associated
2254 with it. */
2255 if (loop->latch == bb
2256 || loop->header == bb)
2257 free_numbers_of_iterations_estimates (loop);
2258 }
2259
2260 /* Remove all the instructions in the block. */
2261 if (bb_seq (bb) != NULL)
2262 {
2263 /* Walk backwards so as to get a chance to substitute all
2264 released DEFs into debug stmts. See
2265 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2266 details. */
2267 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2268 {
2269 gimple *stmt = gsi_stmt (i);
2270 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2271 if (label_stmt
2272 && (FORCED_LABEL (gimple_label_label (label_stmt))
2273 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2274 {
2275 basic_block new_bb;
2276 gimple_stmt_iterator new_gsi;
2277
2278 /* A non-reachable non-local label may still be referenced.
2279 But it no longer needs to carry the extra semantics of
2280 non-locality. */
2281 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2282 {
2283 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2284 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2285 }
2286
2287 new_bb = bb->prev_bb;
2288 /* Don't move any labels into ENTRY block. */
2289 if (new_bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
2290 {
2291 new_bb = single_succ (new_bb);
2292 gcc_assert (new_bb != bb);
2293 }
2294 new_gsi = gsi_after_labels (new_bb);
2295 gsi_remove (&i, false);
2296 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2297 }
2298 else
2299 {
2300 /* Release SSA definitions. */
2301 release_defs (stmt);
2302 gsi_remove (&i, true);
2303 }
2304
2305 if (gsi_end_p (i))
2306 i = gsi_last_bb (bb);
2307 else
2308 gsi_prev (&i);
2309 }
2310 }
2311
2312 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2313 bb->il.gimple.seq = NULL;
2314 bb->il.gimple.phi_nodes = NULL;
2315 }
2316
2317
2318 /* Given a basic block BB and a value VAL for use in the final statement
2319 of the block (if a GIMPLE_COND, GIMPLE_SWITCH, or computed goto), return
2320 the edge that will be taken out of the block.
2321 If VAL is NULL_TREE, then the current value of the final statement's
2322 predicate or index is used.
2323 If the value does not match a unique edge, NULL is returned. */
2324
2325 edge
2326 find_taken_edge (basic_block bb, tree val)
2327 {
2328 gimple *stmt;
2329
2330 stmt = last_stmt (bb);
2331
2332 /* Handle ENTRY and EXIT. */
2333 if (!stmt)
2334 return NULL;
2335
2336 if (gimple_code (stmt) == GIMPLE_COND)
2337 return find_taken_edge_cond_expr (as_a <gcond *> (stmt), val);
2338
2339 if (gimple_code (stmt) == GIMPLE_SWITCH)
2340 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), val);
2341
2342 if (computed_goto_p (stmt))
2343 {
2344 /* Only optimize if the argument is a label, if the argument is
2345 not a label then we cannot construct a proper CFG.
2346
2347 It may be the case that we only need to allow the LABEL_REF to
2348 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2349 appear inside a LABEL_EXPR just to be safe. */
2350 if (val
2351 && (TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2352 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2353 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2354 }
2355
2356 /* Otherwise we only know the taken successor edge if it's unique. */
2357 return single_succ_p (bb) ? single_succ_edge (bb) : NULL;
2358 }
2359
2360 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2361 statement, determine which of the outgoing edges will be taken out of the
2362 block. Return NULL if either edge may be taken. */
2363
2364 static edge
2365 find_taken_edge_computed_goto (basic_block bb, tree val)
2366 {
2367 basic_block dest;
2368 edge e = NULL;
2369
2370 dest = label_to_block (cfun, val);
2371 if (dest)
2372 e = find_edge (bb, dest);
2373
2374 /* It's possible for find_edge to return NULL here on invalid code
2375 that abuses the labels-as-values extension (e.g. code that attempts to
2376 jump *between* functions via stored labels-as-values; PR 84136).
2377 If so, then we simply return that NULL for the edge.
2378 We don't currently have a way of detecting such invalid code, so we
2379 can't assert that it was the case when a NULL edge occurs here. */
2380
2381 return e;
2382 }
2383
2384 /* Given COND_STMT and a constant value VAL for use as the predicate,
2385 determine which of the two edges will be taken out of
2386 the statement's block. Return NULL if either edge may be taken.
2387 If VAL is NULL_TREE, then the current value of COND_STMT's predicate
2388 is used. */
2389
2390 static edge
2391 find_taken_edge_cond_expr (const gcond *cond_stmt, tree val)
2392 {
2393 edge true_edge, false_edge;
2394
2395 if (val == NULL_TREE)
2396 {
2397 /* Use the current value of the predicate. */
2398 if (gimple_cond_true_p (cond_stmt))
2399 val = integer_one_node;
2400 else if (gimple_cond_false_p (cond_stmt))
2401 val = integer_zero_node;
2402 else
2403 return NULL;
2404 }
2405 else if (TREE_CODE (val) != INTEGER_CST)
2406 return NULL;
2407
2408 extract_true_false_edges_from_block (gimple_bb (cond_stmt),
2409 &true_edge, &false_edge);
2410
2411 return (integer_zerop (val) ? false_edge : true_edge);
2412 }
2413
2414 /* Given SWITCH_STMT and an INTEGER_CST VAL for use as the index, determine
2415 which edge will be taken out of the statement's block. Return NULL if any
2416 edge may be taken.
2417 If VAL is NULL_TREE, then the current value of SWITCH_STMT's index
2418 is used. */
2419
2420 edge
2421 find_taken_edge_switch_expr (const gswitch *switch_stmt, tree val)
2422 {
2423 basic_block dest_bb;
2424 edge e;
2425 tree taken_case;
2426
2427 if (gimple_switch_num_labels (switch_stmt) == 1)
2428 taken_case = gimple_switch_default_label (switch_stmt);
2429 else
2430 {
2431 if (val == NULL_TREE)
2432 val = gimple_switch_index (switch_stmt);
2433 if (TREE_CODE (val) != INTEGER_CST)
2434 return NULL;
2435 else
2436 taken_case = find_case_label_for_value (switch_stmt, val);
2437 }
2438 dest_bb = label_to_block (cfun, CASE_LABEL (taken_case));
2439
2440 e = find_edge (gimple_bb (switch_stmt), dest_bb);
2441 gcc_assert (e);
2442 return e;
2443 }
2444
2445
2446 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2447 We can make optimal use here of the fact that the case labels are
2448 sorted: We can do a binary search for a case matching VAL. */
2449
2450 tree
2451 find_case_label_for_value (const gswitch *switch_stmt, tree val)
2452 {
2453 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2454 tree default_case = gimple_switch_default_label (switch_stmt);
2455
2456 for (low = 0, high = n; high - low > 1; )
2457 {
2458 size_t i = (high + low) / 2;
2459 tree t = gimple_switch_label (switch_stmt, i);
2460 int cmp;
2461
2462 /* Cache the result of comparing CASE_LOW and val. */
2463 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2464
2465 if (cmp > 0)
2466 high = i;
2467 else
2468 low = i;
2469
2470 if (CASE_HIGH (t) == NULL)
2471 {
2472 /* A singe-valued case label. */
2473 if (cmp == 0)
2474 return t;
2475 }
2476 else
2477 {
2478 /* A case range. We can only handle integer ranges. */
2479 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2480 return t;
2481 }
2482 }
2483
2484 return default_case;
2485 }
2486
2487
2488 /* Dump a basic block on stderr. */
2489
2490 void
2491 gimple_debug_bb (basic_block bb)
2492 {
2493 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2494 }
2495
2496
2497 /* Dump basic block with index N on stderr. */
2498
2499 basic_block
2500 gimple_debug_bb_n (int n)
2501 {
2502 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2503 return BASIC_BLOCK_FOR_FN (cfun, n);
2504 }
2505
2506
2507 /* Dump the CFG on stderr.
2508
2509 FLAGS are the same used by the tree dumping functions
2510 (see TDF_* in dumpfile.h). */
2511
2512 void
2513 gimple_debug_cfg (dump_flags_t flags)
2514 {
2515 gimple_dump_cfg (stderr, flags);
2516 }
2517
2518
2519 /* Dump the program showing basic block boundaries on the given FILE.
2520
2521 FLAGS are the same used by the tree dumping functions (see TDF_* in
2522 tree.h). */
2523
2524 void
2525 gimple_dump_cfg (FILE *file, dump_flags_t flags)
2526 {
2527 if (flags & TDF_DETAILS)
2528 {
2529 dump_function_header (file, current_function_decl, flags);
2530 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2531 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2532 last_basic_block_for_fn (cfun));
2533
2534 brief_dump_cfg (file, flags);
2535 fprintf (file, "\n");
2536 }
2537
2538 if (flags & TDF_STATS)
2539 dump_cfg_stats (file);
2540
2541 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2542 }
2543
2544
2545 /* Dump CFG statistics on FILE. */
2546
2547 void
2548 dump_cfg_stats (FILE *file)
2549 {
2550 static long max_num_merged_labels = 0;
2551 unsigned long size, total = 0;
2552 long num_edges;
2553 basic_block bb;
2554 const char * const fmt_str = "%-30s%-13s%12s\n";
2555 const char * const fmt_str_1 = "%-30s%13d" PRsa (11) "\n";
2556 const char * const fmt_str_2 = "%-30s%13ld" PRsa (11) "\n";
2557 const char * const fmt_str_3 = "%-43s" PRsa (11) "\n";
2558 const char *funcname = current_function_name ();
2559
2560 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2561
2562 fprintf (file, "---------------------------------------------------------\n");
2563 fprintf (file, fmt_str, "", " Number of ", "Memory");
2564 fprintf (file, fmt_str, "", " instances ", "used ");
2565 fprintf (file, "---------------------------------------------------------\n");
2566
2567 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2568 total += size;
2569 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2570 SIZE_AMOUNT (size));
2571
2572 num_edges = 0;
2573 FOR_EACH_BB_FN (bb, cfun)
2574 num_edges += EDGE_COUNT (bb->succs);
2575 size = num_edges * sizeof (class edge_def);
2576 total += size;
2577 fprintf (file, fmt_str_2, "Edges", num_edges, SIZE_AMOUNT (size));
2578
2579 fprintf (file, "---------------------------------------------------------\n");
2580 fprintf (file, fmt_str_3, "Total memory used by CFG data",
2581 SIZE_AMOUNT (total));
2582 fprintf (file, "---------------------------------------------------------\n");
2583 fprintf (file, "\n");
2584
2585 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2586 max_num_merged_labels = cfg_stats.num_merged_labels;
2587
2588 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2589 cfg_stats.num_merged_labels, max_num_merged_labels);
2590
2591 fprintf (file, "\n");
2592 }
2593
2594
2595 /* Dump CFG statistics on stderr. Keep extern so that it's always
2596 linked in the final executable. */
2597
2598 DEBUG_FUNCTION void
2599 debug_cfg_stats (void)
2600 {
2601 dump_cfg_stats (stderr);
2602 }
2603
2604 /*---------------------------------------------------------------------------
2605 Miscellaneous helpers
2606 ---------------------------------------------------------------------------*/
2607
2608 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2609 flow. Transfers of control flow associated with EH are excluded. */
2610
2611 static bool
2612 call_can_make_abnormal_goto (gimple *t)
2613 {
2614 /* If the function has no non-local labels, then a call cannot make an
2615 abnormal transfer of control. */
2616 if (!cfun->has_nonlocal_label
2617 && !cfun->calls_setjmp)
2618 return false;
2619
2620 /* Likewise if the call has no side effects. */
2621 if (!gimple_has_side_effects (t))
2622 return false;
2623
2624 /* Likewise if the called function is leaf. */
2625 if (gimple_call_flags (t) & ECF_LEAF)
2626 return false;
2627
2628 return true;
2629 }
2630
2631
2632 /* Return true if T can make an abnormal transfer of control flow.
2633 Transfers of control flow associated with EH are excluded. */
2634
2635 bool
2636 stmt_can_make_abnormal_goto (gimple *t)
2637 {
2638 if (computed_goto_p (t))
2639 return true;
2640 if (is_gimple_call (t))
2641 return call_can_make_abnormal_goto (t);
2642 return false;
2643 }
2644
2645
2646 /* Return true if T represents a stmt that always transfers control. */
2647
2648 bool
2649 is_ctrl_stmt (gimple *t)
2650 {
2651 switch (gimple_code (t))
2652 {
2653 case GIMPLE_COND:
2654 case GIMPLE_SWITCH:
2655 case GIMPLE_GOTO:
2656 case GIMPLE_RETURN:
2657 case GIMPLE_RESX:
2658 return true;
2659 default:
2660 return false;
2661 }
2662 }
2663
2664
2665 /* Return true if T is a statement that may alter the flow of control
2666 (e.g., a call to a non-returning function). */
2667
2668 bool
2669 is_ctrl_altering_stmt (gimple *t)
2670 {
2671 gcc_assert (t);
2672
2673 switch (gimple_code (t))
2674 {
2675 case GIMPLE_CALL:
2676 /* Per stmt call flag indicates whether the call could alter
2677 controlflow. */
2678 if (gimple_call_ctrl_altering_p (t))
2679 return true;
2680 break;
2681
2682 case GIMPLE_EH_DISPATCH:
2683 /* EH_DISPATCH branches to the individual catch handlers at
2684 this level of a try or allowed-exceptions region. It can
2685 fallthru to the next statement as well. */
2686 return true;
2687
2688 case GIMPLE_ASM:
2689 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2690 return true;
2691 break;
2692
2693 CASE_GIMPLE_OMP:
2694 /* OpenMP directives alter control flow. */
2695 return true;
2696
2697 case GIMPLE_TRANSACTION:
2698 /* A transaction start alters control flow. */
2699 return true;
2700
2701 default:
2702 break;
2703 }
2704
2705 /* If a statement can throw, it alters control flow. */
2706 return stmt_can_throw_internal (cfun, t);
2707 }
2708
2709
2710 /* Return true if T is a simple local goto. */
2711
2712 bool
2713 simple_goto_p (gimple *t)
2714 {
2715 return (gimple_code (t) == GIMPLE_GOTO
2716 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2717 }
2718
2719
2720 /* Return true if STMT should start a new basic block. PREV_STMT is
2721 the statement preceding STMT. It is used when STMT is a label or a
2722 case label. Labels should only start a new basic block if their
2723 previous statement wasn't a label. Otherwise, sequence of labels
2724 would generate unnecessary basic blocks that only contain a single
2725 label. */
2726
2727 static inline bool
2728 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2729 {
2730 if (stmt == NULL)
2731 return false;
2732
2733 /* PREV_STMT is only set to a debug stmt if the debug stmt is before
2734 any nondebug stmts in the block. We don't want to start another
2735 block in this case: the debug stmt will already have started the
2736 one STMT would start if we weren't outputting debug stmts. */
2737 if (prev_stmt && is_gimple_debug (prev_stmt))
2738 return false;
2739
2740 /* Labels start a new basic block only if the preceding statement
2741 wasn't a label of the same type. This prevents the creation of
2742 consecutive blocks that have nothing but a single label. */
2743 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2744 {
2745 /* Nonlocal and computed GOTO targets always start a new block. */
2746 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2747 || FORCED_LABEL (gimple_label_label (label_stmt)))
2748 return true;
2749
2750 if (glabel *plabel = safe_dyn_cast <glabel *> (prev_stmt))
2751 {
2752 if (DECL_NONLOCAL (gimple_label_label (plabel))
2753 || !DECL_ARTIFICIAL (gimple_label_label (plabel)))
2754 return true;
2755
2756 cfg_stats.num_merged_labels++;
2757 return false;
2758 }
2759 else
2760 return true;
2761 }
2762 else if (gimple_code (stmt) == GIMPLE_CALL)
2763 {
2764 if (gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2765 /* setjmp acts similar to a nonlocal GOTO target and thus should
2766 start a new block. */
2767 return true;
2768 if (gimple_call_internal_p (stmt, IFN_PHI)
2769 && prev_stmt
2770 && gimple_code (prev_stmt) != GIMPLE_LABEL
2771 && (gimple_code (prev_stmt) != GIMPLE_CALL
2772 || ! gimple_call_internal_p (prev_stmt, IFN_PHI)))
2773 /* PHI nodes start a new block unless preceeded by a label
2774 or another PHI. */
2775 return true;
2776 }
2777
2778 return false;
2779 }
2780
2781
2782 /* Return true if T should end a basic block. */
2783
2784 bool
2785 stmt_ends_bb_p (gimple *t)
2786 {
2787 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2788 }
2789
2790 /* Remove block annotations and other data structures. */
2791
2792 void
2793 delete_tree_cfg_annotations (struct function *fn)
2794 {
2795 vec_free (label_to_block_map_for_fn (fn));
2796 }
2797
2798 /* Return the virtual phi in BB. */
2799
2800 gphi *
2801 get_virtual_phi (basic_block bb)
2802 {
2803 for (gphi_iterator gsi = gsi_start_phis (bb);
2804 !gsi_end_p (gsi);
2805 gsi_next (&gsi))
2806 {
2807 gphi *phi = gsi.phi ();
2808
2809 if (virtual_operand_p (PHI_RESULT (phi)))
2810 return phi;
2811 }
2812
2813 return NULL;
2814 }
2815
2816 /* Return the first statement in basic block BB. */
2817
2818 gimple *
2819 first_stmt (basic_block bb)
2820 {
2821 gimple_stmt_iterator i = gsi_start_bb (bb);
2822 gimple *stmt = NULL;
2823
2824 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2825 {
2826 gsi_next (&i);
2827 stmt = NULL;
2828 }
2829 return stmt;
2830 }
2831
2832 /* Return the first non-label statement in basic block BB. */
2833
2834 static gimple *
2835 first_non_label_stmt (basic_block bb)
2836 {
2837 gimple_stmt_iterator i = gsi_start_bb (bb);
2838 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2839 gsi_next (&i);
2840 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2841 }
2842
2843 /* Return the last statement in basic block BB. */
2844
2845 gimple *
2846 last_stmt (basic_block bb)
2847 {
2848 gimple_stmt_iterator i = gsi_last_bb (bb);
2849 gimple *stmt = NULL;
2850
2851 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2852 {
2853 gsi_prev (&i);
2854 stmt = NULL;
2855 }
2856 return stmt;
2857 }
2858
2859 /* Return the last statement of an otherwise empty block. Return NULL
2860 if the block is totally empty, or if it contains more than one
2861 statement. */
2862
2863 gimple *
2864 last_and_only_stmt (basic_block bb)
2865 {
2866 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2867 gimple *last, *prev;
2868
2869 if (gsi_end_p (i))
2870 return NULL;
2871
2872 last = gsi_stmt (i);
2873 gsi_prev_nondebug (&i);
2874 if (gsi_end_p (i))
2875 return last;
2876
2877 /* Empty statements should no longer appear in the instruction stream.
2878 Everything that might have appeared before should be deleted by
2879 remove_useless_stmts, and the optimizers should just gsi_remove
2880 instead of smashing with build_empty_stmt.
2881
2882 Thus the only thing that should appear here in a block containing
2883 one executable statement is a label. */
2884 prev = gsi_stmt (i);
2885 if (gimple_code (prev) == GIMPLE_LABEL)
2886 return last;
2887 else
2888 return NULL;
2889 }
2890
2891 /* Returns the basic block after which the new basic block created
2892 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2893 near its "logical" location. This is of most help to humans looking
2894 at debugging dumps. */
2895
2896 basic_block
2897 split_edge_bb_loc (edge edge_in)
2898 {
2899 basic_block dest = edge_in->dest;
2900 basic_block dest_prev = dest->prev_bb;
2901
2902 if (dest_prev)
2903 {
2904 edge e = find_edge (dest_prev, dest);
2905 if (e && !(e->flags & EDGE_COMPLEX))
2906 return edge_in->src;
2907 }
2908 return dest_prev;
2909 }
2910
2911 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2912 Abort on abnormal edges. */
2913
2914 static basic_block
2915 gimple_split_edge (edge edge_in)
2916 {
2917 basic_block new_bb, after_bb, dest;
2918 edge new_edge, e;
2919
2920 /* Abnormal edges cannot be split. */
2921 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2922
2923 dest = edge_in->dest;
2924
2925 after_bb = split_edge_bb_loc (edge_in);
2926
2927 new_bb = create_empty_bb (after_bb);
2928 new_bb->count = edge_in->count ();
2929
2930 /* We want to avoid re-allocating PHIs when we first
2931 add the fallthru edge from new_bb to dest but we also
2932 want to avoid changing PHI argument order when
2933 first redirecting edge_in away from dest. The former
2934 avoids changing PHI argument order by adding them
2935 last and then the redirection swapping it back into
2936 place by means of unordered remove.
2937 So hack around things by temporarily removing all PHIs
2938 from the destination during the edge redirection and then
2939 making sure the edges stay in order. */
2940 gimple_seq saved_phis = phi_nodes (dest);
2941 unsigned old_dest_idx = edge_in->dest_idx;
2942 set_phi_nodes (dest, NULL);
2943 new_edge = make_single_succ_edge (new_bb, dest, EDGE_FALLTHRU);
2944 e = redirect_edge_and_branch (edge_in, new_bb);
2945 gcc_assert (e == edge_in && new_edge->dest_idx == old_dest_idx);
2946 /* set_phi_nodes sets the BB of the PHI nodes, so do it manually here. */
2947 dest->il.gimple.phi_nodes = saved_phis;
2948
2949 return new_bb;
2950 }
2951
2952
2953 /* Verify properties of the address expression T whose base should be
2954 TREE_ADDRESSABLE if VERIFY_ADDRESSABLE is true. */
2955
2956 static bool
2957 verify_address (tree t, bool verify_addressable)
2958 {
2959 bool old_constant;
2960 bool old_side_effects;
2961 bool new_constant;
2962 bool new_side_effects;
2963
2964 old_constant = TREE_CONSTANT (t);
2965 old_side_effects = TREE_SIDE_EFFECTS (t);
2966
2967 recompute_tree_invariant_for_addr_expr (t);
2968 new_side_effects = TREE_SIDE_EFFECTS (t);
2969 new_constant = TREE_CONSTANT (t);
2970
2971 if (old_constant != new_constant)
2972 {
2973 error ("constant not recomputed when %<ADDR_EXPR%> changed");
2974 return true;
2975 }
2976 if (old_side_effects != new_side_effects)
2977 {
2978 error ("side effects not recomputed when %<ADDR_EXPR%> changed");
2979 return true;
2980 }
2981
2982 tree base = TREE_OPERAND (t, 0);
2983 while (handled_component_p (base))
2984 base = TREE_OPERAND (base, 0);
2985
2986 if (!(VAR_P (base)
2987 || TREE_CODE (base) == PARM_DECL
2988 || TREE_CODE (base) == RESULT_DECL))
2989 return false;
2990
2991 if (verify_addressable && !TREE_ADDRESSABLE (base))
2992 {
2993 error ("address taken but %<TREE_ADDRESSABLE%> bit not set");
2994 return true;
2995 }
2996
2997 return false;
2998 }
2999
3000
3001 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3002 Returns true if there is an error, otherwise false. */
3003
3004 static bool
3005 verify_types_in_gimple_min_lval (tree expr)
3006 {
3007 tree op;
3008
3009 if (is_gimple_id (expr))
3010 return false;
3011
3012 if (TREE_CODE (expr) != TARGET_MEM_REF
3013 && TREE_CODE (expr) != MEM_REF)
3014 {
3015 error ("invalid expression for min lvalue");
3016 return true;
3017 }
3018
3019 /* TARGET_MEM_REFs are strange beasts. */
3020 if (TREE_CODE (expr) == TARGET_MEM_REF)
3021 return false;
3022
3023 op = TREE_OPERAND (expr, 0);
3024 if (!is_gimple_val (op))
3025 {
3026 error ("invalid operand in indirect reference");
3027 debug_generic_stmt (op);
3028 return true;
3029 }
3030 /* Memory references now generally can involve a value conversion. */
3031
3032 return false;
3033 }
3034
3035 /* Verify if EXPR is a valid GIMPLE reference expression. If
3036 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3037 if there is an error, otherwise false. */
3038
3039 static bool
3040 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3041 {
3042 const char *code_name = get_tree_code_name (TREE_CODE (expr));
3043
3044 if (TREE_CODE (expr) == REALPART_EXPR
3045 || TREE_CODE (expr) == IMAGPART_EXPR
3046 || TREE_CODE (expr) == BIT_FIELD_REF)
3047 {
3048 tree op = TREE_OPERAND (expr, 0);
3049 if (!is_gimple_reg_type (TREE_TYPE (expr)))
3050 {
3051 error ("non-scalar %qs", code_name);
3052 return true;
3053 }
3054
3055 if (TREE_CODE (expr) == BIT_FIELD_REF)
3056 {
3057 tree t1 = TREE_OPERAND (expr, 1);
3058 tree t2 = TREE_OPERAND (expr, 2);
3059 poly_uint64 size, bitpos;
3060 if (!poly_int_tree_p (t1, &size)
3061 || !poly_int_tree_p (t2, &bitpos)
3062 || !types_compatible_p (bitsizetype, TREE_TYPE (t1))
3063 || !types_compatible_p (bitsizetype, TREE_TYPE (t2)))
3064 {
3065 error ("invalid position or size operand to %qs", code_name);
3066 return true;
3067 }
3068 if (INTEGRAL_TYPE_P (TREE_TYPE (expr))
3069 && maybe_ne (TYPE_PRECISION (TREE_TYPE (expr)), size))
3070 {
3071 error ("integral result type precision does not match "
3072 "field size of %qs", code_name);
3073 return true;
3074 }
3075 else if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
3076 && TYPE_MODE (TREE_TYPE (expr)) != BLKmode
3077 && maybe_ne (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (expr))),
3078 size))
3079 {
3080 error ("mode size of non-integral result does not "
3081 "match field size of %qs",
3082 code_name);
3083 return true;
3084 }
3085 if (INTEGRAL_TYPE_P (TREE_TYPE (op))
3086 && !type_has_mode_precision_p (TREE_TYPE (op)))
3087 {
3088 error ("%qs of non-mode-precision operand", code_name);
3089 return true;
3090 }
3091 if (!AGGREGATE_TYPE_P (TREE_TYPE (op))
3092 && maybe_gt (size + bitpos,
3093 tree_to_poly_uint64 (TYPE_SIZE (TREE_TYPE (op)))))
3094 {
3095 error ("position plus size exceeds size of referenced object in "
3096 "%qs", code_name);
3097 return true;
3098 }
3099 }
3100
3101 if ((TREE_CODE (expr) == REALPART_EXPR
3102 || TREE_CODE (expr) == IMAGPART_EXPR)
3103 && !useless_type_conversion_p (TREE_TYPE (expr),
3104 TREE_TYPE (TREE_TYPE (op))))
3105 {
3106 error ("type mismatch in %qs reference", code_name);
3107 debug_generic_stmt (TREE_TYPE (expr));
3108 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3109 return true;
3110 }
3111 expr = op;
3112 }
3113
3114 while (handled_component_p (expr))
3115 {
3116 code_name = get_tree_code_name (TREE_CODE (expr));
3117
3118 if (TREE_CODE (expr) == REALPART_EXPR
3119 || TREE_CODE (expr) == IMAGPART_EXPR
3120 || TREE_CODE (expr) == BIT_FIELD_REF)
3121 {
3122 error ("non-top-level %qs", code_name);
3123 return true;
3124 }
3125
3126 tree op = TREE_OPERAND (expr, 0);
3127
3128 if (TREE_CODE (expr) == ARRAY_REF
3129 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3130 {
3131 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3132 || (TREE_OPERAND (expr, 2)
3133 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3134 || (TREE_OPERAND (expr, 3)
3135 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3136 {
3137 error ("invalid operands to %qs", code_name);
3138 debug_generic_stmt (expr);
3139 return true;
3140 }
3141 }
3142
3143 /* Verify if the reference array element types are compatible. */
3144 if (TREE_CODE (expr) == ARRAY_REF
3145 && !useless_type_conversion_p (TREE_TYPE (expr),
3146 TREE_TYPE (TREE_TYPE (op))))
3147 {
3148 error ("type mismatch in %qs", code_name);
3149 debug_generic_stmt (TREE_TYPE (expr));
3150 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3151 return true;
3152 }
3153 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3154 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3155 TREE_TYPE (TREE_TYPE (op))))
3156 {
3157 error ("type mismatch in %qs", code_name);
3158 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3159 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3160 return true;
3161 }
3162
3163 if (TREE_CODE (expr) == COMPONENT_REF)
3164 {
3165 if (TREE_OPERAND (expr, 2)
3166 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3167 {
3168 error ("invalid %qs offset operator", code_name);
3169 return true;
3170 }
3171 if (!useless_type_conversion_p (TREE_TYPE (expr),
3172 TREE_TYPE (TREE_OPERAND (expr, 1))))
3173 {
3174 error ("type mismatch in %qs", code_name);
3175 debug_generic_stmt (TREE_TYPE (expr));
3176 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3177 return true;
3178 }
3179 }
3180
3181 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3182 {
3183 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3184 that their operand is not an SSA name or an invariant when
3185 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3186 bug). Otherwise there is nothing to verify, gross mismatches at
3187 most invoke undefined behavior. */
3188 if (require_lvalue
3189 && (TREE_CODE (op) == SSA_NAME
3190 || is_gimple_min_invariant (op)))
3191 {
3192 error ("conversion of %qs on the left hand side of %qs",
3193 get_tree_code_name (TREE_CODE (op)), code_name);
3194 debug_generic_stmt (expr);
3195 return true;
3196 }
3197 else if (TREE_CODE (op) == SSA_NAME
3198 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3199 {
3200 error ("conversion of register to a different size in %qs",
3201 code_name);
3202 debug_generic_stmt (expr);
3203 return true;
3204 }
3205 else if (!handled_component_p (op))
3206 return false;
3207 }
3208
3209 expr = op;
3210 }
3211
3212 code_name = get_tree_code_name (TREE_CODE (expr));
3213
3214 if (TREE_CODE (expr) == MEM_REF)
3215 {
3216 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0))
3217 || (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
3218 && verify_address (TREE_OPERAND (expr, 0), false)))
3219 {
3220 error ("invalid address operand in %qs", code_name);
3221 debug_generic_stmt (expr);
3222 return true;
3223 }
3224 if (!poly_int_tree_p (TREE_OPERAND (expr, 1))
3225 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3226 {
3227 error ("invalid offset operand in %qs", code_name);
3228 debug_generic_stmt (expr);
3229 return true;
3230 }
3231 if (MR_DEPENDENCE_CLIQUE (expr) != 0
3232 && MR_DEPENDENCE_CLIQUE (expr) > cfun->last_clique)
3233 {
3234 error ("invalid clique in %qs", code_name);
3235 debug_generic_stmt (expr);
3236 return true;
3237 }
3238 }
3239 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3240 {
3241 if (!TMR_BASE (expr)
3242 || !is_gimple_mem_ref_addr (TMR_BASE (expr))
3243 || (TREE_CODE (TMR_BASE (expr)) == ADDR_EXPR
3244 && verify_address (TMR_BASE (expr), false)))
3245 {
3246 error ("invalid address operand in %qs", code_name);
3247 return true;
3248 }
3249 if (!TMR_OFFSET (expr)
3250 || !poly_int_tree_p (TMR_OFFSET (expr))
3251 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3252 {
3253 error ("invalid offset operand in %qs", code_name);
3254 debug_generic_stmt (expr);
3255 return true;
3256 }
3257 if (MR_DEPENDENCE_CLIQUE (expr) != 0
3258 && MR_DEPENDENCE_CLIQUE (expr) > cfun->last_clique)
3259 {
3260 error ("invalid clique in %qs", code_name);
3261 debug_generic_stmt (expr);
3262 return true;
3263 }
3264 }
3265 else if (TREE_CODE (expr) == INDIRECT_REF)
3266 {
3267 error ("%qs in gimple IL", code_name);
3268 debug_generic_stmt (expr);
3269 return true;
3270 }
3271
3272 return ((require_lvalue || !is_gimple_min_invariant (expr))
3273 && verify_types_in_gimple_min_lval (expr));
3274 }
3275
3276 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3277 list of pointer-to types that is trivially convertible to DEST. */
3278
3279 static bool
3280 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3281 {
3282 tree src;
3283
3284 if (!TYPE_POINTER_TO (src_obj))
3285 return true;
3286
3287 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3288 if (useless_type_conversion_p (dest, src))
3289 return true;
3290
3291 return false;
3292 }
3293
3294 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3295 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3296
3297 static bool
3298 valid_fixed_convert_types_p (tree type1, tree type2)
3299 {
3300 return (FIXED_POINT_TYPE_P (type1)
3301 && (INTEGRAL_TYPE_P (type2)
3302 || SCALAR_FLOAT_TYPE_P (type2)
3303 || FIXED_POINT_TYPE_P (type2)));
3304 }
3305
3306 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3307 is a problem, otherwise false. */
3308
3309 static bool
3310 verify_gimple_call (gcall *stmt)
3311 {
3312 tree fn = gimple_call_fn (stmt);
3313 tree fntype, fndecl;
3314 unsigned i;
3315
3316 if (gimple_call_internal_p (stmt))
3317 {
3318 if (fn)
3319 {
3320 error ("gimple call has two targets");
3321 debug_generic_stmt (fn);
3322 return true;
3323 }
3324 }
3325 else
3326 {
3327 if (!fn)
3328 {
3329 error ("gimple call has no target");
3330 return true;
3331 }
3332 }
3333
3334 if (fn && !is_gimple_call_addr (fn))
3335 {
3336 error ("invalid function in gimple call");
3337 debug_generic_stmt (fn);
3338 return true;
3339 }
3340
3341 if (fn
3342 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3343 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3344 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3345 {
3346 error ("non-function in gimple call");
3347 return true;
3348 }
3349
3350 fndecl = gimple_call_fndecl (stmt);
3351 if (fndecl
3352 && TREE_CODE (fndecl) == FUNCTION_DECL
3353 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3354 && !DECL_PURE_P (fndecl)
3355 && !TREE_READONLY (fndecl))
3356 {
3357 error ("invalid pure const state for function");
3358 return true;
3359 }
3360
3361 tree lhs = gimple_call_lhs (stmt);
3362 if (lhs
3363 && (!is_gimple_lvalue (lhs)
3364 || verify_types_in_gimple_reference (lhs, true)))
3365 {
3366 error ("invalid LHS in gimple call");
3367 return true;
3368 }
3369
3370 if (gimple_call_ctrl_altering_p (stmt)
3371 && gimple_call_noreturn_p (stmt)
3372 && should_remove_lhs_p (lhs))
3373 {
3374 error ("LHS in %<noreturn%> call");
3375 return true;
3376 }
3377
3378 fntype = gimple_call_fntype (stmt);
3379 if (fntype
3380 && lhs
3381 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3382 /* ??? At least C++ misses conversions at assignments from
3383 void * call results.
3384 For now simply allow arbitrary pointer type conversions. */
3385 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3386 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3387 {
3388 error ("invalid conversion in gimple call");
3389 debug_generic_stmt (TREE_TYPE (lhs));
3390 debug_generic_stmt (TREE_TYPE (fntype));
3391 return true;
3392 }
3393
3394 if (gimple_call_chain (stmt)
3395 && !is_gimple_val (gimple_call_chain (stmt)))
3396 {
3397 error ("invalid static chain in gimple call");
3398 debug_generic_stmt (gimple_call_chain (stmt));
3399 return true;
3400 }
3401
3402 /* If there is a static chain argument, the call should either be
3403 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3404 if (gimple_call_chain (stmt)
3405 && fndecl
3406 && !DECL_STATIC_CHAIN (fndecl))
3407 {
3408 error ("static chain with function that doesn%'t use one");
3409 return true;
3410 }
3411
3412 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3413 {
3414 switch (DECL_FUNCTION_CODE (fndecl))
3415 {
3416 case BUILT_IN_UNREACHABLE:
3417 case BUILT_IN_TRAP:
3418 if (gimple_call_num_args (stmt) > 0)
3419 {
3420 /* Built-in unreachable with parameters might not be caught by
3421 undefined behavior sanitizer. Front-ends do check users do not
3422 call them that way but we also produce calls to
3423 __builtin_unreachable internally, for example when IPA figures
3424 out a call cannot happen in a legal program. In such cases,
3425 we must make sure arguments are stripped off. */
3426 error ("%<__builtin_unreachable%> or %<__builtin_trap%> call "
3427 "with arguments");
3428 return true;
3429 }
3430 break;
3431 default:
3432 break;
3433 }
3434 }
3435
3436 /* ??? The C frontend passes unpromoted arguments in case it
3437 didn't see a function declaration before the call. So for now
3438 leave the call arguments mostly unverified. Once we gimplify
3439 unit-at-a-time we have a chance to fix this. */
3440
3441 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3442 {
3443 tree arg = gimple_call_arg (stmt, i);
3444 if ((is_gimple_reg_type (TREE_TYPE (arg))
3445 && !is_gimple_val (arg))
3446 || (!is_gimple_reg_type (TREE_TYPE (arg))
3447 && !is_gimple_lvalue (arg)))
3448 {
3449 error ("invalid argument to gimple call");
3450 debug_generic_expr (arg);
3451 return true;
3452 }
3453 }
3454
3455 return false;
3456 }
3457
3458 /* Verifies the gimple comparison with the result type TYPE and
3459 the operands OP0 and OP1, comparison code is CODE. */
3460
3461 static bool
3462 verify_gimple_comparison (tree type, tree op0, tree op1, enum tree_code code)
3463 {
3464 tree op0_type = TREE_TYPE (op0);
3465 tree op1_type = TREE_TYPE (op1);
3466
3467 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3468 {
3469 error ("invalid operands in gimple comparison");
3470 return true;
3471 }
3472
3473 /* For comparisons we do not have the operations type as the
3474 effective type the comparison is carried out in. Instead
3475 we require that either the first operand is trivially
3476 convertible into the second, or the other way around. */
3477 if (!useless_type_conversion_p (op0_type, op1_type)
3478 && !useless_type_conversion_p (op1_type, op0_type))
3479 {
3480 error ("mismatching comparison operand types");
3481 debug_generic_expr (op0_type);
3482 debug_generic_expr (op1_type);
3483 return true;
3484 }
3485
3486 /* The resulting type of a comparison may be an effective boolean type. */
3487 if (INTEGRAL_TYPE_P (type)
3488 && (TREE_CODE (type) == BOOLEAN_TYPE
3489 || TYPE_PRECISION (type) == 1))
3490 {
3491 if ((TREE_CODE (op0_type) == VECTOR_TYPE
3492 || TREE_CODE (op1_type) == VECTOR_TYPE)
3493 && code != EQ_EXPR && code != NE_EXPR
3494 && !VECTOR_BOOLEAN_TYPE_P (op0_type)
3495 && !VECTOR_INTEGER_TYPE_P (op0_type))
3496 {
3497 error ("unsupported operation or type for vector comparison"
3498 " returning a boolean");
3499 debug_generic_expr (op0_type);
3500 debug_generic_expr (op1_type);
3501 return true;
3502 }
3503 }
3504 /* Or a boolean vector type with the same element count
3505 as the comparison operand types. */
3506 else if (TREE_CODE (type) == VECTOR_TYPE
3507 && TREE_CODE (TREE_TYPE (type)) == BOOLEAN_TYPE)
3508 {
3509 if (TREE_CODE (op0_type) != VECTOR_TYPE
3510 || TREE_CODE (op1_type) != VECTOR_TYPE)
3511 {
3512 error ("non-vector operands in vector comparison");
3513 debug_generic_expr (op0_type);
3514 debug_generic_expr (op1_type);
3515 return true;
3516 }
3517
3518 if (maybe_ne (TYPE_VECTOR_SUBPARTS (type),
3519 TYPE_VECTOR_SUBPARTS (op0_type)))
3520 {
3521 error ("invalid vector comparison resulting type");
3522 debug_generic_expr (type);
3523 return true;
3524 }
3525 }
3526 else
3527 {
3528 error ("bogus comparison result type");
3529 debug_generic_expr (type);
3530 return true;
3531 }
3532
3533 return false;
3534 }
3535
3536 /* Verify a gimple assignment statement STMT with an unary rhs.
3537 Returns true if anything is wrong. */
3538
3539 static bool
3540 verify_gimple_assign_unary (gassign *stmt)
3541 {
3542 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3543 tree lhs = gimple_assign_lhs (stmt);
3544 tree lhs_type = TREE_TYPE (lhs);
3545 tree rhs1 = gimple_assign_rhs1 (stmt);
3546 tree rhs1_type = TREE_TYPE (rhs1);
3547
3548 if (!is_gimple_reg (lhs))
3549 {
3550 error ("non-register as LHS of unary operation");
3551 return true;
3552 }
3553
3554 if (!is_gimple_val (rhs1))
3555 {
3556 error ("invalid operand in unary operation");
3557 return true;
3558 }
3559
3560 const char* const code_name = get_tree_code_name (rhs_code);
3561
3562 /* First handle conversions. */
3563 switch (rhs_code)
3564 {
3565 CASE_CONVERT:
3566 {
3567 /* Allow conversions between vectors with the same number of elements,
3568 provided that the conversion is OK for the element types too. */
3569 if (VECTOR_TYPE_P (lhs_type)
3570 && VECTOR_TYPE_P (rhs1_type)
3571 && known_eq (TYPE_VECTOR_SUBPARTS (lhs_type),
3572 TYPE_VECTOR_SUBPARTS (rhs1_type)))
3573 {
3574 lhs_type = TREE_TYPE (lhs_type);
3575 rhs1_type = TREE_TYPE (rhs1_type);
3576 }
3577 else if (VECTOR_TYPE_P (lhs_type) || VECTOR_TYPE_P (rhs1_type))
3578 {
3579 error ("invalid vector types in nop conversion");
3580 debug_generic_expr (lhs_type);
3581 debug_generic_expr (rhs1_type);
3582 return true;
3583 }
3584
3585 /* Allow conversions from pointer type to integral type only if
3586 there is no sign or zero extension involved.
3587 For targets were the precision of ptrofftype doesn't match that
3588 of pointers we allow conversions to types where
3589 POINTERS_EXTEND_UNSIGNED specifies how that works. */
3590 if ((POINTER_TYPE_P (lhs_type)
3591 && INTEGRAL_TYPE_P (rhs1_type))
3592 || (POINTER_TYPE_P (rhs1_type)
3593 && INTEGRAL_TYPE_P (lhs_type)
3594 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3595 #if defined(POINTERS_EXTEND_UNSIGNED)
3596 || (TYPE_MODE (rhs1_type) == ptr_mode
3597 && (TYPE_PRECISION (lhs_type)
3598 == BITS_PER_WORD /* word_mode */
3599 || (TYPE_PRECISION (lhs_type)
3600 == GET_MODE_PRECISION (Pmode))))
3601 #endif
3602 )))
3603 return false;
3604
3605 /* Allow conversion from integral to offset type and vice versa. */
3606 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3607 && INTEGRAL_TYPE_P (rhs1_type))
3608 || (INTEGRAL_TYPE_P (lhs_type)
3609 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3610 return false;
3611
3612 /* Otherwise assert we are converting between types of the
3613 same kind. */
3614 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3615 {
3616 error ("invalid types in nop conversion");
3617 debug_generic_expr (lhs_type);
3618 debug_generic_expr (rhs1_type);
3619 return true;
3620 }
3621
3622 return false;
3623 }
3624
3625 case ADDR_SPACE_CONVERT_EXPR:
3626 {
3627 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3628 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3629 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3630 {
3631 error ("invalid types in address space conversion");
3632 debug_generic_expr (lhs_type);
3633 debug_generic_expr (rhs1_type);
3634 return true;
3635 }
3636
3637 return false;
3638 }
3639
3640 case FIXED_CONVERT_EXPR:
3641 {
3642 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3643 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3644 {
3645 error ("invalid types in fixed-point conversion");
3646 debug_generic_expr (lhs_type);
3647 debug_generic_expr (rhs1_type);
3648 return true;
3649 }
3650
3651 return false;
3652 }
3653
3654 case FLOAT_EXPR:
3655 {
3656 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3657 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3658 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3659 {
3660 error ("invalid types in conversion to floating-point");
3661 debug_generic_expr (lhs_type);
3662 debug_generic_expr (rhs1_type);
3663 return true;
3664 }
3665
3666 return false;
3667 }
3668
3669 case FIX_TRUNC_EXPR:
3670 {
3671 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3672 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3673 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3674 {
3675 error ("invalid types in conversion to integer");
3676 debug_generic_expr (lhs_type);
3677 debug_generic_expr (rhs1_type);
3678 return true;
3679 }
3680
3681 return false;
3682 }
3683
3684 case VEC_UNPACK_HI_EXPR:
3685 case VEC_UNPACK_LO_EXPR:
3686 case VEC_UNPACK_FLOAT_HI_EXPR:
3687 case VEC_UNPACK_FLOAT_LO_EXPR:
3688 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
3689 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
3690 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3691 || TREE_CODE (lhs_type) != VECTOR_TYPE
3692 || (!INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3693 && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type)))
3694 || (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3695 && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3696 || ((rhs_code == VEC_UNPACK_HI_EXPR
3697 || rhs_code == VEC_UNPACK_LO_EXPR)
3698 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3699 != INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3700 || ((rhs_code == VEC_UNPACK_FLOAT_HI_EXPR
3701 || rhs_code == VEC_UNPACK_FLOAT_LO_EXPR)
3702 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3703 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))))
3704 || ((rhs_code == VEC_UNPACK_FIX_TRUNC_HI_EXPR
3705 || rhs_code == VEC_UNPACK_FIX_TRUNC_LO_EXPR)
3706 && (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3707 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type))))
3708 || (maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
3709 2 * GET_MODE_SIZE (element_mode (rhs1_type)))
3710 && (!VECTOR_BOOLEAN_TYPE_P (lhs_type)
3711 || !VECTOR_BOOLEAN_TYPE_P (rhs1_type)))
3712 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (lhs_type),
3713 TYPE_VECTOR_SUBPARTS (rhs1_type)))
3714 {
3715 error ("type mismatch in %qs expression", code_name);
3716 debug_generic_expr (lhs_type);
3717 debug_generic_expr (rhs1_type);
3718 return true;
3719 }
3720
3721 return false;
3722
3723 case NEGATE_EXPR:
3724 case ABS_EXPR:
3725 case BIT_NOT_EXPR:
3726 case PAREN_EXPR:
3727 case CONJ_EXPR:
3728 break;
3729
3730 case ABSU_EXPR:
3731 if (!ANY_INTEGRAL_TYPE_P (lhs_type)
3732 || !TYPE_UNSIGNED (lhs_type)
3733 || !ANY_INTEGRAL_TYPE_P (rhs1_type)
3734 || TYPE_UNSIGNED (rhs1_type)
3735 || element_precision (lhs_type) != element_precision (rhs1_type))
3736 {
3737 error ("invalid types for %qs", code_name);
3738 debug_generic_expr (lhs_type);
3739 debug_generic_expr (rhs1_type);
3740 return true;
3741 }
3742 return false;
3743
3744 case VEC_DUPLICATE_EXPR:
3745 if (TREE_CODE (lhs_type) != VECTOR_TYPE
3746 || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
3747 {
3748 error ("%qs should be from a scalar to a like vector", code_name);
3749 debug_generic_expr (lhs_type);
3750 debug_generic_expr (rhs1_type);
3751 return true;
3752 }
3753 return false;
3754
3755 default:
3756 gcc_unreachable ();
3757 }
3758
3759 /* For the remaining codes assert there is no conversion involved. */
3760 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3761 {
3762 error ("non-trivial conversion in unary operation");
3763 debug_generic_expr (lhs_type);
3764 debug_generic_expr (rhs1_type);
3765 return true;
3766 }
3767
3768 return false;
3769 }
3770
3771 /* Verify a gimple assignment statement STMT with a binary rhs.
3772 Returns true if anything is wrong. */
3773
3774 static bool
3775 verify_gimple_assign_binary (gassign *stmt)
3776 {
3777 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3778 tree lhs = gimple_assign_lhs (stmt);
3779 tree lhs_type = TREE_TYPE (lhs);
3780 tree rhs1 = gimple_assign_rhs1 (stmt);
3781 tree rhs1_type = TREE_TYPE (rhs1);
3782 tree rhs2 = gimple_assign_rhs2 (stmt);
3783 tree rhs2_type = TREE_TYPE (rhs2);
3784
3785 if (!is_gimple_reg (lhs))
3786 {
3787 error ("non-register as LHS of binary operation");
3788 return true;
3789 }
3790
3791 if (!is_gimple_val (rhs1)
3792 || !is_gimple_val (rhs2))
3793 {
3794 error ("invalid operands in binary operation");
3795 return true;
3796 }
3797
3798 const char* const code_name = get_tree_code_name (rhs_code);
3799
3800 /* First handle operations that involve different types. */
3801 switch (rhs_code)
3802 {
3803 case COMPLEX_EXPR:
3804 {
3805 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3806 || !(INTEGRAL_TYPE_P (rhs1_type)
3807 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3808 || !(INTEGRAL_TYPE_P (rhs2_type)
3809 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3810 {
3811 error ("type mismatch in %qs", code_name);
3812 debug_generic_expr (lhs_type);
3813 debug_generic_expr (rhs1_type);
3814 debug_generic_expr (rhs2_type);
3815 return true;
3816 }
3817
3818 return false;
3819 }
3820
3821 case LSHIFT_EXPR:
3822 case RSHIFT_EXPR:
3823 case LROTATE_EXPR:
3824 case RROTATE_EXPR:
3825 {
3826 /* Shifts and rotates are ok on integral types, fixed point
3827 types and integer vector types. */
3828 if ((!INTEGRAL_TYPE_P (rhs1_type)
3829 && !FIXED_POINT_TYPE_P (rhs1_type)
3830 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3831 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3832 || (!INTEGRAL_TYPE_P (rhs2_type)
3833 /* Vector shifts of vectors are also ok. */
3834 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3835 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3836 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3837 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3838 || !useless_type_conversion_p (lhs_type, rhs1_type))
3839 {
3840 error ("type mismatch in %qs", code_name);
3841 debug_generic_expr (lhs_type);
3842 debug_generic_expr (rhs1_type);
3843 debug_generic_expr (rhs2_type);
3844 return true;
3845 }
3846
3847 return false;
3848 }
3849
3850 case WIDEN_LSHIFT_EXPR:
3851 {
3852 if (!INTEGRAL_TYPE_P (lhs_type)
3853 || !INTEGRAL_TYPE_P (rhs1_type)
3854 || TREE_CODE (rhs2) != INTEGER_CST
3855 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3856 {
3857 error ("type mismatch in %qs", code_name);
3858 debug_generic_expr (lhs_type);
3859 debug_generic_expr (rhs1_type);
3860 debug_generic_expr (rhs2_type);
3861 return true;
3862 }
3863
3864 return false;
3865 }
3866
3867 case VEC_WIDEN_LSHIFT_HI_EXPR:
3868 case VEC_WIDEN_LSHIFT_LO_EXPR:
3869 {
3870 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3871 || TREE_CODE (lhs_type) != VECTOR_TYPE
3872 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3873 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3874 || TREE_CODE (rhs2) != INTEGER_CST
3875 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3876 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3877 {
3878 error ("type mismatch in %qs", code_name);
3879 debug_generic_expr (lhs_type);
3880 debug_generic_expr (rhs1_type);
3881 debug_generic_expr (rhs2_type);
3882 return true;
3883 }
3884
3885 return false;
3886 }
3887
3888 case PLUS_EXPR:
3889 case MINUS_EXPR:
3890 {
3891 tree lhs_etype = lhs_type;
3892 tree rhs1_etype = rhs1_type;
3893 tree rhs2_etype = rhs2_type;
3894 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3895 {
3896 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3897 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3898 {
3899 error ("invalid non-vector operands to %qs", code_name);
3900 return true;
3901 }
3902 lhs_etype = TREE_TYPE (lhs_type);
3903 rhs1_etype = TREE_TYPE (rhs1_type);
3904 rhs2_etype = TREE_TYPE (rhs2_type);
3905 }
3906 if (POINTER_TYPE_P (lhs_etype)
3907 || POINTER_TYPE_P (rhs1_etype)
3908 || POINTER_TYPE_P (rhs2_etype))
3909 {
3910 error ("invalid (pointer) operands %qs", code_name);
3911 return true;
3912 }
3913
3914 /* Continue with generic binary expression handling. */
3915 break;
3916 }
3917
3918 case POINTER_PLUS_EXPR:
3919 {
3920 if (!POINTER_TYPE_P (rhs1_type)
3921 || !useless_type_conversion_p (lhs_type, rhs1_type)
3922 || !ptrofftype_p (rhs2_type))
3923 {
3924 error ("type mismatch in %qs", code_name);
3925 debug_generic_stmt (lhs_type);
3926 debug_generic_stmt (rhs1_type);
3927 debug_generic_stmt (rhs2_type);
3928 return true;
3929 }
3930
3931 return false;
3932 }
3933
3934 case POINTER_DIFF_EXPR:
3935 {
3936 if (!POINTER_TYPE_P (rhs1_type)
3937 || !POINTER_TYPE_P (rhs2_type)
3938 /* Because we special-case pointers to void we allow difference
3939 of arbitrary pointers with the same mode. */
3940 || TYPE_MODE (rhs1_type) != TYPE_MODE (rhs2_type)
3941 || TREE_CODE (lhs_type) != INTEGER_TYPE
3942 || TYPE_UNSIGNED (lhs_type)
3943 || TYPE_PRECISION (lhs_type) != TYPE_PRECISION (rhs1_type))
3944 {
3945 error ("type mismatch in %qs", code_name);
3946 debug_generic_stmt (lhs_type);
3947 debug_generic_stmt (rhs1_type);
3948 debug_generic_stmt (rhs2_type);
3949 return true;
3950 }
3951
3952 return false;
3953 }
3954
3955 case TRUTH_ANDIF_EXPR:
3956 case TRUTH_ORIF_EXPR:
3957 case TRUTH_AND_EXPR:
3958 case TRUTH_OR_EXPR:
3959 case TRUTH_XOR_EXPR:
3960
3961 gcc_unreachable ();
3962
3963 case LT_EXPR:
3964 case LE_EXPR:
3965 case GT_EXPR:
3966 case GE_EXPR:
3967 case EQ_EXPR:
3968 case NE_EXPR:
3969 case UNORDERED_EXPR:
3970 case ORDERED_EXPR:
3971 case UNLT_EXPR:
3972 case UNLE_EXPR:
3973 case UNGT_EXPR:
3974 case UNGE_EXPR:
3975 case UNEQ_EXPR:
3976 case LTGT_EXPR:
3977 /* Comparisons are also binary, but the result type is not
3978 connected to the operand types. */
3979 return verify_gimple_comparison (lhs_type, rhs1, rhs2, rhs_code);
3980
3981 case WIDEN_MULT_EXPR:
3982 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3983 return true;
3984 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3985 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3986
3987 case WIDEN_SUM_EXPR:
3988 {
3989 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
3990 || TREE_CODE (lhs_type) != VECTOR_TYPE)
3991 && ((!INTEGRAL_TYPE_P (rhs1_type)
3992 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
3993 || (!INTEGRAL_TYPE_P (lhs_type)
3994 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
3995 || !useless_type_conversion_p (lhs_type, rhs2_type)
3996 || maybe_lt (GET_MODE_SIZE (element_mode (rhs2_type)),
3997 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
3998 {
3999 error ("type mismatch in %qs", code_name);
4000 debug_generic_expr (lhs_type);
4001 debug_generic_expr (rhs1_type);
4002 debug_generic_expr (rhs2_type);
4003 return true;
4004 }
4005 return false;
4006 }
4007
4008 case VEC_WIDEN_MULT_HI_EXPR:
4009 case VEC_WIDEN_MULT_LO_EXPR:
4010 case VEC_WIDEN_MULT_EVEN_EXPR:
4011 case VEC_WIDEN_MULT_ODD_EXPR:
4012 {
4013 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4014 || TREE_CODE (lhs_type) != VECTOR_TYPE
4015 || !types_compatible_p (rhs1_type, rhs2_type)
4016 || maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
4017 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4018 {
4019 error ("type mismatch in %qs", code_name);
4020 debug_generic_expr (lhs_type);
4021 debug_generic_expr (rhs1_type);
4022 debug_generic_expr (rhs2_type);
4023 return true;
4024 }
4025 return false;
4026 }
4027
4028 case VEC_PACK_TRUNC_EXPR:
4029 /* ??? We currently use VEC_PACK_TRUNC_EXPR to simply concat
4030 vector boolean types. */
4031 if (VECTOR_BOOLEAN_TYPE_P (lhs_type)
4032 && VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4033 && types_compatible_p (rhs1_type, rhs2_type)
4034 && known_eq (TYPE_VECTOR_SUBPARTS (lhs_type),
4035 2 * TYPE_VECTOR_SUBPARTS (rhs1_type)))
4036 return false;
4037
4038 /* Fallthru. */
4039 case VEC_PACK_SAT_EXPR:
4040 case VEC_PACK_FIX_TRUNC_EXPR:
4041 {
4042 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4043 || TREE_CODE (lhs_type) != VECTOR_TYPE
4044 || !((rhs_code == VEC_PACK_FIX_TRUNC_EXPR
4045 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))
4046 && INTEGRAL_TYPE_P (TREE_TYPE (lhs_type)))
4047 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4048 == INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))))
4049 || !types_compatible_p (rhs1_type, rhs2_type)
4050 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
4051 2 * GET_MODE_SIZE (element_mode (lhs_type)))
4052 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type),
4053 TYPE_VECTOR_SUBPARTS (lhs_type)))
4054 {
4055 error ("type mismatch in %qs", code_name);
4056 debug_generic_expr (lhs_type);
4057 debug_generic_expr (rhs1_type);
4058 debug_generic_expr (rhs2_type);
4059 return true;
4060 }
4061
4062 return false;
4063 }
4064
4065 case VEC_PACK_FLOAT_EXPR:
4066 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4067 || TREE_CODE (lhs_type) != VECTOR_TYPE
4068 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4069 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type))
4070 || !types_compatible_p (rhs1_type, rhs2_type)
4071 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
4072 2 * GET_MODE_SIZE (element_mode (lhs_type)))
4073 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type),
4074 TYPE_VECTOR_SUBPARTS (lhs_type)))
4075 {
4076 error ("type mismatch in %qs", code_name);
4077 debug_generic_expr (lhs_type);
4078 debug_generic_expr (rhs1_type);
4079 debug_generic_expr (rhs2_type);
4080 return true;
4081 }
4082
4083 return false;
4084
4085 case MULT_EXPR:
4086 case MULT_HIGHPART_EXPR:
4087 case TRUNC_DIV_EXPR:
4088 case CEIL_DIV_EXPR:
4089 case FLOOR_DIV_EXPR:
4090 case ROUND_DIV_EXPR:
4091 case TRUNC_MOD_EXPR:
4092 case CEIL_MOD_EXPR:
4093 case FLOOR_MOD_EXPR:
4094 case ROUND_MOD_EXPR:
4095 case RDIV_EXPR:
4096 case EXACT_DIV_EXPR:
4097 case MIN_EXPR:
4098 case MAX_EXPR:
4099 case BIT_IOR_EXPR:
4100 case BIT_XOR_EXPR:
4101 case BIT_AND_EXPR:
4102 /* Continue with generic binary expression handling. */
4103 break;
4104
4105 case VEC_SERIES_EXPR:
4106 if (!useless_type_conversion_p (rhs1_type, rhs2_type))
4107 {
4108 error ("type mismatch in %qs", code_name);
4109 debug_generic_expr (rhs1_type);
4110 debug_generic_expr (rhs2_type);
4111 return true;
4112 }
4113 if (TREE_CODE (lhs_type) != VECTOR_TYPE
4114 || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
4115 {
4116 error ("vector type expected in %qs", code_name);
4117 debug_generic_expr (lhs_type);
4118 return true;
4119 }
4120 return false;
4121
4122 default:
4123 gcc_unreachable ();
4124 }
4125
4126 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4127 || !useless_type_conversion_p (lhs_type, rhs2_type))
4128 {
4129 error ("type mismatch in binary expression");
4130 debug_generic_stmt (lhs_type);
4131 debug_generic_stmt (rhs1_type);
4132 debug_generic_stmt (rhs2_type);
4133 return true;
4134 }
4135
4136 return false;
4137 }
4138
4139 /* Verify a gimple assignment statement STMT with a ternary rhs.
4140 Returns true if anything is wrong. */
4141
4142 static bool
4143 verify_gimple_assign_ternary (gassign *stmt)
4144 {
4145 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4146 tree lhs = gimple_assign_lhs (stmt);
4147 tree lhs_type = TREE_TYPE (lhs);
4148 tree rhs1 = gimple_assign_rhs1 (stmt);
4149 tree rhs1_type = TREE_TYPE (rhs1);
4150 tree rhs2 = gimple_assign_rhs2 (stmt);
4151 tree rhs2_type = TREE_TYPE (rhs2);
4152 tree rhs3 = gimple_assign_rhs3 (stmt);
4153 tree rhs3_type = TREE_TYPE (rhs3);
4154
4155 if (!is_gimple_reg (lhs))
4156 {
4157 error ("non-register as LHS of ternary operation");
4158 return true;
4159 }
4160
4161 if ((rhs_code == COND_EXPR
4162 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
4163 || !is_gimple_val (rhs2)
4164 || !is_gimple_val (rhs3))
4165 {
4166 error ("invalid operands in ternary operation");
4167 return true;
4168 }
4169
4170 const char* const code_name = get_tree_code_name (rhs_code);
4171
4172 /* First handle operations that involve different types. */
4173 switch (rhs_code)
4174 {
4175 case WIDEN_MULT_PLUS_EXPR:
4176 case WIDEN_MULT_MINUS_EXPR:
4177 if ((!INTEGRAL_TYPE_P (rhs1_type)
4178 && !FIXED_POINT_TYPE_P (rhs1_type))
4179 || !useless_type_conversion_p (rhs1_type, rhs2_type)
4180 || !useless_type_conversion_p (lhs_type, rhs3_type)
4181 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
4182 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
4183 {
4184 error ("type mismatch in %qs", code_name);
4185 debug_generic_expr (lhs_type);
4186 debug_generic_expr (rhs1_type);
4187 debug_generic_expr (rhs2_type);
4188 debug_generic_expr (rhs3_type);
4189 return true;
4190 }
4191 break;
4192
4193 case VEC_COND_EXPR:
4194 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4195 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4196 TYPE_VECTOR_SUBPARTS (lhs_type)))
4197 {
4198 error ("the first argument of a %qs must be of a "
4199 "boolean vector type of the same number of elements "
4200 "as the result", code_name);
4201 debug_generic_expr (lhs_type);
4202 debug_generic_expr (rhs1_type);
4203 return true;
4204 }
4205 /* Fallthrough. */
4206 case COND_EXPR:
4207 if (!is_gimple_val (rhs1)
4208 && verify_gimple_comparison (TREE_TYPE (rhs1),
4209 TREE_OPERAND (rhs1, 0),
4210 TREE_OPERAND (rhs1, 1),
4211 TREE_CODE (rhs1)))
4212 return true;
4213 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4214 || !useless_type_conversion_p (lhs_type, rhs3_type))
4215 {
4216 error ("type mismatch in %qs", code_name);
4217 debug_generic_expr (lhs_type);
4218 debug_generic_expr (rhs2_type);
4219 debug_generic_expr (rhs3_type);
4220 return true;
4221 }
4222 break;
4223
4224 case VEC_PERM_EXPR:
4225 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4226 || !useless_type_conversion_p (lhs_type, rhs2_type))
4227 {
4228 error ("type mismatch in %qs", code_name);
4229 debug_generic_expr (lhs_type);
4230 debug_generic_expr (rhs1_type);
4231 debug_generic_expr (rhs2_type);
4232 debug_generic_expr (rhs3_type);
4233 return true;
4234 }
4235
4236 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4237 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4238 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4239 {
4240 error ("vector types expected in %qs", code_name);
4241 debug_generic_expr (lhs_type);
4242 debug_generic_expr (rhs1_type);
4243 debug_generic_expr (rhs2_type);
4244 debug_generic_expr (rhs3_type);
4245 return true;
4246 }
4247
4248 if (maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4249 TYPE_VECTOR_SUBPARTS (rhs2_type))
4250 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs2_type),
4251 TYPE_VECTOR_SUBPARTS (rhs3_type))
4252 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs3_type),
4253 TYPE_VECTOR_SUBPARTS (lhs_type)))
4254 {
4255 error ("vectors with different element number found in %qs",
4256 code_name);
4257 debug_generic_expr (lhs_type);
4258 debug_generic_expr (rhs1_type);
4259 debug_generic_expr (rhs2_type);
4260 debug_generic_expr (rhs3_type);
4261 return true;
4262 }
4263
4264 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4265 || (TREE_CODE (rhs3) != VECTOR_CST
4266 && (GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE
4267 (TREE_TYPE (rhs3_type)))
4268 != GET_MODE_BITSIZE (SCALAR_TYPE_MODE
4269 (TREE_TYPE (rhs1_type))))))
4270 {
4271 error ("invalid mask type in %qs", code_name);
4272 debug_generic_expr (lhs_type);
4273 debug_generic_expr (rhs1_type);
4274 debug_generic_expr (rhs2_type);
4275 debug_generic_expr (rhs3_type);
4276 return true;
4277 }
4278
4279 return false;
4280
4281 case SAD_EXPR:
4282 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4283 || !useless_type_conversion_p (lhs_type, rhs3_type)
4284 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4285 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4286 {
4287 error ("type mismatch in %qs", code_name);
4288 debug_generic_expr (lhs_type);
4289 debug_generic_expr (rhs1_type);
4290 debug_generic_expr (rhs2_type);
4291 debug_generic_expr (rhs3_type);
4292 return true;
4293 }
4294
4295 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4296 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4297 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4298 {
4299 error ("vector types expected in %qs", code_name);
4300 debug_generic_expr (lhs_type);
4301 debug_generic_expr (rhs1_type);
4302 debug_generic_expr (rhs2_type);
4303 debug_generic_expr (rhs3_type);
4304 return true;
4305 }
4306
4307 return false;
4308
4309 case BIT_INSERT_EXPR:
4310 if (! useless_type_conversion_p (lhs_type, rhs1_type))
4311 {
4312 error ("type mismatch in %qs", code_name);
4313 debug_generic_expr (lhs_type);
4314 debug_generic_expr (rhs1_type);
4315 return true;
4316 }
4317 if (! ((INTEGRAL_TYPE_P (rhs1_type)
4318 && INTEGRAL_TYPE_P (rhs2_type))
4319 /* Vector element insert. */
4320 || (VECTOR_TYPE_P (rhs1_type)
4321 && types_compatible_p (TREE_TYPE (rhs1_type), rhs2_type))
4322 /* Aligned sub-vector insert. */
4323 || (VECTOR_TYPE_P (rhs1_type)
4324 && VECTOR_TYPE_P (rhs2_type)
4325 && types_compatible_p (TREE_TYPE (rhs1_type),
4326 TREE_TYPE (rhs2_type))
4327 && multiple_p (TYPE_VECTOR_SUBPARTS (rhs1_type),
4328 TYPE_VECTOR_SUBPARTS (rhs2_type))
4329 && multiple_of_p (bitsizetype, rhs3, TYPE_SIZE (rhs2_type)))))
4330 {
4331 error ("not allowed type combination in %qs", code_name);
4332 debug_generic_expr (rhs1_type);
4333 debug_generic_expr (rhs2_type);
4334 return true;
4335 }
4336 if (! tree_fits_uhwi_p (rhs3)
4337 || ! types_compatible_p (bitsizetype, TREE_TYPE (rhs3))
4338 || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type)))
4339 {
4340 error ("invalid position or size in %qs", code_name);
4341 return true;
4342 }
4343 if (INTEGRAL_TYPE_P (rhs1_type)
4344 && !type_has_mode_precision_p (rhs1_type))
4345 {
4346 error ("%qs into non-mode-precision operand", code_name);
4347 return true;
4348 }
4349 if (INTEGRAL_TYPE_P (rhs1_type))
4350 {
4351 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4352 if (bitpos >= TYPE_PRECISION (rhs1_type)
4353 || (bitpos + TYPE_PRECISION (rhs2_type)
4354 > TYPE_PRECISION (rhs1_type)))
4355 {
4356 error ("insertion out of range in %qs", code_name);
4357 return true;
4358 }
4359 }
4360 else if (VECTOR_TYPE_P (rhs1_type))
4361 {
4362 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4363 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (TYPE_SIZE (rhs2_type));
4364 if (bitpos % bitsize != 0)
4365 {
4366 error ("%qs not at element boundary", code_name);
4367 return true;
4368 }
4369 }
4370 return false;
4371
4372 case DOT_PROD_EXPR:
4373 {
4374 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4375 || TREE_CODE (lhs_type) != VECTOR_TYPE)
4376 && ((!INTEGRAL_TYPE_P (rhs1_type)
4377 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4378 || (!INTEGRAL_TYPE_P (lhs_type)
4379 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4380 || !types_compatible_p (rhs1_type, rhs2_type)
4381 || !useless_type_conversion_p (lhs_type, rhs3_type)
4382 || maybe_lt (GET_MODE_SIZE (element_mode (rhs3_type)),
4383 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4384 {
4385 error ("type mismatch in %qs", code_name);
4386 debug_generic_expr (lhs_type);
4387 debug_generic_expr (rhs1_type);
4388 debug_generic_expr (rhs2_type);
4389 return true;
4390 }
4391 return false;
4392 }
4393
4394 case REALIGN_LOAD_EXPR:
4395 /* FIXME. */
4396 return false;
4397
4398 default:
4399 gcc_unreachable ();
4400 }
4401 return false;
4402 }
4403
4404 /* Verify a gimple assignment statement STMT with a single rhs.
4405 Returns true if anything is wrong. */
4406
4407 static bool
4408 verify_gimple_assign_single (gassign *stmt)
4409 {
4410 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4411 tree lhs = gimple_assign_lhs (stmt);
4412 tree lhs_type = TREE_TYPE (lhs);
4413 tree rhs1 = gimple_assign_rhs1 (stmt);
4414 tree rhs1_type = TREE_TYPE (rhs1);
4415 bool res = false;
4416
4417 const char* const code_name = get_tree_code_name (rhs_code);
4418
4419 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4420 {
4421 error ("non-trivial conversion in %qs", code_name);
4422 debug_generic_expr (lhs_type);
4423 debug_generic_expr (rhs1_type);
4424 return true;
4425 }
4426
4427 if (gimple_clobber_p (stmt)
4428 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4429 {
4430 error ("%qs LHS in clobber statement",
4431 get_tree_code_name (TREE_CODE (lhs)));
4432 debug_generic_expr (lhs);
4433 return true;
4434 }
4435
4436 if (handled_component_p (lhs)
4437 || TREE_CODE (lhs) == MEM_REF
4438 || TREE_CODE (lhs) == TARGET_MEM_REF)
4439 res |= verify_types_in_gimple_reference (lhs, true);
4440
4441 /* Special codes we cannot handle via their class. */
4442 switch (rhs_code)
4443 {
4444 case ADDR_EXPR:
4445 {
4446 tree op = TREE_OPERAND (rhs1, 0);
4447 if (!is_gimple_addressable (op))
4448 {
4449 error ("invalid operand in %qs", code_name);
4450 return true;
4451 }
4452
4453 /* Technically there is no longer a need for matching types, but
4454 gimple hygiene asks for this check. In LTO we can end up
4455 combining incompatible units and thus end up with addresses
4456 of globals that change their type to a common one. */
4457 if (!in_lto_p
4458 && !types_compatible_p (TREE_TYPE (op),
4459 TREE_TYPE (TREE_TYPE (rhs1)))
4460 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4461 TREE_TYPE (op)))
4462 {
4463 error ("type mismatch in %qs", code_name);
4464 debug_generic_stmt (TREE_TYPE (rhs1));
4465 debug_generic_stmt (TREE_TYPE (op));
4466 return true;
4467 }
4468
4469 return (verify_address (rhs1, true)
4470 || verify_types_in_gimple_reference (op, true));
4471 }
4472
4473 /* tcc_reference */
4474 case INDIRECT_REF:
4475 error ("%qs in gimple IL", code_name);
4476 return true;
4477
4478 case COMPONENT_REF:
4479 case BIT_FIELD_REF:
4480 case ARRAY_REF:
4481 case ARRAY_RANGE_REF:
4482 case VIEW_CONVERT_EXPR:
4483 case REALPART_EXPR:
4484 case IMAGPART_EXPR:
4485 case TARGET_MEM_REF:
4486 case MEM_REF:
4487 if (!is_gimple_reg (lhs)
4488 && is_gimple_reg_type (TREE_TYPE (lhs)))
4489 {
4490 error ("invalid RHS for gimple memory store: %qs", code_name);
4491 debug_generic_stmt (lhs);
4492 debug_generic_stmt (rhs1);
4493 return true;
4494 }
4495 return res || verify_types_in_gimple_reference (rhs1, false);
4496
4497 /* tcc_constant */
4498 case SSA_NAME:
4499 case INTEGER_CST:
4500 case REAL_CST:
4501 case FIXED_CST:
4502 case COMPLEX_CST:
4503 case VECTOR_CST:
4504 case STRING_CST:
4505 return res;
4506
4507 /* tcc_declaration */
4508 case CONST_DECL:
4509 return res;
4510 case VAR_DECL:
4511 case PARM_DECL:
4512 if (!is_gimple_reg (lhs)
4513 && !is_gimple_reg (rhs1)
4514 && is_gimple_reg_type (TREE_TYPE (lhs)))
4515 {
4516 error ("invalid RHS for gimple memory store: %qs", code_name);
4517 debug_generic_stmt (lhs);
4518 debug_generic_stmt (rhs1);
4519 return true;
4520 }
4521 return res;
4522
4523 case CONSTRUCTOR:
4524 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4525 {
4526 unsigned int i;
4527 tree elt_i, elt_v, elt_t = NULL_TREE;
4528
4529 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4530 return res;
4531 /* For vector CONSTRUCTORs we require that either it is empty
4532 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4533 (then the element count must be correct to cover the whole
4534 outer vector and index must be NULL on all elements, or it is
4535 a CONSTRUCTOR of scalar elements, where we as an exception allow
4536 smaller number of elements (assuming zero filling) and
4537 consecutive indexes as compared to NULL indexes (such
4538 CONSTRUCTORs can appear in the IL from FEs). */
4539 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4540 {
4541 if (elt_t == NULL_TREE)
4542 {
4543 elt_t = TREE_TYPE (elt_v);
4544 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4545 {
4546 tree elt_t = TREE_TYPE (elt_v);
4547 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4548 TREE_TYPE (elt_t)))
4549 {
4550 error ("incorrect type of vector %qs elements",
4551 code_name);
4552 debug_generic_stmt (rhs1);
4553 return true;
4554 }
4555 else if (maybe_ne (CONSTRUCTOR_NELTS (rhs1)
4556 * TYPE_VECTOR_SUBPARTS (elt_t),
4557 TYPE_VECTOR_SUBPARTS (rhs1_type)))
4558 {
4559 error ("incorrect number of vector %qs elements",
4560 code_name);
4561 debug_generic_stmt (rhs1);
4562 return true;
4563 }
4564 }
4565 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4566 elt_t))
4567 {
4568 error ("incorrect type of vector %qs elements",
4569 code_name);
4570 debug_generic_stmt (rhs1);
4571 return true;
4572 }
4573 else if (maybe_gt (CONSTRUCTOR_NELTS (rhs1),
4574 TYPE_VECTOR_SUBPARTS (rhs1_type)))
4575 {
4576 error ("incorrect number of vector %qs elements",
4577 code_name);
4578 debug_generic_stmt (rhs1);
4579 return true;
4580 }
4581 }
4582 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4583 {
4584 error ("incorrect type of vector CONSTRUCTOR elements");
4585 debug_generic_stmt (rhs1);
4586 return true;
4587 }
4588 if (elt_i != NULL_TREE
4589 && (TREE_CODE (elt_t) == VECTOR_TYPE
4590 || TREE_CODE (elt_i) != INTEGER_CST
4591 || compare_tree_int (elt_i, i) != 0))
4592 {
4593 error ("vector %qs with non-NULL element index",
4594 code_name);
4595 debug_generic_stmt (rhs1);
4596 return true;
4597 }
4598 if (!is_gimple_val (elt_v))
4599 {
4600 error ("vector %qs element is not a GIMPLE value",
4601 code_name);
4602 debug_generic_stmt (rhs1);
4603 return true;
4604 }
4605 }
4606 }
4607 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4608 {
4609 error ("non-vector %qs with elements", code_name);
4610 debug_generic_stmt (rhs1);
4611 return true;
4612 }
4613 return res;
4614
4615 case ASSERT_EXPR:
4616 /* FIXME. */
4617 rhs1 = fold (ASSERT_EXPR_COND (rhs1));
4618 if (rhs1 == boolean_false_node)
4619 {
4620 error ("%qs with an always-false condition", code_name);
4621 debug_generic_stmt (rhs1);
4622 return true;
4623 }
4624 break;
4625
4626 case OBJ_TYPE_REF:
4627 case WITH_SIZE_EXPR:
4628 /* FIXME. */
4629 return res;
4630
4631 default:;
4632 }
4633
4634 return res;
4635 }
4636
4637 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4638 is a problem, otherwise false. */
4639
4640 static bool
4641 verify_gimple_assign (gassign *stmt)
4642 {
4643 switch (gimple_assign_rhs_class (stmt))
4644 {
4645 case GIMPLE_SINGLE_RHS:
4646 return verify_gimple_assign_single (stmt);
4647
4648 case GIMPLE_UNARY_RHS:
4649 return verify_gimple_assign_unary (stmt);
4650
4651 case GIMPLE_BINARY_RHS:
4652 return verify_gimple_assign_binary (stmt);
4653
4654 case GIMPLE_TERNARY_RHS:
4655 return verify_gimple_assign_ternary (stmt);
4656
4657 default:
4658 gcc_unreachable ();
4659 }
4660 }
4661
4662 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4663 is a problem, otherwise false. */
4664
4665 static bool
4666 verify_gimple_return (greturn *stmt)
4667 {
4668 tree op = gimple_return_retval (stmt);
4669 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4670
4671 /* We cannot test for present return values as we do not fix up missing
4672 return values from the original source. */
4673 if (op == NULL)
4674 return false;
4675
4676 if (!is_gimple_val (op)
4677 && TREE_CODE (op) != RESULT_DECL)
4678 {
4679 error ("invalid operand in return statement");
4680 debug_generic_stmt (op);
4681 return true;
4682 }
4683
4684 if ((TREE_CODE (op) == RESULT_DECL
4685 && DECL_BY_REFERENCE (op))
4686 || (TREE_CODE (op) == SSA_NAME
4687 && SSA_NAME_VAR (op)
4688 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4689 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4690 op = TREE_TYPE (op);
4691
4692 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4693 {
4694 error ("invalid conversion in return statement");
4695 debug_generic_stmt (restype);
4696 debug_generic_stmt (TREE_TYPE (op));
4697 return true;
4698 }
4699
4700 return false;
4701 }
4702
4703
4704 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4705 is a problem, otherwise false. */
4706
4707 static bool
4708 verify_gimple_goto (ggoto *stmt)
4709 {
4710 tree dest = gimple_goto_dest (stmt);
4711
4712 /* ??? We have two canonical forms of direct goto destinations, a
4713 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4714 if (TREE_CODE (dest) != LABEL_DECL
4715 && (!is_gimple_val (dest)
4716 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4717 {
4718 error ("goto destination is neither a label nor a pointer");
4719 return true;
4720 }
4721
4722 return false;
4723 }
4724
4725 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4726 is a problem, otherwise false. */
4727
4728 static bool
4729 verify_gimple_switch (gswitch *stmt)
4730 {
4731 unsigned int i, n;
4732 tree elt, prev_upper_bound = NULL_TREE;
4733 tree index_type, elt_type = NULL_TREE;
4734
4735 if (!is_gimple_val (gimple_switch_index (stmt)))
4736 {
4737 error ("invalid operand to switch statement");
4738 debug_generic_stmt (gimple_switch_index (stmt));
4739 return true;
4740 }
4741
4742 index_type = TREE_TYPE (gimple_switch_index (stmt));
4743 if (! INTEGRAL_TYPE_P (index_type))
4744 {
4745 error ("non-integral type switch statement");
4746 debug_generic_expr (index_type);
4747 return true;
4748 }
4749
4750 elt = gimple_switch_label (stmt, 0);
4751 if (CASE_LOW (elt) != NULL_TREE
4752 || CASE_HIGH (elt) != NULL_TREE
4753 || CASE_CHAIN (elt) != NULL_TREE)
4754 {
4755 error ("invalid default case label in switch statement");
4756 debug_generic_expr (elt);
4757 return true;
4758 }
4759
4760 n = gimple_switch_num_labels (stmt);
4761 for (i = 1; i < n; i++)
4762 {
4763 elt = gimple_switch_label (stmt, i);
4764
4765 if (CASE_CHAIN (elt))
4766 {
4767 error ("invalid %<CASE_CHAIN%>");
4768 debug_generic_expr (elt);
4769 return true;
4770 }
4771 if (! CASE_LOW (elt))
4772 {
4773 error ("invalid case label in switch statement");
4774 debug_generic_expr (elt);
4775 return true;
4776 }
4777 if (CASE_HIGH (elt)
4778 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4779 {
4780 error ("invalid case range in switch statement");
4781 debug_generic_expr (elt);
4782 return true;
4783 }
4784
4785 if (! elt_type)
4786 {
4787 elt_type = TREE_TYPE (CASE_LOW (elt));
4788 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4789 {
4790 error ("type precision mismatch in switch statement");
4791 return true;
4792 }
4793 }
4794 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4795 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4796 {
4797 error ("type mismatch for case label in switch statement");
4798 debug_generic_expr (elt);
4799 return true;
4800 }
4801
4802 if (prev_upper_bound)
4803 {
4804 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4805 {
4806 error ("case labels not sorted in switch statement");
4807 return true;
4808 }
4809 }
4810
4811 prev_upper_bound = CASE_HIGH (elt);
4812 if (! prev_upper_bound)
4813 prev_upper_bound = CASE_LOW (elt);
4814 }
4815
4816 return false;
4817 }
4818
4819 /* Verify a gimple debug statement STMT.
4820 Returns true if anything is wrong. */
4821
4822 static bool
4823 verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
4824 {
4825 /* There isn't much that could be wrong in a gimple debug stmt. A
4826 gimple debug bind stmt, for example, maps a tree, that's usually
4827 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4828 component or member of an aggregate type, to another tree, that
4829 can be an arbitrary expression. These stmts expand into debug
4830 insns, and are converted to debug notes by var-tracking.c. */
4831 return false;
4832 }
4833
4834 /* Verify a gimple label statement STMT.
4835 Returns true if anything is wrong. */
4836
4837 static bool
4838 verify_gimple_label (glabel *stmt)
4839 {
4840 tree decl = gimple_label_label (stmt);
4841 int uid;
4842 bool err = false;
4843
4844 if (TREE_CODE (decl) != LABEL_DECL)
4845 return true;
4846 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4847 && DECL_CONTEXT (decl) != current_function_decl)
4848 {
4849 error ("label context is not the current function declaration");
4850 err |= true;
4851 }
4852
4853 uid = LABEL_DECL_UID (decl);
4854 if (cfun->cfg
4855 && (uid == -1
4856 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4857 {
4858 error ("incorrect entry in %<label_to_block_map%>");
4859 err |= true;
4860 }
4861
4862 uid = EH_LANDING_PAD_NR (decl);
4863 if (uid)
4864 {
4865 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4866 if (decl != lp->post_landing_pad)
4867 {
4868 error ("incorrect setting of landing pad number");
4869 err |= true;
4870 }
4871 }
4872
4873 return err;
4874 }
4875
4876 /* Verify a gimple cond statement STMT.
4877 Returns true if anything is wrong. */
4878
4879 static bool
4880 verify_gimple_cond (gcond *stmt)
4881 {
4882 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4883 {
4884 error ("invalid comparison code in gimple cond");
4885 return true;
4886 }
4887 if (!(!gimple_cond_true_label (stmt)
4888 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4889 || !(!gimple_cond_false_label (stmt)
4890 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4891 {
4892 error ("invalid labels in gimple cond");
4893 return true;
4894 }
4895
4896 return verify_gimple_comparison (boolean_type_node,
4897 gimple_cond_lhs (stmt),
4898 gimple_cond_rhs (stmt),
4899 gimple_cond_code (stmt));
4900 }
4901
4902 /* Verify the GIMPLE statement STMT. Returns true if there is an
4903 error, otherwise false. */
4904
4905 static bool
4906 verify_gimple_stmt (gimple *stmt)
4907 {
4908 switch (gimple_code (stmt))
4909 {
4910 case GIMPLE_ASSIGN:
4911 return verify_gimple_assign (as_a <gassign *> (stmt));
4912
4913 case GIMPLE_LABEL:
4914 return verify_gimple_label (as_a <glabel *> (stmt));
4915
4916 case GIMPLE_CALL:
4917 return verify_gimple_call (as_a <gcall *> (stmt));
4918
4919 case GIMPLE_COND:
4920 return verify_gimple_cond (as_a <gcond *> (stmt));
4921
4922 case GIMPLE_GOTO:
4923 return verify_gimple_goto (as_a <ggoto *> (stmt));
4924
4925 case GIMPLE_SWITCH:
4926 return verify_gimple_switch (as_a <gswitch *> (stmt));
4927
4928 case GIMPLE_RETURN:
4929 return verify_gimple_return (as_a <greturn *> (stmt));
4930
4931 case GIMPLE_ASM:
4932 return false;
4933
4934 case GIMPLE_TRANSACTION:
4935 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4936
4937 /* Tuples that do not have tree operands. */
4938 case GIMPLE_NOP:
4939 case GIMPLE_PREDICT:
4940 case GIMPLE_RESX:
4941 case GIMPLE_EH_DISPATCH:
4942 case GIMPLE_EH_MUST_NOT_THROW:
4943 return false;
4944
4945 CASE_GIMPLE_OMP:
4946 /* OpenMP directives are validated by the FE and never operated
4947 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4948 non-gimple expressions when the main index variable has had
4949 its address taken. This does not affect the loop itself
4950 because the header of an GIMPLE_OMP_FOR is merely used to determine
4951 how to setup the parallel iteration. */
4952 return false;
4953
4954 case GIMPLE_DEBUG:
4955 return verify_gimple_debug (stmt);
4956
4957 default:
4958 gcc_unreachable ();
4959 }
4960 }
4961
4962 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4963 and false otherwise. */
4964
4965 static bool
4966 verify_gimple_phi (gphi *phi)
4967 {
4968 bool err = false;
4969 unsigned i;
4970 tree phi_result = gimple_phi_result (phi);
4971 bool virtual_p;
4972
4973 if (!phi_result)
4974 {
4975 error ("invalid %<PHI%> result");
4976 return true;
4977 }
4978
4979 virtual_p = virtual_operand_p (phi_result);
4980 if (TREE_CODE (phi_result) != SSA_NAME
4981 || (virtual_p
4982 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4983 {
4984 error ("invalid %<PHI%> result");
4985 err = true;
4986 }
4987
4988 for (i = 0; i < gimple_phi_num_args (phi); i++)
4989 {
4990 tree t = gimple_phi_arg_def (phi, i);
4991
4992 if (!t)
4993 {
4994 error ("missing %<PHI%> def");
4995 err |= true;
4996 continue;
4997 }
4998 /* Addressable variables do have SSA_NAMEs but they
4999 are not considered gimple values. */
5000 else if ((TREE_CODE (t) == SSA_NAME
5001 && virtual_p != virtual_operand_p (t))
5002 || (virtual_p
5003 && (TREE_CODE (t) != SSA_NAME
5004 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
5005 || (!virtual_p
5006 && !is_gimple_val (t)))
5007 {
5008 error ("invalid %<PHI%> argument");
5009 debug_generic_expr (t);
5010 err |= true;
5011 }
5012 #ifdef ENABLE_TYPES_CHECKING
5013 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
5014 {
5015 error ("incompatible types in %<PHI%> argument %u", i);
5016 debug_generic_stmt (TREE_TYPE (phi_result));
5017 debug_generic_stmt (TREE_TYPE (t));
5018 err |= true;
5019 }
5020 #endif
5021 }
5022
5023 return err;
5024 }
5025
5026 /* Verify the GIMPLE statements inside the sequence STMTS. */
5027
5028 static bool
5029 verify_gimple_in_seq_2 (gimple_seq stmts)
5030 {
5031 gimple_stmt_iterator ittr;
5032 bool err = false;
5033
5034 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
5035 {
5036 gimple *stmt = gsi_stmt (ittr);
5037
5038 switch (gimple_code (stmt))
5039 {
5040 case GIMPLE_BIND:
5041 err |= verify_gimple_in_seq_2 (
5042 gimple_bind_body (as_a <gbind *> (stmt)));
5043 break;
5044
5045 case GIMPLE_TRY:
5046 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
5047 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
5048 break;
5049
5050 case GIMPLE_EH_FILTER:
5051 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
5052 break;
5053
5054 case GIMPLE_EH_ELSE:
5055 {
5056 geh_else *eh_else = as_a <geh_else *> (stmt);
5057 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
5058 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
5059 }
5060 break;
5061
5062 case GIMPLE_CATCH:
5063 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
5064 as_a <gcatch *> (stmt)));
5065 break;
5066
5067 case GIMPLE_TRANSACTION:
5068 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
5069 break;
5070
5071 default:
5072 {
5073 bool err2 = verify_gimple_stmt (stmt);
5074 if (err2)
5075 debug_gimple_stmt (stmt);
5076 err |= err2;
5077 }
5078 }
5079 }
5080
5081 return err;
5082 }
5083
5084 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
5085 is a problem, otherwise false. */
5086
5087 static bool
5088 verify_gimple_transaction (gtransaction *stmt)
5089 {
5090 tree lab;
5091
5092 lab = gimple_transaction_label_norm (stmt);
5093 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5094 return true;
5095 lab = gimple_transaction_label_uninst (stmt);
5096 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5097 return true;
5098 lab = gimple_transaction_label_over (stmt);
5099 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5100 return true;
5101
5102 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
5103 }
5104
5105
5106 /* Verify the GIMPLE statements inside the statement list STMTS. */
5107
5108 DEBUG_FUNCTION void
5109 verify_gimple_in_seq (gimple_seq stmts)
5110 {
5111 timevar_push (TV_TREE_STMT_VERIFY);
5112 if (verify_gimple_in_seq_2 (stmts))
5113 internal_error ("%<verify_gimple%> failed");
5114 timevar_pop (TV_TREE_STMT_VERIFY);
5115 }
5116
5117 /* Return true when the T can be shared. */
5118
5119 static bool
5120 tree_node_can_be_shared (tree t)
5121 {
5122 if (IS_TYPE_OR_DECL_P (t)
5123 || TREE_CODE (t) == SSA_NAME
5124 || TREE_CODE (t) == IDENTIFIER_NODE
5125 || TREE_CODE (t) == CASE_LABEL_EXPR
5126 || is_gimple_min_invariant (t))
5127 return true;
5128
5129 if (t == error_mark_node)
5130 return true;
5131
5132 return false;
5133 }
5134
5135 /* Called via walk_tree. Verify tree sharing. */
5136
5137 static tree
5138 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
5139 {
5140 hash_set<void *> *visited = (hash_set<void *> *) data;
5141
5142 if (tree_node_can_be_shared (*tp))
5143 {
5144 *walk_subtrees = false;
5145 return NULL;
5146 }
5147
5148 if (visited->add (*tp))
5149 return *tp;
5150
5151 return NULL;
5152 }
5153
5154 /* Called via walk_gimple_stmt. Verify tree sharing. */
5155
5156 static tree
5157 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
5158 {
5159 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5160 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
5161 }
5162
5163 static bool eh_error_found;
5164 bool
5165 verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
5166 hash_set<gimple *> *visited)
5167 {
5168 if (!visited->contains (stmt))
5169 {
5170 error ("dead statement in EH table");
5171 debug_gimple_stmt (stmt);
5172 eh_error_found = true;
5173 }
5174 return true;
5175 }
5176
5177 /* Verify if the location LOCs block is in BLOCKS. */
5178
5179 static bool
5180 verify_location (hash_set<tree> *blocks, location_t loc)
5181 {
5182 tree block = LOCATION_BLOCK (loc);
5183 if (block != NULL_TREE
5184 && !blocks->contains (block))
5185 {
5186 error ("location references block not in block tree");
5187 return true;
5188 }
5189 if (block != NULL_TREE)
5190 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
5191 return false;
5192 }
5193
5194 /* Called via walk_tree. Verify that expressions have no blocks. */
5195
5196 static tree
5197 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
5198 {
5199 if (!EXPR_P (*tp))
5200 {
5201 *walk_subtrees = false;
5202 return NULL;
5203 }
5204
5205 location_t loc = EXPR_LOCATION (*tp);
5206 if (LOCATION_BLOCK (loc) != NULL)
5207 return *tp;
5208
5209 return NULL;
5210 }
5211
5212 /* Called via walk_tree. Verify locations of expressions. */
5213
5214 static tree
5215 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
5216 {
5217 hash_set<tree> *blocks = (hash_set<tree> *) data;
5218 tree t = *tp;
5219
5220 /* ??? This doesn't really belong here but there's no good place to
5221 stick this remainder of old verify_expr. */
5222 /* ??? This barfs on debug stmts which contain binds to vars with
5223 different function context. */
5224 #if 0
5225 if (VAR_P (t)
5226 || TREE_CODE (t) == PARM_DECL
5227 || TREE_CODE (t) == RESULT_DECL)
5228 {
5229 tree context = decl_function_context (t);
5230 if (context != cfun->decl
5231 && !SCOPE_FILE_SCOPE_P (context)
5232 && !TREE_STATIC (t)
5233 && !DECL_EXTERNAL (t))
5234 {
5235 error ("local declaration from a different function");
5236 return t;
5237 }
5238 }
5239 #endif
5240
5241 if (VAR_P (t) && DECL_HAS_DEBUG_EXPR_P (t))
5242 {
5243 tree x = DECL_DEBUG_EXPR (t);
5244 tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5245 if (addr)
5246 return addr;
5247 }
5248 if ((VAR_P (t)
5249 || TREE_CODE (t) == PARM_DECL
5250 || TREE_CODE (t) == RESULT_DECL)
5251 && DECL_HAS_VALUE_EXPR_P (t))
5252 {
5253 tree x = DECL_VALUE_EXPR (t);
5254 tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5255 if (addr)
5256 return addr;
5257 }
5258
5259 if (!EXPR_P (t))
5260 {
5261 *walk_subtrees = false;
5262 return NULL;
5263 }
5264
5265 location_t loc = EXPR_LOCATION (t);
5266 if (verify_location (blocks, loc))
5267 return t;
5268
5269 return NULL;
5270 }
5271
5272 /* Called via walk_gimple_op. Verify locations of expressions. */
5273
5274 static tree
5275 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
5276 {
5277 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5278 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
5279 }
5280
5281 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
5282
5283 static void
5284 collect_subblocks (hash_set<tree> *blocks, tree block)
5285 {
5286 tree t;
5287 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
5288 {
5289 blocks->add (t);
5290 collect_subblocks (blocks, t);
5291 }
5292 }
5293
5294 /* Disable warnings about missing quoting in GCC diagnostics for
5295 the verification errors. Their format strings don't follow
5296 GCC diagnostic conventions and trigger an ICE in the end. */
5297 #if __GNUC__ >= 10
5298 # pragma GCC diagnostic push
5299 # pragma GCC diagnostic ignored "-Wformat-diag"
5300 #endif
5301
5302 /* Verify the GIMPLE statements in the CFG of FN. */
5303
5304 DEBUG_FUNCTION void
5305 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
5306 {
5307 basic_block bb;
5308 bool err = false;
5309
5310 timevar_push (TV_TREE_STMT_VERIFY);
5311 hash_set<void *> visited;
5312 hash_set<gimple *> visited_throwing_stmts;
5313
5314 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
5315 hash_set<tree> blocks;
5316 if (DECL_INITIAL (fn->decl))
5317 {
5318 blocks.add (DECL_INITIAL (fn->decl));
5319 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
5320 }
5321
5322 FOR_EACH_BB_FN (bb, fn)
5323 {
5324 gimple_stmt_iterator gsi;
5325 edge_iterator ei;
5326 edge e;
5327
5328 for (gphi_iterator gpi = gsi_start_phis (bb);
5329 !gsi_end_p (gpi);
5330 gsi_next (&gpi))
5331 {
5332 gphi *phi = gpi.phi ();
5333 bool err2 = false;
5334 unsigned i;
5335
5336 if (gimple_bb (phi) != bb)
5337 {
5338 error ("gimple_bb (phi) is set to a wrong basic block");
5339 err2 = true;
5340 }
5341
5342 err2 |= verify_gimple_phi (phi);
5343
5344 /* Only PHI arguments have locations. */
5345 if (gimple_location (phi) != UNKNOWN_LOCATION)
5346 {
5347 error ("PHI node with location");
5348 err2 = true;
5349 }
5350
5351 for (i = 0; i < gimple_phi_num_args (phi); i++)
5352 {
5353 tree arg = gimple_phi_arg_def (phi, i);
5354 tree addr = walk_tree (&arg, verify_node_sharing_1,
5355 &visited, NULL);
5356 if (addr)
5357 {
5358 error ("incorrect sharing of tree nodes");
5359 debug_generic_expr (addr);
5360 err2 |= true;
5361 }
5362 location_t loc = gimple_phi_arg_location (phi, i);
5363 if (virtual_operand_p (gimple_phi_result (phi))
5364 && loc != UNKNOWN_LOCATION)
5365 {
5366 error ("virtual PHI with argument locations");
5367 err2 = true;
5368 }
5369 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5370 if (addr)
5371 {
5372 debug_generic_expr (addr);
5373 err2 = true;
5374 }
5375 err2 |= verify_location (&blocks, loc);
5376 }
5377
5378 if (err2)
5379 debug_gimple_stmt (phi);
5380 err |= err2;
5381 }
5382
5383 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5384 {
5385 gimple *stmt = gsi_stmt (gsi);
5386 bool err2 = false;
5387 struct walk_stmt_info wi;
5388 tree addr;
5389 int lp_nr;
5390
5391 if (gimple_bb (stmt) != bb)
5392 {
5393 error ("gimple_bb (stmt) is set to a wrong basic block");
5394 err2 = true;
5395 }
5396
5397 err2 |= verify_gimple_stmt (stmt);
5398 err2 |= verify_location (&blocks, gimple_location (stmt));
5399
5400 memset (&wi, 0, sizeof (wi));
5401 wi.info = (void *) &visited;
5402 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5403 if (addr)
5404 {
5405 error ("incorrect sharing of tree nodes");
5406 debug_generic_expr (addr);
5407 err2 |= true;
5408 }
5409
5410 memset (&wi, 0, sizeof (wi));
5411 wi.info = (void *) &blocks;
5412 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5413 if (addr)
5414 {
5415 debug_generic_expr (addr);
5416 err2 |= true;
5417 }
5418
5419 /* If the statement is marked as part of an EH region, then it is
5420 expected that the statement could throw. Verify that when we
5421 have optimizations that simplify statements such that we prove
5422 that they cannot throw, that we update other data structures
5423 to match. */
5424 lp_nr = lookup_stmt_eh_lp (stmt);
5425 if (lp_nr != 0)
5426 visited_throwing_stmts.add (stmt);
5427 if (lp_nr > 0)
5428 {
5429 if (!stmt_could_throw_p (cfun, stmt))
5430 {
5431 if (verify_nothrow)
5432 {
5433 error ("statement marked for throw, but doesn%'t");
5434 err2 |= true;
5435 }
5436 }
5437 else if (!gsi_one_before_end_p (gsi))
5438 {
5439 error ("statement marked for throw in middle of block");
5440 err2 |= true;
5441 }
5442 }
5443
5444 if (err2)
5445 debug_gimple_stmt (stmt);
5446 err |= err2;
5447 }
5448
5449 FOR_EACH_EDGE (e, ei, bb->succs)
5450 if (e->goto_locus != UNKNOWN_LOCATION)
5451 err |= verify_location (&blocks, e->goto_locus);
5452 }
5453
5454 hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5455 eh_error_found = false;
5456 if (eh_table)
5457 eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
5458 (&visited_throwing_stmts);
5459
5460 if (err || eh_error_found)
5461 internal_error ("verify_gimple failed");
5462
5463 verify_histograms ();
5464 timevar_pop (TV_TREE_STMT_VERIFY);
5465 }
5466
5467
5468 /* Verifies that the flow information is OK. */
5469
5470 static int
5471 gimple_verify_flow_info (void)
5472 {
5473 int err = 0;
5474 basic_block bb;
5475 gimple_stmt_iterator gsi;
5476 gimple *stmt;
5477 edge e;
5478 edge_iterator ei;
5479
5480 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5481 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5482 {
5483 error ("ENTRY_BLOCK has IL associated with it");
5484 err = 1;
5485 }
5486
5487 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5488 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5489 {
5490 error ("EXIT_BLOCK has IL associated with it");
5491 err = 1;
5492 }
5493
5494 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5495 if (e->flags & EDGE_FALLTHRU)
5496 {
5497 error ("fallthru to exit from bb %d", e->src->index);
5498 err = 1;
5499 }
5500
5501 FOR_EACH_BB_FN (bb, cfun)
5502 {
5503 bool found_ctrl_stmt = false;
5504
5505 stmt = NULL;
5506
5507 /* Skip labels on the start of basic block. */
5508 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5509 {
5510 tree label;
5511 gimple *prev_stmt = stmt;
5512
5513 stmt = gsi_stmt (gsi);
5514
5515 if (gimple_code (stmt) != GIMPLE_LABEL)
5516 break;
5517
5518 label = gimple_label_label (as_a <glabel *> (stmt));
5519 if (prev_stmt && DECL_NONLOCAL (label))
5520 {
5521 error ("nonlocal label ");
5522 print_generic_expr (stderr, label);
5523 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5524 bb->index);
5525 err = 1;
5526 }
5527
5528 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5529 {
5530 error ("EH landing pad label ");
5531 print_generic_expr (stderr, label);
5532 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5533 bb->index);
5534 err = 1;
5535 }
5536
5537 if (label_to_block (cfun, label) != bb)
5538 {
5539 error ("label ");
5540 print_generic_expr (stderr, label);
5541 fprintf (stderr, " to block does not match in bb %d",
5542 bb->index);
5543 err = 1;
5544 }
5545
5546 if (decl_function_context (label) != current_function_decl)
5547 {
5548 error ("label ");
5549 print_generic_expr (stderr, label);
5550 fprintf (stderr, " has incorrect context in bb %d",
5551 bb->index);
5552 err = 1;
5553 }
5554 }
5555
5556 /* Verify that body of basic block BB is free of control flow. */
5557 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5558 {
5559 gimple *stmt = gsi_stmt (gsi);
5560
5561 if (found_ctrl_stmt)
5562 {
5563 error ("control flow in the middle of basic block %d",
5564 bb->index);
5565 err = 1;
5566 }
5567
5568 if (stmt_ends_bb_p (stmt))
5569 found_ctrl_stmt = true;
5570
5571 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5572 {
5573 error ("label ");
5574 print_generic_expr (stderr, gimple_label_label (label_stmt));
5575 fprintf (stderr, " in the middle of basic block %d", bb->index);
5576 err = 1;
5577 }
5578 }
5579
5580 gsi = gsi_last_nondebug_bb (bb);
5581 if (gsi_end_p (gsi))
5582 continue;
5583
5584 stmt = gsi_stmt (gsi);
5585
5586 if (gimple_code (stmt) == GIMPLE_LABEL)
5587 continue;
5588
5589 err |= verify_eh_edges (stmt);
5590
5591 if (is_ctrl_stmt (stmt))
5592 {
5593 FOR_EACH_EDGE (e, ei, bb->succs)
5594 if (e->flags & EDGE_FALLTHRU)
5595 {
5596 error ("fallthru edge after a control statement in bb %d",
5597 bb->index);
5598 err = 1;
5599 }
5600 }
5601
5602 if (gimple_code (stmt) != GIMPLE_COND)
5603 {
5604 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5605 after anything else but if statement. */
5606 FOR_EACH_EDGE (e, ei, bb->succs)
5607 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5608 {
5609 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5610 bb->index);
5611 err = 1;
5612 }
5613 }
5614
5615 switch (gimple_code (stmt))
5616 {
5617 case GIMPLE_COND:
5618 {
5619 edge true_edge;
5620 edge false_edge;
5621
5622 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5623
5624 if (!true_edge
5625 || !false_edge
5626 || !(true_edge->flags & EDGE_TRUE_VALUE)
5627 || !(false_edge->flags & EDGE_FALSE_VALUE)
5628 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5629 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5630 || EDGE_COUNT (bb->succs) >= 3)
5631 {
5632 error ("wrong outgoing edge flags at end of bb %d",
5633 bb->index);
5634 err = 1;
5635 }
5636 }
5637 break;
5638
5639 case GIMPLE_GOTO:
5640 if (simple_goto_p (stmt))
5641 {
5642 error ("explicit goto at end of bb %d", bb->index);
5643 err = 1;
5644 }
5645 else
5646 {
5647 /* FIXME. We should double check that the labels in the
5648 destination blocks have their address taken. */
5649 FOR_EACH_EDGE (e, ei, bb->succs)
5650 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5651 | EDGE_FALSE_VALUE))
5652 || !(e->flags & EDGE_ABNORMAL))
5653 {
5654 error ("wrong outgoing edge flags at end of bb %d",
5655 bb->index);
5656 err = 1;
5657 }
5658 }
5659 break;
5660
5661 case GIMPLE_CALL:
5662 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5663 break;
5664 /* fallthru */
5665 case GIMPLE_RETURN:
5666 if (!single_succ_p (bb)
5667 || (single_succ_edge (bb)->flags
5668 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5669 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5670 {
5671 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5672 err = 1;
5673 }
5674 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5675 {
5676 error ("return edge does not point to exit in bb %d",
5677 bb->index);
5678 err = 1;
5679 }
5680 break;
5681
5682 case GIMPLE_SWITCH:
5683 {
5684 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5685 tree prev;
5686 edge e;
5687 size_t i, n;
5688
5689 n = gimple_switch_num_labels (switch_stmt);
5690
5691 /* Mark all the destination basic blocks. */
5692 for (i = 0; i < n; ++i)
5693 {
5694 basic_block label_bb = gimple_switch_label_bb (cfun, switch_stmt, i);
5695 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5696 label_bb->aux = (void *)1;
5697 }
5698
5699 /* Verify that the case labels are sorted. */
5700 prev = gimple_switch_label (switch_stmt, 0);
5701 for (i = 1; i < n; ++i)
5702 {
5703 tree c = gimple_switch_label (switch_stmt, i);
5704 if (!CASE_LOW (c))
5705 {
5706 error ("found default case not at the start of "
5707 "case vector");
5708 err = 1;
5709 continue;
5710 }
5711 if (CASE_LOW (prev)
5712 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5713 {
5714 error ("case labels not sorted: ");
5715 print_generic_expr (stderr, prev);
5716 fprintf (stderr," is greater than ");
5717 print_generic_expr (stderr, c);
5718 fprintf (stderr," but comes before it.\n");
5719 err = 1;
5720 }
5721 prev = c;
5722 }
5723 /* VRP will remove the default case if it can prove it will
5724 never be executed. So do not verify there always exists
5725 a default case here. */
5726
5727 FOR_EACH_EDGE (e, ei, bb->succs)
5728 {
5729 if (!e->dest->aux)
5730 {
5731 error ("extra outgoing edge %d->%d",
5732 bb->index, e->dest->index);
5733 err = 1;
5734 }
5735
5736 e->dest->aux = (void *)2;
5737 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5738 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5739 {
5740 error ("wrong outgoing edge flags at end of bb %d",
5741 bb->index);
5742 err = 1;
5743 }
5744 }
5745
5746 /* Check that we have all of them. */
5747 for (i = 0; i < n; ++i)
5748 {
5749 basic_block label_bb = gimple_switch_label_bb (cfun,
5750 switch_stmt, i);
5751
5752 if (label_bb->aux != (void *)2)
5753 {
5754 error ("missing edge %i->%i", bb->index, label_bb->index);
5755 err = 1;
5756 }
5757 }
5758
5759 FOR_EACH_EDGE (e, ei, bb->succs)
5760 e->dest->aux = (void *)0;
5761 }
5762 break;
5763
5764 case GIMPLE_EH_DISPATCH:
5765 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5766 break;
5767
5768 default:
5769 break;
5770 }
5771 }
5772
5773 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5774 verify_dominators (CDI_DOMINATORS);
5775
5776 return err;
5777 }
5778
5779 #if __GNUC__ >= 10
5780 # pragma GCC diagnostic pop
5781 #endif
5782
5783 /* Updates phi nodes after creating a forwarder block joined
5784 by edge FALLTHRU. */
5785
5786 static void
5787 gimple_make_forwarder_block (edge fallthru)
5788 {
5789 edge e;
5790 edge_iterator ei;
5791 basic_block dummy, bb;
5792 tree var;
5793 gphi_iterator gsi;
5794 bool forward_location_p;
5795
5796 dummy = fallthru->src;
5797 bb = fallthru->dest;
5798
5799 if (single_pred_p (bb))
5800 return;
5801
5802 /* We can forward location info if we have only one predecessor. */
5803 forward_location_p = single_pred_p (dummy);
5804
5805 /* If we redirected a branch we must create new PHI nodes at the
5806 start of BB. */
5807 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5808 {
5809 gphi *phi, *new_phi;
5810
5811 phi = gsi.phi ();
5812 var = gimple_phi_result (phi);
5813 new_phi = create_phi_node (var, bb);
5814 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5815 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5816 forward_location_p
5817 ? gimple_phi_arg_location (phi, 0) : UNKNOWN_LOCATION);
5818 }
5819
5820 /* Add the arguments we have stored on edges. */
5821 FOR_EACH_EDGE (e, ei, bb->preds)
5822 {
5823 if (e == fallthru)
5824 continue;
5825
5826 flush_pending_stmts (e);
5827 }
5828 }
5829
5830
5831 /* Return a non-special label in the head of basic block BLOCK.
5832 Create one if it doesn't exist. */
5833
5834 tree
5835 gimple_block_label (basic_block bb)
5836 {
5837 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5838 bool first = true;
5839 tree label;
5840 glabel *stmt;
5841
5842 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5843 {
5844 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5845 if (!stmt)
5846 break;
5847 label = gimple_label_label (stmt);
5848 if (!DECL_NONLOCAL (label))
5849 {
5850 if (!first)
5851 gsi_move_before (&i, &s);
5852 return label;
5853 }
5854 }
5855
5856 label = create_artificial_label (UNKNOWN_LOCATION);
5857 stmt = gimple_build_label (label);
5858 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5859 return label;
5860 }
5861
5862
5863 /* Attempt to perform edge redirection by replacing a possibly complex
5864 jump instruction by a goto or by removing the jump completely.
5865 This can apply only if all edges now point to the same block. The
5866 parameters and return values are equivalent to
5867 redirect_edge_and_branch. */
5868
5869 static edge
5870 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5871 {
5872 basic_block src = e->src;
5873 gimple_stmt_iterator i;
5874 gimple *stmt;
5875
5876 /* We can replace or remove a complex jump only when we have exactly
5877 two edges. */
5878 if (EDGE_COUNT (src->succs) != 2
5879 /* Verify that all targets will be TARGET. Specifically, the
5880 edge that is not E must also go to TARGET. */
5881 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5882 return NULL;
5883
5884 i = gsi_last_bb (src);
5885 if (gsi_end_p (i))
5886 return NULL;
5887
5888 stmt = gsi_stmt (i);
5889
5890 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5891 {
5892 gsi_remove (&i, true);
5893 e = ssa_redirect_edge (e, target);
5894 e->flags = EDGE_FALLTHRU;
5895 return e;
5896 }
5897
5898 return NULL;
5899 }
5900
5901
5902 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5903 edge representing the redirected branch. */
5904
5905 static edge
5906 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5907 {
5908 basic_block bb = e->src;
5909 gimple_stmt_iterator gsi;
5910 edge ret;
5911 gimple *stmt;
5912
5913 if (e->flags & EDGE_ABNORMAL)
5914 return NULL;
5915
5916 if (e->dest == dest)
5917 return NULL;
5918
5919 if (e->flags & EDGE_EH)
5920 return redirect_eh_edge (e, dest);
5921
5922 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5923 {
5924 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5925 if (ret)
5926 return ret;
5927 }
5928
5929 gsi = gsi_last_nondebug_bb (bb);
5930 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5931
5932 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5933 {
5934 case GIMPLE_COND:
5935 /* For COND_EXPR, we only need to redirect the edge. */
5936 break;
5937
5938 case GIMPLE_GOTO:
5939 /* No non-abnormal edges should lead from a non-simple goto, and
5940 simple ones should be represented implicitly. */
5941 gcc_unreachable ();
5942
5943 case GIMPLE_SWITCH:
5944 {
5945 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5946 tree label = gimple_block_label (dest);
5947 tree cases = get_cases_for_edge (e, switch_stmt);
5948
5949 /* If we have a list of cases associated with E, then use it
5950 as it's a lot faster than walking the entire case vector. */
5951 if (cases)
5952 {
5953 edge e2 = find_edge (e->src, dest);
5954 tree last, first;
5955
5956 first = cases;
5957 while (cases)
5958 {
5959 last = cases;
5960 CASE_LABEL (cases) = label;
5961 cases = CASE_CHAIN (cases);
5962 }
5963
5964 /* If there was already an edge in the CFG, then we need
5965 to move all the cases associated with E to E2. */
5966 if (e2)
5967 {
5968 tree cases2 = get_cases_for_edge (e2, switch_stmt);
5969
5970 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5971 CASE_CHAIN (cases2) = first;
5972 }
5973 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5974 }
5975 else
5976 {
5977 size_t i, n = gimple_switch_num_labels (switch_stmt);
5978
5979 for (i = 0; i < n; i++)
5980 {
5981 tree elt = gimple_switch_label (switch_stmt, i);
5982 if (label_to_block (cfun, CASE_LABEL (elt)) == e->dest)
5983 CASE_LABEL (elt) = label;
5984 }
5985 }
5986 }
5987 break;
5988
5989 case GIMPLE_ASM:
5990 {
5991 gasm *asm_stmt = as_a <gasm *> (stmt);
5992 int i, n = gimple_asm_nlabels (asm_stmt);
5993 tree label = NULL;
5994
5995 for (i = 0; i < n; ++i)
5996 {
5997 tree cons = gimple_asm_label_op (asm_stmt, i);
5998 if (label_to_block (cfun, TREE_VALUE (cons)) == e->dest)
5999 {
6000 if (!label)
6001 label = gimple_block_label (dest);
6002 TREE_VALUE (cons) = label;
6003 }
6004 }
6005
6006 /* If we didn't find any label matching the former edge in the
6007 asm labels, we must be redirecting the fallthrough
6008 edge. */
6009 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
6010 }
6011 break;
6012
6013 case GIMPLE_RETURN:
6014 gsi_remove (&gsi, true);
6015 e->flags |= EDGE_FALLTHRU;
6016 break;
6017
6018 case GIMPLE_OMP_RETURN:
6019 case GIMPLE_OMP_CONTINUE:
6020 case GIMPLE_OMP_SECTIONS_SWITCH:
6021 case GIMPLE_OMP_FOR:
6022 /* The edges from OMP constructs can be simply redirected. */
6023 break;
6024
6025 case GIMPLE_EH_DISPATCH:
6026 if (!(e->flags & EDGE_FALLTHRU))
6027 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
6028 break;
6029
6030 case GIMPLE_TRANSACTION:
6031 if (e->flags & EDGE_TM_ABORT)
6032 gimple_transaction_set_label_over (as_a <gtransaction *> (stmt),
6033 gimple_block_label (dest));
6034 else if (e->flags & EDGE_TM_UNINSTRUMENTED)
6035 gimple_transaction_set_label_uninst (as_a <gtransaction *> (stmt),
6036 gimple_block_label (dest));
6037 else
6038 gimple_transaction_set_label_norm (as_a <gtransaction *> (stmt),
6039 gimple_block_label (dest));
6040 break;
6041
6042 default:
6043 /* Otherwise it must be a fallthru edge, and we don't need to
6044 do anything besides redirecting it. */
6045 gcc_assert (e->flags & EDGE_FALLTHRU);
6046 break;
6047 }
6048
6049 /* Update/insert PHI nodes as necessary. */
6050
6051 /* Now update the edges in the CFG. */
6052 e = ssa_redirect_edge (e, dest);
6053
6054 return e;
6055 }
6056
6057 /* Returns true if it is possible to remove edge E by redirecting
6058 it to the destination of the other edge from E->src. */
6059
6060 static bool
6061 gimple_can_remove_branch_p (const_edge e)
6062 {
6063 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
6064 return false;
6065
6066 return true;
6067 }
6068
6069 /* Simple wrapper, as we can always redirect fallthru edges. */
6070
6071 static basic_block
6072 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
6073 {
6074 e = gimple_redirect_edge_and_branch (e, dest);
6075 gcc_assert (e);
6076
6077 return NULL;
6078 }
6079
6080
6081 /* Splits basic block BB after statement STMT (but at least after the
6082 labels). If STMT is NULL, BB is split just after the labels. */
6083
6084 static basic_block
6085 gimple_split_block (basic_block bb, void *stmt)
6086 {
6087 gimple_stmt_iterator gsi;
6088 gimple_stmt_iterator gsi_tgt;
6089 gimple_seq list;
6090 basic_block new_bb;
6091 edge e;
6092 edge_iterator ei;
6093
6094 new_bb = create_empty_bb (bb);
6095
6096 /* Redirect the outgoing edges. */
6097 new_bb->succs = bb->succs;
6098 bb->succs = NULL;
6099 FOR_EACH_EDGE (e, ei, new_bb->succs)
6100 e->src = new_bb;
6101
6102 /* Get a stmt iterator pointing to the first stmt to move. */
6103 if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
6104 gsi = gsi_after_labels (bb);
6105 else
6106 {
6107 gsi = gsi_for_stmt ((gimple *) stmt);
6108 gsi_next (&gsi);
6109 }
6110
6111 /* Move everything from GSI to the new basic block. */
6112 if (gsi_end_p (gsi))
6113 return new_bb;
6114
6115 /* Split the statement list - avoid re-creating new containers as this
6116 brings ugly quadratic memory consumption in the inliner.
6117 (We are still quadratic since we need to update stmt BB pointers,
6118 sadly.) */
6119 gsi_split_seq_before (&gsi, &list);
6120 set_bb_seq (new_bb, list);
6121 for (gsi_tgt = gsi_start (list);
6122 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
6123 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
6124
6125 return new_bb;
6126 }
6127
6128
6129 /* Moves basic block BB after block AFTER. */
6130
6131 static bool
6132 gimple_move_block_after (basic_block bb, basic_block after)
6133 {
6134 if (bb->prev_bb == after)
6135 return true;
6136
6137 unlink_block (bb);
6138 link_block (bb, after);
6139
6140 return true;
6141 }
6142
6143
6144 /* Return TRUE if block BB has no executable statements, otherwise return
6145 FALSE. */
6146
6147 static bool
6148 gimple_empty_block_p (basic_block bb)
6149 {
6150 /* BB must have no executable statements. */
6151 gimple_stmt_iterator gsi = gsi_after_labels (bb);
6152 if (phi_nodes (bb))
6153 return false;
6154 while (!gsi_end_p (gsi))
6155 {
6156 gimple *stmt = gsi_stmt (gsi);
6157 if (is_gimple_debug (stmt))
6158 ;
6159 else if (gimple_code (stmt) == GIMPLE_NOP
6160 || gimple_code (stmt) == GIMPLE_PREDICT)
6161 ;
6162 else
6163 return false;
6164 gsi_next (&gsi);
6165 }
6166 return true;
6167 }
6168
6169
6170 /* Split a basic block if it ends with a conditional branch and if the
6171 other part of the block is not empty. */
6172
6173 static basic_block
6174 gimple_split_block_before_cond_jump (basic_block bb)
6175 {
6176 gimple *last, *split_point;
6177 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6178 if (gsi_end_p (gsi))
6179 return NULL;
6180 last = gsi_stmt (gsi);
6181 if (gimple_code (last) != GIMPLE_COND
6182 && gimple_code (last) != GIMPLE_SWITCH)
6183 return NULL;
6184 gsi_prev (&gsi);
6185 split_point = gsi_stmt (gsi);
6186 return split_block (bb, split_point)->dest;
6187 }
6188
6189
6190 /* Return true if basic_block can be duplicated. */
6191
6192 static bool
6193 gimple_can_duplicate_bb_p (const_basic_block bb)
6194 {
6195 gimple *last = last_stmt (CONST_CAST_BB (bb));
6196
6197 /* Do checks that can only fail for the last stmt, to minimize the work in the
6198 stmt loop. */
6199 if (last) {
6200 /* A transaction is a single entry multiple exit region. It
6201 must be duplicated in its entirety or not at all. */
6202 if (gimple_code (last) == GIMPLE_TRANSACTION)
6203 return false;
6204
6205 /* An IFN_UNIQUE call must be duplicated as part of its group,
6206 or not at all. */
6207 if (is_gimple_call (last)
6208 && gimple_call_internal_p (last)
6209 && gimple_call_internal_unique_p (last))
6210 return false;
6211 }
6212
6213 for (gimple_stmt_iterator gsi = gsi_start_bb (CONST_CAST_BB (bb));
6214 !gsi_end_p (gsi); gsi_next (&gsi))
6215 {
6216 gimple *g = gsi_stmt (gsi);
6217
6218 /* An IFN_GOMP_SIMT_ENTER_ALLOC/IFN_GOMP_SIMT_EXIT call must be
6219 duplicated as part of its group, or not at all.
6220 The IFN_GOMP_SIMT_VOTE_ANY and IFN_GOMP_SIMT_XCHG_* are part of such a
6221 group, so the same holds there. */
6222 if (is_gimple_call (g)
6223 && (gimple_call_internal_p (g, IFN_GOMP_SIMT_ENTER_ALLOC)
6224 || gimple_call_internal_p (g, IFN_GOMP_SIMT_EXIT)
6225 || gimple_call_internal_p (g, IFN_GOMP_SIMT_VOTE_ANY)
6226 || gimple_call_internal_p (g, IFN_GOMP_SIMT_XCHG_BFLY)
6227 || gimple_call_internal_p (g, IFN_GOMP_SIMT_XCHG_IDX)))
6228 return false;
6229 }
6230
6231 return true;
6232 }
6233
6234 /* Create a duplicate of the basic block BB. NOTE: This does not
6235 preserve SSA form. */
6236
6237 static basic_block
6238 gimple_duplicate_bb (basic_block bb, copy_bb_data *id)
6239 {
6240 basic_block new_bb;
6241 gimple_stmt_iterator gsi_tgt;
6242
6243 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
6244
6245 /* Copy the PHI nodes. We ignore PHI node arguments here because
6246 the incoming edges have not been setup yet. */
6247 for (gphi_iterator gpi = gsi_start_phis (bb);
6248 !gsi_end_p (gpi);
6249 gsi_next (&gpi))
6250 {
6251 gphi *phi, *copy;
6252 phi = gpi.phi ();
6253 copy = create_phi_node (NULL_TREE, new_bb);
6254 create_new_def_for (gimple_phi_result (phi), copy,
6255 gimple_phi_result_ptr (copy));
6256 gimple_set_uid (copy, gimple_uid (phi));
6257 }
6258
6259 gsi_tgt = gsi_start_bb (new_bb);
6260 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6261 !gsi_end_p (gsi);
6262 gsi_next (&gsi))
6263 {
6264 def_operand_p def_p;
6265 ssa_op_iter op_iter;
6266 tree lhs;
6267 gimple *stmt, *copy;
6268
6269 stmt = gsi_stmt (gsi);
6270 if (gimple_code (stmt) == GIMPLE_LABEL)
6271 continue;
6272
6273 /* Don't duplicate label debug stmts. */
6274 if (gimple_debug_bind_p (stmt)
6275 && TREE_CODE (gimple_debug_bind_get_var (stmt))
6276 == LABEL_DECL)
6277 continue;
6278
6279 /* Create a new copy of STMT and duplicate STMT's virtual
6280 operands. */
6281 copy = gimple_copy (stmt);
6282 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
6283
6284 maybe_duplicate_eh_stmt (copy, stmt);
6285 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
6286
6287 /* When copying around a stmt writing into a local non-user
6288 aggregate, make sure it won't share stack slot with other
6289 vars. */
6290 lhs = gimple_get_lhs (stmt);
6291 if (lhs && TREE_CODE (lhs) != SSA_NAME)
6292 {
6293 tree base = get_base_address (lhs);
6294 if (base
6295 && (VAR_P (base) || TREE_CODE (base) == RESULT_DECL)
6296 && DECL_IGNORED_P (base)
6297 && !TREE_STATIC (base)
6298 && !DECL_EXTERNAL (base)
6299 && (!VAR_P (base) || !DECL_HAS_VALUE_EXPR_P (base)))
6300 DECL_NONSHAREABLE (base) = 1;
6301 }
6302
6303 /* If requested remap dependence info of cliques brought in
6304 via inlining. */
6305 if (id)
6306 for (unsigned i = 0; i < gimple_num_ops (copy); ++i)
6307 {
6308 tree op = gimple_op (copy, i);
6309 if (!op)
6310 continue;
6311 if (TREE_CODE (op) == ADDR_EXPR
6312 || TREE_CODE (op) == WITH_SIZE_EXPR)
6313 op = TREE_OPERAND (op, 0);
6314 while (handled_component_p (op))
6315 op = TREE_OPERAND (op, 0);
6316 if ((TREE_CODE (op) == MEM_REF
6317 || TREE_CODE (op) == TARGET_MEM_REF)
6318 && MR_DEPENDENCE_CLIQUE (op) > 1
6319 && MR_DEPENDENCE_CLIQUE (op) != bb->loop_father->owned_clique)
6320 {
6321 if (!id->dependence_map)
6322 id->dependence_map = new hash_map<dependence_hash,
6323 unsigned short>;
6324 bool existed;
6325 unsigned short &newc = id->dependence_map->get_or_insert
6326 (MR_DEPENDENCE_CLIQUE (op), &existed);
6327 if (!existed)
6328 {
6329 gcc_assert (MR_DEPENDENCE_CLIQUE (op) <= cfun->last_clique);
6330 newc = ++cfun->last_clique;
6331 }
6332 MR_DEPENDENCE_CLIQUE (op) = newc;
6333 }
6334 }
6335
6336 /* Create new names for all the definitions created by COPY and
6337 add replacement mappings for each new name. */
6338 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
6339 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
6340 }
6341
6342 return new_bb;
6343 }
6344
6345 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
6346
6347 static void
6348 add_phi_args_after_copy_edge (edge e_copy)
6349 {
6350 basic_block bb, bb_copy = e_copy->src, dest;
6351 edge e;
6352 edge_iterator ei;
6353 gphi *phi, *phi_copy;
6354 tree def;
6355 gphi_iterator psi, psi_copy;
6356
6357 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
6358 return;
6359
6360 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
6361
6362 if (e_copy->dest->flags & BB_DUPLICATED)
6363 dest = get_bb_original (e_copy->dest);
6364 else
6365 dest = e_copy->dest;
6366
6367 e = find_edge (bb, dest);
6368 if (!e)
6369 {
6370 /* During loop unrolling the target of the latch edge is copied.
6371 In this case we are not looking for edge to dest, but to
6372 duplicated block whose original was dest. */
6373 FOR_EACH_EDGE (e, ei, bb->succs)
6374 {
6375 if ((e->dest->flags & BB_DUPLICATED)
6376 && get_bb_original (e->dest) == dest)
6377 break;
6378 }
6379
6380 gcc_assert (e != NULL);
6381 }
6382
6383 for (psi = gsi_start_phis (e->dest),
6384 psi_copy = gsi_start_phis (e_copy->dest);
6385 !gsi_end_p (psi);
6386 gsi_next (&psi), gsi_next (&psi_copy))
6387 {
6388 phi = psi.phi ();
6389 phi_copy = psi_copy.phi ();
6390 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
6391 add_phi_arg (phi_copy, def, e_copy,
6392 gimple_phi_arg_location_from_edge (phi, e));
6393 }
6394 }
6395
6396
6397 /* Basic block BB_COPY was created by code duplication. Add phi node
6398 arguments for edges going out of BB_COPY. The blocks that were
6399 duplicated have BB_DUPLICATED set. */
6400
6401 void
6402 add_phi_args_after_copy_bb (basic_block bb_copy)
6403 {
6404 edge e_copy;
6405 edge_iterator ei;
6406
6407 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
6408 {
6409 add_phi_args_after_copy_edge (e_copy);
6410 }
6411 }
6412
6413 /* Blocks in REGION_COPY array of length N_REGION were created by
6414 duplication of basic blocks. Add phi node arguments for edges
6415 going from these blocks. If E_COPY is not NULL, also add
6416 phi node arguments for its destination.*/
6417
6418 void
6419 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
6420 edge e_copy)
6421 {
6422 unsigned i;
6423
6424 for (i = 0; i < n_region; i++)
6425 region_copy[i]->flags |= BB_DUPLICATED;
6426
6427 for (i = 0; i < n_region; i++)
6428 add_phi_args_after_copy_bb (region_copy[i]);
6429 if (e_copy)
6430 add_phi_args_after_copy_edge (e_copy);
6431
6432 for (i = 0; i < n_region; i++)
6433 region_copy[i]->flags &= ~BB_DUPLICATED;
6434 }
6435
6436 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6437 important exit edge EXIT. By important we mean that no SSA name defined
6438 inside region is live over the other exit edges of the region. All entry
6439 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6440 to the duplicate of the region. Dominance and loop information is
6441 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6442 UPDATE_DOMINANCE is false then we assume that the caller will update the
6443 dominance information after calling this function. The new basic
6444 blocks are stored to REGION_COPY in the same order as they had in REGION,
6445 provided that REGION_COPY is not NULL.
6446 The function returns false if it is unable to copy the region,
6447 true otherwise. */
6448
6449 bool
6450 gimple_duplicate_sese_region (edge entry, edge exit,
6451 basic_block *region, unsigned n_region,
6452 basic_block *region_copy,
6453 bool update_dominance)
6454 {
6455 unsigned i;
6456 bool free_region_copy = false, copying_header = false;
6457 class loop *loop = entry->dest->loop_father;
6458 edge exit_copy;
6459 vec<basic_block> doms = vNULL;
6460 edge redirected;
6461 profile_count total_count = profile_count::uninitialized ();
6462 profile_count entry_count = profile_count::uninitialized ();
6463
6464 if (!can_copy_bbs_p (region, n_region))
6465 return false;
6466
6467 /* Some sanity checking. Note that we do not check for all possible
6468 missuses of the functions. I.e. if you ask to copy something weird,
6469 it will work, but the state of structures probably will not be
6470 correct. */
6471 for (i = 0; i < n_region; i++)
6472 {
6473 /* We do not handle subloops, i.e. all the blocks must belong to the
6474 same loop. */
6475 if (region[i]->loop_father != loop)
6476 return false;
6477
6478 if (region[i] != entry->dest
6479 && region[i] == loop->header)
6480 return false;
6481 }
6482
6483 /* In case the function is used for loop header copying (which is the primary
6484 use), ensure that EXIT and its copy will be new latch and entry edges. */
6485 if (loop->header == entry->dest)
6486 {
6487 copying_header = true;
6488
6489 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6490 return false;
6491
6492 for (i = 0; i < n_region; i++)
6493 if (region[i] != exit->src
6494 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6495 return false;
6496 }
6497
6498 initialize_original_copy_tables ();
6499
6500 if (copying_header)
6501 set_loop_copy (loop, loop_outer (loop));
6502 else
6503 set_loop_copy (loop, loop);
6504
6505 if (!region_copy)
6506 {
6507 region_copy = XNEWVEC (basic_block, n_region);
6508 free_region_copy = true;
6509 }
6510
6511 /* Record blocks outside the region that are dominated by something
6512 inside. */
6513 if (update_dominance)
6514 {
6515 doms.create (0);
6516 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6517 }
6518
6519 if (entry->dest->count.initialized_p ())
6520 {
6521 total_count = entry->dest->count;
6522 entry_count = entry->count ();
6523 /* Fix up corner cases, to avoid division by zero or creation of negative
6524 frequencies. */
6525 if (entry_count > total_count)
6526 entry_count = total_count;
6527 }
6528
6529 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6530 split_edge_bb_loc (entry), update_dominance);
6531 if (total_count.initialized_p () && entry_count.initialized_p ())
6532 {
6533 scale_bbs_frequencies_profile_count (region, n_region,
6534 total_count - entry_count,
6535 total_count);
6536 scale_bbs_frequencies_profile_count (region_copy, n_region, entry_count,
6537 total_count);
6538 }
6539
6540 if (copying_header)
6541 {
6542 loop->header = exit->dest;
6543 loop->latch = exit->src;
6544 }
6545
6546 /* Redirect the entry and add the phi node arguments. */
6547 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6548 gcc_assert (redirected != NULL);
6549 flush_pending_stmts (entry);
6550
6551 /* Concerning updating of dominators: We must recount dominators
6552 for entry block and its copy. Anything that is outside of the
6553 region, but was dominated by something inside needs recounting as
6554 well. */
6555 if (update_dominance)
6556 {
6557 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6558 doms.safe_push (get_bb_original (entry->dest));
6559 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6560 doms.release ();
6561 }
6562
6563 /* Add the other PHI node arguments. */
6564 add_phi_args_after_copy (region_copy, n_region, NULL);
6565
6566 if (free_region_copy)
6567 free (region_copy);
6568
6569 free_original_copy_tables ();
6570 return true;
6571 }
6572
6573 /* Checks if BB is part of the region defined by N_REGION BBS. */
6574 static bool
6575 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6576 {
6577 unsigned int n;
6578
6579 for (n = 0; n < n_region; n++)
6580 {
6581 if (bb == bbs[n])
6582 return true;
6583 }
6584 return false;
6585 }
6586
6587 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6588 are stored to REGION_COPY in the same order in that they appear
6589 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6590 the region, EXIT an exit from it. The condition guarding EXIT
6591 is moved to ENTRY. Returns true if duplication succeeds, false
6592 otherwise.
6593
6594 For example,
6595
6596 some_code;
6597 if (cond)
6598 A;
6599 else
6600 B;
6601
6602 is transformed to
6603
6604 if (cond)
6605 {
6606 some_code;
6607 A;
6608 }
6609 else
6610 {
6611 some_code;
6612 B;
6613 }
6614 */
6615
6616 bool
6617 gimple_duplicate_sese_tail (edge entry, edge exit,
6618 basic_block *region, unsigned n_region,
6619 basic_block *region_copy)
6620 {
6621 unsigned i;
6622 bool free_region_copy = false;
6623 class loop *loop = exit->dest->loop_father;
6624 class loop *orig_loop = entry->dest->loop_father;
6625 basic_block switch_bb, entry_bb, nentry_bb;
6626 vec<basic_block> doms;
6627 profile_count total_count = profile_count::uninitialized (),
6628 exit_count = profile_count::uninitialized ();
6629 edge exits[2], nexits[2], e;
6630 gimple_stmt_iterator gsi;
6631 gimple *cond_stmt;
6632 edge sorig, snew;
6633 basic_block exit_bb;
6634 gphi_iterator psi;
6635 gphi *phi;
6636 tree def;
6637 class loop *target, *aloop, *cloop;
6638
6639 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6640 exits[0] = exit;
6641 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6642
6643 if (!can_copy_bbs_p (region, n_region))
6644 return false;
6645
6646 initialize_original_copy_tables ();
6647 set_loop_copy (orig_loop, loop);
6648
6649 target= loop;
6650 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6651 {
6652 if (bb_part_of_region_p (aloop->header, region, n_region))
6653 {
6654 cloop = duplicate_loop (aloop, target);
6655 duplicate_subloops (aloop, cloop);
6656 }
6657 }
6658
6659 if (!region_copy)
6660 {
6661 region_copy = XNEWVEC (basic_block, n_region);
6662 free_region_copy = true;
6663 }
6664
6665 gcc_assert (!need_ssa_update_p (cfun));
6666
6667 /* Record blocks outside the region that are dominated by something
6668 inside. */
6669 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6670
6671 total_count = exit->src->count;
6672 exit_count = exit->count ();
6673 /* Fix up corner cases, to avoid division by zero or creation of negative
6674 frequencies. */
6675 if (exit_count > total_count)
6676 exit_count = total_count;
6677
6678 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6679 split_edge_bb_loc (exit), true);
6680 if (total_count.initialized_p () && exit_count.initialized_p ())
6681 {
6682 scale_bbs_frequencies_profile_count (region, n_region,
6683 total_count - exit_count,
6684 total_count);
6685 scale_bbs_frequencies_profile_count (region_copy, n_region, exit_count,
6686 total_count);
6687 }
6688
6689 /* Create the switch block, and put the exit condition to it. */
6690 entry_bb = entry->dest;
6691 nentry_bb = get_bb_copy (entry_bb);
6692 if (!last_stmt (entry->src)
6693 || !stmt_ends_bb_p (last_stmt (entry->src)))
6694 switch_bb = entry->src;
6695 else
6696 switch_bb = split_edge (entry);
6697 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6698
6699 gsi = gsi_last_bb (switch_bb);
6700 cond_stmt = last_stmt (exit->src);
6701 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6702 cond_stmt = gimple_copy (cond_stmt);
6703
6704 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6705
6706 sorig = single_succ_edge (switch_bb);
6707 sorig->flags = exits[1]->flags;
6708 sorig->probability = exits[1]->probability;
6709 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6710 snew->probability = exits[0]->probability;
6711
6712
6713 /* Register the new edge from SWITCH_BB in loop exit lists. */
6714 rescan_loop_exit (snew, true, false);
6715
6716 /* Add the PHI node arguments. */
6717 add_phi_args_after_copy (region_copy, n_region, snew);
6718
6719 /* Get rid of now superfluous conditions and associated edges (and phi node
6720 arguments). */
6721 exit_bb = exit->dest;
6722
6723 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6724 PENDING_STMT (e) = NULL;
6725
6726 /* The latch of ORIG_LOOP was copied, and so was the backedge
6727 to the original header. We redirect this backedge to EXIT_BB. */
6728 for (i = 0; i < n_region; i++)
6729 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6730 {
6731 gcc_assert (single_succ_edge (region_copy[i]));
6732 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6733 PENDING_STMT (e) = NULL;
6734 for (psi = gsi_start_phis (exit_bb);
6735 !gsi_end_p (psi);
6736 gsi_next (&psi))
6737 {
6738 phi = psi.phi ();
6739 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6740 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6741 }
6742 }
6743 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6744 PENDING_STMT (e) = NULL;
6745
6746 /* Anything that is outside of the region, but was dominated by something
6747 inside needs to update dominance info. */
6748 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6749 doms.release ();
6750 /* Update the SSA web. */
6751 update_ssa (TODO_update_ssa);
6752
6753 if (free_region_copy)
6754 free (region_copy);
6755
6756 free_original_copy_tables ();
6757 return true;
6758 }
6759
6760 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6761 adding blocks when the dominator traversal reaches EXIT. This
6762 function silently assumes that ENTRY strictly dominates EXIT. */
6763
6764 void
6765 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6766 vec<basic_block> *bbs_p)
6767 {
6768 basic_block son;
6769
6770 for (son = first_dom_son (CDI_DOMINATORS, entry);
6771 son;
6772 son = next_dom_son (CDI_DOMINATORS, son))
6773 {
6774 bbs_p->safe_push (son);
6775 if (son != exit)
6776 gather_blocks_in_sese_region (son, exit, bbs_p);
6777 }
6778 }
6779
6780 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6781 The duplicates are recorded in VARS_MAP. */
6782
6783 static void
6784 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6785 tree to_context)
6786 {
6787 tree t = *tp, new_t;
6788 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6789
6790 if (DECL_CONTEXT (t) == to_context)
6791 return;
6792
6793 bool existed;
6794 tree &loc = vars_map->get_or_insert (t, &existed);
6795
6796 if (!existed)
6797 {
6798 if (SSA_VAR_P (t))
6799 {
6800 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6801 add_local_decl (f, new_t);
6802 }
6803 else
6804 {
6805 gcc_assert (TREE_CODE (t) == CONST_DECL);
6806 new_t = copy_node (t);
6807 }
6808 DECL_CONTEXT (new_t) = to_context;
6809
6810 loc = new_t;
6811 }
6812 else
6813 new_t = loc;
6814
6815 *tp = new_t;
6816 }
6817
6818
6819 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6820 VARS_MAP maps old ssa names and var_decls to the new ones. */
6821
6822 static tree
6823 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6824 tree to_context)
6825 {
6826 tree new_name;
6827
6828 gcc_assert (!virtual_operand_p (name));
6829
6830 tree *loc = vars_map->get (name);
6831
6832 if (!loc)
6833 {
6834 tree decl = SSA_NAME_VAR (name);
6835 if (decl)
6836 {
6837 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name));
6838 replace_by_duplicate_decl (&decl, vars_map, to_context);
6839 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6840 decl, SSA_NAME_DEF_STMT (name));
6841 }
6842 else
6843 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6844 name, SSA_NAME_DEF_STMT (name));
6845
6846 /* Now that we've used the def stmt to define new_name, make sure it
6847 doesn't define name anymore. */
6848 SSA_NAME_DEF_STMT (name) = NULL;
6849
6850 vars_map->put (name, new_name);
6851 }
6852 else
6853 new_name = *loc;
6854
6855 return new_name;
6856 }
6857
6858 struct move_stmt_d
6859 {
6860 tree orig_block;
6861 tree new_block;
6862 tree from_context;
6863 tree to_context;
6864 hash_map<tree, tree> *vars_map;
6865 htab_t new_label_map;
6866 hash_map<void *, void *> *eh_map;
6867 bool remap_decls_p;
6868 };
6869
6870 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6871 contained in *TP if it has been ORIG_BLOCK previously and change the
6872 DECL_CONTEXT of every local variable referenced in *TP. */
6873
6874 static tree
6875 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6876 {
6877 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6878 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6879 tree t = *tp;
6880
6881 if (EXPR_P (t))
6882 {
6883 tree block = TREE_BLOCK (t);
6884 if (block == NULL_TREE)
6885 ;
6886 else if (block == p->orig_block
6887 || p->orig_block == NULL_TREE)
6888 {
6889 /* tree_node_can_be_shared says we can share invariant
6890 addresses but unshare_expr copies them anyways. Make sure
6891 to unshare before adjusting the block in place - we do not
6892 always see a copy here. */
6893 if (TREE_CODE (t) == ADDR_EXPR
6894 && is_gimple_min_invariant (t))
6895 *tp = t = unshare_expr (t);
6896 TREE_SET_BLOCK (t, p->new_block);
6897 }
6898 else if (flag_checking)
6899 {
6900 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6901 block = BLOCK_SUPERCONTEXT (block);
6902 gcc_assert (block == p->orig_block);
6903 }
6904 }
6905 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6906 {
6907 if (TREE_CODE (t) == SSA_NAME)
6908 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6909 else if (TREE_CODE (t) == PARM_DECL
6910 && gimple_in_ssa_p (cfun))
6911 *tp = *(p->vars_map->get (t));
6912 else if (TREE_CODE (t) == LABEL_DECL)
6913 {
6914 if (p->new_label_map)
6915 {
6916 struct tree_map in, *out;
6917 in.base.from = t;
6918 out = (struct tree_map *)
6919 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6920 if (out)
6921 *tp = t = out->to;
6922 }
6923
6924 /* For FORCED_LABELs we can end up with references from other
6925 functions if some SESE regions are outlined. It is UB to
6926 jump in between them, but they could be used just for printing
6927 addresses etc. In that case, DECL_CONTEXT on the label should
6928 be the function containing the glabel stmt with that LABEL_DECL,
6929 rather than whatever function a reference to the label was seen
6930 last time. */
6931 if (!FORCED_LABEL (t) && !DECL_NONLOCAL (t))
6932 DECL_CONTEXT (t) = p->to_context;
6933 }
6934 else if (p->remap_decls_p)
6935 {
6936 /* Replace T with its duplicate. T should no longer appear in the
6937 parent function, so this looks wasteful; however, it may appear
6938 in referenced_vars, and more importantly, as virtual operands of
6939 statements, and in alias lists of other variables. It would be
6940 quite difficult to expunge it from all those places. ??? It might
6941 suffice to do this for addressable variables. */
6942 if ((VAR_P (t) && !is_global_var (t))
6943 || TREE_CODE (t) == CONST_DECL)
6944 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6945 }
6946 *walk_subtrees = 0;
6947 }
6948 else if (TYPE_P (t))
6949 *walk_subtrees = 0;
6950
6951 return NULL_TREE;
6952 }
6953
6954 /* Helper for move_stmt_r. Given an EH region number for the source
6955 function, map that to the duplicate EH regio number in the dest. */
6956
6957 static int
6958 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6959 {
6960 eh_region old_r, new_r;
6961
6962 old_r = get_eh_region_from_number (old_nr);
6963 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6964
6965 return new_r->index;
6966 }
6967
6968 /* Similar, but operate on INTEGER_CSTs. */
6969
6970 static tree
6971 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6972 {
6973 int old_nr, new_nr;
6974
6975 old_nr = tree_to_shwi (old_t_nr);
6976 new_nr = move_stmt_eh_region_nr (old_nr, p);
6977
6978 return build_int_cst (integer_type_node, new_nr);
6979 }
6980
6981 /* Like move_stmt_op, but for gimple statements.
6982
6983 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6984 contained in the current statement in *GSI_P and change the
6985 DECL_CONTEXT of every local variable referenced in the current
6986 statement. */
6987
6988 static tree
6989 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6990 struct walk_stmt_info *wi)
6991 {
6992 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6993 gimple *stmt = gsi_stmt (*gsi_p);
6994 tree block = gimple_block (stmt);
6995
6996 if (block == p->orig_block
6997 || (p->orig_block == NULL_TREE
6998 && block != NULL_TREE))
6999 gimple_set_block (stmt, p->new_block);
7000
7001 switch (gimple_code (stmt))
7002 {
7003 case GIMPLE_CALL:
7004 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
7005 {
7006 tree r, fndecl = gimple_call_fndecl (stmt);
7007 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
7008 switch (DECL_FUNCTION_CODE (fndecl))
7009 {
7010 case BUILT_IN_EH_COPY_VALUES:
7011 r = gimple_call_arg (stmt, 1);
7012 r = move_stmt_eh_region_tree_nr (r, p);
7013 gimple_call_set_arg (stmt, 1, r);
7014 /* FALLTHRU */
7015
7016 case BUILT_IN_EH_POINTER:
7017 case BUILT_IN_EH_FILTER:
7018 r = gimple_call_arg (stmt, 0);
7019 r = move_stmt_eh_region_tree_nr (r, p);
7020 gimple_call_set_arg (stmt, 0, r);
7021 break;
7022
7023 default:
7024 break;
7025 }
7026 }
7027 break;
7028
7029 case GIMPLE_RESX:
7030 {
7031 gresx *resx_stmt = as_a <gresx *> (stmt);
7032 int r = gimple_resx_region (resx_stmt);
7033 r = move_stmt_eh_region_nr (r, p);
7034 gimple_resx_set_region (resx_stmt, r);
7035 }
7036 break;
7037
7038 case GIMPLE_EH_DISPATCH:
7039 {
7040 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
7041 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
7042 r = move_stmt_eh_region_nr (r, p);
7043 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
7044 }
7045 break;
7046
7047 case GIMPLE_OMP_RETURN:
7048 case GIMPLE_OMP_CONTINUE:
7049 break;
7050
7051 case GIMPLE_LABEL:
7052 {
7053 /* For FORCED_LABEL, move_stmt_op doesn't adjust DECL_CONTEXT,
7054 so that such labels can be referenced from other regions.
7055 Make sure to update it when seeing a GIMPLE_LABEL though,
7056 that is the owner of the label. */
7057 walk_gimple_op (stmt, move_stmt_op, wi);
7058 *handled_ops_p = true;
7059 tree label = gimple_label_label (as_a <glabel *> (stmt));
7060 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
7061 DECL_CONTEXT (label) = p->to_context;
7062 }
7063 break;
7064
7065 default:
7066 if (is_gimple_omp (stmt))
7067 {
7068 /* Do not remap variables inside OMP directives. Variables
7069 referenced in clauses and directive header belong to the
7070 parent function and should not be moved into the child
7071 function. */
7072 bool save_remap_decls_p = p->remap_decls_p;
7073 p->remap_decls_p = false;
7074 *handled_ops_p = true;
7075
7076 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
7077 move_stmt_op, wi);
7078
7079 p->remap_decls_p = save_remap_decls_p;
7080 }
7081 break;
7082 }
7083
7084 return NULL_TREE;
7085 }
7086
7087 /* Move basic block BB from function CFUN to function DEST_FN. The
7088 block is moved out of the original linked list and placed after
7089 block AFTER in the new list. Also, the block is removed from the
7090 original array of blocks and placed in DEST_FN's array of blocks.
7091 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
7092 updated to reflect the moved edges.
7093
7094 The local variables are remapped to new instances, VARS_MAP is used
7095 to record the mapping. */
7096
7097 static void
7098 move_block_to_fn (struct function *dest_cfun, basic_block bb,
7099 basic_block after, bool update_edge_count_p,
7100 struct move_stmt_d *d)
7101 {
7102 struct control_flow_graph *cfg;
7103 edge_iterator ei;
7104 edge e;
7105 gimple_stmt_iterator si;
7106 unsigned old_len;
7107
7108 /* Remove BB from dominance structures. */
7109 delete_from_dominance_info (CDI_DOMINATORS, bb);
7110
7111 /* Move BB from its current loop to the copy in the new function. */
7112 if (current_loops)
7113 {
7114 class loop *new_loop = (class loop *)bb->loop_father->aux;
7115 if (new_loop)
7116 bb->loop_father = new_loop;
7117 }
7118
7119 /* Link BB to the new linked list. */
7120 move_block_after (bb, after);
7121
7122 /* Update the edge count in the corresponding flowgraphs. */
7123 if (update_edge_count_p)
7124 FOR_EACH_EDGE (e, ei, bb->succs)
7125 {
7126 cfun->cfg->x_n_edges--;
7127 dest_cfun->cfg->x_n_edges++;
7128 }
7129
7130 /* Remove BB from the original basic block array. */
7131 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
7132 cfun->cfg->x_n_basic_blocks--;
7133
7134 /* Grow DEST_CFUN's basic block array if needed. */
7135 cfg = dest_cfun->cfg;
7136 cfg->x_n_basic_blocks++;
7137 if (bb->index >= cfg->x_last_basic_block)
7138 cfg->x_last_basic_block = bb->index + 1;
7139
7140 old_len = vec_safe_length (cfg->x_basic_block_info);
7141 if ((unsigned) cfg->x_last_basic_block >= old_len)
7142 vec_safe_grow_cleared (cfg->x_basic_block_info,
7143 cfg->x_last_basic_block + 1);
7144
7145 (*cfg->x_basic_block_info)[bb->index] = bb;
7146
7147 /* Remap the variables in phi nodes. */
7148 for (gphi_iterator psi = gsi_start_phis (bb);
7149 !gsi_end_p (psi); )
7150 {
7151 gphi *phi = psi.phi ();
7152 use_operand_p use;
7153 tree op = PHI_RESULT (phi);
7154 ssa_op_iter oi;
7155 unsigned i;
7156
7157 if (virtual_operand_p (op))
7158 {
7159 /* Remove the phi nodes for virtual operands (alias analysis will be
7160 run for the new function, anyway). But replace all uses that
7161 might be outside of the region we move. */
7162 use_operand_p use_p;
7163 imm_use_iterator iter;
7164 gimple *use_stmt;
7165 FOR_EACH_IMM_USE_STMT (use_stmt, iter, op)
7166 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
7167 SET_USE (use_p, SSA_NAME_VAR (op));
7168 remove_phi_node (&psi, true);
7169 continue;
7170 }
7171
7172 SET_PHI_RESULT (phi,
7173 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7174 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
7175 {
7176 op = USE_FROM_PTR (use);
7177 if (TREE_CODE (op) == SSA_NAME)
7178 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7179 }
7180
7181 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
7182 {
7183 location_t locus = gimple_phi_arg_location (phi, i);
7184 tree block = LOCATION_BLOCK (locus);
7185
7186 if (locus == UNKNOWN_LOCATION)
7187 continue;
7188 if (d->orig_block == NULL_TREE || block == d->orig_block)
7189 {
7190 locus = set_block (locus, d->new_block);
7191 gimple_phi_arg_set_location (phi, i, locus);
7192 }
7193 }
7194
7195 gsi_next (&psi);
7196 }
7197
7198 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7199 {
7200 gimple *stmt = gsi_stmt (si);
7201 struct walk_stmt_info wi;
7202
7203 memset (&wi, 0, sizeof (wi));
7204 wi.info = d;
7205 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
7206
7207 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
7208 {
7209 tree label = gimple_label_label (label_stmt);
7210 int uid = LABEL_DECL_UID (label);
7211
7212 gcc_assert (uid > -1);
7213
7214 old_len = vec_safe_length (cfg->x_label_to_block_map);
7215 if (old_len <= (unsigned) uid)
7216 vec_safe_grow_cleared (cfg->x_label_to_block_map, uid + 1);
7217
7218 (*cfg->x_label_to_block_map)[uid] = bb;
7219 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
7220
7221 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
7222
7223 if (uid >= dest_cfun->cfg->last_label_uid)
7224 dest_cfun->cfg->last_label_uid = uid + 1;
7225 }
7226
7227 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
7228 remove_stmt_from_eh_lp_fn (cfun, stmt);
7229
7230 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
7231 gimple_remove_stmt_histograms (cfun, stmt);
7232
7233 /* We cannot leave any operands allocated from the operand caches of
7234 the current function. */
7235 free_stmt_operands (cfun, stmt);
7236 push_cfun (dest_cfun);
7237 update_stmt (stmt);
7238 if (is_gimple_call (stmt))
7239 notice_special_calls (as_a <gcall *> (stmt));
7240 pop_cfun ();
7241 }
7242
7243 FOR_EACH_EDGE (e, ei, bb->succs)
7244 if (e->goto_locus != UNKNOWN_LOCATION)
7245 {
7246 tree block = LOCATION_BLOCK (e->goto_locus);
7247 if (d->orig_block == NULL_TREE
7248 || block == d->orig_block)
7249 e->goto_locus = set_block (e->goto_locus, d->new_block);
7250 }
7251 }
7252
7253 /* Examine the statements in BB (which is in SRC_CFUN); find and return
7254 the outermost EH region. Use REGION as the incoming base EH region.
7255 If there is no single outermost region, return NULL and set *ALL to
7256 true. */
7257
7258 static eh_region
7259 find_outermost_region_in_block (struct function *src_cfun,
7260 basic_block bb, eh_region region,
7261 bool *all)
7262 {
7263 gimple_stmt_iterator si;
7264
7265 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7266 {
7267 gimple *stmt = gsi_stmt (si);
7268 eh_region stmt_region;
7269 int lp_nr;
7270
7271 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
7272 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
7273 if (stmt_region)
7274 {
7275 if (region == NULL)
7276 region = stmt_region;
7277 else if (stmt_region != region)
7278 {
7279 region = eh_region_outermost (src_cfun, stmt_region, region);
7280 if (region == NULL)
7281 {
7282 *all = true;
7283 return NULL;
7284 }
7285 }
7286 }
7287 }
7288
7289 return region;
7290 }
7291
7292 static tree
7293 new_label_mapper (tree decl, void *data)
7294 {
7295 htab_t hash = (htab_t) data;
7296 struct tree_map *m;
7297 void **slot;
7298
7299 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
7300
7301 m = XNEW (struct tree_map);
7302 m->hash = DECL_UID (decl);
7303 m->base.from = decl;
7304 m->to = create_artificial_label (UNKNOWN_LOCATION);
7305 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
7306 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
7307 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
7308
7309 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
7310 gcc_assert (*slot == NULL);
7311
7312 *slot = m;
7313
7314 return m->to;
7315 }
7316
7317 /* Tree walker to replace the decls used inside value expressions by
7318 duplicates. */
7319
7320 static tree
7321 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
7322 {
7323 struct replace_decls_d *rd = (struct replace_decls_d *)data;
7324
7325 switch (TREE_CODE (*tp))
7326 {
7327 case VAR_DECL:
7328 case PARM_DECL:
7329 case RESULT_DECL:
7330 replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
7331 break;
7332 default:
7333 break;
7334 }
7335
7336 if (IS_TYPE_OR_DECL_P (*tp))
7337 *walk_subtrees = false;
7338
7339 return NULL;
7340 }
7341
7342 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7343 subblocks. */
7344
7345 static void
7346 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
7347 tree to_context)
7348 {
7349 tree *tp, t;
7350
7351 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
7352 {
7353 t = *tp;
7354 if (!VAR_P (t) && TREE_CODE (t) != CONST_DECL)
7355 continue;
7356 replace_by_duplicate_decl (&t, vars_map, to_context);
7357 if (t != *tp)
7358 {
7359 if (VAR_P (*tp) && DECL_HAS_VALUE_EXPR_P (*tp))
7360 {
7361 tree x = DECL_VALUE_EXPR (*tp);
7362 struct replace_decls_d rd = { vars_map, to_context };
7363 unshare_expr (x);
7364 walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
7365 SET_DECL_VALUE_EXPR (t, x);
7366 DECL_HAS_VALUE_EXPR_P (t) = 1;
7367 }
7368 DECL_CHAIN (t) = DECL_CHAIN (*tp);
7369 *tp = t;
7370 }
7371 }
7372
7373 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
7374 replace_block_vars_by_duplicates (block, vars_map, to_context);
7375 }
7376
7377 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7378 from FN1 to FN2. */
7379
7380 static void
7381 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
7382 class loop *loop)
7383 {
7384 /* Discard it from the old loop array. */
7385 (*get_loops (fn1))[loop->num] = NULL;
7386
7387 /* Place it in the new loop array, assigning it a new number. */
7388 loop->num = number_of_loops (fn2);
7389 vec_safe_push (loops_for_fn (fn2)->larray, loop);
7390
7391 /* Recurse to children. */
7392 for (loop = loop->inner; loop; loop = loop->next)
7393 fixup_loop_arrays_after_move (fn1, fn2, loop);
7394 }
7395
7396 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7397 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
7398
7399 DEBUG_FUNCTION void
7400 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
7401 {
7402 basic_block bb;
7403 edge_iterator ei;
7404 edge e;
7405 bitmap bbs = BITMAP_ALLOC (NULL);
7406 int i;
7407
7408 gcc_assert (entry != NULL);
7409 gcc_assert (entry != exit);
7410 gcc_assert (bbs_p != NULL);
7411
7412 gcc_assert (bbs_p->length () > 0);
7413
7414 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7415 bitmap_set_bit (bbs, bb->index);
7416
7417 gcc_assert (bitmap_bit_p (bbs, entry->index));
7418 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
7419
7420 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7421 {
7422 if (bb == entry)
7423 {
7424 gcc_assert (single_pred_p (entry));
7425 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
7426 }
7427 else
7428 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
7429 {
7430 e = ei_edge (ei);
7431 gcc_assert (bitmap_bit_p (bbs, e->src->index));
7432 }
7433
7434 if (bb == exit)
7435 {
7436 gcc_assert (single_succ_p (exit));
7437 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
7438 }
7439 else
7440 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
7441 {
7442 e = ei_edge (ei);
7443 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
7444 }
7445 }
7446
7447 BITMAP_FREE (bbs);
7448 }
7449
7450 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7451
7452 bool
7453 gather_ssa_name_hash_map_from (tree const &from, tree const &, void *data)
7454 {
7455 bitmap release_names = (bitmap)data;
7456
7457 if (TREE_CODE (from) != SSA_NAME)
7458 return true;
7459
7460 bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7461 return true;
7462 }
7463
7464 /* Return LOOP_DIST_ALIAS call if present in BB. */
7465
7466 static gimple *
7467 find_loop_dist_alias (basic_block bb)
7468 {
7469 gimple *g = last_stmt (bb);
7470 if (g == NULL || gimple_code (g) != GIMPLE_COND)
7471 return NULL;
7472
7473 gimple_stmt_iterator gsi = gsi_for_stmt (g);
7474 gsi_prev (&gsi);
7475 if (gsi_end_p (gsi))
7476 return NULL;
7477
7478 g = gsi_stmt (gsi);
7479 if (gimple_call_internal_p (g, IFN_LOOP_DIST_ALIAS))
7480 return g;
7481 return NULL;
7482 }
7483
7484 /* Fold loop internal call G like IFN_LOOP_VECTORIZED/IFN_LOOP_DIST_ALIAS
7485 to VALUE and update any immediate uses of it's LHS. */
7486
7487 void
7488 fold_loop_internal_call (gimple *g, tree value)
7489 {
7490 tree lhs = gimple_call_lhs (g);
7491 use_operand_p use_p;
7492 imm_use_iterator iter;
7493 gimple *use_stmt;
7494 gimple_stmt_iterator gsi = gsi_for_stmt (g);
7495
7496 update_call_from_tree (&gsi, value);
7497 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
7498 {
7499 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
7500 SET_USE (use_p, value);
7501 update_stmt (use_stmt);
7502 }
7503 }
7504
7505 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7506 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7507 single basic block in the original CFG and the new basic block is
7508 returned. DEST_CFUN must not have a CFG yet.
7509
7510 Note that the region need not be a pure SESE region. Blocks inside
7511 the region may contain calls to abort/exit. The only restriction
7512 is that ENTRY_BB should be the only entry point and it must
7513 dominate EXIT_BB.
7514
7515 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7516 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7517 to the new function.
7518
7519 All local variables referenced in the region are assumed to be in
7520 the corresponding BLOCK_VARS and unexpanded variable lists
7521 associated with DEST_CFUN.
7522
7523 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7524 reimplement move_sese_region_to_fn by duplicating the region rather than
7525 moving it. */
7526
7527 basic_block
7528 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7529 basic_block exit_bb, tree orig_block)
7530 {
7531 vec<basic_block> bbs, dom_bbs;
7532 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7533 basic_block after, bb, *entry_pred, *exit_succ, abb;
7534 struct function *saved_cfun = cfun;
7535 int *entry_flag, *exit_flag;
7536 profile_probability *entry_prob, *exit_prob;
7537 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7538 edge e;
7539 edge_iterator ei;
7540 htab_t new_label_map;
7541 hash_map<void *, void *> *eh_map;
7542 class loop *loop = entry_bb->loop_father;
7543 class loop *loop0 = get_loop (saved_cfun, 0);
7544 struct move_stmt_d d;
7545
7546 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7547 region. */
7548 gcc_assert (entry_bb != exit_bb
7549 && (!exit_bb
7550 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7551
7552 /* Collect all the blocks in the region. Manually add ENTRY_BB
7553 because it won't be added by dfs_enumerate_from. */
7554 bbs.create (0);
7555 bbs.safe_push (entry_bb);
7556 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7557
7558 if (flag_checking)
7559 verify_sese (entry_bb, exit_bb, &bbs);
7560
7561 /* The blocks that used to be dominated by something in BBS will now be
7562 dominated by the new block. */
7563 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7564 bbs.address (),
7565 bbs.length ());
7566
7567 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7568 the predecessor edges to ENTRY_BB and the successor edges to
7569 EXIT_BB so that we can re-attach them to the new basic block that
7570 will replace the region. */
7571 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7572 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7573 entry_flag = XNEWVEC (int, num_entry_edges);
7574 entry_prob = XNEWVEC (profile_probability, num_entry_edges);
7575 i = 0;
7576 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7577 {
7578 entry_prob[i] = e->probability;
7579 entry_flag[i] = e->flags;
7580 entry_pred[i++] = e->src;
7581 remove_edge (e);
7582 }
7583
7584 if (exit_bb)
7585 {
7586 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7587 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7588 exit_flag = XNEWVEC (int, num_exit_edges);
7589 exit_prob = XNEWVEC (profile_probability, num_exit_edges);
7590 i = 0;
7591 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7592 {
7593 exit_prob[i] = e->probability;
7594 exit_flag[i] = e->flags;
7595 exit_succ[i++] = e->dest;
7596 remove_edge (e);
7597 }
7598 }
7599 else
7600 {
7601 num_exit_edges = 0;
7602 exit_succ = NULL;
7603 exit_flag = NULL;
7604 exit_prob = NULL;
7605 }
7606
7607 /* Switch context to the child function to initialize DEST_FN's CFG. */
7608 gcc_assert (dest_cfun->cfg == NULL);
7609 push_cfun (dest_cfun);
7610
7611 init_empty_tree_cfg ();
7612
7613 /* Initialize EH information for the new function. */
7614 eh_map = NULL;
7615 new_label_map = NULL;
7616 if (saved_cfun->eh)
7617 {
7618 eh_region region = NULL;
7619 bool all = false;
7620
7621 FOR_EACH_VEC_ELT (bbs, i, bb)
7622 {
7623 region = find_outermost_region_in_block (saved_cfun, bb, region, &all);
7624 if (all)
7625 break;
7626 }
7627
7628 init_eh_for_function ();
7629 if (region != NULL || all)
7630 {
7631 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7632 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7633 new_label_mapper, new_label_map);
7634 }
7635 }
7636
7637 /* Initialize an empty loop tree. */
7638 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7639 init_loops_structure (dest_cfun, loops, 1);
7640 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7641 set_loops_for_fn (dest_cfun, loops);
7642
7643 vec<loop_p, va_gc> *larray = get_loops (saved_cfun)->copy ();
7644
7645 /* Move the outlined loop tree part. */
7646 num_nodes = bbs.length ();
7647 FOR_EACH_VEC_ELT (bbs, i, bb)
7648 {
7649 if (bb->loop_father->header == bb)
7650 {
7651 class loop *this_loop = bb->loop_father;
7652 class loop *outer = loop_outer (this_loop);
7653 if (outer == loop
7654 /* If the SESE region contains some bbs ending with
7655 a noreturn call, those are considered to belong
7656 to the outermost loop in saved_cfun, rather than
7657 the entry_bb's loop_father. */
7658 || outer == loop0)
7659 {
7660 if (outer != loop)
7661 num_nodes -= this_loop->num_nodes;
7662 flow_loop_tree_node_remove (bb->loop_father);
7663 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7664 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7665 }
7666 }
7667 else if (bb->loop_father == loop0 && loop0 != loop)
7668 num_nodes--;
7669
7670 /* Remove loop exits from the outlined region. */
7671 if (loops_for_fn (saved_cfun)->exits)
7672 FOR_EACH_EDGE (e, ei, bb->succs)
7673 {
7674 struct loops *l = loops_for_fn (saved_cfun);
7675 loop_exit **slot
7676 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7677 NO_INSERT);
7678 if (slot)
7679 l->exits->clear_slot (slot);
7680 }
7681 }
7682
7683 /* Adjust the number of blocks in the tree root of the outlined part. */
7684 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7685
7686 /* Setup a mapping to be used by move_block_to_fn. */
7687 loop->aux = current_loops->tree_root;
7688 loop0->aux = current_loops->tree_root;
7689
7690 /* Fix up orig_loop_num. If the block referenced in it has been moved
7691 to dest_cfun, update orig_loop_num field, otherwise clear it. */
7692 class loop *dloop;
7693 signed char *moved_orig_loop_num = NULL;
7694 FOR_EACH_LOOP_FN (dest_cfun, dloop, 0)
7695 if (dloop->orig_loop_num)
7696 {
7697 if (moved_orig_loop_num == NULL)
7698 moved_orig_loop_num
7699 = XCNEWVEC (signed char, vec_safe_length (larray));
7700 if ((*larray)[dloop->orig_loop_num] != NULL
7701 && get_loop (saved_cfun, dloop->orig_loop_num) == NULL)
7702 {
7703 if (moved_orig_loop_num[dloop->orig_loop_num] >= 0
7704 && moved_orig_loop_num[dloop->orig_loop_num] < 2)
7705 moved_orig_loop_num[dloop->orig_loop_num]++;
7706 dloop->orig_loop_num = (*larray)[dloop->orig_loop_num]->num;
7707 }
7708 else
7709 {
7710 moved_orig_loop_num[dloop->orig_loop_num] = -1;
7711 dloop->orig_loop_num = 0;
7712 }
7713 }
7714 pop_cfun ();
7715
7716 if (moved_orig_loop_num)
7717 {
7718 FOR_EACH_VEC_ELT (bbs, i, bb)
7719 {
7720 gimple *g = find_loop_dist_alias (bb);
7721 if (g == NULL)
7722 continue;
7723
7724 int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
7725 gcc_assert (orig_loop_num
7726 && (unsigned) orig_loop_num < vec_safe_length (larray));
7727 if (moved_orig_loop_num[orig_loop_num] == 2)
7728 {
7729 /* If we have moved both loops with this orig_loop_num into
7730 dest_cfun and the LOOP_DIST_ALIAS call is being moved there
7731 too, update the first argument. */
7732 gcc_assert ((*larray)[dloop->orig_loop_num] != NULL
7733 && (get_loop (saved_cfun, dloop->orig_loop_num)
7734 == NULL));
7735 tree t = build_int_cst (integer_type_node,
7736 (*larray)[dloop->orig_loop_num]->num);
7737 gimple_call_set_arg (g, 0, t);
7738 update_stmt (g);
7739 /* Make sure the following loop will not update it. */
7740 moved_orig_loop_num[orig_loop_num] = 0;
7741 }
7742 else
7743 /* Otherwise at least one of the loops stayed in saved_cfun.
7744 Remove the LOOP_DIST_ALIAS call. */
7745 fold_loop_internal_call (g, gimple_call_arg (g, 1));
7746 }
7747 FOR_EACH_BB_FN (bb, saved_cfun)
7748 {
7749 gimple *g = find_loop_dist_alias (bb);
7750 if (g == NULL)
7751 continue;
7752 int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
7753 gcc_assert (orig_loop_num
7754 && (unsigned) orig_loop_num < vec_safe_length (larray));
7755 if (moved_orig_loop_num[orig_loop_num])
7756 /* LOOP_DIST_ALIAS call remained in saved_cfun, if at least one
7757 of the corresponding loops was moved, remove it. */
7758 fold_loop_internal_call (g, gimple_call_arg (g, 1));
7759 }
7760 XDELETEVEC (moved_orig_loop_num);
7761 }
7762 ggc_free (larray);
7763
7764 /* Move blocks from BBS into DEST_CFUN. */
7765 gcc_assert (bbs.length () >= 2);
7766 after = dest_cfun->cfg->x_entry_block_ptr;
7767 hash_map<tree, tree> vars_map;
7768
7769 memset (&d, 0, sizeof (d));
7770 d.orig_block = orig_block;
7771 d.new_block = DECL_INITIAL (dest_cfun->decl);
7772 d.from_context = cfun->decl;
7773 d.to_context = dest_cfun->decl;
7774 d.vars_map = &vars_map;
7775 d.new_label_map = new_label_map;
7776 d.eh_map = eh_map;
7777 d.remap_decls_p = true;
7778
7779 if (gimple_in_ssa_p (cfun))
7780 for (tree arg = DECL_ARGUMENTS (d.to_context); arg; arg = DECL_CHAIN (arg))
7781 {
7782 tree narg = make_ssa_name_fn (dest_cfun, arg, gimple_build_nop ());
7783 set_ssa_default_def (dest_cfun, arg, narg);
7784 vars_map.put (arg, narg);
7785 }
7786
7787 FOR_EACH_VEC_ELT (bbs, i, bb)
7788 {
7789 /* No need to update edge counts on the last block. It has
7790 already been updated earlier when we detached the region from
7791 the original CFG. */
7792 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7793 after = bb;
7794 }
7795
7796 /* Adjust the maximum clique used. */
7797 dest_cfun->last_clique = saved_cfun->last_clique;
7798
7799 loop->aux = NULL;
7800 loop0->aux = NULL;
7801 /* Loop sizes are no longer correct, fix them up. */
7802 loop->num_nodes -= num_nodes;
7803 for (class loop *outer = loop_outer (loop);
7804 outer; outer = loop_outer (outer))
7805 outer->num_nodes -= num_nodes;
7806 loop0->num_nodes -= bbs.length () - num_nodes;
7807
7808 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7809 {
7810 class loop *aloop;
7811 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7812 if (aloop != NULL)
7813 {
7814 if (aloop->simduid)
7815 {
7816 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7817 d.to_context);
7818 dest_cfun->has_simduid_loops = true;
7819 }
7820 if (aloop->force_vectorize)
7821 dest_cfun->has_force_vectorize_loops = true;
7822 }
7823 }
7824
7825 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7826 if (orig_block)
7827 {
7828 tree block;
7829 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7830 == NULL_TREE);
7831 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7832 = BLOCK_SUBBLOCKS (orig_block);
7833 for (block = BLOCK_SUBBLOCKS (orig_block);
7834 block; block = BLOCK_CHAIN (block))
7835 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7836 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7837 }
7838
7839 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7840 &vars_map, dest_cfun->decl);
7841
7842 if (new_label_map)
7843 htab_delete (new_label_map);
7844 if (eh_map)
7845 delete eh_map;
7846
7847 if (gimple_in_ssa_p (cfun))
7848 {
7849 /* We need to release ssa-names in a defined order, so first find them,
7850 and then iterate in ascending version order. */
7851 bitmap release_names = BITMAP_ALLOC (NULL);
7852 vars_map.traverse<void *, gather_ssa_name_hash_map_from> (release_names);
7853 bitmap_iterator bi;
7854 unsigned i;
7855 EXECUTE_IF_SET_IN_BITMAP (release_names, 0, i, bi)
7856 release_ssa_name (ssa_name (i));
7857 BITMAP_FREE (release_names);
7858 }
7859
7860 /* Rewire the entry and exit blocks. The successor to the entry
7861 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7862 the child function. Similarly, the predecessor of DEST_FN's
7863 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7864 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7865 various CFG manipulation function get to the right CFG.
7866
7867 FIXME, this is silly. The CFG ought to become a parameter to
7868 these helpers. */
7869 push_cfun (dest_cfun);
7870 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = entry_bb->count;
7871 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7872 if (exit_bb)
7873 {
7874 make_single_succ_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7875 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = exit_bb->count;
7876 }
7877 else
7878 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = profile_count::zero ();
7879 pop_cfun ();
7880
7881 /* Back in the original function, the SESE region has disappeared,
7882 create a new basic block in its place. */
7883 bb = create_empty_bb (entry_pred[0]);
7884 if (current_loops)
7885 add_bb_to_loop (bb, loop);
7886 for (i = 0; i < num_entry_edges; i++)
7887 {
7888 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7889 e->probability = entry_prob[i];
7890 }
7891
7892 for (i = 0; i < num_exit_edges; i++)
7893 {
7894 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7895 e->probability = exit_prob[i];
7896 }
7897
7898 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7899 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7900 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7901 dom_bbs.release ();
7902
7903 if (exit_bb)
7904 {
7905 free (exit_prob);
7906 free (exit_flag);
7907 free (exit_succ);
7908 }
7909 free (entry_prob);
7910 free (entry_flag);
7911 free (entry_pred);
7912 bbs.release ();
7913
7914 return bb;
7915 }
7916
7917 /* Dump default def DEF to file FILE using FLAGS and indentation
7918 SPC. */
7919
7920 static void
7921 dump_default_def (FILE *file, tree def, int spc, dump_flags_t flags)
7922 {
7923 for (int i = 0; i < spc; ++i)
7924 fprintf (file, " ");
7925 dump_ssaname_info_to_file (file, def, spc);
7926
7927 print_generic_expr (file, TREE_TYPE (def), flags);
7928 fprintf (file, " ");
7929 print_generic_expr (file, def, flags);
7930 fprintf (file, " = ");
7931 print_generic_expr (file, SSA_NAME_VAR (def), flags);
7932 fprintf (file, ";\n");
7933 }
7934
7935 /* Print no_sanitize attribute to FILE for a given attribute VALUE. */
7936
7937 static void
7938 print_no_sanitize_attr_value (FILE *file, tree value)
7939 {
7940 unsigned int flags = tree_to_uhwi (value);
7941 bool first = true;
7942 for (int i = 0; sanitizer_opts[i].name != NULL; ++i)
7943 {
7944 if ((sanitizer_opts[i].flag & flags) == sanitizer_opts[i].flag)
7945 {
7946 if (!first)
7947 fprintf (file, " | ");
7948 fprintf (file, "%s", sanitizer_opts[i].name);
7949 first = false;
7950 }
7951 }
7952 }
7953
7954 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7955 */
7956
7957 void
7958 dump_function_to_file (tree fndecl, FILE *file, dump_flags_t flags)
7959 {
7960 tree arg, var, old_current_fndecl = current_function_decl;
7961 struct function *dsf;
7962 bool ignore_topmost_bind = false, any_var = false;
7963 basic_block bb;
7964 tree chain;
7965 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7966 && decl_is_tm_clone (fndecl));
7967 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7968
7969 if (DECL_ATTRIBUTES (fndecl) != NULL_TREE)
7970 {
7971 fprintf (file, "__attribute__((");
7972
7973 bool first = true;
7974 tree chain;
7975 for (chain = DECL_ATTRIBUTES (fndecl); chain;
7976 first = false, chain = TREE_CHAIN (chain))
7977 {
7978 if (!first)
7979 fprintf (file, ", ");
7980
7981 tree name = get_attribute_name (chain);
7982 print_generic_expr (file, name, dump_flags);
7983 if (TREE_VALUE (chain) != NULL_TREE)
7984 {
7985 fprintf (file, " (");
7986
7987 if (strstr (IDENTIFIER_POINTER (name), "no_sanitize"))
7988 print_no_sanitize_attr_value (file, TREE_VALUE (chain));
7989 else
7990 print_generic_expr (file, TREE_VALUE (chain), dump_flags);
7991 fprintf (file, ")");
7992 }
7993 }
7994
7995 fprintf (file, "))\n");
7996 }
7997
7998 current_function_decl = fndecl;
7999 if (flags & TDF_GIMPLE)
8000 {
8001 static bool hotness_bb_param_printed = false;
8002 if (profile_info != NULL
8003 && !hotness_bb_param_printed)
8004 {
8005 hotness_bb_param_printed = true;
8006 fprintf (file,
8007 "/* --param=gimple-fe-computed-hot-bb-threshold=%" PRId64
8008 " */\n", get_hot_bb_threshold ());
8009 }
8010
8011 print_generic_expr (file, TREE_TYPE (TREE_TYPE (fndecl)),
8012 dump_flags | TDF_SLIM);
8013 fprintf (file, " __GIMPLE (%s",
8014 (fun->curr_properties & PROP_ssa) ? "ssa"
8015 : (fun->curr_properties & PROP_cfg) ? "cfg"
8016 : "");
8017
8018 if (cfun->cfg)
8019 {
8020 basic_block bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
8021 if (bb->count.initialized_p ())
8022 fprintf (file, ",%s(%d)",
8023 profile_quality_as_string (bb->count.quality ()),
8024 bb->count.value ());
8025 fprintf (file, ")\n%s (", function_name (fun));
8026 }
8027 }
8028 else
8029 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
8030
8031 arg = DECL_ARGUMENTS (fndecl);
8032 while (arg)
8033 {
8034 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
8035 fprintf (file, " ");
8036 print_generic_expr (file, arg, dump_flags);
8037 if (DECL_CHAIN (arg))
8038 fprintf (file, ", ");
8039 arg = DECL_CHAIN (arg);
8040 }
8041 fprintf (file, ")\n");
8042
8043 dsf = DECL_STRUCT_FUNCTION (fndecl);
8044 if (dsf && (flags & TDF_EH))
8045 dump_eh_tree (file, dsf);
8046
8047 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
8048 {
8049 dump_node (fndecl, TDF_SLIM | flags, file);
8050 current_function_decl = old_current_fndecl;
8051 return;
8052 }
8053
8054 /* When GIMPLE is lowered, the variables are no longer available in
8055 BIND_EXPRs, so display them separately. */
8056 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
8057 {
8058 unsigned ix;
8059 ignore_topmost_bind = true;
8060
8061 fprintf (file, "{\n");
8062 if (gimple_in_ssa_p (fun)
8063 && (flags & TDF_ALIAS))
8064 {
8065 for (arg = DECL_ARGUMENTS (fndecl); arg != NULL;
8066 arg = DECL_CHAIN (arg))
8067 {
8068 tree def = ssa_default_def (fun, arg);
8069 if (def)
8070 dump_default_def (file, def, 2, flags);
8071 }
8072
8073 tree res = DECL_RESULT (fun->decl);
8074 if (res != NULL_TREE
8075 && DECL_BY_REFERENCE (res))
8076 {
8077 tree def = ssa_default_def (fun, res);
8078 if (def)
8079 dump_default_def (file, def, 2, flags);
8080 }
8081
8082 tree static_chain = fun->static_chain_decl;
8083 if (static_chain != NULL_TREE)
8084 {
8085 tree def = ssa_default_def (fun, static_chain);
8086 if (def)
8087 dump_default_def (file, def, 2, flags);
8088 }
8089 }
8090
8091 if (!vec_safe_is_empty (fun->local_decls))
8092 FOR_EACH_LOCAL_DECL (fun, ix, var)
8093 {
8094 print_generic_decl (file, var, flags);
8095 fprintf (file, "\n");
8096
8097 any_var = true;
8098 }
8099
8100 tree name;
8101
8102 if (gimple_in_ssa_p (cfun))
8103 FOR_EACH_SSA_NAME (ix, name, cfun)
8104 {
8105 if (!SSA_NAME_VAR (name))
8106 {
8107 fprintf (file, " ");
8108 print_generic_expr (file, TREE_TYPE (name), flags);
8109 fprintf (file, " ");
8110 print_generic_expr (file, name, flags);
8111 fprintf (file, ";\n");
8112
8113 any_var = true;
8114 }
8115 }
8116 }
8117
8118 if (fun && fun->decl == fndecl
8119 && fun->cfg
8120 && basic_block_info_for_fn (fun))
8121 {
8122 /* If the CFG has been built, emit a CFG-based dump. */
8123 if (!ignore_topmost_bind)
8124 fprintf (file, "{\n");
8125
8126 if (any_var && n_basic_blocks_for_fn (fun))
8127 fprintf (file, "\n");
8128
8129 FOR_EACH_BB_FN (bb, fun)
8130 dump_bb (file, bb, 2, flags);
8131
8132 fprintf (file, "}\n");
8133 }
8134 else if (fun->curr_properties & PROP_gimple_any)
8135 {
8136 /* The function is now in GIMPLE form but the CFG has not been
8137 built yet. Emit the single sequence of GIMPLE statements
8138 that make up its body. */
8139 gimple_seq body = gimple_body (fndecl);
8140
8141 if (gimple_seq_first_stmt (body)
8142 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
8143 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
8144 print_gimple_seq (file, body, 0, flags);
8145 else
8146 {
8147 if (!ignore_topmost_bind)
8148 fprintf (file, "{\n");
8149
8150 if (any_var)
8151 fprintf (file, "\n");
8152
8153 print_gimple_seq (file, body, 2, flags);
8154 fprintf (file, "}\n");
8155 }
8156 }
8157 else
8158 {
8159 int indent;
8160
8161 /* Make a tree based dump. */
8162 chain = DECL_SAVED_TREE (fndecl);
8163 if (chain && TREE_CODE (chain) == BIND_EXPR)
8164 {
8165 if (ignore_topmost_bind)
8166 {
8167 chain = BIND_EXPR_BODY (chain);
8168 indent = 2;
8169 }
8170 else
8171 indent = 0;
8172 }
8173 else
8174 {
8175 if (!ignore_topmost_bind)
8176 {
8177 fprintf (file, "{\n");
8178 /* No topmost bind, pretend it's ignored for later. */
8179 ignore_topmost_bind = true;
8180 }
8181 indent = 2;
8182 }
8183
8184 if (any_var)
8185 fprintf (file, "\n");
8186
8187 print_generic_stmt_indented (file, chain, flags, indent);
8188 if (ignore_topmost_bind)
8189 fprintf (file, "}\n");
8190 }
8191
8192 if (flags & TDF_ENUMERATE_LOCALS)
8193 dump_enumerated_decls (file, flags);
8194 fprintf (file, "\n\n");
8195
8196 current_function_decl = old_current_fndecl;
8197 }
8198
8199 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
8200
8201 DEBUG_FUNCTION void
8202 debug_function (tree fn, dump_flags_t flags)
8203 {
8204 dump_function_to_file (fn, stderr, flags);
8205 }
8206
8207
8208 /* Print on FILE the indexes for the predecessors of basic_block BB. */
8209
8210 static void
8211 print_pred_bbs (FILE *file, basic_block bb)
8212 {
8213 edge e;
8214 edge_iterator ei;
8215
8216 FOR_EACH_EDGE (e, ei, bb->preds)
8217 fprintf (file, "bb_%d ", e->src->index);
8218 }
8219
8220
8221 /* Print on FILE the indexes for the successors of basic_block BB. */
8222
8223 static void
8224 print_succ_bbs (FILE *file, basic_block bb)
8225 {
8226 edge e;
8227 edge_iterator ei;
8228
8229 FOR_EACH_EDGE (e, ei, bb->succs)
8230 fprintf (file, "bb_%d ", e->dest->index);
8231 }
8232
8233 /* Print to FILE the basic block BB following the VERBOSITY level. */
8234
8235 void
8236 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
8237 {
8238 char *s_indent = (char *) alloca ((size_t) indent + 1);
8239 memset ((void *) s_indent, ' ', (size_t) indent);
8240 s_indent[indent] = '\0';
8241
8242 /* Print basic_block's header. */
8243 if (verbosity >= 2)
8244 {
8245 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
8246 print_pred_bbs (file, bb);
8247 fprintf (file, "}, succs = {");
8248 print_succ_bbs (file, bb);
8249 fprintf (file, "})\n");
8250 }
8251
8252 /* Print basic_block's body. */
8253 if (verbosity >= 3)
8254 {
8255 fprintf (file, "%s {\n", s_indent);
8256 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
8257 fprintf (file, "%s }\n", s_indent);
8258 }
8259 }
8260
8261 static void print_loop_and_siblings (FILE *, class loop *, int, int);
8262
8263 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
8264 VERBOSITY level this outputs the contents of the loop, or just its
8265 structure. */
8266
8267 static void
8268 print_loop (FILE *file, class loop *loop, int indent, int verbosity)
8269 {
8270 char *s_indent;
8271 basic_block bb;
8272
8273 if (loop == NULL)
8274 return;
8275
8276 s_indent = (char *) alloca ((size_t) indent + 1);
8277 memset ((void *) s_indent, ' ', (size_t) indent);
8278 s_indent[indent] = '\0';
8279
8280 /* Print loop's header. */
8281 fprintf (file, "%sloop_%d (", s_indent, loop->num);
8282 if (loop->header)
8283 fprintf (file, "header = %d", loop->header->index);
8284 else
8285 {
8286 fprintf (file, "deleted)\n");
8287 return;
8288 }
8289 if (loop->latch)
8290 fprintf (file, ", latch = %d", loop->latch->index);
8291 else
8292 fprintf (file, ", multiple latches");
8293 fprintf (file, ", niter = ");
8294 print_generic_expr (file, loop->nb_iterations);
8295
8296 if (loop->any_upper_bound)
8297 {
8298 fprintf (file, ", upper_bound = ");
8299 print_decu (loop->nb_iterations_upper_bound, file);
8300 }
8301 if (loop->any_likely_upper_bound)
8302 {
8303 fprintf (file, ", likely_upper_bound = ");
8304 print_decu (loop->nb_iterations_likely_upper_bound, file);
8305 }
8306
8307 if (loop->any_estimate)
8308 {
8309 fprintf (file, ", estimate = ");
8310 print_decu (loop->nb_iterations_estimate, file);
8311 }
8312 if (loop->unroll)
8313 fprintf (file, ", unroll = %d", loop->unroll);
8314 fprintf (file, ")\n");
8315
8316 /* Print loop's body. */
8317 if (verbosity >= 1)
8318 {
8319 fprintf (file, "%s{\n", s_indent);
8320 FOR_EACH_BB_FN (bb, cfun)
8321 if (bb->loop_father == loop)
8322 print_loops_bb (file, bb, indent, verbosity);
8323
8324 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
8325 fprintf (file, "%s}\n", s_indent);
8326 }
8327 }
8328
8329 /* Print the LOOP and its sibling loops on FILE, indented INDENT
8330 spaces. Following VERBOSITY level this outputs the contents of the
8331 loop, or just its structure. */
8332
8333 static void
8334 print_loop_and_siblings (FILE *file, class loop *loop, int indent,
8335 int verbosity)
8336 {
8337 if (loop == NULL)
8338 return;
8339
8340 print_loop (file, loop, indent, verbosity);
8341 print_loop_and_siblings (file, loop->next, indent, verbosity);
8342 }
8343
8344 /* Follow a CFG edge from the entry point of the program, and on entry
8345 of a loop, pretty print the loop structure on FILE. */
8346
8347 void
8348 print_loops (FILE *file, int verbosity)
8349 {
8350 basic_block bb;
8351
8352 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
8353 fprintf (file, "\nLoops in function: %s\n", current_function_name ());
8354 if (bb && bb->loop_father)
8355 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
8356 }
8357
8358 /* Dump a loop. */
8359
8360 DEBUG_FUNCTION void
8361 debug (class loop &ref)
8362 {
8363 print_loop (stderr, &ref, 0, /*verbosity*/0);
8364 }
8365
8366 DEBUG_FUNCTION void
8367 debug (class loop *ptr)
8368 {
8369 if (ptr)
8370 debug (*ptr);
8371 else
8372 fprintf (stderr, "<nil>\n");
8373 }
8374
8375 /* Dump a loop verbosely. */
8376
8377 DEBUG_FUNCTION void
8378 debug_verbose (class loop &ref)
8379 {
8380 print_loop (stderr, &ref, 0, /*verbosity*/3);
8381 }
8382
8383 DEBUG_FUNCTION void
8384 debug_verbose (class loop *ptr)
8385 {
8386 if (ptr)
8387 debug (*ptr);
8388 else
8389 fprintf (stderr, "<nil>\n");
8390 }
8391
8392
8393 /* Debugging loops structure at tree level, at some VERBOSITY level. */
8394
8395 DEBUG_FUNCTION void
8396 debug_loops (int verbosity)
8397 {
8398 print_loops (stderr, verbosity);
8399 }
8400
8401 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
8402
8403 DEBUG_FUNCTION void
8404 debug_loop (class loop *loop, int verbosity)
8405 {
8406 print_loop (stderr, loop, 0, verbosity);
8407 }
8408
8409 /* Print on stderr the code of loop number NUM, at some VERBOSITY
8410 level. */
8411
8412 DEBUG_FUNCTION void
8413 debug_loop_num (unsigned num, int verbosity)
8414 {
8415 debug_loop (get_loop (cfun, num), verbosity);
8416 }
8417
8418 /* Return true if BB ends with a call, possibly followed by some
8419 instructions that must stay with the call. Return false,
8420 otherwise. */
8421
8422 static bool
8423 gimple_block_ends_with_call_p (basic_block bb)
8424 {
8425 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8426 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
8427 }
8428
8429
8430 /* Return true if BB ends with a conditional branch. Return false,
8431 otherwise. */
8432
8433 static bool
8434 gimple_block_ends_with_condjump_p (const_basic_block bb)
8435 {
8436 gimple *stmt = last_stmt (CONST_CAST_BB (bb));
8437 return (stmt && gimple_code (stmt) == GIMPLE_COND);
8438 }
8439
8440
8441 /* Return true if statement T may terminate execution of BB in ways not
8442 explicitly represtented in the CFG. */
8443
8444 bool
8445 stmt_can_terminate_bb_p (gimple *t)
8446 {
8447 tree fndecl = NULL_TREE;
8448 int call_flags = 0;
8449
8450 /* Eh exception not handled internally terminates execution of the whole
8451 function. */
8452 if (stmt_can_throw_external (cfun, t))
8453 return true;
8454
8455 /* NORETURN and LONGJMP calls already have an edge to exit.
8456 CONST and PURE calls do not need one.
8457 We don't currently check for CONST and PURE here, although
8458 it would be a good idea, because those attributes are
8459 figured out from the RTL in mark_constant_function, and
8460 the counter incrementation code from -fprofile-arcs
8461 leads to different results from -fbranch-probabilities. */
8462 if (is_gimple_call (t))
8463 {
8464 fndecl = gimple_call_fndecl (t);
8465 call_flags = gimple_call_flags (t);
8466 }
8467
8468 if (is_gimple_call (t)
8469 && fndecl
8470 && fndecl_built_in_p (fndecl)
8471 && (call_flags & ECF_NOTHROW)
8472 && !(call_flags & ECF_RETURNS_TWICE)
8473 /* fork() doesn't really return twice, but the effect of
8474 wrapping it in __gcov_fork() which calls __gcov_dump() and
8475 __gcov_reset() and clears the counters before forking has the same
8476 effect as returning twice. Force a fake edge. */
8477 && !fndecl_built_in_p (fndecl, BUILT_IN_FORK))
8478 return false;
8479
8480 if (is_gimple_call (t))
8481 {
8482 edge_iterator ei;
8483 edge e;
8484 basic_block bb;
8485
8486 if (call_flags & (ECF_PURE | ECF_CONST)
8487 && !(call_flags & ECF_LOOPING_CONST_OR_PURE))
8488 return false;
8489
8490 /* Function call may do longjmp, terminate program or do other things.
8491 Special case noreturn that have non-abnormal edges out as in this case
8492 the fact is sufficiently represented by lack of edges out of T. */
8493 if (!(call_flags & ECF_NORETURN))
8494 return true;
8495
8496 bb = gimple_bb (t);
8497 FOR_EACH_EDGE (e, ei, bb->succs)
8498 if ((e->flags & EDGE_FAKE) == 0)
8499 return true;
8500 }
8501
8502 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
8503 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
8504 return true;
8505
8506 return false;
8507 }
8508
8509
8510 /* Add fake edges to the function exit for any non constant and non
8511 noreturn calls (or noreturn calls with EH/abnormal edges),
8512 volatile inline assembly in the bitmap of blocks specified by BLOCKS
8513 or to the whole CFG if BLOCKS is zero. Return the number of blocks
8514 that were split.
8515
8516 The goal is to expose cases in which entering a basic block does
8517 not imply that all subsequent instructions must be executed. */
8518
8519 static int
8520 gimple_flow_call_edges_add (sbitmap blocks)
8521 {
8522 int i;
8523 int blocks_split = 0;
8524 int last_bb = last_basic_block_for_fn (cfun);
8525 bool check_last_block = false;
8526
8527 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
8528 return 0;
8529
8530 if (! blocks)
8531 check_last_block = true;
8532 else
8533 check_last_block = bitmap_bit_p (blocks,
8534 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
8535
8536 /* In the last basic block, before epilogue generation, there will be
8537 a fallthru edge to EXIT. Special care is required if the last insn
8538 of the last basic block is a call because make_edge folds duplicate
8539 edges, which would result in the fallthru edge also being marked
8540 fake, which would result in the fallthru edge being removed by
8541 remove_fake_edges, which would result in an invalid CFG.
8542
8543 Moreover, we can't elide the outgoing fake edge, since the block
8544 profiler needs to take this into account in order to solve the minimal
8545 spanning tree in the case that the call doesn't return.
8546
8547 Handle this by adding a dummy instruction in a new last basic block. */
8548 if (check_last_block)
8549 {
8550 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
8551 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8552 gimple *t = NULL;
8553
8554 if (!gsi_end_p (gsi))
8555 t = gsi_stmt (gsi);
8556
8557 if (t && stmt_can_terminate_bb_p (t))
8558 {
8559 edge e;
8560
8561 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8562 if (e)
8563 {
8564 gsi_insert_on_edge (e, gimple_build_nop ());
8565 gsi_commit_edge_inserts ();
8566 }
8567 }
8568 }
8569
8570 /* Now add fake edges to the function exit for any non constant
8571 calls since there is no way that we can determine if they will
8572 return or not... */
8573 for (i = 0; i < last_bb; i++)
8574 {
8575 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8576 gimple_stmt_iterator gsi;
8577 gimple *stmt, *last_stmt;
8578
8579 if (!bb)
8580 continue;
8581
8582 if (blocks && !bitmap_bit_p (blocks, i))
8583 continue;
8584
8585 gsi = gsi_last_nondebug_bb (bb);
8586 if (!gsi_end_p (gsi))
8587 {
8588 last_stmt = gsi_stmt (gsi);
8589 do
8590 {
8591 stmt = gsi_stmt (gsi);
8592 if (stmt_can_terminate_bb_p (stmt))
8593 {
8594 edge e;
8595
8596 /* The handling above of the final block before the
8597 epilogue should be enough to verify that there is
8598 no edge to the exit block in CFG already.
8599 Calling make_edge in such case would cause us to
8600 mark that edge as fake and remove it later. */
8601 if (flag_checking && stmt == last_stmt)
8602 {
8603 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8604 gcc_assert (e == NULL);
8605 }
8606
8607 /* Note that the following may create a new basic block
8608 and renumber the existing basic blocks. */
8609 if (stmt != last_stmt)
8610 {
8611 e = split_block (bb, stmt);
8612 if (e)
8613 blocks_split++;
8614 }
8615 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
8616 e->probability = profile_probability::guessed_never ();
8617 }
8618 gsi_prev (&gsi);
8619 }
8620 while (!gsi_end_p (gsi));
8621 }
8622 }
8623
8624 if (blocks_split)
8625 checking_verify_flow_info ();
8626
8627 return blocks_split;
8628 }
8629
8630 /* Removes edge E and all the blocks dominated by it, and updates dominance
8631 information. The IL in E->src needs to be updated separately.
8632 If dominance info is not available, only the edge E is removed.*/
8633
8634 void
8635 remove_edge_and_dominated_blocks (edge e)
8636 {
8637 vec<basic_block> bbs_to_remove = vNULL;
8638 vec<basic_block> bbs_to_fix_dom = vNULL;
8639 edge f;
8640 edge_iterator ei;
8641 bool none_removed = false;
8642 unsigned i;
8643 basic_block bb, dbb;
8644 bitmap_iterator bi;
8645
8646 /* If we are removing a path inside a non-root loop that may change
8647 loop ownership of blocks or remove loops. Mark loops for fixup. */
8648 if (current_loops
8649 && loop_outer (e->src->loop_father) != NULL
8650 && e->src->loop_father == e->dest->loop_father)
8651 loops_state_set (LOOPS_NEED_FIXUP);
8652
8653 if (!dom_info_available_p (CDI_DOMINATORS))
8654 {
8655 remove_edge (e);
8656 return;
8657 }
8658
8659 /* No updating is needed for edges to exit. */
8660 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8661 {
8662 if (cfgcleanup_altered_bbs)
8663 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8664 remove_edge (e);
8665 return;
8666 }
8667
8668 /* First, we find the basic blocks to remove. If E->dest has a predecessor
8669 that is not dominated by E->dest, then this set is empty. Otherwise,
8670 all the basic blocks dominated by E->dest are removed.
8671
8672 Also, to DF_IDOM we store the immediate dominators of the blocks in
8673 the dominance frontier of E (i.e., of the successors of the
8674 removed blocks, if there are any, and of E->dest otherwise). */
8675 FOR_EACH_EDGE (f, ei, e->dest->preds)
8676 {
8677 if (f == e)
8678 continue;
8679
8680 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
8681 {
8682 none_removed = true;
8683 break;
8684 }
8685 }
8686
8687 auto_bitmap df, df_idom;
8688 if (none_removed)
8689 bitmap_set_bit (df_idom,
8690 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
8691 else
8692 {
8693 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
8694 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8695 {
8696 FOR_EACH_EDGE (f, ei, bb->succs)
8697 {
8698 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
8699 bitmap_set_bit (df, f->dest->index);
8700 }
8701 }
8702 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8703 bitmap_clear_bit (df, bb->index);
8704
8705 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
8706 {
8707 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8708 bitmap_set_bit (df_idom,
8709 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
8710 }
8711 }
8712
8713 if (cfgcleanup_altered_bbs)
8714 {
8715 /* Record the set of the altered basic blocks. */
8716 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8717 bitmap_ior_into (cfgcleanup_altered_bbs, df);
8718 }
8719
8720 /* Remove E and the cancelled blocks. */
8721 if (none_removed)
8722 remove_edge (e);
8723 else
8724 {
8725 /* Walk backwards so as to get a chance to substitute all
8726 released DEFs into debug stmts. See
8727 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
8728 details. */
8729 for (i = bbs_to_remove.length (); i-- > 0; )
8730 delete_basic_block (bbs_to_remove[i]);
8731 }
8732
8733 /* Update the dominance information. The immediate dominator may change only
8734 for blocks whose immediate dominator belongs to DF_IDOM:
8735
8736 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8737 removal. Let Z the arbitrary block such that idom(Z) = Y and
8738 Z dominates X after the removal. Before removal, there exists a path P
8739 from Y to X that avoids Z. Let F be the last edge on P that is
8740 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
8741 dominates W, and because of P, Z does not dominate W), and W belongs to
8742 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
8743 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
8744 {
8745 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8746 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
8747 dbb;
8748 dbb = next_dom_son (CDI_DOMINATORS, dbb))
8749 bbs_to_fix_dom.safe_push (dbb);
8750 }
8751
8752 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8753
8754 bbs_to_remove.release ();
8755 bbs_to_fix_dom.release ();
8756 }
8757
8758 /* Purge dead EH edges from basic block BB. */
8759
8760 bool
8761 gimple_purge_dead_eh_edges (basic_block bb)
8762 {
8763 bool changed = false;
8764 edge e;
8765 edge_iterator ei;
8766 gimple *stmt = last_stmt (bb);
8767
8768 if (stmt && stmt_can_throw_internal (cfun, stmt))
8769 return false;
8770
8771 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8772 {
8773 if (e->flags & EDGE_EH)
8774 {
8775 remove_edge_and_dominated_blocks (e);
8776 changed = true;
8777 }
8778 else
8779 ei_next (&ei);
8780 }
8781
8782 return changed;
8783 }
8784
8785 /* Purge dead EH edges from basic block listed in BLOCKS. */
8786
8787 bool
8788 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8789 {
8790 bool changed = false;
8791 unsigned i;
8792 bitmap_iterator bi;
8793
8794 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8795 {
8796 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8797
8798 /* Earlier gimple_purge_dead_eh_edges could have removed
8799 this basic block already. */
8800 gcc_assert (bb || changed);
8801 if (bb != NULL)
8802 changed |= gimple_purge_dead_eh_edges (bb);
8803 }
8804
8805 return changed;
8806 }
8807
8808 /* Purge dead abnormal call edges from basic block BB. */
8809
8810 bool
8811 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8812 {
8813 bool changed = false;
8814 edge e;
8815 edge_iterator ei;
8816 gimple *stmt = last_stmt (bb);
8817
8818 if (!cfun->has_nonlocal_label
8819 && !cfun->calls_setjmp)
8820 return false;
8821
8822 if (stmt && stmt_can_make_abnormal_goto (stmt))
8823 return false;
8824
8825 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8826 {
8827 if (e->flags & EDGE_ABNORMAL)
8828 {
8829 if (e->flags & EDGE_FALLTHRU)
8830 e->flags &= ~EDGE_ABNORMAL;
8831 else
8832 remove_edge_and_dominated_blocks (e);
8833 changed = true;
8834 }
8835 else
8836 ei_next (&ei);
8837 }
8838
8839 return changed;
8840 }
8841
8842 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8843
8844 bool
8845 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8846 {
8847 bool changed = false;
8848 unsigned i;
8849 bitmap_iterator bi;
8850
8851 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8852 {
8853 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8854
8855 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8856 this basic block already. */
8857 gcc_assert (bb || changed);
8858 if (bb != NULL)
8859 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8860 }
8861
8862 return changed;
8863 }
8864
8865 /* This function is called whenever a new edge is created or
8866 redirected. */
8867
8868 static void
8869 gimple_execute_on_growing_pred (edge e)
8870 {
8871 basic_block bb = e->dest;
8872
8873 if (!gimple_seq_empty_p (phi_nodes (bb)))
8874 reserve_phi_args_for_new_edge (bb);
8875 }
8876
8877 /* This function is called immediately before edge E is removed from
8878 the edge vector E->dest->preds. */
8879
8880 static void
8881 gimple_execute_on_shrinking_pred (edge e)
8882 {
8883 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8884 remove_phi_args (e);
8885 }
8886
8887 /*---------------------------------------------------------------------------
8888 Helper functions for Loop versioning
8889 ---------------------------------------------------------------------------*/
8890
8891 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8892 of 'first'. Both of them are dominated by 'new_head' basic block. When
8893 'new_head' was created by 'second's incoming edge it received phi arguments
8894 on the edge by split_edge(). Later, additional edge 'e' was created to
8895 connect 'new_head' and 'first'. Now this routine adds phi args on this
8896 additional edge 'e' that new_head to second edge received as part of edge
8897 splitting. */
8898
8899 static void
8900 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8901 basic_block new_head, edge e)
8902 {
8903 gphi *phi1, *phi2;
8904 gphi_iterator psi1, psi2;
8905 tree def;
8906 edge e2 = find_edge (new_head, second);
8907
8908 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8909 edge, we should always have an edge from NEW_HEAD to SECOND. */
8910 gcc_assert (e2 != NULL);
8911
8912 /* Browse all 'second' basic block phi nodes and add phi args to
8913 edge 'e' for 'first' head. PHI args are always in correct order. */
8914
8915 for (psi2 = gsi_start_phis (second),
8916 psi1 = gsi_start_phis (first);
8917 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8918 gsi_next (&psi2), gsi_next (&psi1))
8919 {
8920 phi1 = psi1.phi ();
8921 phi2 = psi2.phi ();
8922 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8923 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8924 }
8925 }
8926
8927
8928 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8929 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8930 the destination of the ELSE part. */
8931
8932 static void
8933 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8934 basic_block second_head ATTRIBUTE_UNUSED,
8935 basic_block cond_bb, void *cond_e)
8936 {
8937 gimple_stmt_iterator gsi;
8938 gimple *new_cond_expr;
8939 tree cond_expr = (tree) cond_e;
8940 edge e0;
8941
8942 /* Build new conditional expr */
8943 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8944 NULL_TREE, NULL_TREE);
8945
8946 /* Add new cond in cond_bb. */
8947 gsi = gsi_last_bb (cond_bb);
8948 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8949
8950 /* Adjust edges appropriately to connect new head with first head
8951 as well as second head. */
8952 e0 = single_succ_edge (cond_bb);
8953 e0->flags &= ~EDGE_FALLTHRU;
8954 e0->flags |= EDGE_FALSE_VALUE;
8955 }
8956
8957
8958 /* Do book-keeping of basic block BB for the profile consistency checker.
8959 Store the counting in RECORD. */
8960 static void
8961 gimple_account_profile_record (basic_block bb,
8962 struct profile_record *record)
8963 {
8964 gimple_stmt_iterator i;
8965 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8966 {
8967 record->size
8968 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8969 if (bb->count.initialized_p ())
8970 record->time
8971 += estimate_num_insns (gsi_stmt (i),
8972 &eni_time_weights) * bb->count.to_gcov_type ();
8973 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8974 record->time
8975 += estimate_num_insns (gsi_stmt (i),
8976 &eni_time_weights) * bb->count.to_frequency (cfun);
8977 }
8978 }
8979
8980 struct cfg_hooks gimple_cfg_hooks = {
8981 "gimple",
8982 gimple_verify_flow_info,
8983 gimple_dump_bb, /* dump_bb */
8984 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8985 create_bb, /* create_basic_block */
8986 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8987 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8988 gimple_can_remove_branch_p, /* can_remove_branch_p */
8989 remove_bb, /* delete_basic_block */
8990 gimple_split_block, /* split_block */
8991 gimple_move_block_after, /* move_block_after */
8992 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8993 gimple_merge_blocks, /* merge_blocks */
8994 gimple_predict_edge, /* predict_edge */
8995 gimple_predicted_by_p, /* predicted_by_p */
8996 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8997 gimple_duplicate_bb, /* duplicate_block */
8998 gimple_split_edge, /* split_edge */
8999 gimple_make_forwarder_block, /* make_forward_block */
9000 NULL, /* tidy_fallthru_edge */
9001 NULL, /* force_nonfallthru */
9002 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
9003 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
9004 gimple_flow_call_edges_add, /* flow_call_edges_add */
9005 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
9006 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
9007 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
9008 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
9009 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
9010 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
9011 flush_pending_stmts, /* flush_pending_stmts */
9012 gimple_empty_block_p, /* block_empty_p */
9013 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
9014 gimple_account_profile_record,
9015 };
9016
9017
9018 /* Split all critical edges. Split some extra (not necessarily critical) edges
9019 if FOR_EDGE_INSERTION_P is true. */
9020
9021 unsigned int
9022 split_critical_edges (bool for_edge_insertion_p /* = false */)
9023 {
9024 basic_block bb;
9025 edge e;
9026 edge_iterator ei;
9027
9028 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
9029 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
9030 mappings around the calls to split_edge. */
9031 start_recording_case_labels ();
9032 FOR_ALL_BB_FN (bb, cfun)
9033 {
9034 FOR_EACH_EDGE (e, ei, bb->succs)
9035 {
9036 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
9037 split_edge (e);
9038 /* PRE inserts statements to edges and expects that
9039 since split_critical_edges was done beforehand, committing edge
9040 insertions will not split more edges. In addition to critical
9041 edges we must split edges that have multiple successors and
9042 end by control flow statements, such as RESX.
9043 Go ahead and split them too. This matches the logic in
9044 gimple_find_edge_insert_loc. */
9045 else if (for_edge_insertion_p
9046 && (!single_pred_p (e->dest)
9047 || !gimple_seq_empty_p (phi_nodes (e->dest))
9048 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
9049 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
9050 && !(e->flags & EDGE_ABNORMAL))
9051 {
9052 gimple_stmt_iterator gsi;
9053
9054 gsi = gsi_last_bb (e->src);
9055 if (!gsi_end_p (gsi)
9056 && stmt_ends_bb_p (gsi_stmt (gsi))
9057 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
9058 && !gimple_call_builtin_p (gsi_stmt (gsi),
9059 BUILT_IN_RETURN)))
9060 split_edge (e);
9061 }
9062 }
9063 }
9064 end_recording_case_labels ();
9065 return 0;
9066 }
9067
9068 namespace {
9069
9070 const pass_data pass_data_split_crit_edges =
9071 {
9072 GIMPLE_PASS, /* type */
9073 "crited", /* name */
9074 OPTGROUP_NONE, /* optinfo_flags */
9075 TV_TREE_SPLIT_EDGES, /* tv_id */
9076 PROP_cfg, /* properties_required */
9077 PROP_no_crit_edges, /* properties_provided */
9078 0, /* properties_destroyed */
9079 0, /* todo_flags_start */
9080 0, /* todo_flags_finish */
9081 };
9082
9083 class pass_split_crit_edges : public gimple_opt_pass
9084 {
9085 public:
9086 pass_split_crit_edges (gcc::context *ctxt)
9087 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
9088 {}
9089
9090 /* opt_pass methods: */
9091 virtual unsigned int execute (function *) { return split_critical_edges (); }
9092
9093 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
9094 }; // class pass_split_crit_edges
9095
9096 } // anon namespace
9097
9098 gimple_opt_pass *
9099 make_pass_split_crit_edges (gcc::context *ctxt)
9100 {
9101 return new pass_split_crit_edges (ctxt);
9102 }
9103
9104
9105 /* Insert COND expression which is GIMPLE_COND after STMT
9106 in basic block BB with appropriate basic block split
9107 and creation of a new conditionally executed basic block.
9108 Update profile so the new bb is visited with probability PROB.
9109 Return created basic block. */
9110 basic_block
9111 insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond,
9112 profile_probability prob)
9113 {
9114 edge fall = split_block (bb, stmt);
9115 gimple_stmt_iterator iter = gsi_last_bb (bb);
9116 basic_block new_bb;
9117
9118 /* Insert cond statement. */
9119 gcc_assert (gimple_code (cond) == GIMPLE_COND);
9120 if (gsi_end_p (iter))
9121 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
9122 else
9123 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
9124
9125 /* Create conditionally executed block. */
9126 new_bb = create_empty_bb (bb);
9127 edge e = make_edge (bb, new_bb, EDGE_TRUE_VALUE);
9128 e->probability = prob;
9129 new_bb->count = e->count ();
9130 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
9131
9132 /* Fix edge for split bb. */
9133 fall->flags = EDGE_FALSE_VALUE;
9134 fall->probability -= e->probability;
9135
9136 /* Update dominance info. */
9137 if (dom_info_available_p (CDI_DOMINATORS))
9138 {
9139 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
9140 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
9141 }
9142
9143 /* Update loop info. */
9144 if (current_loops)
9145 add_bb_to_loop (new_bb, bb->loop_father);
9146
9147 return new_bb;
9148 }
9149
9150 /* Build a ternary operation and gimplify it. Emit code before GSI.
9151 Return the gimple_val holding the result. */
9152
9153 tree
9154 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
9155 tree type, tree a, tree b, tree c)
9156 {
9157 tree ret;
9158 location_t loc = gimple_location (gsi_stmt (*gsi));
9159
9160 ret = fold_build3_loc (loc, code, type, a, b, c);
9161 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
9162 GSI_SAME_STMT);
9163 }
9164
9165 /* Build a binary operation and gimplify it. Emit code before GSI.
9166 Return the gimple_val holding the result. */
9167
9168 tree
9169 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
9170 tree type, tree a, tree b)
9171 {
9172 tree ret;
9173
9174 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
9175 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
9176 GSI_SAME_STMT);
9177 }
9178
9179 /* Build a unary operation and gimplify it. Emit code before GSI.
9180 Return the gimple_val holding the result. */
9181
9182 tree
9183 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
9184 tree a)
9185 {
9186 tree ret;
9187
9188 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
9189 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
9190 GSI_SAME_STMT);
9191 }
9192
9193
9194 \f
9195 /* Given a basic block B which ends with a conditional and has
9196 precisely two successors, determine which of the edges is taken if
9197 the conditional is true and which is taken if the conditional is
9198 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
9199
9200 void
9201 extract_true_false_edges_from_block (basic_block b,
9202 edge *true_edge,
9203 edge *false_edge)
9204 {
9205 edge e = EDGE_SUCC (b, 0);
9206
9207 if (e->flags & EDGE_TRUE_VALUE)
9208 {
9209 *true_edge = e;
9210 *false_edge = EDGE_SUCC (b, 1);
9211 }
9212 else
9213 {
9214 *false_edge = e;
9215 *true_edge = EDGE_SUCC (b, 1);
9216 }
9217 }
9218
9219
9220 /* From a controlling predicate in the immediate dominator DOM of
9221 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
9222 predicate evaluates to true and false and store them to
9223 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
9224 they are non-NULL. Returns true if the edges can be determined,
9225 else return false. */
9226
9227 bool
9228 extract_true_false_controlled_edges (basic_block dom, basic_block phiblock,
9229 edge *true_controlled_edge,
9230 edge *false_controlled_edge)
9231 {
9232 basic_block bb = phiblock;
9233 edge true_edge, false_edge, tem;
9234 edge e0 = NULL, e1 = NULL;
9235
9236 /* We have to verify that one edge into the PHI node is dominated
9237 by the true edge of the predicate block and the other edge
9238 dominated by the false edge. This ensures that the PHI argument
9239 we are going to take is completely determined by the path we
9240 take from the predicate block.
9241 We can only use BB dominance checks below if the destination of
9242 the true/false edges are dominated by their edge, thus only
9243 have a single predecessor. */
9244 extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
9245 tem = EDGE_PRED (bb, 0);
9246 if (tem == true_edge
9247 || (single_pred_p (true_edge->dest)
9248 && (tem->src == true_edge->dest
9249 || dominated_by_p (CDI_DOMINATORS,
9250 tem->src, true_edge->dest))))
9251 e0 = tem;
9252 else if (tem == false_edge
9253 || (single_pred_p (false_edge->dest)
9254 && (tem->src == false_edge->dest
9255 || dominated_by_p (CDI_DOMINATORS,
9256 tem->src, false_edge->dest))))
9257 e1 = tem;
9258 else
9259 return false;
9260 tem = EDGE_PRED (bb, 1);
9261 if (tem == true_edge
9262 || (single_pred_p (true_edge->dest)
9263 && (tem->src == true_edge->dest
9264 || dominated_by_p (CDI_DOMINATORS,
9265 tem->src, true_edge->dest))))
9266 e0 = tem;
9267 else if (tem == false_edge
9268 || (single_pred_p (false_edge->dest)
9269 && (tem->src == false_edge->dest
9270 || dominated_by_p (CDI_DOMINATORS,
9271 tem->src, false_edge->dest))))
9272 e1 = tem;
9273 else
9274 return false;
9275 if (!e0 || !e1)
9276 return false;
9277
9278 if (true_controlled_edge)
9279 *true_controlled_edge = e0;
9280 if (false_controlled_edge)
9281 *false_controlled_edge = e1;
9282
9283 return true;
9284 }
9285
9286 /* Generate a range test LHS CODE RHS that determines whether INDEX is in the
9287 range [low, high]. Place associated stmts before *GSI. */
9288
9289 void
9290 generate_range_test (basic_block bb, tree index, tree low, tree high,
9291 tree *lhs, tree *rhs)
9292 {
9293 tree type = TREE_TYPE (index);
9294 tree utype = range_check_type (type);
9295
9296 low = fold_convert (utype, low);
9297 high = fold_convert (utype, high);
9298
9299 gimple_seq seq = NULL;
9300 index = gimple_convert (&seq, utype, index);
9301 *lhs = gimple_build (&seq, MINUS_EXPR, utype, index, low);
9302 *rhs = const_binop (MINUS_EXPR, utype, high, low);
9303
9304 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9305 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
9306 }
9307
9308 /* Return the basic block that belongs to label numbered INDEX
9309 of a switch statement. */
9310
9311 basic_block
9312 gimple_switch_label_bb (function *ifun, gswitch *gs, unsigned index)
9313 {
9314 return label_to_block (ifun, CASE_LABEL (gimple_switch_label (gs, index)));
9315 }
9316
9317 /* Return the default basic block of a switch statement. */
9318
9319 basic_block
9320 gimple_switch_default_bb (function *ifun, gswitch *gs)
9321 {
9322 return gimple_switch_label_bb (ifun, gs, 0);
9323 }
9324
9325 /* Return the edge that belongs to label numbered INDEX
9326 of a switch statement. */
9327
9328 edge
9329 gimple_switch_edge (function *ifun, gswitch *gs, unsigned index)
9330 {
9331 return find_edge (gimple_bb (gs), gimple_switch_label_bb (ifun, gs, index));
9332 }
9333
9334 /* Return the default edge of a switch statement. */
9335
9336 edge
9337 gimple_switch_default_edge (function *ifun, gswitch *gs)
9338 {
9339 return gimple_switch_edge (ifun, gs, 0);
9340 }
9341
9342
9343 /* Emit return warnings. */
9344
9345 namespace {
9346
9347 const pass_data pass_data_warn_function_return =
9348 {
9349 GIMPLE_PASS, /* type */
9350 "*warn_function_return", /* name */
9351 OPTGROUP_NONE, /* optinfo_flags */
9352 TV_NONE, /* tv_id */
9353 PROP_cfg, /* properties_required */
9354 0, /* properties_provided */
9355 0, /* properties_destroyed */
9356 0, /* todo_flags_start */
9357 0, /* todo_flags_finish */
9358 };
9359
9360 class pass_warn_function_return : public gimple_opt_pass
9361 {
9362 public:
9363 pass_warn_function_return (gcc::context *ctxt)
9364 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
9365 {}
9366
9367 /* opt_pass methods: */
9368 virtual unsigned int execute (function *);
9369
9370 }; // class pass_warn_function_return
9371
9372 unsigned int
9373 pass_warn_function_return::execute (function *fun)
9374 {
9375 location_t location;
9376 gimple *last;
9377 edge e;
9378 edge_iterator ei;
9379
9380 if (!targetm.warn_func_return (fun->decl))
9381 return 0;
9382
9383 /* If we have a path to EXIT, then we do return. */
9384 if (TREE_THIS_VOLATILE (fun->decl)
9385 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
9386 {
9387 location = UNKNOWN_LOCATION;
9388 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (fun)->preds);
9389 (e = ei_safe_edge (ei)); )
9390 {
9391 last = last_stmt (e->src);
9392 if ((gimple_code (last) == GIMPLE_RETURN
9393 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
9394 && location == UNKNOWN_LOCATION
9395 && ((location = LOCATION_LOCUS (gimple_location (last)))
9396 != UNKNOWN_LOCATION)
9397 && !optimize)
9398 break;
9399 /* When optimizing, replace return stmts in noreturn functions
9400 with __builtin_unreachable () call. */
9401 if (optimize && gimple_code (last) == GIMPLE_RETURN)
9402 {
9403 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9404 gimple *new_stmt = gimple_build_call (fndecl, 0);
9405 gimple_set_location (new_stmt, gimple_location (last));
9406 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9407 gsi_replace (&gsi, new_stmt, true);
9408 remove_edge (e);
9409 }
9410 else
9411 ei_next (&ei);
9412 }
9413 if (location == UNKNOWN_LOCATION)
9414 location = cfun->function_end_locus;
9415 warning_at (location, 0, "%<noreturn%> function does return");
9416 }
9417
9418 /* If we see "return;" in some basic block, then we do reach the end
9419 without returning a value. */
9420 else if (warn_return_type > 0
9421 && !TREE_NO_WARNING (fun->decl)
9422 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
9423 {
9424 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
9425 {
9426 gimple *last = last_stmt (e->src);
9427 greturn *return_stmt = dyn_cast <greturn *> (last);
9428 if (return_stmt
9429 && gimple_return_retval (return_stmt) == NULL
9430 && !gimple_no_warning_p (last))
9431 {
9432 location = gimple_location (last);
9433 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9434 location = fun->function_end_locus;
9435 if (warning_at (location, OPT_Wreturn_type,
9436 "control reaches end of non-void function"))
9437 TREE_NO_WARNING (fun->decl) = 1;
9438 break;
9439 }
9440 }
9441 /* The C++ FE turns fallthrough from the end of non-void function
9442 into __builtin_unreachable () call with BUILTINS_LOCATION.
9443 Recognize those too. */
9444 basic_block bb;
9445 if (!TREE_NO_WARNING (fun->decl))
9446 FOR_EACH_BB_FN (bb, fun)
9447 if (EDGE_COUNT (bb->succs) == 0)
9448 {
9449 gimple *last = last_stmt (bb);
9450 const enum built_in_function ubsan_missing_ret
9451 = BUILT_IN_UBSAN_HANDLE_MISSING_RETURN;
9452 if (last
9453 && ((LOCATION_LOCUS (gimple_location (last))
9454 == BUILTINS_LOCATION
9455 && gimple_call_builtin_p (last, BUILT_IN_UNREACHABLE))
9456 || gimple_call_builtin_p (last, ubsan_missing_ret)))
9457 {
9458 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9459 gsi_prev_nondebug (&gsi);
9460 gimple *prev = gsi_stmt (gsi);
9461 if (prev == NULL)
9462 location = UNKNOWN_LOCATION;
9463 else
9464 location = gimple_location (prev);
9465 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9466 location = fun->function_end_locus;
9467 if (warning_at (location, OPT_Wreturn_type,
9468 "control reaches end of non-void function"))
9469 TREE_NO_WARNING (fun->decl) = 1;
9470 break;
9471 }
9472 }
9473 }
9474 return 0;
9475 }
9476
9477 } // anon namespace
9478
9479 gimple_opt_pass *
9480 make_pass_warn_function_return (gcc::context *ctxt)
9481 {
9482 return new pass_warn_function_return (ctxt);
9483 }
9484
9485 /* Walk a gimplified function and warn for functions whose return value is
9486 ignored and attribute((warn_unused_result)) is set. This is done before
9487 inlining, so we don't have to worry about that. */
9488
9489 static void
9490 do_warn_unused_result (gimple_seq seq)
9491 {
9492 tree fdecl, ftype;
9493 gimple_stmt_iterator i;
9494
9495 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
9496 {
9497 gimple *g = gsi_stmt (i);
9498
9499 switch (gimple_code (g))
9500 {
9501 case GIMPLE_BIND:
9502 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
9503 break;
9504 case GIMPLE_TRY:
9505 do_warn_unused_result (gimple_try_eval (g));
9506 do_warn_unused_result (gimple_try_cleanup (g));
9507 break;
9508 case GIMPLE_CATCH:
9509 do_warn_unused_result (gimple_catch_handler (
9510 as_a <gcatch *> (g)));
9511 break;
9512 case GIMPLE_EH_FILTER:
9513 do_warn_unused_result (gimple_eh_filter_failure (g));
9514 break;
9515
9516 case GIMPLE_CALL:
9517 if (gimple_call_lhs (g))
9518 break;
9519 if (gimple_call_internal_p (g))
9520 break;
9521
9522 /* This is a naked call, as opposed to a GIMPLE_CALL with an
9523 LHS. All calls whose value is ignored should be
9524 represented like this. Look for the attribute. */
9525 fdecl = gimple_call_fndecl (g);
9526 ftype = gimple_call_fntype (g);
9527
9528 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
9529 {
9530 location_t loc = gimple_location (g);
9531
9532 if (fdecl)
9533 warning_at (loc, OPT_Wunused_result,
9534 "ignoring return value of %qD "
9535 "declared with attribute %<warn_unused_result%>",
9536 fdecl);
9537 else
9538 warning_at (loc, OPT_Wunused_result,
9539 "ignoring return value of function "
9540 "declared with attribute %<warn_unused_result%>");
9541 }
9542 break;
9543
9544 default:
9545 /* Not a container, not a call, or a call whose value is used. */
9546 break;
9547 }
9548 }
9549 }
9550
9551 namespace {
9552
9553 const pass_data pass_data_warn_unused_result =
9554 {
9555 GIMPLE_PASS, /* type */
9556 "*warn_unused_result", /* name */
9557 OPTGROUP_NONE, /* optinfo_flags */
9558 TV_NONE, /* tv_id */
9559 PROP_gimple_any, /* properties_required */
9560 0, /* properties_provided */
9561 0, /* properties_destroyed */
9562 0, /* todo_flags_start */
9563 0, /* todo_flags_finish */
9564 };
9565
9566 class pass_warn_unused_result : public gimple_opt_pass
9567 {
9568 public:
9569 pass_warn_unused_result (gcc::context *ctxt)
9570 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
9571 {}
9572
9573 /* opt_pass methods: */
9574 virtual bool gate (function *) { return flag_warn_unused_result; }
9575 virtual unsigned int execute (function *)
9576 {
9577 do_warn_unused_result (gimple_body (current_function_decl));
9578 return 0;
9579 }
9580
9581 }; // class pass_warn_unused_result
9582
9583 } // anon namespace
9584
9585 gimple_opt_pass *
9586 make_pass_warn_unused_result (gcc::context *ctxt)
9587 {
9588 return new pass_warn_unused_result (ctxt);
9589 }
9590
9591 /* IPA passes, compilation of earlier functions or inlining
9592 might have changed some properties, such as marked functions nothrow,
9593 pure, const or noreturn.
9594 Remove redundant edges and basic blocks, and create new ones if necessary.
9595
9596 This pass can't be executed as stand alone pass from pass manager, because
9597 in between inlining and this fixup the verify_flow_info would fail. */
9598
9599 unsigned int
9600 execute_fixup_cfg (void)
9601 {
9602 basic_block bb;
9603 gimple_stmt_iterator gsi;
9604 int todo = 0;
9605 cgraph_node *node = cgraph_node::get (current_function_decl);
9606 /* Same scaling is also done by ipa_merge_profiles. */
9607 profile_count num = node->count;
9608 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
9609 bool scale = num.initialized_p () && !(num == den);
9610
9611 if (scale)
9612 {
9613 profile_count::adjust_for_ipa_scaling (&num, &den);
9614 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
9615 EXIT_BLOCK_PTR_FOR_FN (cfun)->count
9616 = EXIT_BLOCK_PTR_FOR_FN (cfun)->count.apply_scale (num, den);
9617 }
9618
9619 FOR_EACH_BB_FN (bb, cfun)
9620 {
9621 if (scale)
9622 bb->count = bb->count.apply_scale (num, den);
9623 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
9624 {
9625 gimple *stmt = gsi_stmt (gsi);
9626 tree decl = is_gimple_call (stmt)
9627 ? gimple_call_fndecl (stmt)
9628 : NULL;
9629 if (decl)
9630 {
9631 int flags = gimple_call_flags (stmt);
9632 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
9633 {
9634 if (gimple_purge_dead_abnormal_call_edges (bb))
9635 todo |= TODO_cleanup_cfg;
9636
9637 if (gimple_in_ssa_p (cfun))
9638 {
9639 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9640 update_stmt (stmt);
9641 }
9642 }
9643
9644 if (flags & ECF_NORETURN
9645 && fixup_noreturn_call (stmt))
9646 todo |= TODO_cleanup_cfg;
9647 }
9648
9649 /* Remove stores to variables we marked write-only.
9650 Keep access when store has side effect, i.e. in case when source
9651 is volatile. */
9652 if (gimple_store_p (stmt)
9653 && !gimple_has_side_effects (stmt)
9654 && !optimize_debug)
9655 {
9656 tree lhs = get_base_address (gimple_get_lhs (stmt));
9657
9658 if (VAR_P (lhs)
9659 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9660 && varpool_node::get (lhs)->writeonly)
9661 {
9662 unlink_stmt_vdef (stmt);
9663 gsi_remove (&gsi, true);
9664 release_defs (stmt);
9665 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9666 continue;
9667 }
9668 }
9669 /* For calls we can simply remove LHS when it is known
9670 to be write-only. */
9671 if (is_gimple_call (stmt)
9672 && gimple_get_lhs (stmt))
9673 {
9674 tree lhs = get_base_address (gimple_get_lhs (stmt));
9675
9676 if (VAR_P (lhs)
9677 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9678 && varpool_node::get (lhs)->writeonly)
9679 {
9680 gimple_call_set_lhs (stmt, NULL);
9681 update_stmt (stmt);
9682 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9683 }
9684 }
9685
9686 if (maybe_clean_eh_stmt (stmt)
9687 && gimple_purge_dead_eh_edges (bb))
9688 todo |= TODO_cleanup_cfg;
9689 gsi_next (&gsi);
9690 }
9691
9692 /* If we have a basic block with no successors that does not
9693 end with a control statement or a noreturn call end it with
9694 a call to __builtin_unreachable. This situation can occur
9695 when inlining a noreturn call that does in fact return. */
9696 if (EDGE_COUNT (bb->succs) == 0)
9697 {
9698 gimple *stmt = last_stmt (bb);
9699 if (!stmt
9700 || (!is_ctrl_stmt (stmt)
9701 && (!is_gimple_call (stmt)
9702 || !gimple_call_noreturn_p (stmt))))
9703 {
9704 if (stmt && is_gimple_call (stmt))
9705 gimple_call_set_ctrl_altering (stmt, false);
9706 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9707 stmt = gimple_build_call (fndecl, 0);
9708 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9709 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
9710 if (!cfun->after_inlining)
9711 {
9712 gcall *call_stmt = dyn_cast <gcall *> (stmt);
9713 node->create_edge (cgraph_node::get_create (fndecl),
9714 call_stmt, bb->count);
9715 }
9716 }
9717 }
9718 }
9719 if (scale)
9720 {
9721 update_max_bb_count ();
9722 compute_function_frequency ();
9723 }
9724
9725 if (current_loops
9726 && (todo & TODO_cleanup_cfg))
9727 loops_state_set (LOOPS_NEED_FIXUP);
9728
9729 return todo;
9730 }
9731
9732 namespace {
9733
9734 const pass_data pass_data_fixup_cfg =
9735 {
9736 GIMPLE_PASS, /* type */
9737 "fixup_cfg", /* name */
9738 OPTGROUP_NONE, /* optinfo_flags */
9739 TV_NONE, /* tv_id */
9740 PROP_cfg, /* properties_required */
9741 0, /* properties_provided */
9742 0, /* properties_destroyed */
9743 0, /* todo_flags_start */
9744 0, /* todo_flags_finish */
9745 };
9746
9747 class pass_fixup_cfg : public gimple_opt_pass
9748 {
9749 public:
9750 pass_fixup_cfg (gcc::context *ctxt)
9751 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
9752 {}
9753
9754 /* opt_pass methods: */
9755 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
9756 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
9757
9758 }; // class pass_fixup_cfg
9759
9760 } // anon namespace
9761
9762 gimple_opt_pass *
9763 make_pass_fixup_cfg (gcc::context *ctxt)
9764 {
9765 return new pass_fixup_cfg (ctxt);
9766 }
9767
9768 /* Garbage collection support for edge_def. */
9769
9770 extern void gt_ggc_mx (tree&);
9771 extern void gt_ggc_mx (gimple *&);
9772 extern void gt_ggc_mx (rtx&);
9773 extern void gt_ggc_mx (basic_block&);
9774
9775 static void
9776 gt_ggc_mx (rtx_insn *& x)
9777 {
9778 if (x)
9779 gt_ggc_mx_rtx_def ((void *) x);
9780 }
9781
9782 void
9783 gt_ggc_mx (edge_def *e)
9784 {
9785 tree block = LOCATION_BLOCK (e->goto_locus);
9786 gt_ggc_mx (e->src);
9787 gt_ggc_mx (e->dest);
9788 if (current_ir_type () == IR_GIMPLE)
9789 gt_ggc_mx (e->insns.g);
9790 else
9791 gt_ggc_mx (e->insns.r);
9792 gt_ggc_mx (block);
9793 }
9794
9795 /* PCH support for edge_def. */
9796
9797 extern void gt_pch_nx (tree&);
9798 extern void gt_pch_nx (gimple *&);
9799 extern void gt_pch_nx (rtx&);
9800 extern void gt_pch_nx (basic_block&);
9801
9802 static void
9803 gt_pch_nx (rtx_insn *& x)
9804 {
9805 if (x)
9806 gt_pch_nx_rtx_def ((void *) x);
9807 }
9808
9809 void
9810 gt_pch_nx (edge_def *e)
9811 {
9812 tree block = LOCATION_BLOCK (e->goto_locus);
9813 gt_pch_nx (e->src);
9814 gt_pch_nx (e->dest);
9815 if (current_ir_type () == IR_GIMPLE)
9816 gt_pch_nx (e->insns.g);
9817 else
9818 gt_pch_nx (e->insns.r);
9819 gt_pch_nx (block);
9820 }
9821
9822 void
9823 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
9824 {
9825 tree block = LOCATION_BLOCK (e->goto_locus);
9826 op (&(e->src), cookie);
9827 op (&(e->dest), cookie);
9828 if (current_ir_type () == IR_GIMPLE)
9829 op (&(e->insns.g), cookie);
9830 else
9831 op (&(e->insns.r), cookie);
9832 op (&(block), cookie);
9833 }
9834
9835 #if CHECKING_P
9836
9837 namespace selftest {
9838
9839 /* Helper function for CFG selftests: create a dummy function decl
9840 and push it as cfun. */
9841
9842 static tree
9843 push_fndecl (const char *name)
9844 {
9845 tree fn_type = build_function_type_array (integer_type_node, 0, NULL);
9846 /* FIXME: this uses input_location: */
9847 tree fndecl = build_fn_decl (name, fn_type);
9848 tree retval = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
9849 NULL_TREE, integer_type_node);
9850 DECL_RESULT (fndecl) = retval;
9851 push_struct_function (fndecl);
9852 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9853 ASSERT_TRUE (fun != NULL);
9854 init_empty_tree_cfg_for_function (fun);
9855 ASSERT_EQ (2, n_basic_blocks_for_fn (fun));
9856 ASSERT_EQ (0, n_edges_for_fn (fun));
9857 return fndecl;
9858 }
9859
9860 /* These tests directly create CFGs.
9861 Compare with the static fns within tree-cfg.c:
9862 - build_gimple_cfg
9863 - make_blocks: calls create_basic_block (seq, bb);
9864 - make_edges. */
9865
9866 /* Verify a simple cfg of the form:
9867 ENTRY -> A -> B -> C -> EXIT. */
9868
9869 static void
9870 test_linear_chain ()
9871 {
9872 gimple_register_cfg_hooks ();
9873
9874 tree fndecl = push_fndecl ("cfg_test_linear_chain");
9875 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9876
9877 /* Create some empty blocks. */
9878 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9879 basic_block bb_b = create_empty_bb (bb_a);
9880 basic_block bb_c = create_empty_bb (bb_b);
9881
9882 ASSERT_EQ (5, n_basic_blocks_for_fn (fun));
9883 ASSERT_EQ (0, n_edges_for_fn (fun));
9884
9885 /* Create some edges: a simple linear chain of BBs. */
9886 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9887 make_edge (bb_a, bb_b, 0);
9888 make_edge (bb_b, bb_c, 0);
9889 make_edge (bb_c, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9890
9891 /* Verify the edges. */
9892 ASSERT_EQ (4, n_edges_for_fn (fun));
9893 ASSERT_EQ (NULL, ENTRY_BLOCK_PTR_FOR_FN (fun)->preds);
9894 ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs->length ());
9895 ASSERT_EQ (1, bb_a->preds->length ());
9896 ASSERT_EQ (1, bb_a->succs->length ());
9897 ASSERT_EQ (1, bb_b->preds->length ());
9898 ASSERT_EQ (1, bb_b->succs->length ());
9899 ASSERT_EQ (1, bb_c->preds->length ());
9900 ASSERT_EQ (1, bb_c->succs->length ());
9901 ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun)->preds->length ());
9902 ASSERT_EQ (NULL, EXIT_BLOCK_PTR_FOR_FN (fun)->succs);
9903
9904 /* Verify the dominance information
9905 Each BB in our simple chain should be dominated by the one before
9906 it. */
9907 calculate_dominance_info (CDI_DOMINATORS);
9908 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9909 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9910 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9911 ASSERT_EQ (1, dom_by_b.length ());
9912 ASSERT_EQ (bb_c, dom_by_b[0]);
9913 free_dominance_info (CDI_DOMINATORS);
9914 dom_by_b.release ();
9915
9916 /* Similarly for post-dominance: each BB in our chain is post-dominated
9917 by the one after it. */
9918 calculate_dominance_info (CDI_POST_DOMINATORS);
9919 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9920 ASSERT_EQ (bb_c, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9921 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9922 ASSERT_EQ (1, postdom_by_b.length ());
9923 ASSERT_EQ (bb_a, postdom_by_b[0]);
9924 free_dominance_info (CDI_POST_DOMINATORS);
9925 postdom_by_b.release ();
9926
9927 pop_cfun ();
9928 }
9929
9930 /* Verify a simple CFG of the form:
9931 ENTRY
9932 |
9933 A
9934 / \
9935 /t \f
9936 B C
9937 \ /
9938 \ /
9939 D
9940 |
9941 EXIT. */
9942
9943 static void
9944 test_diamond ()
9945 {
9946 gimple_register_cfg_hooks ();
9947
9948 tree fndecl = push_fndecl ("cfg_test_diamond");
9949 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9950
9951 /* Create some empty blocks. */
9952 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9953 basic_block bb_b = create_empty_bb (bb_a);
9954 basic_block bb_c = create_empty_bb (bb_a);
9955 basic_block bb_d = create_empty_bb (bb_b);
9956
9957 ASSERT_EQ (6, n_basic_blocks_for_fn (fun));
9958 ASSERT_EQ (0, n_edges_for_fn (fun));
9959
9960 /* Create the edges. */
9961 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9962 make_edge (bb_a, bb_b, EDGE_TRUE_VALUE);
9963 make_edge (bb_a, bb_c, EDGE_FALSE_VALUE);
9964 make_edge (bb_b, bb_d, 0);
9965 make_edge (bb_c, bb_d, 0);
9966 make_edge (bb_d, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9967
9968 /* Verify the edges. */
9969 ASSERT_EQ (6, n_edges_for_fn (fun));
9970 ASSERT_EQ (1, bb_a->preds->length ());
9971 ASSERT_EQ (2, bb_a->succs->length ());
9972 ASSERT_EQ (1, bb_b->preds->length ());
9973 ASSERT_EQ (1, bb_b->succs->length ());
9974 ASSERT_EQ (1, bb_c->preds->length ());
9975 ASSERT_EQ (1, bb_c->succs->length ());
9976 ASSERT_EQ (2, bb_d->preds->length ());
9977 ASSERT_EQ (1, bb_d->succs->length ());
9978
9979 /* Verify the dominance information. */
9980 calculate_dominance_info (CDI_DOMINATORS);
9981 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9982 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9983 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_d));
9984 vec<basic_block> dom_by_a = get_dominated_by (CDI_DOMINATORS, bb_a);
9985 ASSERT_EQ (3, dom_by_a.length ()); /* B, C, D, in some order. */
9986 dom_by_a.release ();
9987 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9988 ASSERT_EQ (0, dom_by_b.length ());
9989 dom_by_b.release ();
9990 free_dominance_info (CDI_DOMINATORS);
9991
9992 /* Similarly for post-dominance. */
9993 calculate_dominance_info (CDI_POST_DOMINATORS);
9994 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9995 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9996 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_c));
9997 vec<basic_block> postdom_by_d = get_dominated_by (CDI_POST_DOMINATORS, bb_d);
9998 ASSERT_EQ (3, postdom_by_d.length ()); /* A, B, C in some order. */
9999 postdom_by_d.release ();
10000 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
10001 ASSERT_EQ (0, postdom_by_b.length ());
10002 postdom_by_b.release ();
10003 free_dominance_info (CDI_POST_DOMINATORS);
10004
10005 pop_cfun ();
10006 }
10007
10008 /* Verify that we can handle a CFG containing a "complete" aka
10009 fully-connected subgraph (where A B C D below all have edges
10010 pointing to each other node, also to themselves).
10011 e.g.:
10012 ENTRY EXIT
10013 | ^
10014 | /
10015 | /
10016 | /
10017 V/
10018 A<--->B
10019 ^^ ^^
10020 | \ / |
10021 | X |
10022 | / \ |
10023 VV VV
10024 C<--->D
10025 */
10026
10027 static void
10028 test_fully_connected ()
10029 {
10030 gimple_register_cfg_hooks ();
10031
10032 tree fndecl = push_fndecl ("cfg_fully_connected");
10033 function *fun = DECL_STRUCT_FUNCTION (fndecl);
10034
10035 const int n = 4;
10036
10037 /* Create some empty blocks. */
10038 auto_vec <basic_block> subgraph_nodes;
10039 for (int i = 0; i < n; i++)
10040 subgraph_nodes.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun)));
10041
10042 ASSERT_EQ (n + 2, n_basic_blocks_for_fn (fun));
10043 ASSERT_EQ (0, n_edges_for_fn (fun));
10044
10045 /* Create the edges. */
10046 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), subgraph_nodes[0], EDGE_FALLTHRU);
10047 make_edge (subgraph_nodes[0], EXIT_BLOCK_PTR_FOR_FN (fun), 0);
10048 for (int i = 0; i < n; i++)
10049 for (int j = 0; j < n; j++)
10050 make_edge (subgraph_nodes[i], subgraph_nodes[j], 0);
10051
10052 /* Verify the edges. */
10053 ASSERT_EQ (2 + (n * n), n_edges_for_fn (fun));
10054 /* The first one is linked to ENTRY/EXIT as well as itself and
10055 everything else. */
10056 ASSERT_EQ (n + 1, subgraph_nodes[0]->preds->length ());
10057 ASSERT_EQ (n + 1, subgraph_nodes[0]->succs->length ());
10058 /* The other ones in the subgraph are linked to everything in
10059 the subgraph (including themselves). */
10060 for (int i = 1; i < n; i++)
10061 {
10062 ASSERT_EQ (n, subgraph_nodes[i]->preds->length ());
10063 ASSERT_EQ (n, subgraph_nodes[i]->succs->length ());
10064 }
10065
10066 /* Verify the dominance information. */
10067 calculate_dominance_info (CDI_DOMINATORS);
10068 /* The initial block in the subgraph should be dominated by ENTRY. */
10069 ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun),
10070 get_immediate_dominator (CDI_DOMINATORS,
10071 subgraph_nodes[0]));
10072 /* Every other block in the subgraph should be dominated by the
10073 initial block. */
10074 for (int i = 1; i < n; i++)
10075 ASSERT_EQ (subgraph_nodes[0],
10076 get_immediate_dominator (CDI_DOMINATORS,
10077 subgraph_nodes[i]));
10078 free_dominance_info (CDI_DOMINATORS);
10079
10080 /* Similarly for post-dominance. */
10081 calculate_dominance_info (CDI_POST_DOMINATORS);
10082 /* The initial block in the subgraph should be postdominated by EXIT. */
10083 ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun),
10084 get_immediate_dominator (CDI_POST_DOMINATORS,
10085 subgraph_nodes[0]));
10086 /* Every other block in the subgraph should be postdominated by the
10087 initial block, since that leads to EXIT. */
10088 for (int i = 1; i < n; i++)
10089 ASSERT_EQ (subgraph_nodes[0],
10090 get_immediate_dominator (CDI_POST_DOMINATORS,
10091 subgraph_nodes[i]));
10092 free_dominance_info (CDI_POST_DOMINATORS);
10093
10094 pop_cfun ();
10095 }
10096
10097 /* Run all of the selftests within this file. */
10098
10099 void
10100 tree_cfg_c_tests ()
10101 {
10102 test_linear_chain ();
10103 test_diamond ();
10104 test_fully_connected ();
10105 }
10106
10107 } // namespace selftest
10108
10109 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
10110 - loop
10111 - nested loops
10112 - switch statement (a block with many out-edges)
10113 - something that jumps to itself
10114 - etc */
10115
10116 #endif /* CHECKING_P */