Suggest including <stdint.h> or <cstdint> for [u]int[8|16|32|64]_t
[gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2020 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "cfganal.h"
40 #include "gimple-fold.h"
41 #include "tree-eh.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-cfg.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
49 #include "tree-dfa.h"
50 #include "tree-ssa.h"
51 #include "except.h"
52 #include "cfgloop.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "omp-general.h"
58 #include "omp-expand.h"
59 #include "tree-cfgcleanup.h"
60 #include "gimplify.h"
61 #include "attribs.h"
62 #include "selftest.h"
63 #include "opts.h"
64 #include "asan.h"
65 #include "profile.h"
66
67 /* This file contains functions for building the Control Flow Graph (CFG)
68 for a function tree. */
69
70 /* Local declarations. */
71
72 /* Initial capacity for the basic block array. */
73 static const int initial_cfg_capacity = 20;
74
75 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
76 which use a particular edge. The CASE_LABEL_EXPRs are chained together
77 via their CASE_CHAIN field, which we clear after we're done with the
78 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
79
80 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
81 update the case vector in response to edge redirections.
82
83 Right now this table is set up and torn down at key points in the
84 compilation process. It would be nice if we could make the table
85 more persistent. The key is getting notification of changes to
86 the CFG (particularly edge removal, creation and redirection). */
87
88 static hash_map<edge, tree> *edge_to_cases;
89
90 /* If we record edge_to_cases, this bitmap will hold indexes
91 of basic blocks that end in a GIMPLE_SWITCH which we touched
92 due to edge manipulations. */
93
94 static bitmap touched_switch_bbs;
95
96 /* CFG statistics. */
97 struct cfg_stats_d
98 {
99 long num_merged_labels;
100 };
101
102 static struct cfg_stats_d cfg_stats;
103
104 /* Data to pass to replace_block_vars_by_duplicates_1. */
105 struct replace_decls_d
106 {
107 hash_map<tree, tree> *vars_map;
108 tree to_context;
109 };
110
111 /* Hash table to store last discriminator assigned for each locus. */
112 struct locus_discrim_map
113 {
114 int location_line;
115 int discriminator;
116 };
117
118 /* Hashtable helpers. */
119
120 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
121 {
122 static inline hashval_t hash (const locus_discrim_map *);
123 static inline bool equal (const locus_discrim_map *,
124 const locus_discrim_map *);
125 };
126
127 /* Trivial hash function for a location_t. ITEM is a pointer to
128 a hash table entry that maps a location_t to a discriminator. */
129
130 inline hashval_t
131 locus_discrim_hasher::hash (const locus_discrim_map *item)
132 {
133 return item->location_line;
134 }
135
136 /* Equality function for the locus-to-discriminator map. A and B
137 point to the two hash table entries to compare. */
138
139 inline bool
140 locus_discrim_hasher::equal (const locus_discrim_map *a,
141 const locus_discrim_map *b)
142 {
143 return a->location_line == b->location_line;
144 }
145
146 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
147
148 /* Basic blocks and flowgraphs. */
149 static void make_blocks (gimple_seq);
150
151 /* Edges. */
152 static void make_edges (void);
153 static void assign_discriminators (void);
154 static void make_cond_expr_edges (basic_block);
155 static void make_gimple_switch_edges (gswitch *, basic_block);
156 static bool make_goto_expr_edges (basic_block);
157 static void make_gimple_asm_edges (basic_block);
158 static edge gimple_redirect_edge_and_branch (edge, basic_block);
159 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
160
161 /* Various helpers. */
162 static inline bool stmt_starts_bb_p (gimple *, gimple *);
163 static int gimple_verify_flow_info (void);
164 static void gimple_make_forwarder_block (edge);
165 static gimple *first_non_label_stmt (basic_block);
166 static bool verify_gimple_transaction (gtransaction *);
167 static bool call_can_make_abnormal_goto (gimple *);
168
169 /* Flowgraph optimization and cleanup. */
170 static void gimple_merge_blocks (basic_block, basic_block);
171 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
172 static void remove_bb (basic_block);
173 static edge find_taken_edge_computed_goto (basic_block, tree);
174 static edge find_taken_edge_cond_expr (const gcond *, tree);
175
176 void
177 init_empty_tree_cfg_for_function (struct function *fn)
178 {
179 /* Initialize the basic block array. */
180 init_flow (fn);
181 profile_status_for_fn (fn) = PROFILE_ABSENT;
182 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
183 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
184 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
185 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
186 initial_cfg_capacity);
187
188 /* Build a mapping of labels to their associated blocks. */
189 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
190 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
191 initial_cfg_capacity);
192
193 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
194 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
195
196 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
197 = EXIT_BLOCK_PTR_FOR_FN (fn);
198 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
199 = ENTRY_BLOCK_PTR_FOR_FN (fn);
200 }
201
202 void
203 init_empty_tree_cfg (void)
204 {
205 init_empty_tree_cfg_for_function (cfun);
206 }
207
208 /*---------------------------------------------------------------------------
209 Create basic blocks
210 ---------------------------------------------------------------------------*/
211
212 /* Entry point to the CFG builder for trees. SEQ is the sequence of
213 statements to be added to the flowgraph. */
214
215 static void
216 build_gimple_cfg (gimple_seq seq)
217 {
218 /* Register specific gimple functions. */
219 gimple_register_cfg_hooks ();
220
221 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
222
223 init_empty_tree_cfg ();
224
225 make_blocks (seq);
226
227 /* Make sure there is always at least one block, even if it's empty. */
228 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
229 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
230
231 /* Adjust the size of the array. */
232 if (basic_block_info_for_fn (cfun)->length ()
233 < (size_t) n_basic_blocks_for_fn (cfun))
234 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
235 n_basic_blocks_for_fn (cfun));
236
237 /* To speed up statement iterator walks, we first purge dead labels. */
238 cleanup_dead_labels ();
239
240 /* Group case nodes to reduce the number of edges.
241 We do this after cleaning up dead labels because otherwise we miss
242 a lot of obvious case merging opportunities. */
243 group_case_labels ();
244
245 /* Create the edges of the flowgraph. */
246 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
247 make_edges ();
248 assign_discriminators ();
249 cleanup_dead_labels ();
250 delete discriminator_per_locus;
251 discriminator_per_locus = NULL;
252 }
253
254 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
255 them and propagate the information to LOOP. We assume that the annotations
256 come immediately before the condition in BB, if any. */
257
258 static void
259 replace_loop_annotate_in_block (basic_block bb, class loop *loop)
260 {
261 gimple_stmt_iterator gsi = gsi_last_bb (bb);
262 gimple *stmt = gsi_stmt (gsi);
263
264 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
265 return;
266
267 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
268 {
269 stmt = gsi_stmt (gsi);
270 if (gimple_code (stmt) != GIMPLE_CALL)
271 break;
272 if (!gimple_call_internal_p (stmt)
273 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
274 break;
275
276 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
277 {
278 case annot_expr_ivdep_kind:
279 loop->safelen = INT_MAX;
280 break;
281 case annot_expr_unroll_kind:
282 loop->unroll
283 = (unsigned short) tree_to_shwi (gimple_call_arg (stmt, 2));
284 cfun->has_unroll = true;
285 break;
286 case annot_expr_no_vector_kind:
287 loop->dont_vectorize = true;
288 break;
289 case annot_expr_vector_kind:
290 loop->force_vectorize = true;
291 cfun->has_force_vectorize_loops = true;
292 break;
293 case annot_expr_parallel_kind:
294 loop->can_be_parallel = true;
295 loop->safelen = INT_MAX;
296 break;
297 default:
298 gcc_unreachable ();
299 }
300
301 stmt = gimple_build_assign (gimple_call_lhs (stmt),
302 gimple_call_arg (stmt, 0));
303 gsi_replace (&gsi, stmt, true);
304 }
305 }
306
307 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
308 them and propagate the information to the loop. We assume that the
309 annotations come immediately before the condition of the loop. */
310
311 static void
312 replace_loop_annotate (void)
313 {
314 class loop *loop;
315 basic_block bb;
316 gimple_stmt_iterator gsi;
317 gimple *stmt;
318
319 FOR_EACH_LOOP (loop, 0)
320 {
321 /* First look into the header. */
322 replace_loop_annotate_in_block (loop->header, loop);
323
324 /* Then look into the latch, if any. */
325 if (loop->latch)
326 replace_loop_annotate_in_block (loop->latch, loop);
327
328 /* Push the global flag_finite_loops state down to individual loops. */
329 loop->finite_p = flag_finite_loops;
330 }
331
332 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
333 FOR_EACH_BB_FN (bb, cfun)
334 {
335 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
336 {
337 stmt = gsi_stmt (gsi);
338 if (gimple_code (stmt) != GIMPLE_CALL)
339 continue;
340 if (!gimple_call_internal_p (stmt)
341 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
342 continue;
343
344 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
345 {
346 case annot_expr_ivdep_kind:
347 case annot_expr_unroll_kind:
348 case annot_expr_no_vector_kind:
349 case annot_expr_vector_kind:
350 case annot_expr_parallel_kind:
351 break;
352 default:
353 gcc_unreachable ();
354 }
355
356 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
357 stmt = gimple_build_assign (gimple_call_lhs (stmt),
358 gimple_call_arg (stmt, 0));
359 gsi_replace (&gsi, stmt, true);
360 }
361 }
362 }
363
364 static unsigned int
365 execute_build_cfg (void)
366 {
367 gimple_seq body = gimple_body (current_function_decl);
368
369 build_gimple_cfg (body);
370 gimple_set_body (current_function_decl, NULL);
371 if (dump_file && (dump_flags & TDF_DETAILS))
372 {
373 fprintf (dump_file, "Scope blocks:\n");
374 dump_scope_blocks (dump_file, dump_flags);
375 }
376 cleanup_tree_cfg ();
377 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
378 replace_loop_annotate ();
379 return 0;
380 }
381
382 namespace {
383
384 const pass_data pass_data_build_cfg =
385 {
386 GIMPLE_PASS, /* type */
387 "cfg", /* name */
388 OPTGROUP_NONE, /* optinfo_flags */
389 TV_TREE_CFG, /* tv_id */
390 PROP_gimple_leh, /* properties_required */
391 ( PROP_cfg | PROP_loops ), /* properties_provided */
392 0, /* properties_destroyed */
393 0, /* todo_flags_start */
394 0, /* todo_flags_finish */
395 };
396
397 class pass_build_cfg : public gimple_opt_pass
398 {
399 public:
400 pass_build_cfg (gcc::context *ctxt)
401 : gimple_opt_pass (pass_data_build_cfg, ctxt)
402 {}
403
404 /* opt_pass methods: */
405 virtual unsigned int execute (function *) { return execute_build_cfg (); }
406
407 }; // class pass_build_cfg
408
409 } // anon namespace
410
411 gimple_opt_pass *
412 make_pass_build_cfg (gcc::context *ctxt)
413 {
414 return new pass_build_cfg (ctxt);
415 }
416
417
418 /* Return true if T is a computed goto. */
419
420 bool
421 computed_goto_p (gimple *t)
422 {
423 return (gimple_code (t) == GIMPLE_GOTO
424 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
425 }
426
427 /* Returns true if the sequence of statements STMTS only contains
428 a call to __builtin_unreachable (). */
429
430 bool
431 gimple_seq_unreachable_p (gimple_seq stmts)
432 {
433 if (stmts == NULL
434 /* Return false if -fsanitize=unreachable, we don't want to
435 optimize away those calls, but rather turn them into
436 __ubsan_handle_builtin_unreachable () or __builtin_trap ()
437 later. */
438 || sanitize_flags_p (SANITIZE_UNREACHABLE))
439 return false;
440
441 gimple_stmt_iterator gsi = gsi_last (stmts);
442
443 if (!gimple_call_builtin_p (gsi_stmt (gsi), BUILT_IN_UNREACHABLE))
444 return false;
445
446 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
447 {
448 gimple *stmt = gsi_stmt (gsi);
449 if (gimple_code (stmt) != GIMPLE_LABEL
450 && !is_gimple_debug (stmt)
451 && !gimple_clobber_p (stmt))
452 return false;
453 }
454 return true;
455 }
456
457 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
458 the other edge points to a bb with just __builtin_unreachable ().
459 I.e. return true for C->M edge in:
460 <bb C>:
461 ...
462 if (something)
463 goto <bb N>;
464 else
465 goto <bb M>;
466 <bb N>:
467 __builtin_unreachable ();
468 <bb M>: */
469
470 bool
471 assert_unreachable_fallthru_edge_p (edge e)
472 {
473 basic_block pred_bb = e->src;
474 gimple *last = last_stmt (pred_bb);
475 if (last && gimple_code (last) == GIMPLE_COND)
476 {
477 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
478 if (other_bb == e->dest)
479 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
480 if (EDGE_COUNT (other_bb->succs) == 0)
481 return gimple_seq_unreachable_p (bb_seq (other_bb));
482 }
483 return false;
484 }
485
486
487 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
488 could alter control flow except via eh. We initialize the flag at
489 CFG build time and only ever clear it later. */
490
491 static void
492 gimple_call_initialize_ctrl_altering (gimple *stmt)
493 {
494 int flags = gimple_call_flags (stmt);
495
496 /* A call alters control flow if it can make an abnormal goto. */
497 if (call_can_make_abnormal_goto (stmt)
498 /* A call also alters control flow if it does not return. */
499 || flags & ECF_NORETURN
500 /* TM ending statements have backedges out of the transaction.
501 Return true so we split the basic block containing them.
502 Note that the TM_BUILTIN test is merely an optimization. */
503 || ((flags & ECF_TM_BUILTIN)
504 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
505 /* BUILT_IN_RETURN call is same as return statement. */
506 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
507 /* IFN_UNIQUE should be the last insn, to make checking for it
508 as cheap as possible. */
509 || (gimple_call_internal_p (stmt)
510 && gimple_call_internal_unique_p (stmt)))
511 gimple_call_set_ctrl_altering (stmt, true);
512 else
513 gimple_call_set_ctrl_altering (stmt, false);
514 }
515
516
517 /* Insert SEQ after BB and build a flowgraph. */
518
519 static basic_block
520 make_blocks_1 (gimple_seq seq, basic_block bb)
521 {
522 gimple_stmt_iterator i = gsi_start (seq);
523 gimple *stmt = NULL;
524 gimple *prev_stmt = NULL;
525 bool start_new_block = true;
526 bool first_stmt_of_seq = true;
527
528 while (!gsi_end_p (i))
529 {
530 /* PREV_STMT should only be set to a debug stmt if the debug
531 stmt is before nondebug stmts. Once stmt reaches a nondebug
532 nonlabel, prev_stmt will be set to it, so that
533 stmt_starts_bb_p will know to start a new block if a label is
534 found. However, if stmt was a label after debug stmts only,
535 keep the label in prev_stmt even if we find further debug
536 stmts, for there may be other labels after them, and they
537 should land in the same block. */
538 if (!prev_stmt || !stmt || !is_gimple_debug (stmt))
539 prev_stmt = stmt;
540 stmt = gsi_stmt (i);
541
542 if (stmt && is_gimple_call (stmt))
543 gimple_call_initialize_ctrl_altering (stmt);
544
545 /* If the statement starts a new basic block or if we have determined
546 in a previous pass that we need to create a new block for STMT, do
547 so now. */
548 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
549 {
550 if (!first_stmt_of_seq)
551 gsi_split_seq_before (&i, &seq);
552 bb = create_basic_block (seq, bb);
553 start_new_block = false;
554 prev_stmt = NULL;
555 }
556
557 /* Now add STMT to BB and create the subgraphs for special statement
558 codes. */
559 gimple_set_bb (stmt, bb);
560
561 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
562 next iteration. */
563 if (stmt_ends_bb_p (stmt))
564 {
565 /* If the stmt can make abnormal goto use a new temporary
566 for the assignment to the LHS. This makes sure the old value
567 of the LHS is available on the abnormal edge. Otherwise
568 we will end up with overlapping life-ranges for abnormal
569 SSA names. */
570 if (gimple_has_lhs (stmt)
571 && stmt_can_make_abnormal_goto (stmt)
572 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
573 {
574 tree lhs = gimple_get_lhs (stmt);
575 tree tmp = create_tmp_var (TREE_TYPE (lhs));
576 gimple *s = gimple_build_assign (lhs, tmp);
577 gimple_set_location (s, gimple_location (stmt));
578 gimple_set_block (s, gimple_block (stmt));
579 gimple_set_lhs (stmt, tmp);
580 gsi_insert_after (&i, s, GSI_SAME_STMT);
581 }
582 start_new_block = true;
583 }
584
585 gsi_next (&i);
586 first_stmt_of_seq = false;
587 }
588 return bb;
589 }
590
591 /* Build a flowgraph for the sequence of stmts SEQ. */
592
593 static void
594 make_blocks (gimple_seq seq)
595 {
596 /* Look for debug markers right before labels, and move the debug
597 stmts after the labels. Accepting labels among debug markers
598 adds no value, just complexity; if we wanted to annotate labels
599 with view numbers (so sequencing among markers would matter) or
600 somesuch, we're probably better off still moving the labels, but
601 adding other debug annotations in their original positions or
602 emitting nonbind or bind markers associated with the labels in
603 the original position of the labels.
604
605 Moving labels would probably be simpler, but we can't do that:
606 moving labels assigns label ids to them, and doing so because of
607 debug markers makes for -fcompare-debug and possibly even codegen
608 differences. So, we have to move the debug stmts instead. To
609 that end, we scan SEQ backwards, marking the position of the
610 latest (earliest we find) label, and moving debug stmts that are
611 not separated from it by nondebug nonlabel stmts after the
612 label. */
613 if (MAY_HAVE_DEBUG_MARKER_STMTS)
614 {
615 gimple_stmt_iterator label = gsi_none ();
616
617 for (gimple_stmt_iterator i = gsi_last (seq); !gsi_end_p (i); gsi_prev (&i))
618 {
619 gimple *stmt = gsi_stmt (i);
620
621 /* If this is the first label we encounter (latest in SEQ)
622 before nondebug stmts, record its position. */
623 if (is_a <glabel *> (stmt))
624 {
625 if (gsi_end_p (label))
626 label = i;
627 continue;
628 }
629
630 /* Without a recorded label position to move debug stmts to,
631 there's nothing to do. */
632 if (gsi_end_p (label))
633 continue;
634
635 /* Move the debug stmt at I after LABEL. */
636 if (is_gimple_debug (stmt))
637 {
638 gcc_assert (gimple_debug_nonbind_marker_p (stmt));
639 /* As STMT is removed, I advances to the stmt after
640 STMT, so the gsi_prev in the for "increment"
641 expression gets us to the stmt we're to visit after
642 STMT. LABEL, however, would advance to the moved
643 stmt if we passed it to gsi_move_after, so pass it a
644 copy instead, so as to keep LABEL pointing to the
645 LABEL. */
646 gimple_stmt_iterator copy = label;
647 gsi_move_after (&i, &copy);
648 continue;
649 }
650
651 /* There aren't any (more?) debug stmts before label, so
652 there isn't anything else to move after it. */
653 label = gsi_none ();
654 }
655 }
656
657 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
658 }
659
660 /* Create and return a new empty basic block after bb AFTER. */
661
662 static basic_block
663 create_bb (void *h, void *e, basic_block after)
664 {
665 basic_block bb;
666
667 gcc_assert (!e);
668
669 /* Create and initialize a new basic block. Since alloc_block uses
670 GC allocation that clears memory to allocate a basic block, we do
671 not have to clear the newly allocated basic block here. */
672 bb = alloc_block ();
673
674 bb->index = last_basic_block_for_fn (cfun);
675 bb->flags = BB_NEW;
676 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
677
678 /* Add the new block to the linked list of blocks. */
679 link_block (bb, after);
680
681 /* Grow the basic block array if needed. */
682 if ((size_t) last_basic_block_for_fn (cfun)
683 == basic_block_info_for_fn (cfun)->length ())
684 {
685 size_t new_size =
686 (last_basic_block_for_fn (cfun)
687 + (last_basic_block_for_fn (cfun) + 3) / 4);
688 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
689 }
690
691 /* Add the newly created block to the array. */
692 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
693
694 n_basic_blocks_for_fn (cfun)++;
695 last_basic_block_for_fn (cfun)++;
696
697 return bb;
698 }
699
700
701 /*---------------------------------------------------------------------------
702 Edge creation
703 ---------------------------------------------------------------------------*/
704
705 /* If basic block BB has an abnormal edge to a basic block
706 containing IFN_ABNORMAL_DISPATCHER internal call, return
707 that the dispatcher's basic block, otherwise return NULL. */
708
709 basic_block
710 get_abnormal_succ_dispatcher (basic_block bb)
711 {
712 edge e;
713 edge_iterator ei;
714
715 FOR_EACH_EDGE (e, ei, bb->succs)
716 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
717 {
718 gimple_stmt_iterator gsi
719 = gsi_start_nondebug_after_labels_bb (e->dest);
720 gimple *g = gsi_stmt (gsi);
721 if (g && gimple_call_internal_p (g, IFN_ABNORMAL_DISPATCHER))
722 return e->dest;
723 }
724 return NULL;
725 }
726
727 /* Helper function for make_edges. Create a basic block with
728 with ABNORMAL_DISPATCHER internal call in it if needed, and
729 create abnormal edges from BBS to it and from it to FOR_BB
730 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
731
732 static void
733 handle_abnormal_edges (basic_block *dispatcher_bbs,
734 basic_block for_bb, int *bb_to_omp_idx,
735 auto_vec<basic_block> *bbs, bool computed_goto)
736 {
737 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
738 unsigned int idx = 0;
739 basic_block bb;
740 bool inner = false;
741
742 if (bb_to_omp_idx)
743 {
744 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
745 if (bb_to_omp_idx[for_bb->index] != 0)
746 inner = true;
747 }
748
749 /* If the dispatcher has been created already, then there are basic
750 blocks with abnormal edges to it, so just make a new edge to
751 for_bb. */
752 if (*dispatcher == NULL)
753 {
754 /* Check if there are any basic blocks that need to have
755 abnormal edges to this dispatcher. If there are none, return
756 early. */
757 if (bb_to_omp_idx == NULL)
758 {
759 if (bbs->is_empty ())
760 return;
761 }
762 else
763 {
764 FOR_EACH_VEC_ELT (*bbs, idx, bb)
765 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
766 break;
767 if (bb == NULL)
768 return;
769 }
770
771 /* Create the dispatcher bb. */
772 *dispatcher = create_basic_block (NULL, for_bb);
773 if (computed_goto)
774 {
775 /* Factor computed gotos into a common computed goto site. Also
776 record the location of that site so that we can un-factor the
777 gotos after we have converted back to normal form. */
778 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
779
780 /* Create the destination of the factored goto. Each original
781 computed goto will put its desired destination into this
782 variable and jump to the label we create immediately below. */
783 tree var = create_tmp_var (ptr_type_node, "gotovar");
784
785 /* Build a label for the new block which will contain the
786 factored computed goto. */
787 tree factored_label_decl
788 = create_artificial_label (UNKNOWN_LOCATION);
789 gimple *factored_computed_goto_label
790 = gimple_build_label (factored_label_decl);
791 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
792
793 /* Build our new computed goto. */
794 gimple *factored_computed_goto = gimple_build_goto (var);
795 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
796
797 FOR_EACH_VEC_ELT (*bbs, idx, bb)
798 {
799 if (bb_to_omp_idx
800 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
801 continue;
802
803 gsi = gsi_last_bb (bb);
804 gimple *last = gsi_stmt (gsi);
805
806 gcc_assert (computed_goto_p (last));
807
808 /* Copy the original computed goto's destination into VAR. */
809 gimple *assignment
810 = gimple_build_assign (var, gimple_goto_dest (last));
811 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
812
813 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
814 e->goto_locus = gimple_location (last);
815 gsi_remove (&gsi, true);
816 }
817 }
818 else
819 {
820 tree arg = inner ? boolean_true_node : boolean_false_node;
821 gimple *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
822 1, arg);
823 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
824 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
825
826 /* Create predecessor edges of the dispatcher. */
827 FOR_EACH_VEC_ELT (*bbs, idx, bb)
828 {
829 if (bb_to_omp_idx
830 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
831 continue;
832 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
833 }
834 }
835 }
836
837 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
838 }
839
840 /* Creates outgoing edges for BB. Returns 1 when it ends with an
841 computed goto, returns 2 when it ends with a statement that
842 might return to this function via an nonlocal goto, otherwise
843 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
844
845 static int
846 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
847 {
848 gimple *last = last_stmt (bb);
849 bool fallthru = false;
850 int ret = 0;
851
852 if (!last)
853 return ret;
854
855 switch (gimple_code (last))
856 {
857 case GIMPLE_GOTO:
858 if (make_goto_expr_edges (bb))
859 ret = 1;
860 fallthru = false;
861 break;
862 case GIMPLE_RETURN:
863 {
864 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
865 e->goto_locus = gimple_location (last);
866 fallthru = false;
867 }
868 break;
869 case GIMPLE_COND:
870 make_cond_expr_edges (bb);
871 fallthru = false;
872 break;
873 case GIMPLE_SWITCH:
874 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
875 fallthru = false;
876 break;
877 case GIMPLE_RESX:
878 make_eh_edges (last);
879 fallthru = false;
880 break;
881 case GIMPLE_EH_DISPATCH:
882 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
883 break;
884
885 case GIMPLE_CALL:
886 /* If this function receives a nonlocal goto, then we need to
887 make edges from this call site to all the nonlocal goto
888 handlers. */
889 if (stmt_can_make_abnormal_goto (last))
890 ret = 2;
891
892 /* If this statement has reachable exception handlers, then
893 create abnormal edges to them. */
894 make_eh_edges (last);
895
896 /* BUILTIN_RETURN is really a return statement. */
897 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
898 {
899 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
900 fallthru = false;
901 }
902 /* Some calls are known not to return. */
903 else
904 fallthru = !gimple_call_noreturn_p (last);
905 break;
906
907 case GIMPLE_ASSIGN:
908 /* A GIMPLE_ASSIGN may throw internally and thus be considered
909 control-altering. */
910 if (is_ctrl_altering_stmt (last))
911 make_eh_edges (last);
912 fallthru = true;
913 break;
914
915 case GIMPLE_ASM:
916 make_gimple_asm_edges (bb);
917 fallthru = true;
918 break;
919
920 CASE_GIMPLE_OMP:
921 fallthru = omp_make_gimple_edges (bb, pcur_region, pomp_index);
922 break;
923
924 case GIMPLE_TRANSACTION:
925 {
926 gtransaction *txn = as_a <gtransaction *> (last);
927 tree label1 = gimple_transaction_label_norm (txn);
928 tree label2 = gimple_transaction_label_uninst (txn);
929
930 if (label1)
931 make_edge (bb, label_to_block (cfun, label1), EDGE_FALLTHRU);
932 if (label2)
933 make_edge (bb, label_to_block (cfun, label2),
934 EDGE_TM_UNINSTRUMENTED | (label1 ? 0 : EDGE_FALLTHRU));
935
936 tree label3 = gimple_transaction_label_over (txn);
937 if (gimple_transaction_subcode (txn)
938 & (GTMA_HAVE_ABORT | GTMA_IS_OUTER))
939 make_edge (bb, label_to_block (cfun, label3), EDGE_TM_ABORT);
940
941 fallthru = false;
942 }
943 break;
944
945 default:
946 gcc_assert (!stmt_ends_bb_p (last));
947 fallthru = true;
948 break;
949 }
950
951 if (fallthru)
952 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
953
954 return ret;
955 }
956
957 /* Join all the blocks in the flowgraph. */
958
959 static void
960 make_edges (void)
961 {
962 basic_block bb;
963 struct omp_region *cur_region = NULL;
964 auto_vec<basic_block> ab_edge_goto;
965 auto_vec<basic_block> ab_edge_call;
966 int *bb_to_omp_idx = NULL;
967 int cur_omp_region_idx = 0;
968
969 /* Create an edge from entry to the first block with executable
970 statements in it. */
971 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
972 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
973 EDGE_FALLTHRU);
974
975 /* Traverse the basic block array placing edges. */
976 FOR_EACH_BB_FN (bb, cfun)
977 {
978 int mer;
979
980 if (bb_to_omp_idx)
981 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
982
983 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
984 if (mer == 1)
985 ab_edge_goto.safe_push (bb);
986 else if (mer == 2)
987 ab_edge_call.safe_push (bb);
988
989 if (cur_region && bb_to_omp_idx == NULL)
990 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
991 }
992
993 /* Computed gotos are hell to deal with, especially if there are
994 lots of them with a large number of destinations. So we factor
995 them to a common computed goto location before we build the
996 edge list. After we convert back to normal form, we will un-factor
997 the computed gotos since factoring introduces an unwanted jump.
998 For non-local gotos and abnormal edges from calls to calls that return
999 twice or forced labels, factor the abnormal edges too, by having all
1000 abnormal edges from the calls go to a common artificial basic block
1001 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
1002 basic block to all forced labels and calls returning twice.
1003 We do this per-OpenMP structured block, because those regions
1004 are guaranteed to be single entry single exit by the standard,
1005 so it is not allowed to enter or exit such regions abnormally this way,
1006 thus all computed gotos, non-local gotos and setjmp/longjmp calls
1007 must not transfer control across SESE region boundaries. */
1008 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
1009 {
1010 gimple_stmt_iterator gsi;
1011 basic_block dispatcher_bb_array[2] = { NULL, NULL };
1012 basic_block *dispatcher_bbs = dispatcher_bb_array;
1013 int count = n_basic_blocks_for_fn (cfun);
1014
1015 if (bb_to_omp_idx)
1016 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
1017
1018 FOR_EACH_BB_FN (bb, cfun)
1019 {
1020 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1021 {
1022 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1023 tree target;
1024
1025 if (!label_stmt)
1026 break;
1027
1028 target = gimple_label_label (label_stmt);
1029
1030 /* Make an edge to every label block that has been marked as a
1031 potential target for a computed goto or a non-local goto. */
1032 if (FORCED_LABEL (target))
1033 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1034 &ab_edge_goto, true);
1035 if (DECL_NONLOCAL (target))
1036 {
1037 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1038 &ab_edge_call, false);
1039 break;
1040 }
1041 }
1042
1043 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
1044 gsi_next_nondebug (&gsi);
1045 if (!gsi_end_p (gsi))
1046 {
1047 /* Make an edge to every setjmp-like call. */
1048 gimple *call_stmt = gsi_stmt (gsi);
1049 if (is_gimple_call (call_stmt)
1050 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
1051 || gimple_call_builtin_p (call_stmt,
1052 BUILT_IN_SETJMP_RECEIVER)))
1053 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1054 &ab_edge_call, false);
1055 }
1056 }
1057
1058 if (bb_to_omp_idx)
1059 XDELETE (dispatcher_bbs);
1060 }
1061
1062 XDELETE (bb_to_omp_idx);
1063
1064 omp_free_regions ();
1065 }
1066
1067 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1068 needed. Returns true if new bbs were created.
1069 Note: This is transitional code, and should not be used for new code. We
1070 should be able to get rid of this by rewriting all target va-arg
1071 gimplification hooks to use an interface gimple_build_cond_value as described
1072 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1073
1074 bool
1075 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
1076 {
1077 gimple *stmt = gsi_stmt (*gsi);
1078 basic_block bb = gimple_bb (stmt);
1079 basic_block lastbb, afterbb;
1080 int old_num_bbs = n_basic_blocks_for_fn (cfun);
1081 edge e;
1082 lastbb = make_blocks_1 (seq, bb);
1083 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
1084 return false;
1085 e = split_block (bb, stmt);
1086 /* Move e->dest to come after the new basic blocks. */
1087 afterbb = e->dest;
1088 unlink_block (afterbb);
1089 link_block (afterbb, lastbb);
1090 redirect_edge_succ (e, bb->next_bb);
1091 bb = bb->next_bb;
1092 while (bb != afterbb)
1093 {
1094 struct omp_region *cur_region = NULL;
1095 profile_count cnt = profile_count::zero ();
1096 bool all = true;
1097
1098 int cur_omp_region_idx = 0;
1099 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1100 gcc_assert (!mer && !cur_region);
1101 add_bb_to_loop (bb, afterbb->loop_father);
1102
1103 edge e;
1104 edge_iterator ei;
1105 FOR_EACH_EDGE (e, ei, bb->preds)
1106 {
1107 if (e->count ().initialized_p ())
1108 cnt += e->count ();
1109 else
1110 all = false;
1111 }
1112 tree_guess_outgoing_edge_probabilities (bb);
1113 if (all || profile_status_for_fn (cfun) == PROFILE_READ)
1114 bb->count = cnt;
1115
1116 bb = bb->next_bb;
1117 }
1118 return true;
1119 }
1120
1121 /* Find the next available discriminator value for LOCUS. The
1122 discriminator distinguishes among several basic blocks that
1123 share a common locus, allowing for more accurate sample-based
1124 profiling. */
1125
1126 static int
1127 next_discriminator_for_locus (int line)
1128 {
1129 struct locus_discrim_map item;
1130 struct locus_discrim_map **slot;
1131
1132 item.location_line = line;
1133 item.discriminator = 0;
1134 slot = discriminator_per_locus->find_slot_with_hash (&item, line, INSERT);
1135 gcc_assert (slot);
1136 if (*slot == HTAB_EMPTY_ENTRY)
1137 {
1138 *slot = XNEW (struct locus_discrim_map);
1139 gcc_assert (*slot);
1140 (*slot)->location_line = line;
1141 (*slot)->discriminator = 0;
1142 }
1143 (*slot)->discriminator++;
1144 return (*slot)->discriminator;
1145 }
1146
1147 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1148
1149 static bool
1150 same_line_p (location_t locus1, expanded_location *from, location_t locus2)
1151 {
1152 expanded_location to;
1153
1154 if (locus1 == locus2)
1155 return true;
1156
1157 to = expand_location (locus2);
1158
1159 if (from->line != to.line)
1160 return false;
1161 if (from->file == to.file)
1162 return true;
1163 return (from->file != NULL
1164 && to.file != NULL
1165 && filename_cmp (from->file, to.file) == 0);
1166 }
1167
1168 /* Assign discriminators to each basic block. */
1169
1170 static void
1171 assign_discriminators (void)
1172 {
1173 basic_block bb;
1174
1175 FOR_EACH_BB_FN (bb, cfun)
1176 {
1177 edge e;
1178 edge_iterator ei;
1179 gimple *last = last_stmt (bb);
1180 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1181
1182 if (locus == UNKNOWN_LOCATION)
1183 continue;
1184
1185 expanded_location locus_e = expand_location (locus);
1186
1187 FOR_EACH_EDGE (e, ei, bb->succs)
1188 {
1189 gimple *first = first_non_label_stmt (e->dest);
1190 gimple *last = last_stmt (e->dest);
1191 if ((first && same_line_p (locus, &locus_e,
1192 gimple_location (first)))
1193 || (last && same_line_p (locus, &locus_e,
1194 gimple_location (last))))
1195 {
1196 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1197 bb->discriminator
1198 = next_discriminator_for_locus (locus_e.line);
1199 else
1200 e->dest->discriminator
1201 = next_discriminator_for_locus (locus_e.line);
1202 }
1203 }
1204 }
1205 }
1206
1207 /* Create the edges for a GIMPLE_COND starting at block BB. */
1208
1209 static void
1210 make_cond_expr_edges (basic_block bb)
1211 {
1212 gcond *entry = as_a <gcond *> (last_stmt (bb));
1213 gimple *then_stmt, *else_stmt;
1214 basic_block then_bb, else_bb;
1215 tree then_label, else_label;
1216 edge e;
1217
1218 gcc_assert (entry);
1219 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1220
1221 /* Entry basic blocks for each component. */
1222 then_label = gimple_cond_true_label (entry);
1223 else_label = gimple_cond_false_label (entry);
1224 then_bb = label_to_block (cfun, then_label);
1225 else_bb = label_to_block (cfun, else_label);
1226 then_stmt = first_stmt (then_bb);
1227 else_stmt = first_stmt (else_bb);
1228
1229 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1230 e->goto_locus = gimple_location (then_stmt);
1231 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1232 if (e)
1233 e->goto_locus = gimple_location (else_stmt);
1234
1235 /* We do not need the labels anymore. */
1236 gimple_cond_set_true_label (entry, NULL_TREE);
1237 gimple_cond_set_false_label (entry, NULL_TREE);
1238 }
1239
1240
1241 /* Called for each element in the hash table (P) as we delete the
1242 edge to cases hash table.
1243
1244 Clear all the CASE_CHAINs to prevent problems with copying of
1245 SWITCH_EXPRs and structure sharing rules, then free the hash table
1246 element. */
1247
1248 bool
1249 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1250 {
1251 tree t, next;
1252
1253 for (t = value; t; t = next)
1254 {
1255 next = CASE_CHAIN (t);
1256 CASE_CHAIN (t) = NULL;
1257 }
1258
1259 return true;
1260 }
1261
1262 /* Start recording information mapping edges to case labels. */
1263
1264 void
1265 start_recording_case_labels (void)
1266 {
1267 gcc_assert (edge_to_cases == NULL);
1268 edge_to_cases = new hash_map<edge, tree>;
1269 touched_switch_bbs = BITMAP_ALLOC (NULL);
1270 }
1271
1272 /* Return nonzero if we are recording information for case labels. */
1273
1274 static bool
1275 recording_case_labels_p (void)
1276 {
1277 return (edge_to_cases != NULL);
1278 }
1279
1280 /* Stop recording information mapping edges to case labels and
1281 remove any information we have recorded. */
1282 void
1283 end_recording_case_labels (void)
1284 {
1285 bitmap_iterator bi;
1286 unsigned i;
1287 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1288 delete edge_to_cases;
1289 edge_to_cases = NULL;
1290 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1291 {
1292 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1293 if (bb)
1294 {
1295 gimple *stmt = last_stmt (bb);
1296 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1297 group_case_labels_stmt (as_a <gswitch *> (stmt));
1298 }
1299 }
1300 BITMAP_FREE (touched_switch_bbs);
1301 }
1302
1303 /* If we are inside a {start,end}_recording_cases block, then return
1304 a chain of CASE_LABEL_EXPRs from T which reference E.
1305
1306 Otherwise return NULL. */
1307
1308 static tree
1309 get_cases_for_edge (edge e, gswitch *t)
1310 {
1311 tree *slot;
1312 size_t i, n;
1313
1314 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1315 chains available. Return NULL so the caller can detect this case. */
1316 if (!recording_case_labels_p ())
1317 return NULL;
1318
1319 slot = edge_to_cases->get (e);
1320 if (slot)
1321 return *slot;
1322
1323 /* If we did not find E in the hash table, then this must be the first
1324 time we have been queried for information about E & T. Add all the
1325 elements from T to the hash table then perform the query again. */
1326
1327 n = gimple_switch_num_labels (t);
1328 for (i = 0; i < n; i++)
1329 {
1330 tree elt = gimple_switch_label (t, i);
1331 tree lab = CASE_LABEL (elt);
1332 basic_block label_bb = label_to_block (cfun, lab);
1333 edge this_edge = find_edge (e->src, label_bb);
1334
1335 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1336 a new chain. */
1337 tree &s = edge_to_cases->get_or_insert (this_edge);
1338 CASE_CHAIN (elt) = s;
1339 s = elt;
1340 }
1341
1342 return *edge_to_cases->get (e);
1343 }
1344
1345 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1346
1347 static void
1348 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1349 {
1350 size_t i, n;
1351
1352 n = gimple_switch_num_labels (entry);
1353
1354 for (i = 0; i < n; ++i)
1355 {
1356 basic_block label_bb = gimple_switch_label_bb (cfun, entry, i);
1357 make_edge (bb, label_bb, 0);
1358 }
1359 }
1360
1361
1362 /* Return the basic block holding label DEST. */
1363
1364 basic_block
1365 label_to_block (struct function *ifun, tree dest)
1366 {
1367 int uid = LABEL_DECL_UID (dest);
1368
1369 /* We would die hard when faced by an undefined label. Emit a label to
1370 the very first basic block. This will hopefully make even the dataflow
1371 and undefined variable warnings quite right. */
1372 if (seen_error () && uid < 0)
1373 {
1374 gimple_stmt_iterator gsi =
1375 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1376 gimple *stmt;
1377
1378 stmt = gimple_build_label (dest);
1379 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1380 uid = LABEL_DECL_UID (dest);
1381 }
1382 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1383 return NULL;
1384 return (*ifun->cfg->x_label_to_block_map)[uid];
1385 }
1386
1387 /* Create edges for a goto statement at block BB. Returns true
1388 if abnormal edges should be created. */
1389
1390 static bool
1391 make_goto_expr_edges (basic_block bb)
1392 {
1393 gimple_stmt_iterator last = gsi_last_bb (bb);
1394 gimple *goto_t = gsi_stmt (last);
1395
1396 /* A simple GOTO creates normal edges. */
1397 if (simple_goto_p (goto_t))
1398 {
1399 tree dest = gimple_goto_dest (goto_t);
1400 basic_block label_bb = label_to_block (cfun, dest);
1401 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1402 e->goto_locus = gimple_location (goto_t);
1403 gsi_remove (&last, true);
1404 return false;
1405 }
1406
1407 /* A computed GOTO creates abnormal edges. */
1408 return true;
1409 }
1410
1411 /* Create edges for an asm statement with labels at block BB. */
1412
1413 static void
1414 make_gimple_asm_edges (basic_block bb)
1415 {
1416 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1417 int i, n = gimple_asm_nlabels (stmt);
1418
1419 for (i = 0; i < n; ++i)
1420 {
1421 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1422 basic_block label_bb = label_to_block (cfun, label);
1423 make_edge (bb, label_bb, 0);
1424 }
1425 }
1426
1427 /*---------------------------------------------------------------------------
1428 Flowgraph analysis
1429 ---------------------------------------------------------------------------*/
1430
1431 /* Cleanup useless labels in basic blocks. This is something we wish
1432 to do early because it allows us to group case labels before creating
1433 the edges for the CFG, and it speeds up block statement iterators in
1434 all passes later on.
1435 We rerun this pass after CFG is created, to get rid of the labels that
1436 are no longer referenced. After then we do not run it any more, since
1437 (almost) no new labels should be created. */
1438
1439 /* A map from basic block index to the leading label of that block. */
1440 struct label_record
1441 {
1442 /* The label. */
1443 tree label;
1444
1445 /* True if the label is referenced from somewhere. */
1446 bool used;
1447 };
1448
1449 /* Given LABEL return the first label in the same basic block. */
1450
1451 static tree
1452 main_block_label (tree label, label_record *label_for_bb)
1453 {
1454 basic_block bb = label_to_block (cfun, label);
1455 tree main_label = label_for_bb[bb->index].label;
1456
1457 /* label_to_block possibly inserted undefined label into the chain. */
1458 if (!main_label)
1459 {
1460 label_for_bb[bb->index].label = label;
1461 main_label = label;
1462 }
1463
1464 label_for_bb[bb->index].used = true;
1465 return main_label;
1466 }
1467
1468 /* Clean up redundant labels within the exception tree. */
1469
1470 static void
1471 cleanup_dead_labels_eh (label_record *label_for_bb)
1472 {
1473 eh_landing_pad lp;
1474 eh_region r;
1475 tree lab;
1476 int i;
1477
1478 if (cfun->eh == NULL)
1479 return;
1480
1481 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1482 if (lp && lp->post_landing_pad)
1483 {
1484 lab = main_block_label (lp->post_landing_pad, label_for_bb);
1485 if (lab != lp->post_landing_pad)
1486 {
1487 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1488 EH_LANDING_PAD_NR (lab) = lp->index;
1489 }
1490 }
1491
1492 FOR_ALL_EH_REGION (r)
1493 switch (r->type)
1494 {
1495 case ERT_CLEANUP:
1496 case ERT_MUST_NOT_THROW:
1497 break;
1498
1499 case ERT_TRY:
1500 {
1501 eh_catch c;
1502 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1503 {
1504 lab = c->label;
1505 if (lab)
1506 c->label = main_block_label (lab, label_for_bb);
1507 }
1508 }
1509 break;
1510
1511 case ERT_ALLOWED_EXCEPTIONS:
1512 lab = r->u.allowed.label;
1513 if (lab)
1514 r->u.allowed.label = main_block_label (lab, label_for_bb);
1515 break;
1516 }
1517 }
1518
1519
1520 /* Cleanup redundant labels. This is a three-step process:
1521 1) Find the leading label for each block.
1522 2) Redirect all references to labels to the leading labels.
1523 3) Cleanup all useless labels. */
1524
1525 void
1526 cleanup_dead_labels (void)
1527 {
1528 basic_block bb;
1529 label_record *label_for_bb = XCNEWVEC (struct label_record,
1530 last_basic_block_for_fn (cfun));
1531
1532 /* Find a suitable label for each block. We use the first user-defined
1533 label if there is one, or otherwise just the first label we see. */
1534 FOR_EACH_BB_FN (bb, cfun)
1535 {
1536 gimple_stmt_iterator i;
1537
1538 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1539 {
1540 tree label;
1541 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1542
1543 if (!label_stmt)
1544 break;
1545
1546 label = gimple_label_label (label_stmt);
1547
1548 /* If we have not yet seen a label for the current block,
1549 remember this one and see if there are more labels. */
1550 if (!label_for_bb[bb->index].label)
1551 {
1552 label_for_bb[bb->index].label = label;
1553 continue;
1554 }
1555
1556 /* If we did see a label for the current block already, but it
1557 is an artificially created label, replace it if the current
1558 label is a user defined label. */
1559 if (!DECL_ARTIFICIAL (label)
1560 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1561 {
1562 label_for_bb[bb->index].label = label;
1563 break;
1564 }
1565 }
1566 }
1567
1568 /* Now redirect all jumps/branches to the selected label.
1569 First do so for each block ending in a control statement. */
1570 FOR_EACH_BB_FN (bb, cfun)
1571 {
1572 gimple *stmt = last_stmt (bb);
1573 tree label, new_label;
1574
1575 if (!stmt)
1576 continue;
1577
1578 switch (gimple_code (stmt))
1579 {
1580 case GIMPLE_COND:
1581 {
1582 gcond *cond_stmt = as_a <gcond *> (stmt);
1583 label = gimple_cond_true_label (cond_stmt);
1584 if (label)
1585 {
1586 new_label = main_block_label (label, label_for_bb);
1587 if (new_label != label)
1588 gimple_cond_set_true_label (cond_stmt, new_label);
1589 }
1590
1591 label = gimple_cond_false_label (cond_stmt);
1592 if (label)
1593 {
1594 new_label = main_block_label (label, label_for_bb);
1595 if (new_label != label)
1596 gimple_cond_set_false_label (cond_stmt, new_label);
1597 }
1598 }
1599 break;
1600
1601 case GIMPLE_SWITCH:
1602 {
1603 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1604 size_t i, n = gimple_switch_num_labels (switch_stmt);
1605
1606 /* Replace all destination labels. */
1607 for (i = 0; i < n; ++i)
1608 {
1609 tree case_label = gimple_switch_label (switch_stmt, i);
1610 label = CASE_LABEL (case_label);
1611 new_label = main_block_label (label, label_for_bb);
1612 if (new_label != label)
1613 CASE_LABEL (case_label) = new_label;
1614 }
1615 break;
1616 }
1617
1618 case GIMPLE_ASM:
1619 {
1620 gasm *asm_stmt = as_a <gasm *> (stmt);
1621 int i, n = gimple_asm_nlabels (asm_stmt);
1622
1623 for (i = 0; i < n; ++i)
1624 {
1625 tree cons = gimple_asm_label_op (asm_stmt, i);
1626 tree label = main_block_label (TREE_VALUE (cons), label_for_bb);
1627 TREE_VALUE (cons) = label;
1628 }
1629 break;
1630 }
1631
1632 /* We have to handle gotos until they're removed, and we don't
1633 remove them until after we've created the CFG edges. */
1634 case GIMPLE_GOTO:
1635 if (!computed_goto_p (stmt))
1636 {
1637 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1638 label = gimple_goto_dest (goto_stmt);
1639 new_label = main_block_label (label, label_for_bb);
1640 if (new_label != label)
1641 gimple_goto_set_dest (goto_stmt, new_label);
1642 }
1643 break;
1644
1645 case GIMPLE_TRANSACTION:
1646 {
1647 gtransaction *txn = as_a <gtransaction *> (stmt);
1648
1649 label = gimple_transaction_label_norm (txn);
1650 if (label)
1651 {
1652 new_label = main_block_label (label, label_for_bb);
1653 if (new_label != label)
1654 gimple_transaction_set_label_norm (txn, new_label);
1655 }
1656
1657 label = gimple_transaction_label_uninst (txn);
1658 if (label)
1659 {
1660 new_label = main_block_label (label, label_for_bb);
1661 if (new_label != label)
1662 gimple_transaction_set_label_uninst (txn, new_label);
1663 }
1664
1665 label = gimple_transaction_label_over (txn);
1666 if (label)
1667 {
1668 new_label = main_block_label (label, label_for_bb);
1669 if (new_label != label)
1670 gimple_transaction_set_label_over (txn, new_label);
1671 }
1672 }
1673 break;
1674
1675 default:
1676 break;
1677 }
1678 }
1679
1680 /* Do the same for the exception region tree labels. */
1681 cleanup_dead_labels_eh (label_for_bb);
1682
1683 /* Finally, purge dead labels. All user-defined labels and labels that
1684 can be the target of non-local gotos and labels which have their
1685 address taken are preserved. */
1686 FOR_EACH_BB_FN (bb, cfun)
1687 {
1688 gimple_stmt_iterator i;
1689 tree label_for_this_bb = label_for_bb[bb->index].label;
1690
1691 if (!label_for_this_bb)
1692 continue;
1693
1694 /* If the main label of the block is unused, we may still remove it. */
1695 if (!label_for_bb[bb->index].used)
1696 label_for_this_bb = NULL;
1697
1698 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1699 {
1700 tree label;
1701 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1702
1703 if (!label_stmt)
1704 break;
1705
1706 label = gimple_label_label (label_stmt);
1707
1708 if (label == label_for_this_bb
1709 || !DECL_ARTIFICIAL (label)
1710 || DECL_NONLOCAL (label)
1711 || FORCED_LABEL (label))
1712 gsi_next (&i);
1713 else
1714 gsi_remove (&i, true);
1715 }
1716 }
1717
1718 free (label_for_bb);
1719 }
1720
1721 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1722 the ones jumping to the same label.
1723 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1724
1725 bool
1726 group_case_labels_stmt (gswitch *stmt)
1727 {
1728 int old_size = gimple_switch_num_labels (stmt);
1729 int i, next_index, new_size;
1730 basic_block default_bb = NULL;
1731
1732 default_bb = gimple_switch_default_bb (cfun, stmt);
1733
1734 /* Look for possible opportunities to merge cases. */
1735 new_size = i = 1;
1736 while (i < old_size)
1737 {
1738 tree base_case, base_high;
1739 basic_block base_bb;
1740
1741 base_case = gimple_switch_label (stmt, i);
1742
1743 gcc_assert (base_case);
1744 base_bb = label_to_block (cfun, CASE_LABEL (base_case));
1745
1746 /* Discard cases that have the same destination as the default case or
1747 whose destiniation blocks have already been removed as unreachable. */
1748 if (base_bb == NULL || base_bb == default_bb)
1749 {
1750 i++;
1751 continue;
1752 }
1753
1754 base_high = CASE_HIGH (base_case)
1755 ? CASE_HIGH (base_case)
1756 : CASE_LOW (base_case);
1757 next_index = i + 1;
1758
1759 /* Try to merge case labels. Break out when we reach the end
1760 of the label vector or when we cannot merge the next case
1761 label with the current one. */
1762 while (next_index < old_size)
1763 {
1764 tree merge_case = gimple_switch_label (stmt, next_index);
1765 basic_block merge_bb = label_to_block (cfun, CASE_LABEL (merge_case));
1766 wide_int bhp1 = wi::to_wide (base_high) + 1;
1767
1768 /* Merge the cases if they jump to the same place,
1769 and their ranges are consecutive. */
1770 if (merge_bb == base_bb
1771 && wi::to_wide (CASE_LOW (merge_case)) == bhp1)
1772 {
1773 base_high = CASE_HIGH (merge_case) ?
1774 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1775 CASE_HIGH (base_case) = base_high;
1776 next_index++;
1777 }
1778 else
1779 break;
1780 }
1781
1782 /* Discard cases that have an unreachable destination block. */
1783 if (EDGE_COUNT (base_bb->succs) == 0
1784 && gimple_seq_unreachable_p (bb_seq (base_bb))
1785 /* Don't optimize this if __builtin_unreachable () is the
1786 implicitly added one by the C++ FE too early, before
1787 -Wreturn-type can be diagnosed. We'll optimize it later
1788 during switchconv pass or any other cfg cleanup. */
1789 && (gimple_in_ssa_p (cfun)
1790 || (LOCATION_LOCUS (gimple_location (last_stmt (base_bb)))
1791 != BUILTINS_LOCATION)))
1792 {
1793 edge base_edge = find_edge (gimple_bb (stmt), base_bb);
1794 if (base_edge != NULL)
1795 remove_edge_and_dominated_blocks (base_edge);
1796 i = next_index;
1797 continue;
1798 }
1799
1800 if (new_size < i)
1801 gimple_switch_set_label (stmt, new_size,
1802 gimple_switch_label (stmt, i));
1803 i = next_index;
1804 new_size++;
1805 }
1806
1807 gcc_assert (new_size <= old_size);
1808
1809 if (new_size < old_size)
1810 gimple_switch_set_num_labels (stmt, new_size);
1811
1812 return new_size < old_size;
1813 }
1814
1815 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1816 and scan the sorted vector of cases. Combine the ones jumping to the
1817 same label. */
1818
1819 bool
1820 group_case_labels (void)
1821 {
1822 basic_block bb;
1823 bool changed = false;
1824
1825 FOR_EACH_BB_FN (bb, cfun)
1826 {
1827 gimple *stmt = last_stmt (bb);
1828 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1829 changed |= group_case_labels_stmt (as_a <gswitch *> (stmt));
1830 }
1831
1832 return changed;
1833 }
1834
1835 /* Checks whether we can merge block B into block A. */
1836
1837 static bool
1838 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1839 {
1840 gimple *stmt;
1841
1842 if (!single_succ_p (a))
1843 return false;
1844
1845 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1846 return false;
1847
1848 if (single_succ (a) != b)
1849 return false;
1850
1851 if (!single_pred_p (b))
1852 return false;
1853
1854 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1855 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1856 return false;
1857
1858 /* If A ends by a statement causing exceptions or something similar, we
1859 cannot merge the blocks. */
1860 stmt = last_stmt (a);
1861 if (stmt && stmt_ends_bb_p (stmt))
1862 return false;
1863
1864 /* Do not allow a block with only a non-local label to be merged. */
1865 if (stmt)
1866 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1867 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1868 return false;
1869
1870 /* Examine the labels at the beginning of B. */
1871 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1872 gsi_next (&gsi))
1873 {
1874 tree lab;
1875 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1876 if (!label_stmt)
1877 break;
1878 lab = gimple_label_label (label_stmt);
1879
1880 /* Do not remove user forced labels or for -O0 any user labels. */
1881 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1882 return false;
1883 }
1884
1885 /* Protect simple loop latches. We only want to avoid merging
1886 the latch with the loop header or with a block in another
1887 loop in this case. */
1888 if (current_loops
1889 && b->loop_father->latch == b
1890 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1891 && (b->loop_father->header == a
1892 || b->loop_father != a->loop_father))
1893 return false;
1894
1895 /* It must be possible to eliminate all phi nodes in B. If ssa form
1896 is not up-to-date and a name-mapping is registered, we cannot eliminate
1897 any phis. Symbols marked for renaming are never a problem though. */
1898 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1899 gsi_next (&gsi))
1900 {
1901 gphi *phi = gsi.phi ();
1902 /* Technically only new names matter. */
1903 if (name_registered_for_update_p (PHI_RESULT (phi)))
1904 return false;
1905 }
1906
1907 /* When not optimizing, don't merge if we'd lose goto_locus. */
1908 if (!optimize
1909 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1910 {
1911 location_t goto_locus = single_succ_edge (a)->goto_locus;
1912 gimple_stmt_iterator prev, next;
1913 prev = gsi_last_nondebug_bb (a);
1914 next = gsi_after_labels (b);
1915 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1916 gsi_next_nondebug (&next);
1917 if ((gsi_end_p (prev)
1918 || gimple_location (gsi_stmt (prev)) != goto_locus)
1919 && (gsi_end_p (next)
1920 || gimple_location (gsi_stmt (next)) != goto_locus))
1921 return false;
1922 }
1923
1924 return true;
1925 }
1926
1927 /* Replaces all uses of NAME by VAL. */
1928
1929 void
1930 replace_uses_by (tree name, tree val)
1931 {
1932 imm_use_iterator imm_iter;
1933 use_operand_p use;
1934 gimple *stmt;
1935 edge e;
1936
1937 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1938 {
1939 /* Mark the block if we change the last stmt in it. */
1940 if (cfgcleanup_altered_bbs
1941 && stmt_ends_bb_p (stmt))
1942 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1943
1944 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1945 {
1946 replace_exp (use, val);
1947
1948 if (gimple_code (stmt) == GIMPLE_PHI)
1949 {
1950 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1951 PHI_ARG_INDEX_FROM_USE (use));
1952 if (e->flags & EDGE_ABNORMAL
1953 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1954 {
1955 /* This can only occur for virtual operands, since
1956 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1957 would prevent replacement. */
1958 gcc_checking_assert (virtual_operand_p (name));
1959 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1960 }
1961 }
1962 }
1963
1964 if (gimple_code (stmt) != GIMPLE_PHI)
1965 {
1966 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1967 gimple *orig_stmt = stmt;
1968 size_t i;
1969
1970 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1971 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1972 only change sth from non-invariant to invariant, and only
1973 when propagating constants. */
1974 if (is_gimple_min_invariant (val))
1975 for (i = 0; i < gimple_num_ops (stmt); i++)
1976 {
1977 tree op = gimple_op (stmt, i);
1978 /* Operands may be empty here. For example, the labels
1979 of a GIMPLE_COND are nulled out following the creation
1980 of the corresponding CFG edges. */
1981 if (op && TREE_CODE (op) == ADDR_EXPR)
1982 recompute_tree_invariant_for_addr_expr (op);
1983 }
1984
1985 if (fold_stmt (&gsi))
1986 stmt = gsi_stmt (gsi);
1987
1988 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1989 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1990
1991 update_stmt (stmt);
1992 }
1993 }
1994
1995 gcc_checking_assert (has_zero_uses (name));
1996
1997 /* Also update the trees stored in loop structures. */
1998 if (current_loops)
1999 {
2000 class loop *loop;
2001
2002 FOR_EACH_LOOP (loop, 0)
2003 {
2004 substitute_in_loop_info (loop, name, val);
2005 }
2006 }
2007 }
2008
2009 /* Merge block B into block A. */
2010
2011 static void
2012 gimple_merge_blocks (basic_block a, basic_block b)
2013 {
2014 gimple_stmt_iterator last, gsi;
2015 gphi_iterator psi;
2016
2017 if (dump_file)
2018 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
2019
2020 /* Remove all single-valued PHI nodes from block B of the form
2021 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
2022 gsi = gsi_last_bb (a);
2023 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
2024 {
2025 gimple *phi = gsi_stmt (psi);
2026 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
2027 gimple *copy;
2028 bool may_replace_uses = (virtual_operand_p (def)
2029 || may_propagate_copy (def, use));
2030
2031 /* In case we maintain loop closed ssa form, do not propagate arguments
2032 of loop exit phi nodes. */
2033 if (current_loops
2034 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
2035 && !virtual_operand_p (def)
2036 && TREE_CODE (use) == SSA_NAME
2037 && a->loop_father != b->loop_father)
2038 may_replace_uses = false;
2039
2040 if (!may_replace_uses)
2041 {
2042 gcc_assert (!virtual_operand_p (def));
2043
2044 /* Note that just emitting the copies is fine -- there is no problem
2045 with ordering of phi nodes. This is because A is the single
2046 predecessor of B, therefore results of the phi nodes cannot
2047 appear as arguments of the phi nodes. */
2048 copy = gimple_build_assign (def, use);
2049 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
2050 remove_phi_node (&psi, false);
2051 }
2052 else
2053 {
2054 /* If we deal with a PHI for virtual operands, we can simply
2055 propagate these without fussing with folding or updating
2056 the stmt. */
2057 if (virtual_operand_p (def))
2058 {
2059 imm_use_iterator iter;
2060 use_operand_p use_p;
2061 gimple *stmt;
2062
2063 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
2064 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2065 SET_USE (use_p, use);
2066
2067 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2068 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
2069 }
2070 else
2071 replace_uses_by (def, use);
2072
2073 remove_phi_node (&psi, true);
2074 }
2075 }
2076
2077 /* Ensure that B follows A. */
2078 move_block_after (b, a);
2079
2080 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
2081 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
2082
2083 /* Remove labels from B and set gimple_bb to A for other statements. */
2084 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
2085 {
2086 gimple *stmt = gsi_stmt (gsi);
2087 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2088 {
2089 tree label = gimple_label_label (label_stmt);
2090 int lp_nr;
2091
2092 gsi_remove (&gsi, false);
2093
2094 /* Now that we can thread computed gotos, we might have
2095 a situation where we have a forced label in block B
2096 However, the label at the start of block B might still be
2097 used in other ways (think about the runtime checking for
2098 Fortran assigned gotos). So we cannot just delete the
2099 label. Instead we move the label to the start of block A. */
2100 if (FORCED_LABEL (label))
2101 {
2102 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
2103 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
2104 }
2105 /* Other user labels keep around in a form of a debug stmt. */
2106 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_BIND_STMTS)
2107 {
2108 gimple *dbg = gimple_build_debug_bind (label,
2109 integer_zero_node,
2110 stmt);
2111 gimple_debug_bind_reset_value (dbg);
2112 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
2113 }
2114
2115 lp_nr = EH_LANDING_PAD_NR (label);
2116 if (lp_nr)
2117 {
2118 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2119 lp->post_landing_pad = NULL;
2120 }
2121 }
2122 else
2123 {
2124 gimple_set_bb (stmt, a);
2125 gsi_next (&gsi);
2126 }
2127 }
2128
2129 /* When merging two BBs, if their counts are different, the larger count
2130 is selected as the new bb count. This is to handle inconsistent
2131 profiles. */
2132 if (a->loop_father == b->loop_father)
2133 {
2134 a->count = a->count.merge (b->count);
2135 }
2136
2137 /* Merge the sequences. */
2138 last = gsi_last_bb (a);
2139 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2140 set_bb_seq (b, NULL);
2141
2142 if (cfgcleanup_altered_bbs)
2143 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2144 }
2145
2146
2147 /* Return the one of two successors of BB that is not reachable by a
2148 complex edge, if there is one. Else, return BB. We use
2149 this in optimizations that use post-dominators for their heuristics,
2150 to catch the cases in C++ where function calls are involved. */
2151
2152 basic_block
2153 single_noncomplex_succ (basic_block bb)
2154 {
2155 edge e0, e1;
2156 if (EDGE_COUNT (bb->succs) != 2)
2157 return bb;
2158
2159 e0 = EDGE_SUCC (bb, 0);
2160 e1 = EDGE_SUCC (bb, 1);
2161 if (e0->flags & EDGE_COMPLEX)
2162 return e1->dest;
2163 if (e1->flags & EDGE_COMPLEX)
2164 return e0->dest;
2165
2166 return bb;
2167 }
2168
2169 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2170
2171 void
2172 notice_special_calls (gcall *call)
2173 {
2174 int flags = gimple_call_flags (call);
2175
2176 if (flags & ECF_MAY_BE_ALLOCA)
2177 cfun->calls_alloca = true;
2178 if (flags & ECF_RETURNS_TWICE)
2179 cfun->calls_setjmp = true;
2180 }
2181
2182
2183 /* Clear flags set by notice_special_calls. Used by dead code removal
2184 to update the flags. */
2185
2186 void
2187 clear_special_calls (void)
2188 {
2189 cfun->calls_alloca = false;
2190 cfun->calls_setjmp = false;
2191 }
2192
2193 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2194
2195 static void
2196 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2197 {
2198 /* Since this block is no longer reachable, we can just delete all
2199 of its PHI nodes. */
2200 remove_phi_nodes (bb);
2201
2202 /* Remove edges to BB's successors. */
2203 while (EDGE_COUNT (bb->succs) > 0)
2204 remove_edge (EDGE_SUCC (bb, 0));
2205 }
2206
2207
2208 /* Remove statements of basic block BB. */
2209
2210 static void
2211 remove_bb (basic_block bb)
2212 {
2213 gimple_stmt_iterator i;
2214
2215 if (dump_file)
2216 {
2217 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2218 if (dump_flags & TDF_DETAILS)
2219 {
2220 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2221 fprintf (dump_file, "\n");
2222 }
2223 }
2224
2225 if (current_loops)
2226 {
2227 class loop *loop = bb->loop_father;
2228
2229 /* If a loop gets removed, clean up the information associated
2230 with it. */
2231 if (loop->latch == bb
2232 || loop->header == bb)
2233 free_numbers_of_iterations_estimates (loop);
2234 }
2235
2236 /* Remove all the instructions in the block. */
2237 if (bb_seq (bb) != NULL)
2238 {
2239 /* Walk backwards so as to get a chance to substitute all
2240 released DEFs into debug stmts. See
2241 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2242 details. */
2243 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2244 {
2245 gimple *stmt = gsi_stmt (i);
2246 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2247 if (label_stmt
2248 && (FORCED_LABEL (gimple_label_label (label_stmt))
2249 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2250 {
2251 basic_block new_bb;
2252 gimple_stmt_iterator new_gsi;
2253
2254 /* A non-reachable non-local label may still be referenced.
2255 But it no longer needs to carry the extra semantics of
2256 non-locality. */
2257 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2258 {
2259 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2260 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2261 }
2262
2263 new_bb = bb->prev_bb;
2264 /* Don't move any labels into ENTRY block. */
2265 if (new_bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
2266 {
2267 new_bb = single_succ (new_bb);
2268 gcc_assert (new_bb != bb);
2269 }
2270 new_gsi = gsi_after_labels (new_bb);
2271 gsi_remove (&i, false);
2272 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2273 }
2274 else
2275 {
2276 /* Release SSA definitions. */
2277 release_defs (stmt);
2278 gsi_remove (&i, true);
2279 }
2280
2281 if (gsi_end_p (i))
2282 i = gsi_last_bb (bb);
2283 else
2284 gsi_prev (&i);
2285 }
2286 }
2287
2288 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2289 bb->il.gimple.seq = NULL;
2290 bb->il.gimple.phi_nodes = NULL;
2291 }
2292
2293
2294 /* Given a basic block BB and a value VAL for use in the final statement
2295 of the block (if a GIMPLE_COND, GIMPLE_SWITCH, or computed goto), return
2296 the edge that will be taken out of the block.
2297 If VAL is NULL_TREE, then the current value of the final statement's
2298 predicate or index is used.
2299 If the value does not match a unique edge, NULL is returned. */
2300
2301 edge
2302 find_taken_edge (basic_block bb, tree val)
2303 {
2304 gimple *stmt;
2305
2306 stmt = last_stmt (bb);
2307
2308 /* Handle ENTRY and EXIT. */
2309 if (!stmt)
2310 return NULL;
2311
2312 if (gimple_code (stmt) == GIMPLE_COND)
2313 return find_taken_edge_cond_expr (as_a <gcond *> (stmt), val);
2314
2315 if (gimple_code (stmt) == GIMPLE_SWITCH)
2316 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), val);
2317
2318 if (computed_goto_p (stmt))
2319 {
2320 /* Only optimize if the argument is a label, if the argument is
2321 not a label then we cannot construct a proper CFG.
2322
2323 It may be the case that we only need to allow the LABEL_REF to
2324 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2325 appear inside a LABEL_EXPR just to be safe. */
2326 if (val
2327 && (TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2328 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2329 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2330 }
2331
2332 /* Otherwise we only know the taken successor edge if it's unique. */
2333 return single_succ_p (bb) ? single_succ_edge (bb) : NULL;
2334 }
2335
2336 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2337 statement, determine which of the outgoing edges will be taken out of the
2338 block. Return NULL if either edge may be taken. */
2339
2340 static edge
2341 find_taken_edge_computed_goto (basic_block bb, tree val)
2342 {
2343 basic_block dest;
2344 edge e = NULL;
2345
2346 dest = label_to_block (cfun, val);
2347 if (dest)
2348 e = find_edge (bb, dest);
2349
2350 /* It's possible for find_edge to return NULL here on invalid code
2351 that abuses the labels-as-values extension (e.g. code that attempts to
2352 jump *between* functions via stored labels-as-values; PR 84136).
2353 If so, then we simply return that NULL for the edge.
2354 We don't currently have a way of detecting such invalid code, so we
2355 can't assert that it was the case when a NULL edge occurs here. */
2356
2357 return e;
2358 }
2359
2360 /* Given COND_STMT and a constant value VAL for use as the predicate,
2361 determine which of the two edges will be taken out of
2362 the statement's block. Return NULL if either edge may be taken.
2363 If VAL is NULL_TREE, then the current value of COND_STMT's predicate
2364 is used. */
2365
2366 static edge
2367 find_taken_edge_cond_expr (const gcond *cond_stmt, tree val)
2368 {
2369 edge true_edge, false_edge;
2370
2371 if (val == NULL_TREE)
2372 {
2373 /* Use the current value of the predicate. */
2374 if (gimple_cond_true_p (cond_stmt))
2375 val = integer_one_node;
2376 else if (gimple_cond_false_p (cond_stmt))
2377 val = integer_zero_node;
2378 else
2379 return NULL;
2380 }
2381 else if (TREE_CODE (val) != INTEGER_CST)
2382 return NULL;
2383
2384 extract_true_false_edges_from_block (gimple_bb (cond_stmt),
2385 &true_edge, &false_edge);
2386
2387 return (integer_zerop (val) ? false_edge : true_edge);
2388 }
2389
2390 /* Given SWITCH_STMT and an INTEGER_CST VAL for use as the index, determine
2391 which edge will be taken out of the statement's block. Return NULL if any
2392 edge may be taken.
2393 If VAL is NULL_TREE, then the current value of SWITCH_STMT's index
2394 is used. */
2395
2396 edge
2397 find_taken_edge_switch_expr (const gswitch *switch_stmt, tree val)
2398 {
2399 basic_block dest_bb;
2400 edge e;
2401 tree taken_case;
2402
2403 if (gimple_switch_num_labels (switch_stmt) == 1)
2404 taken_case = gimple_switch_default_label (switch_stmt);
2405 else
2406 {
2407 if (val == NULL_TREE)
2408 val = gimple_switch_index (switch_stmt);
2409 if (TREE_CODE (val) != INTEGER_CST)
2410 return NULL;
2411 else
2412 taken_case = find_case_label_for_value (switch_stmt, val);
2413 }
2414 dest_bb = label_to_block (cfun, CASE_LABEL (taken_case));
2415
2416 e = find_edge (gimple_bb (switch_stmt), dest_bb);
2417 gcc_assert (e);
2418 return e;
2419 }
2420
2421
2422 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2423 We can make optimal use here of the fact that the case labels are
2424 sorted: We can do a binary search for a case matching VAL. */
2425
2426 tree
2427 find_case_label_for_value (const gswitch *switch_stmt, tree val)
2428 {
2429 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2430 tree default_case = gimple_switch_default_label (switch_stmt);
2431
2432 for (low = 0, high = n; high - low > 1; )
2433 {
2434 size_t i = (high + low) / 2;
2435 tree t = gimple_switch_label (switch_stmt, i);
2436 int cmp;
2437
2438 /* Cache the result of comparing CASE_LOW and val. */
2439 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2440
2441 if (cmp > 0)
2442 high = i;
2443 else
2444 low = i;
2445
2446 if (CASE_HIGH (t) == NULL)
2447 {
2448 /* A singe-valued case label. */
2449 if (cmp == 0)
2450 return t;
2451 }
2452 else
2453 {
2454 /* A case range. We can only handle integer ranges. */
2455 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2456 return t;
2457 }
2458 }
2459
2460 return default_case;
2461 }
2462
2463
2464 /* Dump a basic block on stderr. */
2465
2466 void
2467 gimple_debug_bb (basic_block bb)
2468 {
2469 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2470 }
2471
2472
2473 /* Dump basic block with index N on stderr. */
2474
2475 basic_block
2476 gimple_debug_bb_n (int n)
2477 {
2478 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2479 return BASIC_BLOCK_FOR_FN (cfun, n);
2480 }
2481
2482
2483 /* Dump the CFG on stderr.
2484
2485 FLAGS are the same used by the tree dumping functions
2486 (see TDF_* in dumpfile.h). */
2487
2488 void
2489 gimple_debug_cfg (dump_flags_t flags)
2490 {
2491 gimple_dump_cfg (stderr, flags);
2492 }
2493
2494
2495 /* Dump the program showing basic block boundaries on the given FILE.
2496
2497 FLAGS are the same used by the tree dumping functions (see TDF_* in
2498 tree.h). */
2499
2500 void
2501 gimple_dump_cfg (FILE *file, dump_flags_t flags)
2502 {
2503 if (flags & TDF_DETAILS)
2504 {
2505 dump_function_header (file, current_function_decl, flags);
2506 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2507 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2508 last_basic_block_for_fn (cfun));
2509
2510 brief_dump_cfg (file, flags);
2511 fprintf (file, "\n");
2512 }
2513
2514 if (flags & TDF_STATS)
2515 dump_cfg_stats (file);
2516
2517 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2518 }
2519
2520
2521 /* Dump CFG statistics on FILE. */
2522
2523 void
2524 dump_cfg_stats (FILE *file)
2525 {
2526 static long max_num_merged_labels = 0;
2527 unsigned long size, total = 0;
2528 long num_edges;
2529 basic_block bb;
2530 const char * const fmt_str = "%-30s%-13s%12s\n";
2531 const char * const fmt_str_1 = "%-30s%13d" PRsa (11) "\n";
2532 const char * const fmt_str_2 = "%-30s%13ld" PRsa (11) "\n";
2533 const char * const fmt_str_3 = "%-43s" PRsa (11) "\n";
2534 const char *funcname = current_function_name ();
2535
2536 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2537
2538 fprintf (file, "---------------------------------------------------------\n");
2539 fprintf (file, fmt_str, "", " Number of ", "Memory");
2540 fprintf (file, fmt_str, "", " instances ", "used ");
2541 fprintf (file, "---------------------------------------------------------\n");
2542
2543 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2544 total += size;
2545 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2546 SIZE_AMOUNT (size));
2547
2548 num_edges = 0;
2549 FOR_EACH_BB_FN (bb, cfun)
2550 num_edges += EDGE_COUNT (bb->succs);
2551 size = num_edges * sizeof (class edge_def);
2552 total += size;
2553 fprintf (file, fmt_str_2, "Edges", num_edges, SIZE_AMOUNT (size));
2554
2555 fprintf (file, "---------------------------------------------------------\n");
2556 fprintf (file, fmt_str_3, "Total memory used by CFG data",
2557 SIZE_AMOUNT (total));
2558 fprintf (file, "---------------------------------------------------------\n");
2559 fprintf (file, "\n");
2560
2561 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2562 max_num_merged_labels = cfg_stats.num_merged_labels;
2563
2564 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2565 cfg_stats.num_merged_labels, max_num_merged_labels);
2566
2567 fprintf (file, "\n");
2568 }
2569
2570
2571 /* Dump CFG statistics on stderr. Keep extern so that it's always
2572 linked in the final executable. */
2573
2574 DEBUG_FUNCTION void
2575 debug_cfg_stats (void)
2576 {
2577 dump_cfg_stats (stderr);
2578 }
2579
2580 /*---------------------------------------------------------------------------
2581 Miscellaneous helpers
2582 ---------------------------------------------------------------------------*/
2583
2584 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2585 flow. Transfers of control flow associated with EH are excluded. */
2586
2587 static bool
2588 call_can_make_abnormal_goto (gimple *t)
2589 {
2590 /* If the function has no non-local labels, then a call cannot make an
2591 abnormal transfer of control. */
2592 if (!cfun->has_nonlocal_label
2593 && !cfun->calls_setjmp)
2594 return false;
2595
2596 /* Likewise if the call has no side effects. */
2597 if (!gimple_has_side_effects (t))
2598 return false;
2599
2600 /* Likewise if the called function is leaf. */
2601 if (gimple_call_flags (t) & ECF_LEAF)
2602 return false;
2603
2604 return true;
2605 }
2606
2607
2608 /* Return true if T can make an abnormal transfer of control flow.
2609 Transfers of control flow associated with EH are excluded. */
2610
2611 bool
2612 stmt_can_make_abnormal_goto (gimple *t)
2613 {
2614 if (computed_goto_p (t))
2615 return true;
2616 if (is_gimple_call (t))
2617 return call_can_make_abnormal_goto (t);
2618 return false;
2619 }
2620
2621
2622 /* Return true if T represents a stmt that always transfers control. */
2623
2624 bool
2625 is_ctrl_stmt (gimple *t)
2626 {
2627 switch (gimple_code (t))
2628 {
2629 case GIMPLE_COND:
2630 case GIMPLE_SWITCH:
2631 case GIMPLE_GOTO:
2632 case GIMPLE_RETURN:
2633 case GIMPLE_RESX:
2634 return true;
2635 default:
2636 return false;
2637 }
2638 }
2639
2640
2641 /* Return true if T is a statement that may alter the flow of control
2642 (e.g., a call to a non-returning function). */
2643
2644 bool
2645 is_ctrl_altering_stmt (gimple *t)
2646 {
2647 gcc_assert (t);
2648
2649 switch (gimple_code (t))
2650 {
2651 case GIMPLE_CALL:
2652 /* Per stmt call flag indicates whether the call could alter
2653 controlflow. */
2654 if (gimple_call_ctrl_altering_p (t))
2655 return true;
2656 break;
2657
2658 case GIMPLE_EH_DISPATCH:
2659 /* EH_DISPATCH branches to the individual catch handlers at
2660 this level of a try or allowed-exceptions region. It can
2661 fallthru to the next statement as well. */
2662 return true;
2663
2664 case GIMPLE_ASM:
2665 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2666 return true;
2667 break;
2668
2669 CASE_GIMPLE_OMP:
2670 /* OpenMP directives alter control flow. */
2671 return true;
2672
2673 case GIMPLE_TRANSACTION:
2674 /* A transaction start alters control flow. */
2675 return true;
2676
2677 default:
2678 break;
2679 }
2680
2681 /* If a statement can throw, it alters control flow. */
2682 return stmt_can_throw_internal (cfun, t);
2683 }
2684
2685
2686 /* Return true if T is a simple local goto. */
2687
2688 bool
2689 simple_goto_p (gimple *t)
2690 {
2691 return (gimple_code (t) == GIMPLE_GOTO
2692 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2693 }
2694
2695
2696 /* Return true if STMT should start a new basic block. PREV_STMT is
2697 the statement preceding STMT. It is used when STMT is a label or a
2698 case label. Labels should only start a new basic block if their
2699 previous statement wasn't a label. Otherwise, sequence of labels
2700 would generate unnecessary basic blocks that only contain a single
2701 label. */
2702
2703 static inline bool
2704 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2705 {
2706 if (stmt == NULL)
2707 return false;
2708
2709 /* PREV_STMT is only set to a debug stmt if the debug stmt is before
2710 any nondebug stmts in the block. We don't want to start another
2711 block in this case: the debug stmt will already have started the
2712 one STMT would start if we weren't outputting debug stmts. */
2713 if (prev_stmt && is_gimple_debug (prev_stmt))
2714 return false;
2715
2716 /* Labels start a new basic block only if the preceding statement
2717 wasn't a label of the same type. This prevents the creation of
2718 consecutive blocks that have nothing but a single label. */
2719 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2720 {
2721 /* Nonlocal and computed GOTO targets always start a new block. */
2722 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2723 || FORCED_LABEL (gimple_label_label (label_stmt)))
2724 return true;
2725
2726 if (glabel *plabel = safe_dyn_cast <glabel *> (prev_stmt))
2727 {
2728 if (DECL_NONLOCAL (gimple_label_label (plabel))
2729 || !DECL_ARTIFICIAL (gimple_label_label (plabel)))
2730 return true;
2731
2732 cfg_stats.num_merged_labels++;
2733 return false;
2734 }
2735 else
2736 return true;
2737 }
2738 else if (gimple_code (stmt) == GIMPLE_CALL)
2739 {
2740 if (gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2741 /* setjmp acts similar to a nonlocal GOTO target and thus should
2742 start a new block. */
2743 return true;
2744 if (gimple_call_internal_p (stmt, IFN_PHI)
2745 && prev_stmt
2746 && gimple_code (prev_stmt) != GIMPLE_LABEL
2747 && (gimple_code (prev_stmt) != GIMPLE_CALL
2748 || ! gimple_call_internal_p (prev_stmt, IFN_PHI)))
2749 /* PHI nodes start a new block unless preceeded by a label
2750 or another PHI. */
2751 return true;
2752 }
2753
2754 return false;
2755 }
2756
2757
2758 /* Return true if T should end a basic block. */
2759
2760 bool
2761 stmt_ends_bb_p (gimple *t)
2762 {
2763 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2764 }
2765
2766 /* Remove block annotations and other data structures. */
2767
2768 void
2769 delete_tree_cfg_annotations (struct function *fn)
2770 {
2771 vec_free (label_to_block_map_for_fn (fn));
2772 }
2773
2774 /* Return the virtual phi in BB. */
2775
2776 gphi *
2777 get_virtual_phi (basic_block bb)
2778 {
2779 for (gphi_iterator gsi = gsi_start_phis (bb);
2780 !gsi_end_p (gsi);
2781 gsi_next (&gsi))
2782 {
2783 gphi *phi = gsi.phi ();
2784
2785 if (virtual_operand_p (PHI_RESULT (phi)))
2786 return phi;
2787 }
2788
2789 return NULL;
2790 }
2791
2792 /* Return the first statement in basic block BB. */
2793
2794 gimple *
2795 first_stmt (basic_block bb)
2796 {
2797 gimple_stmt_iterator i = gsi_start_bb (bb);
2798 gimple *stmt = NULL;
2799
2800 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2801 {
2802 gsi_next (&i);
2803 stmt = NULL;
2804 }
2805 return stmt;
2806 }
2807
2808 /* Return the first non-label statement in basic block BB. */
2809
2810 static gimple *
2811 first_non_label_stmt (basic_block bb)
2812 {
2813 gimple_stmt_iterator i = gsi_start_bb (bb);
2814 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2815 gsi_next (&i);
2816 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2817 }
2818
2819 /* Return the last statement in basic block BB. */
2820
2821 gimple *
2822 last_stmt (basic_block bb)
2823 {
2824 gimple_stmt_iterator i = gsi_last_bb (bb);
2825 gimple *stmt = NULL;
2826
2827 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2828 {
2829 gsi_prev (&i);
2830 stmt = NULL;
2831 }
2832 return stmt;
2833 }
2834
2835 /* Return the last statement of an otherwise empty block. Return NULL
2836 if the block is totally empty, or if it contains more than one
2837 statement. */
2838
2839 gimple *
2840 last_and_only_stmt (basic_block bb)
2841 {
2842 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2843 gimple *last, *prev;
2844
2845 if (gsi_end_p (i))
2846 return NULL;
2847
2848 last = gsi_stmt (i);
2849 gsi_prev_nondebug (&i);
2850 if (gsi_end_p (i))
2851 return last;
2852
2853 /* Empty statements should no longer appear in the instruction stream.
2854 Everything that might have appeared before should be deleted by
2855 remove_useless_stmts, and the optimizers should just gsi_remove
2856 instead of smashing with build_empty_stmt.
2857
2858 Thus the only thing that should appear here in a block containing
2859 one executable statement is a label. */
2860 prev = gsi_stmt (i);
2861 if (gimple_code (prev) == GIMPLE_LABEL)
2862 return last;
2863 else
2864 return NULL;
2865 }
2866
2867 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2868
2869 static void
2870 reinstall_phi_args (edge new_edge, edge old_edge)
2871 {
2872 edge_var_map *vm;
2873 int i;
2874 gphi_iterator phis;
2875
2876 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2877 if (!v)
2878 return;
2879
2880 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2881 v->iterate (i, &vm) && !gsi_end_p (phis);
2882 i++, gsi_next (&phis))
2883 {
2884 gphi *phi = phis.phi ();
2885 tree result = redirect_edge_var_map_result (vm);
2886 tree arg = redirect_edge_var_map_def (vm);
2887
2888 gcc_assert (result == gimple_phi_result (phi));
2889
2890 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2891 }
2892
2893 redirect_edge_var_map_clear (old_edge);
2894 }
2895
2896 /* Returns the basic block after which the new basic block created
2897 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2898 near its "logical" location. This is of most help to humans looking
2899 at debugging dumps. */
2900
2901 basic_block
2902 split_edge_bb_loc (edge edge_in)
2903 {
2904 basic_block dest = edge_in->dest;
2905 basic_block dest_prev = dest->prev_bb;
2906
2907 if (dest_prev)
2908 {
2909 edge e = find_edge (dest_prev, dest);
2910 if (e && !(e->flags & EDGE_COMPLEX))
2911 return edge_in->src;
2912 }
2913 return dest_prev;
2914 }
2915
2916 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2917 Abort on abnormal edges. */
2918
2919 static basic_block
2920 gimple_split_edge (edge edge_in)
2921 {
2922 basic_block new_bb, after_bb, dest;
2923 edge new_edge, e;
2924
2925 /* Abnormal edges cannot be split. */
2926 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2927
2928 dest = edge_in->dest;
2929
2930 after_bb = split_edge_bb_loc (edge_in);
2931
2932 new_bb = create_empty_bb (after_bb);
2933 new_bb->count = edge_in->count ();
2934
2935 e = redirect_edge_and_branch (edge_in, new_bb);
2936 gcc_assert (e == edge_in);
2937
2938 new_edge = make_single_succ_edge (new_bb, dest, EDGE_FALLTHRU);
2939 reinstall_phi_args (new_edge, e);
2940
2941 return new_bb;
2942 }
2943
2944
2945 /* Verify properties of the address expression T whose base should be
2946 TREE_ADDRESSABLE if VERIFY_ADDRESSABLE is true. */
2947
2948 static bool
2949 verify_address (tree t, bool verify_addressable)
2950 {
2951 bool old_constant;
2952 bool old_side_effects;
2953 bool new_constant;
2954 bool new_side_effects;
2955
2956 old_constant = TREE_CONSTANT (t);
2957 old_side_effects = TREE_SIDE_EFFECTS (t);
2958
2959 recompute_tree_invariant_for_addr_expr (t);
2960 new_side_effects = TREE_SIDE_EFFECTS (t);
2961 new_constant = TREE_CONSTANT (t);
2962
2963 if (old_constant != new_constant)
2964 {
2965 error ("constant not recomputed when %<ADDR_EXPR%> changed");
2966 return true;
2967 }
2968 if (old_side_effects != new_side_effects)
2969 {
2970 error ("side effects not recomputed when %<ADDR_EXPR%> changed");
2971 return true;
2972 }
2973
2974 tree base = TREE_OPERAND (t, 0);
2975 while (handled_component_p (base))
2976 base = TREE_OPERAND (base, 0);
2977
2978 if (!(VAR_P (base)
2979 || TREE_CODE (base) == PARM_DECL
2980 || TREE_CODE (base) == RESULT_DECL))
2981 return false;
2982
2983 if (verify_addressable && !TREE_ADDRESSABLE (base))
2984 {
2985 error ("address taken but %<TREE_ADDRESSABLE%> bit not set");
2986 return true;
2987 }
2988
2989 return false;
2990 }
2991
2992
2993 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
2994 Returns true if there is an error, otherwise false. */
2995
2996 static bool
2997 verify_types_in_gimple_min_lval (tree expr)
2998 {
2999 tree op;
3000
3001 if (is_gimple_id (expr))
3002 return false;
3003
3004 if (TREE_CODE (expr) != TARGET_MEM_REF
3005 && TREE_CODE (expr) != MEM_REF)
3006 {
3007 error ("invalid expression for min lvalue");
3008 return true;
3009 }
3010
3011 /* TARGET_MEM_REFs are strange beasts. */
3012 if (TREE_CODE (expr) == TARGET_MEM_REF)
3013 return false;
3014
3015 op = TREE_OPERAND (expr, 0);
3016 if (!is_gimple_val (op))
3017 {
3018 error ("invalid operand in indirect reference");
3019 debug_generic_stmt (op);
3020 return true;
3021 }
3022 /* Memory references now generally can involve a value conversion. */
3023
3024 return false;
3025 }
3026
3027 /* Verify if EXPR is a valid GIMPLE reference expression. If
3028 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3029 if there is an error, otherwise false. */
3030
3031 static bool
3032 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3033 {
3034 const char *code_name = get_tree_code_name (TREE_CODE (expr));
3035
3036 if (TREE_CODE (expr) == REALPART_EXPR
3037 || TREE_CODE (expr) == IMAGPART_EXPR
3038 || TREE_CODE (expr) == BIT_FIELD_REF)
3039 {
3040 tree op = TREE_OPERAND (expr, 0);
3041 if (!is_gimple_reg_type (TREE_TYPE (expr)))
3042 {
3043 error ("non-scalar %qs", code_name);
3044 return true;
3045 }
3046
3047 if (TREE_CODE (expr) == BIT_FIELD_REF)
3048 {
3049 tree t1 = TREE_OPERAND (expr, 1);
3050 tree t2 = TREE_OPERAND (expr, 2);
3051 poly_uint64 size, bitpos;
3052 if (!poly_int_tree_p (t1, &size)
3053 || !poly_int_tree_p (t2, &bitpos)
3054 || !types_compatible_p (bitsizetype, TREE_TYPE (t1))
3055 || !types_compatible_p (bitsizetype, TREE_TYPE (t2)))
3056 {
3057 error ("invalid position or size operand to %qs", code_name);
3058 return true;
3059 }
3060 if (INTEGRAL_TYPE_P (TREE_TYPE (expr))
3061 && maybe_ne (TYPE_PRECISION (TREE_TYPE (expr)), size))
3062 {
3063 error ("integral result type precision does not match "
3064 "field size of %qs", code_name);
3065 return true;
3066 }
3067 else if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
3068 && TYPE_MODE (TREE_TYPE (expr)) != BLKmode
3069 && maybe_ne (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (expr))),
3070 size))
3071 {
3072 error ("mode size of non-integral result does not "
3073 "match field size of %qs",
3074 code_name);
3075 return true;
3076 }
3077 if (INTEGRAL_TYPE_P (TREE_TYPE (op))
3078 && !type_has_mode_precision_p (TREE_TYPE (op)))
3079 {
3080 error ("%qs of non-mode-precision operand", code_name);
3081 return true;
3082 }
3083 if (!AGGREGATE_TYPE_P (TREE_TYPE (op))
3084 && maybe_gt (size + bitpos,
3085 tree_to_poly_uint64 (TYPE_SIZE (TREE_TYPE (op)))))
3086 {
3087 error ("position plus size exceeds size of referenced object in "
3088 "%qs", code_name);
3089 return true;
3090 }
3091 }
3092
3093 if ((TREE_CODE (expr) == REALPART_EXPR
3094 || TREE_CODE (expr) == IMAGPART_EXPR)
3095 && !useless_type_conversion_p (TREE_TYPE (expr),
3096 TREE_TYPE (TREE_TYPE (op))))
3097 {
3098 error ("type mismatch in %qs reference", code_name);
3099 debug_generic_stmt (TREE_TYPE (expr));
3100 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3101 return true;
3102 }
3103 expr = op;
3104 }
3105
3106 while (handled_component_p (expr))
3107 {
3108 code_name = get_tree_code_name (TREE_CODE (expr));
3109
3110 if (TREE_CODE (expr) == REALPART_EXPR
3111 || TREE_CODE (expr) == IMAGPART_EXPR
3112 || TREE_CODE (expr) == BIT_FIELD_REF)
3113 {
3114 error ("non-top-level %qs", code_name);
3115 return true;
3116 }
3117
3118 tree op = TREE_OPERAND (expr, 0);
3119
3120 if (TREE_CODE (expr) == ARRAY_REF
3121 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3122 {
3123 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3124 || (TREE_OPERAND (expr, 2)
3125 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3126 || (TREE_OPERAND (expr, 3)
3127 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3128 {
3129 error ("invalid operands to %qs", code_name);
3130 debug_generic_stmt (expr);
3131 return true;
3132 }
3133 }
3134
3135 /* Verify if the reference array element types are compatible. */
3136 if (TREE_CODE (expr) == ARRAY_REF
3137 && !useless_type_conversion_p (TREE_TYPE (expr),
3138 TREE_TYPE (TREE_TYPE (op))))
3139 {
3140 error ("type mismatch in %qs", code_name);
3141 debug_generic_stmt (TREE_TYPE (expr));
3142 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3143 return true;
3144 }
3145 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3146 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3147 TREE_TYPE (TREE_TYPE (op))))
3148 {
3149 error ("type mismatch in %qs", code_name);
3150 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3151 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3152 return true;
3153 }
3154
3155 if (TREE_CODE (expr) == COMPONENT_REF)
3156 {
3157 if (TREE_OPERAND (expr, 2)
3158 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3159 {
3160 error ("invalid %qs offset operator", code_name);
3161 return true;
3162 }
3163 if (!useless_type_conversion_p (TREE_TYPE (expr),
3164 TREE_TYPE (TREE_OPERAND (expr, 1))))
3165 {
3166 error ("type mismatch in %qs", code_name);
3167 debug_generic_stmt (TREE_TYPE (expr));
3168 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3169 return true;
3170 }
3171 }
3172
3173 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3174 {
3175 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3176 that their operand is not an SSA name or an invariant when
3177 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3178 bug). Otherwise there is nothing to verify, gross mismatches at
3179 most invoke undefined behavior. */
3180 if (require_lvalue
3181 && (TREE_CODE (op) == SSA_NAME
3182 || is_gimple_min_invariant (op)))
3183 {
3184 error ("conversion of %qs on the left hand side of %qs",
3185 get_tree_code_name (TREE_CODE (op)), code_name);
3186 debug_generic_stmt (expr);
3187 return true;
3188 }
3189 else if (TREE_CODE (op) == SSA_NAME
3190 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3191 {
3192 error ("conversion of register to a different size in %qs",
3193 code_name);
3194 debug_generic_stmt (expr);
3195 return true;
3196 }
3197 else if (!handled_component_p (op))
3198 return false;
3199 }
3200
3201 expr = op;
3202 }
3203
3204 code_name = get_tree_code_name (TREE_CODE (expr));
3205
3206 if (TREE_CODE (expr) == MEM_REF)
3207 {
3208 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0))
3209 || (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
3210 && verify_address (TREE_OPERAND (expr, 0), false)))
3211 {
3212 error ("invalid address operand in %qs", code_name);
3213 debug_generic_stmt (expr);
3214 return true;
3215 }
3216 if (!poly_int_tree_p (TREE_OPERAND (expr, 1))
3217 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3218 {
3219 error ("invalid offset operand in %qs", code_name);
3220 debug_generic_stmt (expr);
3221 return true;
3222 }
3223 if (MR_DEPENDENCE_CLIQUE (expr) != 0
3224 && MR_DEPENDENCE_CLIQUE (expr) > cfun->last_clique)
3225 {
3226 error ("invalid clique in %qs", code_name);
3227 debug_generic_stmt (expr);
3228 return true;
3229 }
3230 }
3231 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3232 {
3233 if (!TMR_BASE (expr)
3234 || !is_gimple_mem_ref_addr (TMR_BASE (expr))
3235 || (TREE_CODE (TMR_BASE (expr)) == ADDR_EXPR
3236 && verify_address (TMR_BASE (expr), false)))
3237 {
3238 error ("invalid address operand in %qs", code_name);
3239 return true;
3240 }
3241 if (!TMR_OFFSET (expr)
3242 || !poly_int_tree_p (TMR_OFFSET (expr))
3243 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3244 {
3245 error ("invalid offset operand in %qs", code_name);
3246 debug_generic_stmt (expr);
3247 return true;
3248 }
3249 if (MR_DEPENDENCE_CLIQUE (expr) != 0
3250 && MR_DEPENDENCE_CLIQUE (expr) > cfun->last_clique)
3251 {
3252 error ("invalid clique in %qs", code_name);
3253 debug_generic_stmt (expr);
3254 return true;
3255 }
3256 }
3257 else if (TREE_CODE (expr) == INDIRECT_REF)
3258 {
3259 error ("%qs in gimple IL", code_name);
3260 debug_generic_stmt (expr);
3261 return true;
3262 }
3263
3264 return ((require_lvalue || !is_gimple_min_invariant (expr))
3265 && verify_types_in_gimple_min_lval (expr));
3266 }
3267
3268 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3269 list of pointer-to types that is trivially convertible to DEST. */
3270
3271 static bool
3272 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3273 {
3274 tree src;
3275
3276 if (!TYPE_POINTER_TO (src_obj))
3277 return true;
3278
3279 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3280 if (useless_type_conversion_p (dest, src))
3281 return true;
3282
3283 return false;
3284 }
3285
3286 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3287 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3288
3289 static bool
3290 valid_fixed_convert_types_p (tree type1, tree type2)
3291 {
3292 return (FIXED_POINT_TYPE_P (type1)
3293 && (INTEGRAL_TYPE_P (type2)
3294 || SCALAR_FLOAT_TYPE_P (type2)
3295 || FIXED_POINT_TYPE_P (type2)));
3296 }
3297
3298 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3299 is a problem, otherwise false. */
3300
3301 static bool
3302 verify_gimple_call (gcall *stmt)
3303 {
3304 tree fn = gimple_call_fn (stmt);
3305 tree fntype, fndecl;
3306 unsigned i;
3307
3308 if (gimple_call_internal_p (stmt))
3309 {
3310 if (fn)
3311 {
3312 error ("gimple call has two targets");
3313 debug_generic_stmt (fn);
3314 return true;
3315 }
3316 }
3317 else
3318 {
3319 if (!fn)
3320 {
3321 error ("gimple call has no target");
3322 return true;
3323 }
3324 }
3325
3326 if (fn && !is_gimple_call_addr (fn))
3327 {
3328 error ("invalid function in gimple call");
3329 debug_generic_stmt (fn);
3330 return true;
3331 }
3332
3333 if (fn
3334 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3335 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3336 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3337 {
3338 error ("non-function in gimple call");
3339 return true;
3340 }
3341
3342 fndecl = gimple_call_fndecl (stmt);
3343 if (fndecl
3344 && TREE_CODE (fndecl) == FUNCTION_DECL
3345 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3346 && !DECL_PURE_P (fndecl)
3347 && !TREE_READONLY (fndecl))
3348 {
3349 error ("invalid pure const state for function");
3350 return true;
3351 }
3352
3353 tree lhs = gimple_call_lhs (stmt);
3354 if (lhs
3355 && (!is_gimple_lvalue (lhs)
3356 || verify_types_in_gimple_reference (lhs, true)))
3357 {
3358 error ("invalid LHS in gimple call");
3359 return true;
3360 }
3361
3362 if (gimple_call_ctrl_altering_p (stmt)
3363 && gimple_call_noreturn_p (stmt)
3364 && should_remove_lhs_p (lhs))
3365 {
3366 error ("LHS in %<noreturn%> call");
3367 return true;
3368 }
3369
3370 fntype = gimple_call_fntype (stmt);
3371 if (fntype
3372 && lhs
3373 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3374 /* ??? At least C++ misses conversions at assignments from
3375 void * call results.
3376 For now simply allow arbitrary pointer type conversions. */
3377 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3378 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3379 {
3380 error ("invalid conversion in gimple call");
3381 debug_generic_stmt (TREE_TYPE (lhs));
3382 debug_generic_stmt (TREE_TYPE (fntype));
3383 return true;
3384 }
3385
3386 if (gimple_call_chain (stmt)
3387 && !is_gimple_val (gimple_call_chain (stmt)))
3388 {
3389 error ("invalid static chain in gimple call");
3390 debug_generic_stmt (gimple_call_chain (stmt));
3391 return true;
3392 }
3393
3394 /* If there is a static chain argument, the call should either be
3395 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3396 if (gimple_call_chain (stmt)
3397 && fndecl
3398 && !DECL_STATIC_CHAIN (fndecl))
3399 {
3400 error ("static chain with function that doesn%'t use one");
3401 return true;
3402 }
3403
3404 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3405 {
3406 switch (DECL_FUNCTION_CODE (fndecl))
3407 {
3408 case BUILT_IN_UNREACHABLE:
3409 case BUILT_IN_TRAP:
3410 if (gimple_call_num_args (stmt) > 0)
3411 {
3412 /* Built-in unreachable with parameters might not be caught by
3413 undefined behavior sanitizer. Front-ends do check users do not
3414 call them that way but we also produce calls to
3415 __builtin_unreachable internally, for example when IPA figures
3416 out a call cannot happen in a legal program. In such cases,
3417 we must make sure arguments are stripped off. */
3418 error ("%<__builtin_unreachable%> or %<__builtin_trap%> call "
3419 "with arguments");
3420 return true;
3421 }
3422 break;
3423 default:
3424 break;
3425 }
3426 }
3427
3428 /* ??? The C frontend passes unpromoted arguments in case it
3429 didn't see a function declaration before the call. So for now
3430 leave the call arguments mostly unverified. Once we gimplify
3431 unit-at-a-time we have a chance to fix this. */
3432
3433 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3434 {
3435 tree arg = gimple_call_arg (stmt, i);
3436 if ((is_gimple_reg_type (TREE_TYPE (arg))
3437 && !is_gimple_val (arg))
3438 || (!is_gimple_reg_type (TREE_TYPE (arg))
3439 && !is_gimple_lvalue (arg)))
3440 {
3441 error ("invalid argument to gimple call");
3442 debug_generic_expr (arg);
3443 return true;
3444 }
3445 }
3446
3447 return false;
3448 }
3449
3450 /* Verifies the gimple comparison with the result type TYPE and
3451 the operands OP0 and OP1, comparison code is CODE. */
3452
3453 static bool
3454 verify_gimple_comparison (tree type, tree op0, tree op1, enum tree_code code)
3455 {
3456 tree op0_type = TREE_TYPE (op0);
3457 tree op1_type = TREE_TYPE (op1);
3458
3459 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3460 {
3461 error ("invalid operands in gimple comparison");
3462 return true;
3463 }
3464
3465 /* For comparisons we do not have the operations type as the
3466 effective type the comparison is carried out in. Instead
3467 we require that either the first operand is trivially
3468 convertible into the second, or the other way around.
3469 Because we special-case pointers to void we allow
3470 comparisons of pointers with the same mode as well. */
3471 if (!useless_type_conversion_p (op0_type, op1_type)
3472 && !useless_type_conversion_p (op1_type, op0_type)
3473 && (!POINTER_TYPE_P (op0_type)
3474 || !POINTER_TYPE_P (op1_type)
3475 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3476 {
3477 error ("mismatching comparison operand types");
3478 debug_generic_expr (op0_type);
3479 debug_generic_expr (op1_type);
3480 return true;
3481 }
3482
3483 /* The resulting type of a comparison may be an effective boolean type. */
3484 if (INTEGRAL_TYPE_P (type)
3485 && (TREE_CODE (type) == BOOLEAN_TYPE
3486 || TYPE_PRECISION (type) == 1))
3487 {
3488 if ((TREE_CODE (op0_type) == VECTOR_TYPE
3489 || TREE_CODE (op1_type) == VECTOR_TYPE)
3490 && code != EQ_EXPR && code != NE_EXPR
3491 && !VECTOR_BOOLEAN_TYPE_P (op0_type)
3492 && !VECTOR_INTEGER_TYPE_P (op0_type))
3493 {
3494 error ("unsupported operation or type for vector comparison"
3495 " returning a boolean");
3496 debug_generic_expr (op0_type);
3497 debug_generic_expr (op1_type);
3498 return true;
3499 }
3500 }
3501 /* Or a boolean vector type with the same element count
3502 as the comparison operand types. */
3503 else if (TREE_CODE (type) == VECTOR_TYPE
3504 && TREE_CODE (TREE_TYPE (type)) == BOOLEAN_TYPE)
3505 {
3506 if (TREE_CODE (op0_type) != VECTOR_TYPE
3507 || TREE_CODE (op1_type) != VECTOR_TYPE)
3508 {
3509 error ("non-vector operands in vector comparison");
3510 debug_generic_expr (op0_type);
3511 debug_generic_expr (op1_type);
3512 return true;
3513 }
3514
3515 if (maybe_ne (TYPE_VECTOR_SUBPARTS (type),
3516 TYPE_VECTOR_SUBPARTS (op0_type)))
3517 {
3518 error ("invalid vector comparison resulting type");
3519 debug_generic_expr (type);
3520 return true;
3521 }
3522 }
3523 else
3524 {
3525 error ("bogus comparison result type");
3526 debug_generic_expr (type);
3527 return true;
3528 }
3529
3530 return false;
3531 }
3532
3533 /* Verify a gimple assignment statement STMT with an unary rhs.
3534 Returns true if anything is wrong. */
3535
3536 static bool
3537 verify_gimple_assign_unary (gassign *stmt)
3538 {
3539 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3540 tree lhs = gimple_assign_lhs (stmt);
3541 tree lhs_type = TREE_TYPE (lhs);
3542 tree rhs1 = gimple_assign_rhs1 (stmt);
3543 tree rhs1_type = TREE_TYPE (rhs1);
3544
3545 if (!is_gimple_reg (lhs))
3546 {
3547 error ("non-register as LHS of unary operation");
3548 return true;
3549 }
3550
3551 if (!is_gimple_val (rhs1))
3552 {
3553 error ("invalid operand in unary operation");
3554 return true;
3555 }
3556
3557 const char* const code_name = get_tree_code_name (rhs_code);
3558
3559 /* First handle conversions. */
3560 switch (rhs_code)
3561 {
3562 CASE_CONVERT:
3563 {
3564 /* Allow conversions between vectors with the same number of elements,
3565 provided that the conversion is OK for the element types too. */
3566 if (VECTOR_TYPE_P (lhs_type)
3567 && VECTOR_TYPE_P (rhs1_type)
3568 && known_eq (TYPE_VECTOR_SUBPARTS (lhs_type),
3569 TYPE_VECTOR_SUBPARTS (rhs1_type)))
3570 {
3571 lhs_type = TREE_TYPE (lhs_type);
3572 rhs1_type = TREE_TYPE (rhs1_type);
3573 }
3574 else if (VECTOR_TYPE_P (lhs_type) || VECTOR_TYPE_P (rhs1_type))
3575 {
3576 error ("invalid vector types in nop conversion");
3577 debug_generic_expr (lhs_type);
3578 debug_generic_expr (rhs1_type);
3579 return true;
3580 }
3581
3582 /* Allow conversions from pointer type to integral type only if
3583 there is no sign or zero extension involved.
3584 For targets were the precision of ptrofftype doesn't match that
3585 of pointers we allow conversions to types where
3586 POINTERS_EXTEND_UNSIGNED specifies how that works. */
3587 if ((POINTER_TYPE_P (lhs_type)
3588 && INTEGRAL_TYPE_P (rhs1_type))
3589 || (POINTER_TYPE_P (rhs1_type)
3590 && INTEGRAL_TYPE_P (lhs_type)
3591 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3592 #if defined(POINTERS_EXTEND_UNSIGNED)
3593 || (TYPE_MODE (rhs1_type) == ptr_mode
3594 && (TYPE_PRECISION (lhs_type)
3595 == BITS_PER_WORD /* word_mode */
3596 || (TYPE_PRECISION (lhs_type)
3597 == GET_MODE_PRECISION (Pmode))))
3598 #endif
3599 )))
3600 return false;
3601
3602 /* Allow conversion from integral to offset type and vice versa. */
3603 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3604 && INTEGRAL_TYPE_P (rhs1_type))
3605 || (INTEGRAL_TYPE_P (lhs_type)
3606 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3607 return false;
3608
3609 /* Otherwise assert we are converting between types of the
3610 same kind. */
3611 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3612 {
3613 error ("invalid types in nop conversion");
3614 debug_generic_expr (lhs_type);
3615 debug_generic_expr (rhs1_type);
3616 return true;
3617 }
3618
3619 return false;
3620 }
3621
3622 case ADDR_SPACE_CONVERT_EXPR:
3623 {
3624 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3625 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3626 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3627 {
3628 error ("invalid types in address space conversion");
3629 debug_generic_expr (lhs_type);
3630 debug_generic_expr (rhs1_type);
3631 return true;
3632 }
3633
3634 return false;
3635 }
3636
3637 case FIXED_CONVERT_EXPR:
3638 {
3639 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3640 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3641 {
3642 error ("invalid types in fixed-point conversion");
3643 debug_generic_expr (lhs_type);
3644 debug_generic_expr (rhs1_type);
3645 return true;
3646 }
3647
3648 return false;
3649 }
3650
3651 case FLOAT_EXPR:
3652 {
3653 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3654 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3655 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3656 {
3657 error ("invalid types in conversion to floating-point");
3658 debug_generic_expr (lhs_type);
3659 debug_generic_expr (rhs1_type);
3660 return true;
3661 }
3662
3663 return false;
3664 }
3665
3666 case FIX_TRUNC_EXPR:
3667 {
3668 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3669 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3670 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3671 {
3672 error ("invalid types in conversion to integer");
3673 debug_generic_expr (lhs_type);
3674 debug_generic_expr (rhs1_type);
3675 return true;
3676 }
3677
3678 return false;
3679 }
3680
3681 case VEC_UNPACK_HI_EXPR:
3682 case VEC_UNPACK_LO_EXPR:
3683 case VEC_UNPACK_FLOAT_HI_EXPR:
3684 case VEC_UNPACK_FLOAT_LO_EXPR:
3685 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
3686 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
3687 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3688 || TREE_CODE (lhs_type) != VECTOR_TYPE
3689 || (!INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3690 && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type)))
3691 || (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3692 && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3693 || ((rhs_code == VEC_UNPACK_HI_EXPR
3694 || rhs_code == VEC_UNPACK_LO_EXPR)
3695 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3696 != INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3697 || ((rhs_code == VEC_UNPACK_FLOAT_HI_EXPR
3698 || rhs_code == VEC_UNPACK_FLOAT_LO_EXPR)
3699 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3700 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))))
3701 || ((rhs_code == VEC_UNPACK_FIX_TRUNC_HI_EXPR
3702 || rhs_code == VEC_UNPACK_FIX_TRUNC_LO_EXPR)
3703 && (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3704 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type))))
3705 || (maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
3706 2 * GET_MODE_SIZE (element_mode (rhs1_type)))
3707 && (!VECTOR_BOOLEAN_TYPE_P (lhs_type)
3708 || !VECTOR_BOOLEAN_TYPE_P (rhs1_type)))
3709 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (lhs_type),
3710 TYPE_VECTOR_SUBPARTS (rhs1_type)))
3711 {
3712 error ("type mismatch in %qs expression", code_name);
3713 debug_generic_expr (lhs_type);
3714 debug_generic_expr (rhs1_type);
3715 return true;
3716 }
3717
3718 return false;
3719
3720 case NEGATE_EXPR:
3721 case ABS_EXPR:
3722 case BIT_NOT_EXPR:
3723 case PAREN_EXPR:
3724 case CONJ_EXPR:
3725 break;
3726
3727 case ABSU_EXPR:
3728 if (!ANY_INTEGRAL_TYPE_P (lhs_type)
3729 || !TYPE_UNSIGNED (lhs_type)
3730 || !ANY_INTEGRAL_TYPE_P (rhs1_type)
3731 || TYPE_UNSIGNED (rhs1_type)
3732 || element_precision (lhs_type) != element_precision (rhs1_type))
3733 {
3734 error ("invalid types for %qs", code_name);
3735 debug_generic_expr (lhs_type);
3736 debug_generic_expr (rhs1_type);
3737 return true;
3738 }
3739 return false;
3740
3741 case VEC_DUPLICATE_EXPR:
3742 if (TREE_CODE (lhs_type) != VECTOR_TYPE
3743 || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
3744 {
3745 error ("%qs should be from a scalar to a like vector", code_name);
3746 debug_generic_expr (lhs_type);
3747 debug_generic_expr (rhs1_type);
3748 return true;
3749 }
3750 return false;
3751
3752 default:
3753 gcc_unreachable ();
3754 }
3755
3756 /* For the remaining codes assert there is no conversion involved. */
3757 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3758 {
3759 error ("non-trivial conversion in unary operation");
3760 debug_generic_expr (lhs_type);
3761 debug_generic_expr (rhs1_type);
3762 return true;
3763 }
3764
3765 return false;
3766 }
3767
3768 /* Verify a gimple assignment statement STMT with a binary rhs.
3769 Returns true if anything is wrong. */
3770
3771 static bool
3772 verify_gimple_assign_binary (gassign *stmt)
3773 {
3774 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3775 tree lhs = gimple_assign_lhs (stmt);
3776 tree lhs_type = TREE_TYPE (lhs);
3777 tree rhs1 = gimple_assign_rhs1 (stmt);
3778 tree rhs1_type = TREE_TYPE (rhs1);
3779 tree rhs2 = gimple_assign_rhs2 (stmt);
3780 tree rhs2_type = TREE_TYPE (rhs2);
3781
3782 if (!is_gimple_reg (lhs))
3783 {
3784 error ("non-register as LHS of binary operation");
3785 return true;
3786 }
3787
3788 if (!is_gimple_val (rhs1)
3789 || !is_gimple_val (rhs2))
3790 {
3791 error ("invalid operands in binary operation");
3792 return true;
3793 }
3794
3795 const char* const code_name = get_tree_code_name (rhs_code);
3796
3797 /* First handle operations that involve different types. */
3798 switch (rhs_code)
3799 {
3800 case COMPLEX_EXPR:
3801 {
3802 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3803 || !(INTEGRAL_TYPE_P (rhs1_type)
3804 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3805 || !(INTEGRAL_TYPE_P (rhs2_type)
3806 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3807 {
3808 error ("type mismatch in %qs", code_name);
3809 debug_generic_expr (lhs_type);
3810 debug_generic_expr (rhs1_type);
3811 debug_generic_expr (rhs2_type);
3812 return true;
3813 }
3814
3815 return false;
3816 }
3817
3818 case LSHIFT_EXPR:
3819 case RSHIFT_EXPR:
3820 case LROTATE_EXPR:
3821 case RROTATE_EXPR:
3822 {
3823 /* Shifts and rotates are ok on integral types, fixed point
3824 types and integer vector types. */
3825 if ((!INTEGRAL_TYPE_P (rhs1_type)
3826 && !FIXED_POINT_TYPE_P (rhs1_type)
3827 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3828 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3829 || (!INTEGRAL_TYPE_P (rhs2_type)
3830 /* Vector shifts of vectors are also ok. */
3831 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3832 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3833 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3834 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3835 || !useless_type_conversion_p (lhs_type, rhs1_type))
3836 {
3837 error ("type mismatch in %qs", code_name);
3838 debug_generic_expr (lhs_type);
3839 debug_generic_expr (rhs1_type);
3840 debug_generic_expr (rhs2_type);
3841 return true;
3842 }
3843
3844 return false;
3845 }
3846
3847 case WIDEN_LSHIFT_EXPR:
3848 {
3849 if (!INTEGRAL_TYPE_P (lhs_type)
3850 || !INTEGRAL_TYPE_P (rhs1_type)
3851 || TREE_CODE (rhs2) != INTEGER_CST
3852 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3853 {
3854 error ("type mismatch in %qs", code_name);
3855 debug_generic_expr (lhs_type);
3856 debug_generic_expr (rhs1_type);
3857 debug_generic_expr (rhs2_type);
3858 return true;
3859 }
3860
3861 return false;
3862 }
3863
3864 case VEC_WIDEN_LSHIFT_HI_EXPR:
3865 case VEC_WIDEN_LSHIFT_LO_EXPR:
3866 {
3867 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3868 || TREE_CODE (lhs_type) != VECTOR_TYPE
3869 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3870 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3871 || TREE_CODE (rhs2) != INTEGER_CST
3872 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3873 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3874 {
3875 error ("type mismatch in %qs", code_name);
3876 debug_generic_expr (lhs_type);
3877 debug_generic_expr (rhs1_type);
3878 debug_generic_expr (rhs2_type);
3879 return true;
3880 }
3881
3882 return false;
3883 }
3884
3885 case PLUS_EXPR:
3886 case MINUS_EXPR:
3887 {
3888 tree lhs_etype = lhs_type;
3889 tree rhs1_etype = rhs1_type;
3890 tree rhs2_etype = rhs2_type;
3891 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3892 {
3893 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3894 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3895 {
3896 error ("invalid non-vector operands to %qs", code_name);
3897 return true;
3898 }
3899 lhs_etype = TREE_TYPE (lhs_type);
3900 rhs1_etype = TREE_TYPE (rhs1_type);
3901 rhs2_etype = TREE_TYPE (rhs2_type);
3902 }
3903 if (POINTER_TYPE_P (lhs_etype)
3904 || POINTER_TYPE_P (rhs1_etype)
3905 || POINTER_TYPE_P (rhs2_etype))
3906 {
3907 error ("invalid (pointer) operands %qs", code_name);
3908 return true;
3909 }
3910
3911 /* Continue with generic binary expression handling. */
3912 break;
3913 }
3914
3915 case POINTER_PLUS_EXPR:
3916 {
3917 if (!POINTER_TYPE_P (rhs1_type)
3918 || !useless_type_conversion_p (lhs_type, rhs1_type)
3919 || !ptrofftype_p (rhs2_type))
3920 {
3921 error ("type mismatch in %qs", code_name);
3922 debug_generic_stmt (lhs_type);
3923 debug_generic_stmt (rhs1_type);
3924 debug_generic_stmt (rhs2_type);
3925 return true;
3926 }
3927
3928 return false;
3929 }
3930
3931 case POINTER_DIFF_EXPR:
3932 {
3933 if (!POINTER_TYPE_P (rhs1_type)
3934 || !POINTER_TYPE_P (rhs2_type)
3935 /* Because we special-case pointers to void we allow difference
3936 of arbitrary pointers with the same mode. */
3937 || TYPE_MODE (rhs1_type) != TYPE_MODE (rhs2_type)
3938 || TREE_CODE (lhs_type) != INTEGER_TYPE
3939 || TYPE_UNSIGNED (lhs_type)
3940 || TYPE_PRECISION (lhs_type) != TYPE_PRECISION (rhs1_type))
3941 {
3942 error ("type mismatch in %qs", code_name);
3943 debug_generic_stmt (lhs_type);
3944 debug_generic_stmt (rhs1_type);
3945 debug_generic_stmt (rhs2_type);
3946 return true;
3947 }
3948
3949 return false;
3950 }
3951
3952 case TRUTH_ANDIF_EXPR:
3953 case TRUTH_ORIF_EXPR:
3954 case TRUTH_AND_EXPR:
3955 case TRUTH_OR_EXPR:
3956 case TRUTH_XOR_EXPR:
3957
3958 gcc_unreachable ();
3959
3960 case LT_EXPR:
3961 case LE_EXPR:
3962 case GT_EXPR:
3963 case GE_EXPR:
3964 case EQ_EXPR:
3965 case NE_EXPR:
3966 case UNORDERED_EXPR:
3967 case ORDERED_EXPR:
3968 case UNLT_EXPR:
3969 case UNLE_EXPR:
3970 case UNGT_EXPR:
3971 case UNGE_EXPR:
3972 case UNEQ_EXPR:
3973 case LTGT_EXPR:
3974 /* Comparisons are also binary, but the result type is not
3975 connected to the operand types. */
3976 return verify_gimple_comparison (lhs_type, rhs1, rhs2, rhs_code);
3977
3978 case WIDEN_MULT_EXPR:
3979 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3980 return true;
3981 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3982 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3983
3984 case WIDEN_SUM_EXPR:
3985 {
3986 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
3987 || TREE_CODE (lhs_type) != VECTOR_TYPE)
3988 && ((!INTEGRAL_TYPE_P (rhs1_type)
3989 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
3990 || (!INTEGRAL_TYPE_P (lhs_type)
3991 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
3992 || !useless_type_conversion_p (lhs_type, rhs2_type)
3993 || maybe_lt (GET_MODE_SIZE (element_mode (rhs2_type)),
3994 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
3995 {
3996 error ("type mismatch in %qs", code_name);
3997 debug_generic_expr (lhs_type);
3998 debug_generic_expr (rhs1_type);
3999 debug_generic_expr (rhs2_type);
4000 return true;
4001 }
4002 return false;
4003 }
4004
4005 case VEC_WIDEN_MULT_HI_EXPR:
4006 case VEC_WIDEN_MULT_LO_EXPR:
4007 case VEC_WIDEN_MULT_EVEN_EXPR:
4008 case VEC_WIDEN_MULT_ODD_EXPR:
4009 {
4010 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4011 || TREE_CODE (lhs_type) != VECTOR_TYPE
4012 || !types_compatible_p (rhs1_type, rhs2_type)
4013 || maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
4014 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4015 {
4016 error ("type mismatch in %qs", code_name);
4017 debug_generic_expr (lhs_type);
4018 debug_generic_expr (rhs1_type);
4019 debug_generic_expr (rhs2_type);
4020 return true;
4021 }
4022 return false;
4023 }
4024
4025 case VEC_PACK_TRUNC_EXPR:
4026 /* ??? We currently use VEC_PACK_TRUNC_EXPR to simply concat
4027 vector boolean types. */
4028 if (VECTOR_BOOLEAN_TYPE_P (lhs_type)
4029 && VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4030 && types_compatible_p (rhs1_type, rhs2_type)
4031 && known_eq (TYPE_VECTOR_SUBPARTS (lhs_type),
4032 2 * TYPE_VECTOR_SUBPARTS (rhs1_type)))
4033 return false;
4034
4035 /* Fallthru. */
4036 case VEC_PACK_SAT_EXPR:
4037 case VEC_PACK_FIX_TRUNC_EXPR:
4038 {
4039 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4040 || TREE_CODE (lhs_type) != VECTOR_TYPE
4041 || !((rhs_code == VEC_PACK_FIX_TRUNC_EXPR
4042 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))
4043 && INTEGRAL_TYPE_P (TREE_TYPE (lhs_type)))
4044 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4045 == INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))))
4046 || !types_compatible_p (rhs1_type, rhs2_type)
4047 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
4048 2 * GET_MODE_SIZE (element_mode (lhs_type)))
4049 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type),
4050 TYPE_VECTOR_SUBPARTS (lhs_type)))
4051 {
4052 error ("type mismatch in %qs", code_name);
4053 debug_generic_expr (lhs_type);
4054 debug_generic_expr (rhs1_type);
4055 debug_generic_expr (rhs2_type);
4056 return true;
4057 }
4058
4059 return false;
4060 }
4061
4062 case VEC_PACK_FLOAT_EXPR:
4063 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4064 || TREE_CODE (lhs_type) != VECTOR_TYPE
4065 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4066 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type))
4067 || !types_compatible_p (rhs1_type, rhs2_type)
4068 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
4069 2 * GET_MODE_SIZE (element_mode (lhs_type)))
4070 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type),
4071 TYPE_VECTOR_SUBPARTS (lhs_type)))
4072 {
4073 error ("type mismatch in %qs", code_name);
4074 debug_generic_expr (lhs_type);
4075 debug_generic_expr (rhs1_type);
4076 debug_generic_expr (rhs2_type);
4077 return true;
4078 }
4079
4080 return false;
4081
4082 case MULT_EXPR:
4083 case MULT_HIGHPART_EXPR:
4084 case TRUNC_DIV_EXPR:
4085 case CEIL_DIV_EXPR:
4086 case FLOOR_DIV_EXPR:
4087 case ROUND_DIV_EXPR:
4088 case TRUNC_MOD_EXPR:
4089 case CEIL_MOD_EXPR:
4090 case FLOOR_MOD_EXPR:
4091 case ROUND_MOD_EXPR:
4092 case RDIV_EXPR:
4093 case EXACT_DIV_EXPR:
4094 case MIN_EXPR:
4095 case MAX_EXPR:
4096 case BIT_IOR_EXPR:
4097 case BIT_XOR_EXPR:
4098 case BIT_AND_EXPR:
4099 /* Continue with generic binary expression handling. */
4100 break;
4101
4102 case VEC_SERIES_EXPR:
4103 if (!useless_type_conversion_p (rhs1_type, rhs2_type))
4104 {
4105 error ("type mismatch in %qs", code_name);
4106 debug_generic_expr (rhs1_type);
4107 debug_generic_expr (rhs2_type);
4108 return true;
4109 }
4110 if (TREE_CODE (lhs_type) != VECTOR_TYPE
4111 || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
4112 {
4113 error ("vector type expected in %qs", code_name);
4114 debug_generic_expr (lhs_type);
4115 return true;
4116 }
4117 return false;
4118
4119 default:
4120 gcc_unreachable ();
4121 }
4122
4123 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4124 || !useless_type_conversion_p (lhs_type, rhs2_type))
4125 {
4126 error ("type mismatch in binary expression");
4127 debug_generic_stmt (lhs_type);
4128 debug_generic_stmt (rhs1_type);
4129 debug_generic_stmt (rhs2_type);
4130 return true;
4131 }
4132
4133 return false;
4134 }
4135
4136 /* Verify a gimple assignment statement STMT with a ternary rhs.
4137 Returns true if anything is wrong. */
4138
4139 static bool
4140 verify_gimple_assign_ternary (gassign *stmt)
4141 {
4142 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4143 tree lhs = gimple_assign_lhs (stmt);
4144 tree lhs_type = TREE_TYPE (lhs);
4145 tree rhs1 = gimple_assign_rhs1 (stmt);
4146 tree rhs1_type = TREE_TYPE (rhs1);
4147 tree rhs2 = gimple_assign_rhs2 (stmt);
4148 tree rhs2_type = TREE_TYPE (rhs2);
4149 tree rhs3 = gimple_assign_rhs3 (stmt);
4150 tree rhs3_type = TREE_TYPE (rhs3);
4151
4152 if (!is_gimple_reg (lhs))
4153 {
4154 error ("non-register as LHS of ternary operation");
4155 return true;
4156 }
4157
4158 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
4159 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
4160 || !is_gimple_val (rhs2)
4161 || !is_gimple_val (rhs3))
4162 {
4163 error ("invalid operands in ternary operation");
4164 return true;
4165 }
4166
4167 const char* const code_name = get_tree_code_name (rhs_code);
4168
4169 /* First handle operations that involve different types. */
4170 switch (rhs_code)
4171 {
4172 case WIDEN_MULT_PLUS_EXPR:
4173 case WIDEN_MULT_MINUS_EXPR:
4174 if ((!INTEGRAL_TYPE_P (rhs1_type)
4175 && !FIXED_POINT_TYPE_P (rhs1_type))
4176 || !useless_type_conversion_p (rhs1_type, rhs2_type)
4177 || !useless_type_conversion_p (lhs_type, rhs3_type)
4178 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
4179 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
4180 {
4181 error ("type mismatch in %qs", code_name);
4182 debug_generic_expr (lhs_type);
4183 debug_generic_expr (rhs1_type);
4184 debug_generic_expr (rhs2_type);
4185 debug_generic_expr (rhs3_type);
4186 return true;
4187 }
4188 break;
4189
4190 case VEC_COND_EXPR:
4191 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4192 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4193 TYPE_VECTOR_SUBPARTS (lhs_type)))
4194 {
4195 error ("the first argument of a %qs must be of a "
4196 "boolean vector type of the same number of elements "
4197 "as the result", code_name);
4198 debug_generic_expr (lhs_type);
4199 debug_generic_expr (rhs1_type);
4200 return true;
4201 }
4202 /* Fallthrough. */
4203 case COND_EXPR:
4204 if (!is_gimple_val (rhs1)
4205 && verify_gimple_comparison (TREE_TYPE (rhs1),
4206 TREE_OPERAND (rhs1, 0),
4207 TREE_OPERAND (rhs1, 1),
4208 TREE_CODE (rhs1)))
4209 return true;
4210 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4211 || !useless_type_conversion_p (lhs_type, rhs3_type))
4212 {
4213 error ("type mismatch in %qs", code_name);
4214 debug_generic_expr (lhs_type);
4215 debug_generic_expr (rhs2_type);
4216 debug_generic_expr (rhs3_type);
4217 return true;
4218 }
4219 break;
4220
4221 case VEC_PERM_EXPR:
4222 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4223 || !useless_type_conversion_p (lhs_type, rhs2_type))
4224 {
4225 error ("type mismatch in %qs", code_name);
4226 debug_generic_expr (lhs_type);
4227 debug_generic_expr (rhs1_type);
4228 debug_generic_expr (rhs2_type);
4229 debug_generic_expr (rhs3_type);
4230 return true;
4231 }
4232
4233 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4234 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4235 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4236 {
4237 error ("vector types expected in %qs", code_name);
4238 debug_generic_expr (lhs_type);
4239 debug_generic_expr (rhs1_type);
4240 debug_generic_expr (rhs2_type);
4241 debug_generic_expr (rhs3_type);
4242 return true;
4243 }
4244
4245 if (maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4246 TYPE_VECTOR_SUBPARTS (rhs2_type))
4247 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs2_type),
4248 TYPE_VECTOR_SUBPARTS (rhs3_type))
4249 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs3_type),
4250 TYPE_VECTOR_SUBPARTS (lhs_type)))
4251 {
4252 error ("vectors with different element number found in %qs",
4253 code_name);
4254 debug_generic_expr (lhs_type);
4255 debug_generic_expr (rhs1_type);
4256 debug_generic_expr (rhs2_type);
4257 debug_generic_expr (rhs3_type);
4258 return true;
4259 }
4260
4261 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4262 || (TREE_CODE (rhs3) != VECTOR_CST
4263 && (GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE
4264 (TREE_TYPE (rhs3_type)))
4265 != GET_MODE_BITSIZE (SCALAR_TYPE_MODE
4266 (TREE_TYPE (rhs1_type))))))
4267 {
4268 error ("invalid mask type in %qs", code_name);
4269 debug_generic_expr (lhs_type);
4270 debug_generic_expr (rhs1_type);
4271 debug_generic_expr (rhs2_type);
4272 debug_generic_expr (rhs3_type);
4273 return true;
4274 }
4275
4276 return false;
4277
4278 case SAD_EXPR:
4279 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4280 || !useless_type_conversion_p (lhs_type, rhs3_type)
4281 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4282 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4283 {
4284 error ("type mismatch in %qs", code_name);
4285 debug_generic_expr (lhs_type);
4286 debug_generic_expr (rhs1_type);
4287 debug_generic_expr (rhs2_type);
4288 debug_generic_expr (rhs3_type);
4289 return true;
4290 }
4291
4292 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4293 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4294 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4295 {
4296 error ("vector types expected in %qs", code_name);
4297 debug_generic_expr (lhs_type);
4298 debug_generic_expr (rhs1_type);
4299 debug_generic_expr (rhs2_type);
4300 debug_generic_expr (rhs3_type);
4301 return true;
4302 }
4303
4304 return false;
4305
4306 case BIT_INSERT_EXPR:
4307 if (! useless_type_conversion_p (lhs_type, rhs1_type))
4308 {
4309 error ("type mismatch in %qs", code_name);
4310 debug_generic_expr (lhs_type);
4311 debug_generic_expr (rhs1_type);
4312 return true;
4313 }
4314 if (! ((INTEGRAL_TYPE_P (rhs1_type)
4315 && INTEGRAL_TYPE_P (rhs2_type))
4316 /* Vector element insert. */
4317 || (VECTOR_TYPE_P (rhs1_type)
4318 && types_compatible_p (TREE_TYPE (rhs1_type), rhs2_type))
4319 /* Aligned sub-vector insert. */
4320 || (VECTOR_TYPE_P (rhs1_type)
4321 && VECTOR_TYPE_P (rhs2_type)
4322 && types_compatible_p (TREE_TYPE (rhs1_type),
4323 TREE_TYPE (rhs2_type))
4324 && multiple_p (TYPE_VECTOR_SUBPARTS (rhs1_type),
4325 TYPE_VECTOR_SUBPARTS (rhs2_type))
4326 && multiple_of_p (bitsizetype, rhs3, TYPE_SIZE (rhs2_type)))))
4327 {
4328 error ("not allowed type combination in %qs", code_name);
4329 debug_generic_expr (rhs1_type);
4330 debug_generic_expr (rhs2_type);
4331 return true;
4332 }
4333 if (! tree_fits_uhwi_p (rhs3)
4334 || ! types_compatible_p (bitsizetype, TREE_TYPE (rhs3))
4335 || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type)))
4336 {
4337 error ("invalid position or size in %qs", code_name);
4338 return true;
4339 }
4340 if (INTEGRAL_TYPE_P (rhs1_type)
4341 && !type_has_mode_precision_p (rhs1_type))
4342 {
4343 error ("%qs into non-mode-precision operand", code_name);
4344 return true;
4345 }
4346 if (INTEGRAL_TYPE_P (rhs1_type))
4347 {
4348 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4349 if (bitpos >= TYPE_PRECISION (rhs1_type)
4350 || (bitpos + TYPE_PRECISION (rhs2_type)
4351 > TYPE_PRECISION (rhs1_type)))
4352 {
4353 error ("insertion out of range in %qs", code_name);
4354 return true;
4355 }
4356 }
4357 else if (VECTOR_TYPE_P (rhs1_type))
4358 {
4359 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4360 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (TYPE_SIZE (rhs2_type));
4361 if (bitpos % bitsize != 0)
4362 {
4363 error ("%qs not at element boundary", code_name);
4364 return true;
4365 }
4366 }
4367 return false;
4368
4369 case DOT_PROD_EXPR:
4370 {
4371 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4372 || TREE_CODE (lhs_type) != VECTOR_TYPE)
4373 && ((!INTEGRAL_TYPE_P (rhs1_type)
4374 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4375 || (!INTEGRAL_TYPE_P (lhs_type)
4376 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4377 || !types_compatible_p (rhs1_type, rhs2_type)
4378 || !useless_type_conversion_p (lhs_type, rhs3_type)
4379 || maybe_lt (GET_MODE_SIZE (element_mode (rhs3_type)),
4380 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4381 {
4382 error ("type mismatch in %qs", code_name);
4383 debug_generic_expr (lhs_type);
4384 debug_generic_expr (rhs1_type);
4385 debug_generic_expr (rhs2_type);
4386 return true;
4387 }
4388 return false;
4389 }
4390
4391 case REALIGN_LOAD_EXPR:
4392 /* FIXME. */
4393 return false;
4394
4395 default:
4396 gcc_unreachable ();
4397 }
4398 return false;
4399 }
4400
4401 /* Verify a gimple assignment statement STMT with a single rhs.
4402 Returns true if anything is wrong. */
4403
4404 static bool
4405 verify_gimple_assign_single (gassign *stmt)
4406 {
4407 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4408 tree lhs = gimple_assign_lhs (stmt);
4409 tree lhs_type = TREE_TYPE (lhs);
4410 tree rhs1 = gimple_assign_rhs1 (stmt);
4411 tree rhs1_type = TREE_TYPE (rhs1);
4412 bool res = false;
4413
4414 const char* const code_name = get_tree_code_name (rhs_code);
4415
4416 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4417 {
4418 error ("non-trivial conversion in %qs", code_name);
4419 debug_generic_expr (lhs_type);
4420 debug_generic_expr (rhs1_type);
4421 return true;
4422 }
4423
4424 if (gimple_clobber_p (stmt)
4425 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4426 {
4427 error ("%qs LHS in clobber statement",
4428 get_tree_code_name (TREE_CODE (lhs)));
4429 debug_generic_expr (lhs);
4430 return true;
4431 }
4432
4433 if (handled_component_p (lhs)
4434 || TREE_CODE (lhs) == MEM_REF
4435 || TREE_CODE (lhs) == TARGET_MEM_REF)
4436 res |= verify_types_in_gimple_reference (lhs, true);
4437
4438 /* Special codes we cannot handle via their class. */
4439 switch (rhs_code)
4440 {
4441 case ADDR_EXPR:
4442 {
4443 tree op = TREE_OPERAND (rhs1, 0);
4444 if (!is_gimple_addressable (op))
4445 {
4446 error ("invalid operand in %qs", code_name);
4447 return true;
4448 }
4449
4450 /* Technically there is no longer a need for matching types, but
4451 gimple hygiene asks for this check. In LTO we can end up
4452 combining incompatible units and thus end up with addresses
4453 of globals that change their type to a common one. */
4454 if (!in_lto_p
4455 && !types_compatible_p (TREE_TYPE (op),
4456 TREE_TYPE (TREE_TYPE (rhs1)))
4457 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4458 TREE_TYPE (op)))
4459 {
4460 error ("type mismatch in %qs", code_name);
4461 debug_generic_stmt (TREE_TYPE (rhs1));
4462 debug_generic_stmt (TREE_TYPE (op));
4463 return true;
4464 }
4465
4466 return (verify_address (rhs1, true)
4467 || verify_types_in_gimple_reference (op, true));
4468 }
4469
4470 /* tcc_reference */
4471 case INDIRECT_REF:
4472 error ("%qs in gimple IL", code_name);
4473 return true;
4474
4475 case COMPONENT_REF:
4476 case BIT_FIELD_REF:
4477 case ARRAY_REF:
4478 case ARRAY_RANGE_REF:
4479 case VIEW_CONVERT_EXPR:
4480 case REALPART_EXPR:
4481 case IMAGPART_EXPR:
4482 case TARGET_MEM_REF:
4483 case MEM_REF:
4484 if (!is_gimple_reg (lhs)
4485 && is_gimple_reg_type (TREE_TYPE (lhs)))
4486 {
4487 error ("invalid RHS for gimple memory store: %qs", code_name);
4488 debug_generic_stmt (lhs);
4489 debug_generic_stmt (rhs1);
4490 return true;
4491 }
4492 return res || verify_types_in_gimple_reference (rhs1, false);
4493
4494 /* tcc_constant */
4495 case SSA_NAME:
4496 case INTEGER_CST:
4497 case REAL_CST:
4498 case FIXED_CST:
4499 case COMPLEX_CST:
4500 case VECTOR_CST:
4501 case STRING_CST:
4502 return res;
4503
4504 /* tcc_declaration */
4505 case CONST_DECL:
4506 return res;
4507 case VAR_DECL:
4508 case PARM_DECL:
4509 if (!is_gimple_reg (lhs)
4510 && !is_gimple_reg (rhs1)
4511 && is_gimple_reg_type (TREE_TYPE (lhs)))
4512 {
4513 error ("invalid RHS for gimple memory store: %qs", code_name);
4514 debug_generic_stmt (lhs);
4515 debug_generic_stmt (rhs1);
4516 return true;
4517 }
4518 return res;
4519
4520 case CONSTRUCTOR:
4521 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4522 {
4523 unsigned int i;
4524 tree elt_i, elt_v, elt_t = NULL_TREE;
4525
4526 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4527 return res;
4528 /* For vector CONSTRUCTORs we require that either it is empty
4529 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4530 (then the element count must be correct to cover the whole
4531 outer vector and index must be NULL on all elements, or it is
4532 a CONSTRUCTOR of scalar elements, where we as an exception allow
4533 smaller number of elements (assuming zero filling) and
4534 consecutive indexes as compared to NULL indexes (such
4535 CONSTRUCTORs can appear in the IL from FEs). */
4536 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4537 {
4538 if (elt_t == NULL_TREE)
4539 {
4540 elt_t = TREE_TYPE (elt_v);
4541 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4542 {
4543 tree elt_t = TREE_TYPE (elt_v);
4544 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4545 TREE_TYPE (elt_t)))
4546 {
4547 error ("incorrect type of vector %qs elements",
4548 code_name);
4549 debug_generic_stmt (rhs1);
4550 return true;
4551 }
4552 else if (maybe_ne (CONSTRUCTOR_NELTS (rhs1)
4553 * TYPE_VECTOR_SUBPARTS (elt_t),
4554 TYPE_VECTOR_SUBPARTS (rhs1_type)))
4555 {
4556 error ("incorrect number of vector %qs elements",
4557 code_name);
4558 debug_generic_stmt (rhs1);
4559 return true;
4560 }
4561 }
4562 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4563 elt_t))
4564 {
4565 error ("incorrect type of vector %qs elements",
4566 code_name);
4567 debug_generic_stmt (rhs1);
4568 return true;
4569 }
4570 else if (maybe_gt (CONSTRUCTOR_NELTS (rhs1),
4571 TYPE_VECTOR_SUBPARTS (rhs1_type)))
4572 {
4573 error ("incorrect number of vector %qs elements",
4574 code_name);
4575 debug_generic_stmt (rhs1);
4576 return true;
4577 }
4578 }
4579 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4580 {
4581 error ("incorrect type of vector CONSTRUCTOR elements");
4582 debug_generic_stmt (rhs1);
4583 return true;
4584 }
4585 if (elt_i != NULL_TREE
4586 && (TREE_CODE (elt_t) == VECTOR_TYPE
4587 || TREE_CODE (elt_i) != INTEGER_CST
4588 || compare_tree_int (elt_i, i) != 0))
4589 {
4590 error ("vector %qs with non-NULL element index",
4591 code_name);
4592 debug_generic_stmt (rhs1);
4593 return true;
4594 }
4595 if (!is_gimple_val (elt_v))
4596 {
4597 error ("vector %qs element is not a GIMPLE value",
4598 code_name);
4599 debug_generic_stmt (rhs1);
4600 return true;
4601 }
4602 }
4603 }
4604 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4605 {
4606 error ("non-vector %qs with elements", code_name);
4607 debug_generic_stmt (rhs1);
4608 return true;
4609 }
4610 return res;
4611
4612 case ASSERT_EXPR:
4613 /* FIXME. */
4614 rhs1 = fold (ASSERT_EXPR_COND (rhs1));
4615 if (rhs1 == boolean_false_node)
4616 {
4617 error ("%qs with an always-false condition", code_name);
4618 debug_generic_stmt (rhs1);
4619 return true;
4620 }
4621 break;
4622
4623 case OBJ_TYPE_REF:
4624 case WITH_SIZE_EXPR:
4625 /* FIXME. */
4626 return res;
4627
4628 default:;
4629 }
4630
4631 return res;
4632 }
4633
4634 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4635 is a problem, otherwise false. */
4636
4637 static bool
4638 verify_gimple_assign (gassign *stmt)
4639 {
4640 switch (gimple_assign_rhs_class (stmt))
4641 {
4642 case GIMPLE_SINGLE_RHS:
4643 return verify_gimple_assign_single (stmt);
4644
4645 case GIMPLE_UNARY_RHS:
4646 return verify_gimple_assign_unary (stmt);
4647
4648 case GIMPLE_BINARY_RHS:
4649 return verify_gimple_assign_binary (stmt);
4650
4651 case GIMPLE_TERNARY_RHS:
4652 return verify_gimple_assign_ternary (stmt);
4653
4654 default:
4655 gcc_unreachable ();
4656 }
4657 }
4658
4659 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4660 is a problem, otherwise false. */
4661
4662 static bool
4663 verify_gimple_return (greturn *stmt)
4664 {
4665 tree op = gimple_return_retval (stmt);
4666 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4667
4668 /* We cannot test for present return values as we do not fix up missing
4669 return values from the original source. */
4670 if (op == NULL)
4671 return false;
4672
4673 if (!is_gimple_val (op)
4674 && TREE_CODE (op) != RESULT_DECL)
4675 {
4676 error ("invalid operand in return statement");
4677 debug_generic_stmt (op);
4678 return true;
4679 }
4680
4681 if ((TREE_CODE (op) == RESULT_DECL
4682 && DECL_BY_REFERENCE (op))
4683 || (TREE_CODE (op) == SSA_NAME
4684 && SSA_NAME_VAR (op)
4685 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4686 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4687 op = TREE_TYPE (op);
4688
4689 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4690 {
4691 error ("invalid conversion in return statement");
4692 debug_generic_stmt (restype);
4693 debug_generic_stmt (TREE_TYPE (op));
4694 return true;
4695 }
4696
4697 return false;
4698 }
4699
4700
4701 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4702 is a problem, otherwise false. */
4703
4704 static bool
4705 verify_gimple_goto (ggoto *stmt)
4706 {
4707 tree dest = gimple_goto_dest (stmt);
4708
4709 /* ??? We have two canonical forms of direct goto destinations, a
4710 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4711 if (TREE_CODE (dest) != LABEL_DECL
4712 && (!is_gimple_val (dest)
4713 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4714 {
4715 error ("goto destination is neither a label nor a pointer");
4716 return true;
4717 }
4718
4719 return false;
4720 }
4721
4722 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4723 is a problem, otherwise false. */
4724
4725 static bool
4726 verify_gimple_switch (gswitch *stmt)
4727 {
4728 unsigned int i, n;
4729 tree elt, prev_upper_bound = NULL_TREE;
4730 tree index_type, elt_type = NULL_TREE;
4731
4732 if (!is_gimple_val (gimple_switch_index (stmt)))
4733 {
4734 error ("invalid operand to switch statement");
4735 debug_generic_stmt (gimple_switch_index (stmt));
4736 return true;
4737 }
4738
4739 index_type = TREE_TYPE (gimple_switch_index (stmt));
4740 if (! INTEGRAL_TYPE_P (index_type))
4741 {
4742 error ("non-integral type switch statement");
4743 debug_generic_expr (index_type);
4744 return true;
4745 }
4746
4747 elt = gimple_switch_label (stmt, 0);
4748 if (CASE_LOW (elt) != NULL_TREE
4749 || CASE_HIGH (elt) != NULL_TREE
4750 || CASE_CHAIN (elt) != NULL_TREE)
4751 {
4752 error ("invalid default case label in switch statement");
4753 debug_generic_expr (elt);
4754 return true;
4755 }
4756
4757 n = gimple_switch_num_labels (stmt);
4758 for (i = 1; i < n; i++)
4759 {
4760 elt = gimple_switch_label (stmt, i);
4761
4762 if (CASE_CHAIN (elt))
4763 {
4764 error ("invalid %<CASE_CHAIN%>");
4765 debug_generic_expr (elt);
4766 return true;
4767 }
4768 if (! CASE_LOW (elt))
4769 {
4770 error ("invalid case label in switch statement");
4771 debug_generic_expr (elt);
4772 return true;
4773 }
4774 if (CASE_HIGH (elt)
4775 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4776 {
4777 error ("invalid case range in switch statement");
4778 debug_generic_expr (elt);
4779 return true;
4780 }
4781
4782 if (elt_type)
4783 {
4784 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4785 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4786 {
4787 error ("type mismatch for case label in switch statement");
4788 debug_generic_expr (elt);
4789 return true;
4790 }
4791 }
4792 else
4793 {
4794 elt_type = TREE_TYPE (CASE_LOW (elt));
4795 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4796 {
4797 error ("type precision mismatch in switch statement");
4798 return true;
4799 }
4800 }
4801
4802 if (prev_upper_bound)
4803 {
4804 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4805 {
4806 error ("case labels not sorted in switch statement");
4807 return true;
4808 }
4809 }
4810
4811 prev_upper_bound = CASE_HIGH (elt);
4812 if (! prev_upper_bound)
4813 prev_upper_bound = CASE_LOW (elt);
4814 }
4815
4816 return false;
4817 }
4818
4819 /* Verify a gimple debug statement STMT.
4820 Returns true if anything is wrong. */
4821
4822 static bool
4823 verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
4824 {
4825 /* There isn't much that could be wrong in a gimple debug stmt. A
4826 gimple debug bind stmt, for example, maps a tree, that's usually
4827 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4828 component or member of an aggregate type, to another tree, that
4829 can be an arbitrary expression. These stmts expand into debug
4830 insns, and are converted to debug notes by var-tracking.c. */
4831 return false;
4832 }
4833
4834 /* Verify a gimple label statement STMT.
4835 Returns true if anything is wrong. */
4836
4837 static bool
4838 verify_gimple_label (glabel *stmt)
4839 {
4840 tree decl = gimple_label_label (stmt);
4841 int uid;
4842 bool err = false;
4843
4844 if (TREE_CODE (decl) != LABEL_DECL)
4845 return true;
4846 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4847 && DECL_CONTEXT (decl) != current_function_decl)
4848 {
4849 error ("label context is not the current function declaration");
4850 err |= true;
4851 }
4852
4853 uid = LABEL_DECL_UID (decl);
4854 if (cfun->cfg
4855 && (uid == -1
4856 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4857 {
4858 error ("incorrect entry in %<label_to_block_map%>");
4859 err |= true;
4860 }
4861
4862 uid = EH_LANDING_PAD_NR (decl);
4863 if (uid)
4864 {
4865 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4866 if (decl != lp->post_landing_pad)
4867 {
4868 error ("incorrect setting of landing pad number");
4869 err |= true;
4870 }
4871 }
4872
4873 return err;
4874 }
4875
4876 /* Verify a gimple cond statement STMT.
4877 Returns true if anything is wrong. */
4878
4879 static bool
4880 verify_gimple_cond (gcond *stmt)
4881 {
4882 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4883 {
4884 error ("invalid comparison code in gimple cond");
4885 return true;
4886 }
4887 if (!(!gimple_cond_true_label (stmt)
4888 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4889 || !(!gimple_cond_false_label (stmt)
4890 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4891 {
4892 error ("invalid labels in gimple cond");
4893 return true;
4894 }
4895
4896 return verify_gimple_comparison (boolean_type_node,
4897 gimple_cond_lhs (stmt),
4898 gimple_cond_rhs (stmt),
4899 gimple_cond_code (stmt));
4900 }
4901
4902 /* Verify the GIMPLE statement STMT. Returns true if there is an
4903 error, otherwise false. */
4904
4905 static bool
4906 verify_gimple_stmt (gimple *stmt)
4907 {
4908 switch (gimple_code (stmt))
4909 {
4910 case GIMPLE_ASSIGN:
4911 return verify_gimple_assign (as_a <gassign *> (stmt));
4912
4913 case GIMPLE_LABEL:
4914 return verify_gimple_label (as_a <glabel *> (stmt));
4915
4916 case GIMPLE_CALL:
4917 return verify_gimple_call (as_a <gcall *> (stmt));
4918
4919 case GIMPLE_COND:
4920 return verify_gimple_cond (as_a <gcond *> (stmt));
4921
4922 case GIMPLE_GOTO:
4923 return verify_gimple_goto (as_a <ggoto *> (stmt));
4924
4925 case GIMPLE_SWITCH:
4926 return verify_gimple_switch (as_a <gswitch *> (stmt));
4927
4928 case GIMPLE_RETURN:
4929 return verify_gimple_return (as_a <greturn *> (stmt));
4930
4931 case GIMPLE_ASM:
4932 return false;
4933
4934 case GIMPLE_TRANSACTION:
4935 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4936
4937 /* Tuples that do not have tree operands. */
4938 case GIMPLE_NOP:
4939 case GIMPLE_PREDICT:
4940 case GIMPLE_RESX:
4941 case GIMPLE_EH_DISPATCH:
4942 case GIMPLE_EH_MUST_NOT_THROW:
4943 return false;
4944
4945 CASE_GIMPLE_OMP:
4946 /* OpenMP directives are validated by the FE and never operated
4947 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4948 non-gimple expressions when the main index variable has had
4949 its address taken. This does not affect the loop itself
4950 because the header of an GIMPLE_OMP_FOR is merely used to determine
4951 how to setup the parallel iteration. */
4952 return false;
4953
4954 case GIMPLE_DEBUG:
4955 return verify_gimple_debug (stmt);
4956
4957 default:
4958 gcc_unreachable ();
4959 }
4960 }
4961
4962 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4963 and false otherwise. */
4964
4965 static bool
4966 verify_gimple_phi (gphi *phi)
4967 {
4968 bool err = false;
4969 unsigned i;
4970 tree phi_result = gimple_phi_result (phi);
4971 bool virtual_p;
4972
4973 if (!phi_result)
4974 {
4975 error ("invalid %<PHI%> result");
4976 return true;
4977 }
4978
4979 virtual_p = virtual_operand_p (phi_result);
4980 if (TREE_CODE (phi_result) != SSA_NAME
4981 || (virtual_p
4982 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4983 {
4984 error ("invalid %<PHI%> result");
4985 err = true;
4986 }
4987
4988 for (i = 0; i < gimple_phi_num_args (phi); i++)
4989 {
4990 tree t = gimple_phi_arg_def (phi, i);
4991
4992 if (!t)
4993 {
4994 error ("missing %<PHI%> def");
4995 err |= true;
4996 continue;
4997 }
4998 /* Addressable variables do have SSA_NAMEs but they
4999 are not considered gimple values. */
5000 else if ((TREE_CODE (t) == SSA_NAME
5001 && virtual_p != virtual_operand_p (t))
5002 || (virtual_p
5003 && (TREE_CODE (t) != SSA_NAME
5004 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
5005 || (!virtual_p
5006 && !is_gimple_val (t)))
5007 {
5008 error ("invalid %<PHI%> argument");
5009 debug_generic_expr (t);
5010 err |= true;
5011 }
5012 #ifdef ENABLE_TYPES_CHECKING
5013 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
5014 {
5015 error ("incompatible types in %<PHI%> argument %u", i);
5016 debug_generic_stmt (TREE_TYPE (phi_result));
5017 debug_generic_stmt (TREE_TYPE (t));
5018 err |= true;
5019 }
5020 #endif
5021 }
5022
5023 return err;
5024 }
5025
5026 /* Verify the GIMPLE statements inside the sequence STMTS. */
5027
5028 static bool
5029 verify_gimple_in_seq_2 (gimple_seq stmts)
5030 {
5031 gimple_stmt_iterator ittr;
5032 bool err = false;
5033
5034 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
5035 {
5036 gimple *stmt = gsi_stmt (ittr);
5037
5038 switch (gimple_code (stmt))
5039 {
5040 case GIMPLE_BIND:
5041 err |= verify_gimple_in_seq_2 (
5042 gimple_bind_body (as_a <gbind *> (stmt)));
5043 break;
5044
5045 case GIMPLE_TRY:
5046 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
5047 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
5048 break;
5049
5050 case GIMPLE_EH_FILTER:
5051 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
5052 break;
5053
5054 case GIMPLE_EH_ELSE:
5055 {
5056 geh_else *eh_else = as_a <geh_else *> (stmt);
5057 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
5058 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
5059 }
5060 break;
5061
5062 case GIMPLE_CATCH:
5063 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
5064 as_a <gcatch *> (stmt)));
5065 break;
5066
5067 case GIMPLE_TRANSACTION:
5068 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
5069 break;
5070
5071 default:
5072 {
5073 bool err2 = verify_gimple_stmt (stmt);
5074 if (err2)
5075 debug_gimple_stmt (stmt);
5076 err |= err2;
5077 }
5078 }
5079 }
5080
5081 return err;
5082 }
5083
5084 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
5085 is a problem, otherwise false. */
5086
5087 static bool
5088 verify_gimple_transaction (gtransaction *stmt)
5089 {
5090 tree lab;
5091
5092 lab = gimple_transaction_label_norm (stmt);
5093 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5094 return true;
5095 lab = gimple_transaction_label_uninst (stmt);
5096 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5097 return true;
5098 lab = gimple_transaction_label_over (stmt);
5099 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5100 return true;
5101
5102 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
5103 }
5104
5105
5106 /* Verify the GIMPLE statements inside the statement list STMTS. */
5107
5108 DEBUG_FUNCTION void
5109 verify_gimple_in_seq (gimple_seq stmts)
5110 {
5111 timevar_push (TV_TREE_STMT_VERIFY);
5112 if (verify_gimple_in_seq_2 (stmts))
5113 internal_error ("%<verify_gimple%> failed");
5114 timevar_pop (TV_TREE_STMT_VERIFY);
5115 }
5116
5117 /* Return true when the T can be shared. */
5118
5119 static bool
5120 tree_node_can_be_shared (tree t)
5121 {
5122 if (IS_TYPE_OR_DECL_P (t)
5123 || TREE_CODE (t) == SSA_NAME
5124 || TREE_CODE (t) == IDENTIFIER_NODE
5125 || TREE_CODE (t) == CASE_LABEL_EXPR
5126 || is_gimple_min_invariant (t))
5127 return true;
5128
5129 if (t == error_mark_node)
5130 return true;
5131
5132 return false;
5133 }
5134
5135 /* Called via walk_tree. Verify tree sharing. */
5136
5137 static tree
5138 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
5139 {
5140 hash_set<void *> *visited = (hash_set<void *> *) data;
5141
5142 if (tree_node_can_be_shared (*tp))
5143 {
5144 *walk_subtrees = false;
5145 return NULL;
5146 }
5147
5148 if (visited->add (*tp))
5149 return *tp;
5150
5151 return NULL;
5152 }
5153
5154 /* Called via walk_gimple_stmt. Verify tree sharing. */
5155
5156 static tree
5157 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
5158 {
5159 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5160 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
5161 }
5162
5163 static bool eh_error_found;
5164 bool
5165 verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
5166 hash_set<gimple *> *visited)
5167 {
5168 if (!visited->contains (stmt))
5169 {
5170 error ("dead statement in EH table");
5171 debug_gimple_stmt (stmt);
5172 eh_error_found = true;
5173 }
5174 return true;
5175 }
5176
5177 /* Verify if the location LOCs block is in BLOCKS. */
5178
5179 static bool
5180 verify_location (hash_set<tree> *blocks, location_t loc)
5181 {
5182 tree block = LOCATION_BLOCK (loc);
5183 if (block != NULL_TREE
5184 && !blocks->contains (block))
5185 {
5186 error ("location references block not in block tree");
5187 return true;
5188 }
5189 if (block != NULL_TREE)
5190 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
5191 return false;
5192 }
5193
5194 /* Called via walk_tree. Verify that expressions have no blocks. */
5195
5196 static tree
5197 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
5198 {
5199 if (!EXPR_P (*tp))
5200 {
5201 *walk_subtrees = false;
5202 return NULL;
5203 }
5204
5205 location_t loc = EXPR_LOCATION (*tp);
5206 if (LOCATION_BLOCK (loc) != NULL)
5207 return *tp;
5208
5209 return NULL;
5210 }
5211
5212 /* Called via walk_tree. Verify locations of expressions. */
5213
5214 static tree
5215 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
5216 {
5217 hash_set<tree> *blocks = (hash_set<tree> *) data;
5218 tree t = *tp;
5219
5220 /* ??? This doesn't really belong here but there's no good place to
5221 stick this remainder of old verify_expr. */
5222 /* ??? This barfs on debug stmts which contain binds to vars with
5223 different function context. */
5224 #if 0
5225 if (VAR_P (t)
5226 || TREE_CODE (t) == PARM_DECL
5227 || TREE_CODE (t) == RESULT_DECL)
5228 {
5229 tree context = decl_function_context (t);
5230 if (context != cfun->decl
5231 && !SCOPE_FILE_SCOPE_P (context)
5232 && !TREE_STATIC (t)
5233 && !DECL_EXTERNAL (t))
5234 {
5235 error ("local declaration from a different function");
5236 return t;
5237 }
5238 }
5239 #endif
5240
5241 if (VAR_P (t) && DECL_HAS_DEBUG_EXPR_P (t))
5242 {
5243 tree x = DECL_DEBUG_EXPR (t);
5244 tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5245 if (addr)
5246 return addr;
5247 }
5248 if ((VAR_P (t)
5249 || TREE_CODE (t) == PARM_DECL
5250 || TREE_CODE (t) == RESULT_DECL)
5251 && DECL_HAS_VALUE_EXPR_P (t))
5252 {
5253 tree x = DECL_VALUE_EXPR (t);
5254 tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5255 if (addr)
5256 return addr;
5257 }
5258
5259 if (!EXPR_P (t))
5260 {
5261 *walk_subtrees = false;
5262 return NULL;
5263 }
5264
5265 location_t loc = EXPR_LOCATION (t);
5266 if (verify_location (blocks, loc))
5267 return t;
5268
5269 return NULL;
5270 }
5271
5272 /* Called via walk_gimple_op. Verify locations of expressions. */
5273
5274 static tree
5275 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
5276 {
5277 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5278 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
5279 }
5280
5281 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
5282
5283 static void
5284 collect_subblocks (hash_set<tree> *blocks, tree block)
5285 {
5286 tree t;
5287 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
5288 {
5289 blocks->add (t);
5290 collect_subblocks (blocks, t);
5291 }
5292 }
5293
5294 /* Disable warnings about missing quoting in GCC diagnostics for
5295 the verification errors. Their format strings don't follow
5296 GCC diagnostic conventions and trigger an ICE in the end. */
5297 #if __GNUC__ >= 10
5298 # pragma GCC diagnostic push
5299 # pragma GCC diagnostic ignored "-Wformat-diag"
5300 #endif
5301
5302 /* Verify the GIMPLE statements in the CFG of FN. */
5303
5304 DEBUG_FUNCTION void
5305 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
5306 {
5307 basic_block bb;
5308 bool err = false;
5309
5310 timevar_push (TV_TREE_STMT_VERIFY);
5311 hash_set<void *> visited;
5312 hash_set<gimple *> visited_throwing_stmts;
5313
5314 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
5315 hash_set<tree> blocks;
5316 if (DECL_INITIAL (fn->decl))
5317 {
5318 blocks.add (DECL_INITIAL (fn->decl));
5319 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
5320 }
5321
5322 FOR_EACH_BB_FN (bb, fn)
5323 {
5324 gimple_stmt_iterator gsi;
5325 edge_iterator ei;
5326 edge e;
5327
5328 for (gphi_iterator gpi = gsi_start_phis (bb);
5329 !gsi_end_p (gpi);
5330 gsi_next (&gpi))
5331 {
5332 gphi *phi = gpi.phi ();
5333 bool err2 = false;
5334 unsigned i;
5335
5336 if (gimple_bb (phi) != bb)
5337 {
5338 error ("gimple_bb (phi) is set to a wrong basic block");
5339 err2 = true;
5340 }
5341
5342 err2 |= verify_gimple_phi (phi);
5343
5344 /* Only PHI arguments have locations. */
5345 if (gimple_location (phi) != UNKNOWN_LOCATION)
5346 {
5347 error ("PHI node with location");
5348 err2 = true;
5349 }
5350
5351 for (i = 0; i < gimple_phi_num_args (phi); i++)
5352 {
5353 tree arg = gimple_phi_arg_def (phi, i);
5354 tree addr = walk_tree (&arg, verify_node_sharing_1,
5355 &visited, NULL);
5356 if (addr)
5357 {
5358 error ("incorrect sharing of tree nodes");
5359 debug_generic_expr (addr);
5360 err2 |= true;
5361 }
5362 location_t loc = gimple_phi_arg_location (phi, i);
5363 if (virtual_operand_p (gimple_phi_result (phi))
5364 && loc != UNKNOWN_LOCATION)
5365 {
5366 error ("virtual PHI with argument locations");
5367 err2 = true;
5368 }
5369 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5370 if (addr)
5371 {
5372 debug_generic_expr (addr);
5373 err2 = true;
5374 }
5375 err2 |= verify_location (&blocks, loc);
5376 }
5377
5378 if (err2)
5379 debug_gimple_stmt (phi);
5380 err |= err2;
5381 }
5382
5383 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5384 {
5385 gimple *stmt = gsi_stmt (gsi);
5386 bool err2 = false;
5387 struct walk_stmt_info wi;
5388 tree addr;
5389 int lp_nr;
5390
5391 if (gimple_bb (stmt) != bb)
5392 {
5393 error ("gimple_bb (stmt) is set to a wrong basic block");
5394 err2 = true;
5395 }
5396
5397 err2 |= verify_gimple_stmt (stmt);
5398 err2 |= verify_location (&blocks, gimple_location (stmt));
5399
5400 memset (&wi, 0, sizeof (wi));
5401 wi.info = (void *) &visited;
5402 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5403 if (addr)
5404 {
5405 error ("incorrect sharing of tree nodes");
5406 debug_generic_expr (addr);
5407 err2 |= true;
5408 }
5409
5410 memset (&wi, 0, sizeof (wi));
5411 wi.info = (void *) &blocks;
5412 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5413 if (addr)
5414 {
5415 debug_generic_expr (addr);
5416 err2 |= true;
5417 }
5418
5419 /* If the statement is marked as part of an EH region, then it is
5420 expected that the statement could throw. Verify that when we
5421 have optimizations that simplify statements such that we prove
5422 that they cannot throw, that we update other data structures
5423 to match. */
5424 lp_nr = lookup_stmt_eh_lp (stmt);
5425 if (lp_nr != 0)
5426 visited_throwing_stmts.add (stmt);
5427 if (lp_nr > 0)
5428 {
5429 if (!stmt_could_throw_p (cfun, stmt))
5430 {
5431 if (verify_nothrow)
5432 {
5433 error ("statement marked for throw, but doesn%'t");
5434 err2 |= true;
5435 }
5436 }
5437 else if (!gsi_one_before_end_p (gsi))
5438 {
5439 error ("statement marked for throw in middle of block");
5440 err2 |= true;
5441 }
5442 }
5443
5444 if (err2)
5445 debug_gimple_stmt (stmt);
5446 err |= err2;
5447 }
5448
5449 FOR_EACH_EDGE (e, ei, bb->succs)
5450 if (e->goto_locus != UNKNOWN_LOCATION)
5451 err |= verify_location (&blocks, e->goto_locus);
5452 }
5453
5454 hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5455 eh_error_found = false;
5456 if (eh_table)
5457 eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
5458 (&visited_throwing_stmts);
5459
5460 if (err || eh_error_found)
5461 internal_error ("verify_gimple failed");
5462
5463 verify_histograms ();
5464 timevar_pop (TV_TREE_STMT_VERIFY);
5465 }
5466
5467
5468 /* Verifies that the flow information is OK. */
5469
5470 static int
5471 gimple_verify_flow_info (void)
5472 {
5473 int err = 0;
5474 basic_block bb;
5475 gimple_stmt_iterator gsi;
5476 gimple *stmt;
5477 edge e;
5478 edge_iterator ei;
5479
5480 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5481 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5482 {
5483 error ("ENTRY_BLOCK has IL associated with it");
5484 err = 1;
5485 }
5486
5487 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5488 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5489 {
5490 error ("EXIT_BLOCK has IL associated with it");
5491 err = 1;
5492 }
5493
5494 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5495 if (e->flags & EDGE_FALLTHRU)
5496 {
5497 error ("fallthru to exit from bb %d", e->src->index);
5498 err = 1;
5499 }
5500
5501 FOR_EACH_BB_FN (bb, cfun)
5502 {
5503 bool found_ctrl_stmt = false;
5504
5505 stmt = NULL;
5506
5507 /* Skip labels on the start of basic block. */
5508 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5509 {
5510 tree label;
5511 gimple *prev_stmt = stmt;
5512
5513 stmt = gsi_stmt (gsi);
5514
5515 if (gimple_code (stmt) != GIMPLE_LABEL)
5516 break;
5517
5518 label = gimple_label_label (as_a <glabel *> (stmt));
5519 if (prev_stmt && DECL_NONLOCAL (label))
5520 {
5521 error ("nonlocal label ");
5522 print_generic_expr (stderr, label);
5523 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5524 bb->index);
5525 err = 1;
5526 }
5527
5528 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5529 {
5530 error ("EH landing pad label ");
5531 print_generic_expr (stderr, label);
5532 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5533 bb->index);
5534 err = 1;
5535 }
5536
5537 if (label_to_block (cfun, label) != bb)
5538 {
5539 error ("label ");
5540 print_generic_expr (stderr, label);
5541 fprintf (stderr, " to block does not match in bb %d",
5542 bb->index);
5543 err = 1;
5544 }
5545
5546 if (decl_function_context (label) != current_function_decl)
5547 {
5548 error ("label ");
5549 print_generic_expr (stderr, label);
5550 fprintf (stderr, " has incorrect context in bb %d",
5551 bb->index);
5552 err = 1;
5553 }
5554 }
5555
5556 /* Verify that body of basic block BB is free of control flow. */
5557 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5558 {
5559 gimple *stmt = gsi_stmt (gsi);
5560
5561 if (found_ctrl_stmt)
5562 {
5563 error ("control flow in the middle of basic block %d",
5564 bb->index);
5565 err = 1;
5566 }
5567
5568 if (stmt_ends_bb_p (stmt))
5569 found_ctrl_stmt = true;
5570
5571 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5572 {
5573 error ("label ");
5574 print_generic_expr (stderr, gimple_label_label (label_stmt));
5575 fprintf (stderr, " in the middle of basic block %d", bb->index);
5576 err = 1;
5577 }
5578 }
5579
5580 gsi = gsi_last_nondebug_bb (bb);
5581 if (gsi_end_p (gsi))
5582 continue;
5583
5584 stmt = gsi_stmt (gsi);
5585
5586 if (gimple_code (stmt) == GIMPLE_LABEL)
5587 continue;
5588
5589 err |= verify_eh_edges (stmt);
5590
5591 if (is_ctrl_stmt (stmt))
5592 {
5593 FOR_EACH_EDGE (e, ei, bb->succs)
5594 if (e->flags & EDGE_FALLTHRU)
5595 {
5596 error ("fallthru edge after a control statement in bb %d",
5597 bb->index);
5598 err = 1;
5599 }
5600 }
5601
5602 if (gimple_code (stmt) != GIMPLE_COND)
5603 {
5604 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5605 after anything else but if statement. */
5606 FOR_EACH_EDGE (e, ei, bb->succs)
5607 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5608 {
5609 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5610 bb->index);
5611 err = 1;
5612 }
5613 }
5614
5615 switch (gimple_code (stmt))
5616 {
5617 case GIMPLE_COND:
5618 {
5619 edge true_edge;
5620 edge false_edge;
5621
5622 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5623
5624 if (!true_edge
5625 || !false_edge
5626 || !(true_edge->flags & EDGE_TRUE_VALUE)
5627 || !(false_edge->flags & EDGE_FALSE_VALUE)
5628 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5629 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5630 || EDGE_COUNT (bb->succs) >= 3)
5631 {
5632 error ("wrong outgoing edge flags at end of bb %d",
5633 bb->index);
5634 err = 1;
5635 }
5636 }
5637 break;
5638
5639 case GIMPLE_GOTO:
5640 if (simple_goto_p (stmt))
5641 {
5642 error ("explicit goto at end of bb %d", bb->index);
5643 err = 1;
5644 }
5645 else
5646 {
5647 /* FIXME. We should double check that the labels in the
5648 destination blocks have their address taken. */
5649 FOR_EACH_EDGE (e, ei, bb->succs)
5650 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5651 | EDGE_FALSE_VALUE))
5652 || !(e->flags & EDGE_ABNORMAL))
5653 {
5654 error ("wrong outgoing edge flags at end of bb %d",
5655 bb->index);
5656 err = 1;
5657 }
5658 }
5659 break;
5660
5661 case GIMPLE_CALL:
5662 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5663 break;
5664 /* fallthru */
5665 case GIMPLE_RETURN:
5666 if (!single_succ_p (bb)
5667 || (single_succ_edge (bb)->flags
5668 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5669 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5670 {
5671 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5672 err = 1;
5673 }
5674 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5675 {
5676 error ("return edge does not point to exit in bb %d",
5677 bb->index);
5678 err = 1;
5679 }
5680 break;
5681
5682 case GIMPLE_SWITCH:
5683 {
5684 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5685 tree prev;
5686 edge e;
5687 size_t i, n;
5688
5689 n = gimple_switch_num_labels (switch_stmt);
5690
5691 /* Mark all the destination basic blocks. */
5692 for (i = 0; i < n; ++i)
5693 {
5694 basic_block label_bb = gimple_switch_label_bb (cfun, switch_stmt, i);
5695 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5696 label_bb->aux = (void *)1;
5697 }
5698
5699 /* Verify that the case labels are sorted. */
5700 prev = gimple_switch_label (switch_stmt, 0);
5701 for (i = 1; i < n; ++i)
5702 {
5703 tree c = gimple_switch_label (switch_stmt, i);
5704 if (!CASE_LOW (c))
5705 {
5706 error ("found default case not at the start of "
5707 "case vector");
5708 err = 1;
5709 continue;
5710 }
5711 if (CASE_LOW (prev)
5712 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5713 {
5714 error ("case labels not sorted: ");
5715 print_generic_expr (stderr, prev);
5716 fprintf (stderr," is greater than ");
5717 print_generic_expr (stderr, c);
5718 fprintf (stderr," but comes before it.\n");
5719 err = 1;
5720 }
5721 prev = c;
5722 }
5723 /* VRP will remove the default case if it can prove it will
5724 never be executed. So do not verify there always exists
5725 a default case here. */
5726
5727 FOR_EACH_EDGE (e, ei, bb->succs)
5728 {
5729 if (!e->dest->aux)
5730 {
5731 error ("extra outgoing edge %d->%d",
5732 bb->index, e->dest->index);
5733 err = 1;
5734 }
5735
5736 e->dest->aux = (void *)2;
5737 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5738 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5739 {
5740 error ("wrong outgoing edge flags at end of bb %d",
5741 bb->index);
5742 err = 1;
5743 }
5744 }
5745
5746 /* Check that we have all of them. */
5747 for (i = 0; i < n; ++i)
5748 {
5749 basic_block label_bb = gimple_switch_label_bb (cfun,
5750 switch_stmt, i);
5751
5752 if (label_bb->aux != (void *)2)
5753 {
5754 error ("missing edge %i->%i", bb->index, label_bb->index);
5755 err = 1;
5756 }
5757 }
5758
5759 FOR_EACH_EDGE (e, ei, bb->succs)
5760 e->dest->aux = (void *)0;
5761 }
5762 break;
5763
5764 case GIMPLE_EH_DISPATCH:
5765 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5766 break;
5767
5768 default:
5769 break;
5770 }
5771 }
5772
5773 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5774 verify_dominators (CDI_DOMINATORS);
5775
5776 return err;
5777 }
5778
5779 #if __GNUC__ >= 10
5780 # pragma GCC diagnostic pop
5781 #endif
5782
5783 /* Updates phi nodes after creating a forwarder block joined
5784 by edge FALLTHRU. */
5785
5786 static void
5787 gimple_make_forwarder_block (edge fallthru)
5788 {
5789 edge e;
5790 edge_iterator ei;
5791 basic_block dummy, bb;
5792 tree var;
5793 gphi_iterator gsi;
5794 bool forward_location_p;
5795
5796 dummy = fallthru->src;
5797 bb = fallthru->dest;
5798
5799 if (single_pred_p (bb))
5800 return;
5801
5802 /* We can forward location info if we have only one predecessor. */
5803 forward_location_p = single_pred_p (dummy);
5804
5805 /* If we redirected a branch we must create new PHI nodes at the
5806 start of BB. */
5807 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5808 {
5809 gphi *phi, *new_phi;
5810
5811 phi = gsi.phi ();
5812 var = gimple_phi_result (phi);
5813 new_phi = create_phi_node (var, bb);
5814 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5815 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5816 forward_location_p
5817 ? gimple_phi_arg_location (phi, 0) : UNKNOWN_LOCATION);
5818 }
5819
5820 /* Add the arguments we have stored on edges. */
5821 FOR_EACH_EDGE (e, ei, bb->preds)
5822 {
5823 if (e == fallthru)
5824 continue;
5825
5826 flush_pending_stmts (e);
5827 }
5828 }
5829
5830
5831 /* Return a non-special label in the head of basic block BLOCK.
5832 Create one if it doesn't exist. */
5833
5834 tree
5835 gimple_block_label (basic_block bb)
5836 {
5837 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5838 bool first = true;
5839 tree label;
5840 glabel *stmt;
5841
5842 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5843 {
5844 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5845 if (!stmt)
5846 break;
5847 label = gimple_label_label (stmt);
5848 if (!DECL_NONLOCAL (label))
5849 {
5850 if (!first)
5851 gsi_move_before (&i, &s);
5852 return label;
5853 }
5854 }
5855
5856 label = create_artificial_label (UNKNOWN_LOCATION);
5857 stmt = gimple_build_label (label);
5858 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5859 return label;
5860 }
5861
5862
5863 /* Attempt to perform edge redirection by replacing a possibly complex
5864 jump instruction by a goto or by removing the jump completely.
5865 This can apply only if all edges now point to the same block. The
5866 parameters and return values are equivalent to
5867 redirect_edge_and_branch. */
5868
5869 static edge
5870 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5871 {
5872 basic_block src = e->src;
5873 gimple_stmt_iterator i;
5874 gimple *stmt;
5875
5876 /* We can replace or remove a complex jump only when we have exactly
5877 two edges. */
5878 if (EDGE_COUNT (src->succs) != 2
5879 /* Verify that all targets will be TARGET. Specifically, the
5880 edge that is not E must also go to TARGET. */
5881 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5882 return NULL;
5883
5884 i = gsi_last_bb (src);
5885 if (gsi_end_p (i))
5886 return NULL;
5887
5888 stmt = gsi_stmt (i);
5889
5890 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5891 {
5892 gsi_remove (&i, true);
5893 e = ssa_redirect_edge (e, target);
5894 e->flags = EDGE_FALLTHRU;
5895 return e;
5896 }
5897
5898 return NULL;
5899 }
5900
5901
5902 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5903 edge representing the redirected branch. */
5904
5905 static edge
5906 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5907 {
5908 basic_block bb = e->src;
5909 gimple_stmt_iterator gsi;
5910 edge ret;
5911 gimple *stmt;
5912
5913 if (e->flags & EDGE_ABNORMAL)
5914 return NULL;
5915
5916 if (e->dest == dest)
5917 return NULL;
5918
5919 if (e->flags & EDGE_EH)
5920 return redirect_eh_edge (e, dest);
5921
5922 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5923 {
5924 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5925 if (ret)
5926 return ret;
5927 }
5928
5929 gsi = gsi_last_nondebug_bb (bb);
5930 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5931
5932 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5933 {
5934 case GIMPLE_COND:
5935 /* For COND_EXPR, we only need to redirect the edge. */
5936 break;
5937
5938 case GIMPLE_GOTO:
5939 /* No non-abnormal edges should lead from a non-simple goto, and
5940 simple ones should be represented implicitly. */
5941 gcc_unreachable ();
5942
5943 case GIMPLE_SWITCH:
5944 {
5945 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5946 tree label = gimple_block_label (dest);
5947 tree cases = get_cases_for_edge (e, switch_stmt);
5948
5949 /* If we have a list of cases associated with E, then use it
5950 as it's a lot faster than walking the entire case vector. */
5951 if (cases)
5952 {
5953 edge e2 = find_edge (e->src, dest);
5954 tree last, first;
5955
5956 first = cases;
5957 while (cases)
5958 {
5959 last = cases;
5960 CASE_LABEL (cases) = label;
5961 cases = CASE_CHAIN (cases);
5962 }
5963
5964 /* If there was already an edge in the CFG, then we need
5965 to move all the cases associated with E to E2. */
5966 if (e2)
5967 {
5968 tree cases2 = get_cases_for_edge (e2, switch_stmt);
5969
5970 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5971 CASE_CHAIN (cases2) = first;
5972 }
5973 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5974 }
5975 else
5976 {
5977 size_t i, n = gimple_switch_num_labels (switch_stmt);
5978
5979 for (i = 0; i < n; i++)
5980 {
5981 tree elt = gimple_switch_label (switch_stmt, i);
5982 if (label_to_block (cfun, CASE_LABEL (elt)) == e->dest)
5983 CASE_LABEL (elt) = label;
5984 }
5985 }
5986 }
5987 break;
5988
5989 case GIMPLE_ASM:
5990 {
5991 gasm *asm_stmt = as_a <gasm *> (stmt);
5992 int i, n = gimple_asm_nlabels (asm_stmt);
5993 tree label = NULL;
5994
5995 for (i = 0; i < n; ++i)
5996 {
5997 tree cons = gimple_asm_label_op (asm_stmt, i);
5998 if (label_to_block (cfun, TREE_VALUE (cons)) == e->dest)
5999 {
6000 if (!label)
6001 label = gimple_block_label (dest);
6002 TREE_VALUE (cons) = label;
6003 }
6004 }
6005
6006 /* If we didn't find any label matching the former edge in the
6007 asm labels, we must be redirecting the fallthrough
6008 edge. */
6009 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
6010 }
6011 break;
6012
6013 case GIMPLE_RETURN:
6014 gsi_remove (&gsi, true);
6015 e->flags |= EDGE_FALLTHRU;
6016 break;
6017
6018 case GIMPLE_OMP_RETURN:
6019 case GIMPLE_OMP_CONTINUE:
6020 case GIMPLE_OMP_SECTIONS_SWITCH:
6021 case GIMPLE_OMP_FOR:
6022 /* The edges from OMP constructs can be simply redirected. */
6023 break;
6024
6025 case GIMPLE_EH_DISPATCH:
6026 if (!(e->flags & EDGE_FALLTHRU))
6027 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
6028 break;
6029
6030 case GIMPLE_TRANSACTION:
6031 if (e->flags & EDGE_TM_ABORT)
6032 gimple_transaction_set_label_over (as_a <gtransaction *> (stmt),
6033 gimple_block_label (dest));
6034 else if (e->flags & EDGE_TM_UNINSTRUMENTED)
6035 gimple_transaction_set_label_uninst (as_a <gtransaction *> (stmt),
6036 gimple_block_label (dest));
6037 else
6038 gimple_transaction_set_label_norm (as_a <gtransaction *> (stmt),
6039 gimple_block_label (dest));
6040 break;
6041
6042 default:
6043 /* Otherwise it must be a fallthru edge, and we don't need to
6044 do anything besides redirecting it. */
6045 gcc_assert (e->flags & EDGE_FALLTHRU);
6046 break;
6047 }
6048
6049 /* Update/insert PHI nodes as necessary. */
6050
6051 /* Now update the edges in the CFG. */
6052 e = ssa_redirect_edge (e, dest);
6053
6054 return e;
6055 }
6056
6057 /* Returns true if it is possible to remove edge E by redirecting
6058 it to the destination of the other edge from E->src. */
6059
6060 static bool
6061 gimple_can_remove_branch_p (const_edge e)
6062 {
6063 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
6064 return false;
6065
6066 return true;
6067 }
6068
6069 /* Simple wrapper, as we can always redirect fallthru edges. */
6070
6071 static basic_block
6072 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
6073 {
6074 e = gimple_redirect_edge_and_branch (e, dest);
6075 gcc_assert (e);
6076
6077 return NULL;
6078 }
6079
6080
6081 /* Splits basic block BB after statement STMT (but at least after the
6082 labels). If STMT is NULL, BB is split just after the labels. */
6083
6084 static basic_block
6085 gimple_split_block (basic_block bb, void *stmt)
6086 {
6087 gimple_stmt_iterator gsi;
6088 gimple_stmt_iterator gsi_tgt;
6089 gimple_seq list;
6090 basic_block new_bb;
6091 edge e;
6092 edge_iterator ei;
6093
6094 new_bb = create_empty_bb (bb);
6095
6096 /* Redirect the outgoing edges. */
6097 new_bb->succs = bb->succs;
6098 bb->succs = NULL;
6099 FOR_EACH_EDGE (e, ei, new_bb->succs)
6100 e->src = new_bb;
6101
6102 /* Get a stmt iterator pointing to the first stmt to move. */
6103 if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
6104 gsi = gsi_after_labels (bb);
6105 else
6106 {
6107 gsi = gsi_for_stmt ((gimple *) stmt);
6108 gsi_next (&gsi);
6109 }
6110
6111 /* Move everything from GSI to the new basic block. */
6112 if (gsi_end_p (gsi))
6113 return new_bb;
6114
6115 /* Split the statement list - avoid re-creating new containers as this
6116 brings ugly quadratic memory consumption in the inliner.
6117 (We are still quadratic since we need to update stmt BB pointers,
6118 sadly.) */
6119 gsi_split_seq_before (&gsi, &list);
6120 set_bb_seq (new_bb, list);
6121 for (gsi_tgt = gsi_start (list);
6122 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
6123 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
6124
6125 return new_bb;
6126 }
6127
6128
6129 /* Moves basic block BB after block AFTER. */
6130
6131 static bool
6132 gimple_move_block_after (basic_block bb, basic_block after)
6133 {
6134 if (bb->prev_bb == after)
6135 return true;
6136
6137 unlink_block (bb);
6138 link_block (bb, after);
6139
6140 return true;
6141 }
6142
6143
6144 /* Return TRUE if block BB has no executable statements, otherwise return
6145 FALSE. */
6146
6147 static bool
6148 gimple_empty_block_p (basic_block bb)
6149 {
6150 /* BB must have no executable statements. */
6151 gimple_stmt_iterator gsi = gsi_after_labels (bb);
6152 if (phi_nodes (bb))
6153 return false;
6154 while (!gsi_end_p (gsi))
6155 {
6156 gimple *stmt = gsi_stmt (gsi);
6157 if (is_gimple_debug (stmt))
6158 ;
6159 else if (gimple_code (stmt) == GIMPLE_NOP
6160 || gimple_code (stmt) == GIMPLE_PREDICT)
6161 ;
6162 else
6163 return false;
6164 gsi_next (&gsi);
6165 }
6166 return true;
6167 }
6168
6169
6170 /* Split a basic block if it ends with a conditional branch and if the
6171 other part of the block is not empty. */
6172
6173 static basic_block
6174 gimple_split_block_before_cond_jump (basic_block bb)
6175 {
6176 gimple *last, *split_point;
6177 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6178 if (gsi_end_p (gsi))
6179 return NULL;
6180 last = gsi_stmt (gsi);
6181 if (gimple_code (last) != GIMPLE_COND
6182 && gimple_code (last) != GIMPLE_SWITCH)
6183 return NULL;
6184 gsi_prev (&gsi);
6185 split_point = gsi_stmt (gsi);
6186 return split_block (bb, split_point)->dest;
6187 }
6188
6189
6190 /* Return true if basic_block can be duplicated. */
6191
6192 static bool
6193 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
6194 {
6195 return true;
6196 }
6197
6198 /* Create a duplicate of the basic block BB. NOTE: This does not
6199 preserve SSA form. */
6200
6201 static basic_block
6202 gimple_duplicate_bb (basic_block bb, copy_bb_data *id)
6203 {
6204 basic_block new_bb;
6205 gimple_stmt_iterator gsi_tgt;
6206
6207 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
6208
6209 /* Copy the PHI nodes. We ignore PHI node arguments here because
6210 the incoming edges have not been setup yet. */
6211 for (gphi_iterator gpi = gsi_start_phis (bb);
6212 !gsi_end_p (gpi);
6213 gsi_next (&gpi))
6214 {
6215 gphi *phi, *copy;
6216 phi = gpi.phi ();
6217 copy = create_phi_node (NULL_TREE, new_bb);
6218 create_new_def_for (gimple_phi_result (phi), copy,
6219 gimple_phi_result_ptr (copy));
6220 gimple_set_uid (copy, gimple_uid (phi));
6221 }
6222
6223 gsi_tgt = gsi_start_bb (new_bb);
6224 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6225 !gsi_end_p (gsi);
6226 gsi_next (&gsi))
6227 {
6228 def_operand_p def_p;
6229 ssa_op_iter op_iter;
6230 tree lhs;
6231 gimple *stmt, *copy;
6232
6233 stmt = gsi_stmt (gsi);
6234 if (gimple_code (stmt) == GIMPLE_LABEL)
6235 continue;
6236
6237 /* Don't duplicate label debug stmts. */
6238 if (gimple_debug_bind_p (stmt)
6239 && TREE_CODE (gimple_debug_bind_get_var (stmt))
6240 == LABEL_DECL)
6241 continue;
6242
6243 /* Create a new copy of STMT and duplicate STMT's virtual
6244 operands. */
6245 copy = gimple_copy (stmt);
6246 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
6247
6248 maybe_duplicate_eh_stmt (copy, stmt);
6249 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
6250
6251 /* When copying around a stmt writing into a local non-user
6252 aggregate, make sure it won't share stack slot with other
6253 vars. */
6254 lhs = gimple_get_lhs (stmt);
6255 if (lhs && TREE_CODE (lhs) != SSA_NAME)
6256 {
6257 tree base = get_base_address (lhs);
6258 if (base
6259 && (VAR_P (base) || TREE_CODE (base) == RESULT_DECL)
6260 && DECL_IGNORED_P (base)
6261 && !TREE_STATIC (base)
6262 && !DECL_EXTERNAL (base)
6263 && (!VAR_P (base) || !DECL_HAS_VALUE_EXPR_P (base)))
6264 DECL_NONSHAREABLE (base) = 1;
6265 }
6266
6267 /* If requested remap dependence info of cliques brought in
6268 via inlining. */
6269 if (id)
6270 for (unsigned i = 0; i < gimple_num_ops (copy); ++i)
6271 {
6272 tree op = gimple_op (copy, i);
6273 if (!op)
6274 continue;
6275 if (TREE_CODE (op) == ADDR_EXPR
6276 || TREE_CODE (op) == WITH_SIZE_EXPR)
6277 op = TREE_OPERAND (op, 0);
6278 while (handled_component_p (op))
6279 op = TREE_OPERAND (op, 0);
6280 if ((TREE_CODE (op) == MEM_REF
6281 || TREE_CODE (op) == TARGET_MEM_REF)
6282 && MR_DEPENDENCE_CLIQUE (op) > 1
6283 && MR_DEPENDENCE_CLIQUE (op) != bb->loop_father->owned_clique)
6284 {
6285 if (!id->dependence_map)
6286 id->dependence_map = new hash_map<dependence_hash,
6287 unsigned short>;
6288 bool existed;
6289 unsigned short &newc = id->dependence_map->get_or_insert
6290 (MR_DEPENDENCE_CLIQUE (op), &existed);
6291 if (!existed)
6292 {
6293 gcc_assert (MR_DEPENDENCE_CLIQUE (op) <= cfun->last_clique);
6294 newc = ++cfun->last_clique;
6295 }
6296 MR_DEPENDENCE_CLIQUE (op) = newc;
6297 }
6298 }
6299
6300 /* Create new names for all the definitions created by COPY and
6301 add replacement mappings for each new name. */
6302 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
6303 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
6304 }
6305
6306 return new_bb;
6307 }
6308
6309 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
6310
6311 static void
6312 add_phi_args_after_copy_edge (edge e_copy)
6313 {
6314 basic_block bb, bb_copy = e_copy->src, dest;
6315 edge e;
6316 edge_iterator ei;
6317 gphi *phi, *phi_copy;
6318 tree def;
6319 gphi_iterator psi, psi_copy;
6320
6321 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
6322 return;
6323
6324 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
6325
6326 if (e_copy->dest->flags & BB_DUPLICATED)
6327 dest = get_bb_original (e_copy->dest);
6328 else
6329 dest = e_copy->dest;
6330
6331 e = find_edge (bb, dest);
6332 if (!e)
6333 {
6334 /* During loop unrolling the target of the latch edge is copied.
6335 In this case we are not looking for edge to dest, but to
6336 duplicated block whose original was dest. */
6337 FOR_EACH_EDGE (e, ei, bb->succs)
6338 {
6339 if ((e->dest->flags & BB_DUPLICATED)
6340 && get_bb_original (e->dest) == dest)
6341 break;
6342 }
6343
6344 gcc_assert (e != NULL);
6345 }
6346
6347 for (psi = gsi_start_phis (e->dest),
6348 psi_copy = gsi_start_phis (e_copy->dest);
6349 !gsi_end_p (psi);
6350 gsi_next (&psi), gsi_next (&psi_copy))
6351 {
6352 phi = psi.phi ();
6353 phi_copy = psi_copy.phi ();
6354 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
6355 add_phi_arg (phi_copy, def, e_copy,
6356 gimple_phi_arg_location_from_edge (phi, e));
6357 }
6358 }
6359
6360
6361 /* Basic block BB_COPY was created by code duplication. Add phi node
6362 arguments for edges going out of BB_COPY. The blocks that were
6363 duplicated have BB_DUPLICATED set. */
6364
6365 void
6366 add_phi_args_after_copy_bb (basic_block bb_copy)
6367 {
6368 edge e_copy;
6369 edge_iterator ei;
6370
6371 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
6372 {
6373 add_phi_args_after_copy_edge (e_copy);
6374 }
6375 }
6376
6377 /* Blocks in REGION_COPY array of length N_REGION were created by
6378 duplication of basic blocks. Add phi node arguments for edges
6379 going from these blocks. If E_COPY is not NULL, also add
6380 phi node arguments for its destination.*/
6381
6382 void
6383 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
6384 edge e_copy)
6385 {
6386 unsigned i;
6387
6388 for (i = 0; i < n_region; i++)
6389 region_copy[i]->flags |= BB_DUPLICATED;
6390
6391 for (i = 0; i < n_region; i++)
6392 add_phi_args_after_copy_bb (region_copy[i]);
6393 if (e_copy)
6394 add_phi_args_after_copy_edge (e_copy);
6395
6396 for (i = 0; i < n_region; i++)
6397 region_copy[i]->flags &= ~BB_DUPLICATED;
6398 }
6399
6400 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6401 important exit edge EXIT. By important we mean that no SSA name defined
6402 inside region is live over the other exit edges of the region. All entry
6403 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6404 to the duplicate of the region. Dominance and loop information is
6405 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6406 UPDATE_DOMINANCE is false then we assume that the caller will update the
6407 dominance information after calling this function. The new basic
6408 blocks are stored to REGION_COPY in the same order as they had in REGION,
6409 provided that REGION_COPY is not NULL.
6410 The function returns false if it is unable to copy the region,
6411 true otherwise. */
6412
6413 bool
6414 gimple_duplicate_sese_region (edge entry, edge exit,
6415 basic_block *region, unsigned n_region,
6416 basic_block *region_copy,
6417 bool update_dominance)
6418 {
6419 unsigned i;
6420 bool free_region_copy = false, copying_header = false;
6421 class loop *loop = entry->dest->loop_father;
6422 edge exit_copy;
6423 vec<basic_block> doms = vNULL;
6424 edge redirected;
6425 profile_count total_count = profile_count::uninitialized ();
6426 profile_count entry_count = profile_count::uninitialized ();
6427
6428 if (!can_copy_bbs_p (region, n_region))
6429 return false;
6430
6431 /* Some sanity checking. Note that we do not check for all possible
6432 missuses of the functions. I.e. if you ask to copy something weird,
6433 it will work, but the state of structures probably will not be
6434 correct. */
6435 for (i = 0; i < n_region; i++)
6436 {
6437 /* We do not handle subloops, i.e. all the blocks must belong to the
6438 same loop. */
6439 if (region[i]->loop_father != loop)
6440 return false;
6441
6442 if (region[i] != entry->dest
6443 && region[i] == loop->header)
6444 return false;
6445 }
6446
6447 /* In case the function is used for loop header copying (which is the primary
6448 use), ensure that EXIT and its copy will be new latch and entry edges. */
6449 if (loop->header == entry->dest)
6450 {
6451 copying_header = true;
6452
6453 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6454 return false;
6455
6456 for (i = 0; i < n_region; i++)
6457 if (region[i] != exit->src
6458 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6459 return false;
6460 }
6461
6462 initialize_original_copy_tables ();
6463
6464 if (copying_header)
6465 set_loop_copy (loop, loop_outer (loop));
6466 else
6467 set_loop_copy (loop, loop);
6468
6469 if (!region_copy)
6470 {
6471 region_copy = XNEWVEC (basic_block, n_region);
6472 free_region_copy = true;
6473 }
6474
6475 /* Record blocks outside the region that are dominated by something
6476 inside. */
6477 if (update_dominance)
6478 {
6479 doms.create (0);
6480 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6481 }
6482
6483 if (entry->dest->count.initialized_p ())
6484 {
6485 total_count = entry->dest->count;
6486 entry_count = entry->count ();
6487 /* Fix up corner cases, to avoid division by zero or creation of negative
6488 frequencies. */
6489 if (entry_count > total_count)
6490 entry_count = total_count;
6491 }
6492
6493 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6494 split_edge_bb_loc (entry), update_dominance);
6495 if (total_count.initialized_p () && entry_count.initialized_p ())
6496 {
6497 scale_bbs_frequencies_profile_count (region, n_region,
6498 total_count - entry_count,
6499 total_count);
6500 scale_bbs_frequencies_profile_count (region_copy, n_region, entry_count,
6501 total_count);
6502 }
6503
6504 if (copying_header)
6505 {
6506 loop->header = exit->dest;
6507 loop->latch = exit->src;
6508 }
6509
6510 /* Redirect the entry and add the phi node arguments. */
6511 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6512 gcc_assert (redirected != NULL);
6513 flush_pending_stmts (entry);
6514
6515 /* Concerning updating of dominators: We must recount dominators
6516 for entry block and its copy. Anything that is outside of the
6517 region, but was dominated by something inside needs recounting as
6518 well. */
6519 if (update_dominance)
6520 {
6521 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6522 doms.safe_push (get_bb_original (entry->dest));
6523 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6524 doms.release ();
6525 }
6526
6527 /* Add the other PHI node arguments. */
6528 add_phi_args_after_copy (region_copy, n_region, NULL);
6529
6530 if (free_region_copy)
6531 free (region_copy);
6532
6533 free_original_copy_tables ();
6534 return true;
6535 }
6536
6537 /* Checks if BB is part of the region defined by N_REGION BBS. */
6538 static bool
6539 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6540 {
6541 unsigned int n;
6542
6543 for (n = 0; n < n_region; n++)
6544 {
6545 if (bb == bbs[n])
6546 return true;
6547 }
6548 return false;
6549 }
6550
6551 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6552 are stored to REGION_COPY in the same order in that they appear
6553 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6554 the region, EXIT an exit from it. The condition guarding EXIT
6555 is moved to ENTRY. Returns true if duplication succeeds, false
6556 otherwise.
6557
6558 For example,
6559
6560 some_code;
6561 if (cond)
6562 A;
6563 else
6564 B;
6565
6566 is transformed to
6567
6568 if (cond)
6569 {
6570 some_code;
6571 A;
6572 }
6573 else
6574 {
6575 some_code;
6576 B;
6577 }
6578 */
6579
6580 bool
6581 gimple_duplicate_sese_tail (edge entry, edge exit,
6582 basic_block *region, unsigned n_region,
6583 basic_block *region_copy)
6584 {
6585 unsigned i;
6586 bool free_region_copy = false;
6587 class loop *loop = exit->dest->loop_father;
6588 class loop *orig_loop = entry->dest->loop_father;
6589 basic_block switch_bb, entry_bb, nentry_bb;
6590 vec<basic_block> doms;
6591 profile_count total_count = profile_count::uninitialized (),
6592 exit_count = profile_count::uninitialized ();
6593 edge exits[2], nexits[2], e;
6594 gimple_stmt_iterator gsi;
6595 gimple *cond_stmt;
6596 edge sorig, snew;
6597 basic_block exit_bb;
6598 gphi_iterator psi;
6599 gphi *phi;
6600 tree def;
6601 class loop *target, *aloop, *cloop;
6602
6603 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6604 exits[0] = exit;
6605 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6606
6607 if (!can_copy_bbs_p (region, n_region))
6608 return false;
6609
6610 initialize_original_copy_tables ();
6611 set_loop_copy (orig_loop, loop);
6612
6613 target= loop;
6614 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6615 {
6616 if (bb_part_of_region_p (aloop->header, region, n_region))
6617 {
6618 cloop = duplicate_loop (aloop, target);
6619 duplicate_subloops (aloop, cloop);
6620 }
6621 }
6622
6623 if (!region_copy)
6624 {
6625 region_copy = XNEWVEC (basic_block, n_region);
6626 free_region_copy = true;
6627 }
6628
6629 gcc_assert (!need_ssa_update_p (cfun));
6630
6631 /* Record blocks outside the region that are dominated by something
6632 inside. */
6633 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6634
6635 total_count = exit->src->count;
6636 exit_count = exit->count ();
6637 /* Fix up corner cases, to avoid division by zero or creation of negative
6638 frequencies. */
6639 if (exit_count > total_count)
6640 exit_count = total_count;
6641
6642 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6643 split_edge_bb_loc (exit), true);
6644 if (total_count.initialized_p () && exit_count.initialized_p ())
6645 {
6646 scale_bbs_frequencies_profile_count (region, n_region,
6647 total_count - exit_count,
6648 total_count);
6649 scale_bbs_frequencies_profile_count (region_copy, n_region, exit_count,
6650 total_count);
6651 }
6652
6653 /* Create the switch block, and put the exit condition to it. */
6654 entry_bb = entry->dest;
6655 nentry_bb = get_bb_copy (entry_bb);
6656 if (!last_stmt (entry->src)
6657 || !stmt_ends_bb_p (last_stmt (entry->src)))
6658 switch_bb = entry->src;
6659 else
6660 switch_bb = split_edge (entry);
6661 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6662
6663 gsi = gsi_last_bb (switch_bb);
6664 cond_stmt = last_stmt (exit->src);
6665 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6666 cond_stmt = gimple_copy (cond_stmt);
6667
6668 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6669
6670 sorig = single_succ_edge (switch_bb);
6671 sorig->flags = exits[1]->flags;
6672 sorig->probability = exits[1]->probability;
6673 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6674 snew->probability = exits[0]->probability;
6675
6676
6677 /* Register the new edge from SWITCH_BB in loop exit lists. */
6678 rescan_loop_exit (snew, true, false);
6679
6680 /* Add the PHI node arguments. */
6681 add_phi_args_after_copy (region_copy, n_region, snew);
6682
6683 /* Get rid of now superfluous conditions and associated edges (and phi node
6684 arguments). */
6685 exit_bb = exit->dest;
6686
6687 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6688 PENDING_STMT (e) = NULL;
6689
6690 /* The latch of ORIG_LOOP was copied, and so was the backedge
6691 to the original header. We redirect this backedge to EXIT_BB. */
6692 for (i = 0; i < n_region; i++)
6693 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6694 {
6695 gcc_assert (single_succ_edge (region_copy[i]));
6696 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6697 PENDING_STMT (e) = NULL;
6698 for (psi = gsi_start_phis (exit_bb);
6699 !gsi_end_p (psi);
6700 gsi_next (&psi))
6701 {
6702 phi = psi.phi ();
6703 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6704 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6705 }
6706 }
6707 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6708 PENDING_STMT (e) = NULL;
6709
6710 /* Anything that is outside of the region, but was dominated by something
6711 inside needs to update dominance info. */
6712 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6713 doms.release ();
6714 /* Update the SSA web. */
6715 update_ssa (TODO_update_ssa);
6716
6717 if (free_region_copy)
6718 free (region_copy);
6719
6720 free_original_copy_tables ();
6721 return true;
6722 }
6723
6724 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6725 adding blocks when the dominator traversal reaches EXIT. This
6726 function silently assumes that ENTRY strictly dominates EXIT. */
6727
6728 void
6729 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6730 vec<basic_block> *bbs_p)
6731 {
6732 basic_block son;
6733
6734 for (son = first_dom_son (CDI_DOMINATORS, entry);
6735 son;
6736 son = next_dom_son (CDI_DOMINATORS, son))
6737 {
6738 bbs_p->safe_push (son);
6739 if (son != exit)
6740 gather_blocks_in_sese_region (son, exit, bbs_p);
6741 }
6742 }
6743
6744 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6745 The duplicates are recorded in VARS_MAP. */
6746
6747 static void
6748 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6749 tree to_context)
6750 {
6751 tree t = *tp, new_t;
6752 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6753
6754 if (DECL_CONTEXT (t) == to_context)
6755 return;
6756
6757 bool existed;
6758 tree &loc = vars_map->get_or_insert (t, &existed);
6759
6760 if (!existed)
6761 {
6762 if (SSA_VAR_P (t))
6763 {
6764 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6765 add_local_decl (f, new_t);
6766 }
6767 else
6768 {
6769 gcc_assert (TREE_CODE (t) == CONST_DECL);
6770 new_t = copy_node (t);
6771 }
6772 DECL_CONTEXT (new_t) = to_context;
6773
6774 loc = new_t;
6775 }
6776 else
6777 new_t = loc;
6778
6779 *tp = new_t;
6780 }
6781
6782
6783 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6784 VARS_MAP maps old ssa names and var_decls to the new ones. */
6785
6786 static tree
6787 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6788 tree to_context)
6789 {
6790 tree new_name;
6791
6792 gcc_assert (!virtual_operand_p (name));
6793
6794 tree *loc = vars_map->get (name);
6795
6796 if (!loc)
6797 {
6798 tree decl = SSA_NAME_VAR (name);
6799 if (decl)
6800 {
6801 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name));
6802 replace_by_duplicate_decl (&decl, vars_map, to_context);
6803 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6804 decl, SSA_NAME_DEF_STMT (name));
6805 }
6806 else
6807 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6808 name, SSA_NAME_DEF_STMT (name));
6809
6810 /* Now that we've used the def stmt to define new_name, make sure it
6811 doesn't define name anymore. */
6812 SSA_NAME_DEF_STMT (name) = NULL;
6813
6814 vars_map->put (name, new_name);
6815 }
6816 else
6817 new_name = *loc;
6818
6819 return new_name;
6820 }
6821
6822 struct move_stmt_d
6823 {
6824 tree orig_block;
6825 tree new_block;
6826 tree from_context;
6827 tree to_context;
6828 hash_map<tree, tree> *vars_map;
6829 htab_t new_label_map;
6830 hash_map<void *, void *> *eh_map;
6831 bool remap_decls_p;
6832 };
6833
6834 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6835 contained in *TP if it has been ORIG_BLOCK previously and change the
6836 DECL_CONTEXT of every local variable referenced in *TP. */
6837
6838 static tree
6839 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6840 {
6841 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6842 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6843 tree t = *tp;
6844
6845 if (EXPR_P (t))
6846 {
6847 tree block = TREE_BLOCK (t);
6848 if (block == NULL_TREE)
6849 ;
6850 else if (block == p->orig_block
6851 || p->orig_block == NULL_TREE)
6852 {
6853 /* tree_node_can_be_shared says we can share invariant
6854 addresses but unshare_expr copies them anyways. Make sure
6855 to unshare before adjusting the block in place - we do not
6856 always see a copy here. */
6857 if (TREE_CODE (t) == ADDR_EXPR
6858 && is_gimple_min_invariant (t))
6859 *tp = t = unshare_expr (t);
6860 TREE_SET_BLOCK (t, p->new_block);
6861 }
6862 else if (flag_checking)
6863 {
6864 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6865 block = BLOCK_SUPERCONTEXT (block);
6866 gcc_assert (block == p->orig_block);
6867 }
6868 }
6869 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6870 {
6871 if (TREE_CODE (t) == SSA_NAME)
6872 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6873 else if (TREE_CODE (t) == PARM_DECL
6874 && gimple_in_ssa_p (cfun))
6875 *tp = *(p->vars_map->get (t));
6876 else if (TREE_CODE (t) == LABEL_DECL)
6877 {
6878 if (p->new_label_map)
6879 {
6880 struct tree_map in, *out;
6881 in.base.from = t;
6882 out = (struct tree_map *)
6883 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6884 if (out)
6885 *tp = t = out->to;
6886 }
6887
6888 /* For FORCED_LABELs we can end up with references from other
6889 functions if some SESE regions are outlined. It is UB to
6890 jump in between them, but they could be used just for printing
6891 addresses etc. In that case, DECL_CONTEXT on the label should
6892 be the function containing the glabel stmt with that LABEL_DECL,
6893 rather than whatever function a reference to the label was seen
6894 last time. */
6895 if (!FORCED_LABEL (t) && !DECL_NONLOCAL (t))
6896 DECL_CONTEXT (t) = p->to_context;
6897 }
6898 else if (p->remap_decls_p)
6899 {
6900 /* Replace T with its duplicate. T should no longer appear in the
6901 parent function, so this looks wasteful; however, it may appear
6902 in referenced_vars, and more importantly, as virtual operands of
6903 statements, and in alias lists of other variables. It would be
6904 quite difficult to expunge it from all those places. ??? It might
6905 suffice to do this for addressable variables. */
6906 if ((VAR_P (t) && !is_global_var (t))
6907 || TREE_CODE (t) == CONST_DECL)
6908 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6909 }
6910 *walk_subtrees = 0;
6911 }
6912 else if (TYPE_P (t))
6913 *walk_subtrees = 0;
6914
6915 return NULL_TREE;
6916 }
6917
6918 /* Helper for move_stmt_r. Given an EH region number for the source
6919 function, map that to the duplicate EH regio number in the dest. */
6920
6921 static int
6922 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6923 {
6924 eh_region old_r, new_r;
6925
6926 old_r = get_eh_region_from_number (old_nr);
6927 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6928
6929 return new_r->index;
6930 }
6931
6932 /* Similar, but operate on INTEGER_CSTs. */
6933
6934 static tree
6935 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6936 {
6937 int old_nr, new_nr;
6938
6939 old_nr = tree_to_shwi (old_t_nr);
6940 new_nr = move_stmt_eh_region_nr (old_nr, p);
6941
6942 return build_int_cst (integer_type_node, new_nr);
6943 }
6944
6945 /* Like move_stmt_op, but for gimple statements.
6946
6947 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6948 contained in the current statement in *GSI_P and change the
6949 DECL_CONTEXT of every local variable referenced in the current
6950 statement. */
6951
6952 static tree
6953 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6954 struct walk_stmt_info *wi)
6955 {
6956 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6957 gimple *stmt = gsi_stmt (*gsi_p);
6958 tree block = gimple_block (stmt);
6959
6960 if (block == p->orig_block
6961 || (p->orig_block == NULL_TREE
6962 && block != NULL_TREE))
6963 gimple_set_block (stmt, p->new_block);
6964
6965 switch (gimple_code (stmt))
6966 {
6967 case GIMPLE_CALL:
6968 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6969 {
6970 tree r, fndecl = gimple_call_fndecl (stmt);
6971 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
6972 switch (DECL_FUNCTION_CODE (fndecl))
6973 {
6974 case BUILT_IN_EH_COPY_VALUES:
6975 r = gimple_call_arg (stmt, 1);
6976 r = move_stmt_eh_region_tree_nr (r, p);
6977 gimple_call_set_arg (stmt, 1, r);
6978 /* FALLTHRU */
6979
6980 case BUILT_IN_EH_POINTER:
6981 case BUILT_IN_EH_FILTER:
6982 r = gimple_call_arg (stmt, 0);
6983 r = move_stmt_eh_region_tree_nr (r, p);
6984 gimple_call_set_arg (stmt, 0, r);
6985 break;
6986
6987 default:
6988 break;
6989 }
6990 }
6991 break;
6992
6993 case GIMPLE_RESX:
6994 {
6995 gresx *resx_stmt = as_a <gresx *> (stmt);
6996 int r = gimple_resx_region (resx_stmt);
6997 r = move_stmt_eh_region_nr (r, p);
6998 gimple_resx_set_region (resx_stmt, r);
6999 }
7000 break;
7001
7002 case GIMPLE_EH_DISPATCH:
7003 {
7004 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
7005 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
7006 r = move_stmt_eh_region_nr (r, p);
7007 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
7008 }
7009 break;
7010
7011 case GIMPLE_OMP_RETURN:
7012 case GIMPLE_OMP_CONTINUE:
7013 break;
7014
7015 case GIMPLE_LABEL:
7016 {
7017 /* For FORCED_LABEL, move_stmt_op doesn't adjust DECL_CONTEXT,
7018 so that such labels can be referenced from other regions.
7019 Make sure to update it when seeing a GIMPLE_LABEL though,
7020 that is the owner of the label. */
7021 walk_gimple_op (stmt, move_stmt_op, wi);
7022 *handled_ops_p = true;
7023 tree label = gimple_label_label (as_a <glabel *> (stmt));
7024 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
7025 DECL_CONTEXT (label) = p->to_context;
7026 }
7027 break;
7028
7029 default:
7030 if (is_gimple_omp (stmt))
7031 {
7032 /* Do not remap variables inside OMP directives. Variables
7033 referenced in clauses and directive header belong to the
7034 parent function and should not be moved into the child
7035 function. */
7036 bool save_remap_decls_p = p->remap_decls_p;
7037 p->remap_decls_p = false;
7038 *handled_ops_p = true;
7039
7040 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
7041 move_stmt_op, wi);
7042
7043 p->remap_decls_p = save_remap_decls_p;
7044 }
7045 break;
7046 }
7047
7048 return NULL_TREE;
7049 }
7050
7051 /* Move basic block BB from function CFUN to function DEST_FN. The
7052 block is moved out of the original linked list and placed after
7053 block AFTER in the new list. Also, the block is removed from the
7054 original array of blocks and placed in DEST_FN's array of blocks.
7055 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
7056 updated to reflect the moved edges.
7057
7058 The local variables are remapped to new instances, VARS_MAP is used
7059 to record the mapping. */
7060
7061 static void
7062 move_block_to_fn (struct function *dest_cfun, basic_block bb,
7063 basic_block after, bool update_edge_count_p,
7064 struct move_stmt_d *d)
7065 {
7066 struct control_flow_graph *cfg;
7067 edge_iterator ei;
7068 edge e;
7069 gimple_stmt_iterator si;
7070 unsigned old_len, new_len;
7071
7072 /* Remove BB from dominance structures. */
7073 delete_from_dominance_info (CDI_DOMINATORS, bb);
7074
7075 /* Move BB from its current loop to the copy in the new function. */
7076 if (current_loops)
7077 {
7078 class loop *new_loop = (class loop *)bb->loop_father->aux;
7079 if (new_loop)
7080 bb->loop_father = new_loop;
7081 }
7082
7083 /* Link BB to the new linked list. */
7084 move_block_after (bb, after);
7085
7086 /* Update the edge count in the corresponding flowgraphs. */
7087 if (update_edge_count_p)
7088 FOR_EACH_EDGE (e, ei, bb->succs)
7089 {
7090 cfun->cfg->x_n_edges--;
7091 dest_cfun->cfg->x_n_edges++;
7092 }
7093
7094 /* Remove BB from the original basic block array. */
7095 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
7096 cfun->cfg->x_n_basic_blocks--;
7097
7098 /* Grow DEST_CFUN's basic block array if needed. */
7099 cfg = dest_cfun->cfg;
7100 cfg->x_n_basic_blocks++;
7101 if (bb->index >= cfg->x_last_basic_block)
7102 cfg->x_last_basic_block = bb->index + 1;
7103
7104 old_len = vec_safe_length (cfg->x_basic_block_info);
7105 if ((unsigned) cfg->x_last_basic_block >= old_len)
7106 {
7107 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
7108 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
7109 }
7110
7111 (*cfg->x_basic_block_info)[bb->index] = bb;
7112
7113 /* Remap the variables in phi nodes. */
7114 for (gphi_iterator psi = gsi_start_phis (bb);
7115 !gsi_end_p (psi); )
7116 {
7117 gphi *phi = psi.phi ();
7118 use_operand_p use;
7119 tree op = PHI_RESULT (phi);
7120 ssa_op_iter oi;
7121 unsigned i;
7122
7123 if (virtual_operand_p (op))
7124 {
7125 /* Remove the phi nodes for virtual operands (alias analysis will be
7126 run for the new function, anyway). But replace all uses that
7127 might be outside of the region we move. */
7128 use_operand_p use_p;
7129 imm_use_iterator iter;
7130 gimple *use_stmt;
7131 FOR_EACH_IMM_USE_STMT (use_stmt, iter, op)
7132 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
7133 SET_USE (use_p, SSA_NAME_VAR (op));
7134 remove_phi_node (&psi, true);
7135 continue;
7136 }
7137
7138 SET_PHI_RESULT (phi,
7139 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7140 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
7141 {
7142 op = USE_FROM_PTR (use);
7143 if (TREE_CODE (op) == SSA_NAME)
7144 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7145 }
7146
7147 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
7148 {
7149 location_t locus = gimple_phi_arg_location (phi, i);
7150 tree block = LOCATION_BLOCK (locus);
7151
7152 if (locus == UNKNOWN_LOCATION)
7153 continue;
7154 if (d->orig_block == NULL_TREE || block == d->orig_block)
7155 {
7156 locus = set_block (locus, d->new_block);
7157 gimple_phi_arg_set_location (phi, i, locus);
7158 }
7159 }
7160
7161 gsi_next (&psi);
7162 }
7163
7164 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7165 {
7166 gimple *stmt = gsi_stmt (si);
7167 struct walk_stmt_info wi;
7168
7169 memset (&wi, 0, sizeof (wi));
7170 wi.info = d;
7171 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
7172
7173 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
7174 {
7175 tree label = gimple_label_label (label_stmt);
7176 int uid = LABEL_DECL_UID (label);
7177
7178 gcc_assert (uid > -1);
7179
7180 old_len = vec_safe_length (cfg->x_label_to_block_map);
7181 if (old_len <= (unsigned) uid)
7182 {
7183 new_len = 3 * uid / 2 + 1;
7184 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
7185 }
7186
7187 (*cfg->x_label_to_block_map)[uid] = bb;
7188 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
7189
7190 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
7191
7192 if (uid >= dest_cfun->cfg->last_label_uid)
7193 dest_cfun->cfg->last_label_uid = uid + 1;
7194 }
7195
7196 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
7197 remove_stmt_from_eh_lp_fn (cfun, stmt);
7198
7199 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
7200 gimple_remove_stmt_histograms (cfun, stmt);
7201
7202 /* We cannot leave any operands allocated from the operand caches of
7203 the current function. */
7204 free_stmt_operands (cfun, stmt);
7205 push_cfun (dest_cfun);
7206 update_stmt (stmt);
7207 pop_cfun ();
7208 }
7209
7210 FOR_EACH_EDGE (e, ei, bb->succs)
7211 if (e->goto_locus != UNKNOWN_LOCATION)
7212 {
7213 tree block = LOCATION_BLOCK (e->goto_locus);
7214 if (d->orig_block == NULL_TREE
7215 || block == d->orig_block)
7216 e->goto_locus = set_block (e->goto_locus, d->new_block);
7217 }
7218 }
7219
7220 /* Examine the statements in BB (which is in SRC_CFUN); find and return
7221 the outermost EH region. Use REGION as the incoming base EH region.
7222 If there is no single outermost region, return NULL and set *ALL to
7223 true. */
7224
7225 static eh_region
7226 find_outermost_region_in_block (struct function *src_cfun,
7227 basic_block bb, eh_region region,
7228 bool *all)
7229 {
7230 gimple_stmt_iterator si;
7231
7232 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7233 {
7234 gimple *stmt = gsi_stmt (si);
7235 eh_region stmt_region;
7236 int lp_nr;
7237
7238 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
7239 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
7240 if (stmt_region)
7241 {
7242 if (region == NULL)
7243 region = stmt_region;
7244 else if (stmt_region != region)
7245 {
7246 region = eh_region_outermost (src_cfun, stmt_region, region);
7247 if (region == NULL)
7248 {
7249 *all = true;
7250 return NULL;
7251 }
7252 }
7253 }
7254 }
7255
7256 return region;
7257 }
7258
7259 static tree
7260 new_label_mapper (tree decl, void *data)
7261 {
7262 htab_t hash = (htab_t) data;
7263 struct tree_map *m;
7264 void **slot;
7265
7266 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
7267
7268 m = XNEW (struct tree_map);
7269 m->hash = DECL_UID (decl);
7270 m->base.from = decl;
7271 m->to = create_artificial_label (UNKNOWN_LOCATION);
7272 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
7273 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
7274 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
7275
7276 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
7277 gcc_assert (*slot == NULL);
7278
7279 *slot = m;
7280
7281 return m->to;
7282 }
7283
7284 /* Tree walker to replace the decls used inside value expressions by
7285 duplicates. */
7286
7287 static tree
7288 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
7289 {
7290 struct replace_decls_d *rd = (struct replace_decls_d *)data;
7291
7292 switch (TREE_CODE (*tp))
7293 {
7294 case VAR_DECL:
7295 case PARM_DECL:
7296 case RESULT_DECL:
7297 replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
7298 break;
7299 default:
7300 break;
7301 }
7302
7303 if (IS_TYPE_OR_DECL_P (*tp))
7304 *walk_subtrees = false;
7305
7306 return NULL;
7307 }
7308
7309 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7310 subblocks. */
7311
7312 static void
7313 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
7314 tree to_context)
7315 {
7316 tree *tp, t;
7317
7318 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
7319 {
7320 t = *tp;
7321 if (!VAR_P (t) && TREE_CODE (t) != CONST_DECL)
7322 continue;
7323 replace_by_duplicate_decl (&t, vars_map, to_context);
7324 if (t != *tp)
7325 {
7326 if (VAR_P (*tp) && DECL_HAS_VALUE_EXPR_P (*tp))
7327 {
7328 tree x = DECL_VALUE_EXPR (*tp);
7329 struct replace_decls_d rd = { vars_map, to_context };
7330 unshare_expr (x);
7331 walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
7332 SET_DECL_VALUE_EXPR (t, x);
7333 DECL_HAS_VALUE_EXPR_P (t) = 1;
7334 }
7335 DECL_CHAIN (t) = DECL_CHAIN (*tp);
7336 *tp = t;
7337 }
7338 }
7339
7340 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
7341 replace_block_vars_by_duplicates (block, vars_map, to_context);
7342 }
7343
7344 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7345 from FN1 to FN2. */
7346
7347 static void
7348 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
7349 class loop *loop)
7350 {
7351 /* Discard it from the old loop array. */
7352 (*get_loops (fn1))[loop->num] = NULL;
7353
7354 /* Place it in the new loop array, assigning it a new number. */
7355 loop->num = number_of_loops (fn2);
7356 vec_safe_push (loops_for_fn (fn2)->larray, loop);
7357
7358 /* Recurse to children. */
7359 for (loop = loop->inner; loop; loop = loop->next)
7360 fixup_loop_arrays_after_move (fn1, fn2, loop);
7361 }
7362
7363 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7364 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
7365
7366 DEBUG_FUNCTION void
7367 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
7368 {
7369 basic_block bb;
7370 edge_iterator ei;
7371 edge e;
7372 bitmap bbs = BITMAP_ALLOC (NULL);
7373 int i;
7374
7375 gcc_assert (entry != NULL);
7376 gcc_assert (entry != exit);
7377 gcc_assert (bbs_p != NULL);
7378
7379 gcc_assert (bbs_p->length () > 0);
7380
7381 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7382 bitmap_set_bit (bbs, bb->index);
7383
7384 gcc_assert (bitmap_bit_p (bbs, entry->index));
7385 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
7386
7387 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7388 {
7389 if (bb == entry)
7390 {
7391 gcc_assert (single_pred_p (entry));
7392 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
7393 }
7394 else
7395 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
7396 {
7397 e = ei_edge (ei);
7398 gcc_assert (bitmap_bit_p (bbs, e->src->index));
7399 }
7400
7401 if (bb == exit)
7402 {
7403 gcc_assert (single_succ_p (exit));
7404 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
7405 }
7406 else
7407 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
7408 {
7409 e = ei_edge (ei);
7410 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
7411 }
7412 }
7413
7414 BITMAP_FREE (bbs);
7415 }
7416
7417 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7418
7419 bool
7420 gather_ssa_name_hash_map_from (tree const &from, tree const &, void *data)
7421 {
7422 bitmap release_names = (bitmap)data;
7423
7424 if (TREE_CODE (from) != SSA_NAME)
7425 return true;
7426
7427 bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7428 return true;
7429 }
7430
7431 /* Return LOOP_DIST_ALIAS call if present in BB. */
7432
7433 static gimple *
7434 find_loop_dist_alias (basic_block bb)
7435 {
7436 gimple *g = last_stmt (bb);
7437 if (g == NULL || gimple_code (g) != GIMPLE_COND)
7438 return NULL;
7439
7440 gimple_stmt_iterator gsi = gsi_for_stmt (g);
7441 gsi_prev (&gsi);
7442 if (gsi_end_p (gsi))
7443 return NULL;
7444
7445 g = gsi_stmt (gsi);
7446 if (gimple_call_internal_p (g, IFN_LOOP_DIST_ALIAS))
7447 return g;
7448 return NULL;
7449 }
7450
7451 /* Fold loop internal call G like IFN_LOOP_VECTORIZED/IFN_LOOP_DIST_ALIAS
7452 to VALUE and update any immediate uses of it's LHS. */
7453
7454 void
7455 fold_loop_internal_call (gimple *g, tree value)
7456 {
7457 tree lhs = gimple_call_lhs (g);
7458 use_operand_p use_p;
7459 imm_use_iterator iter;
7460 gimple *use_stmt;
7461 gimple_stmt_iterator gsi = gsi_for_stmt (g);
7462
7463 update_call_from_tree (&gsi, value);
7464 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
7465 {
7466 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
7467 SET_USE (use_p, value);
7468 update_stmt (use_stmt);
7469 }
7470 }
7471
7472 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7473 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7474 single basic block in the original CFG and the new basic block is
7475 returned. DEST_CFUN must not have a CFG yet.
7476
7477 Note that the region need not be a pure SESE region. Blocks inside
7478 the region may contain calls to abort/exit. The only restriction
7479 is that ENTRY_BB should be the only entry point and it must
7480 dominate EXIT_BB.
7481
7482 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7483 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7484 to the new function.
7485
7486 All local variables referenced in the region are assumed to be in
7487 the corresponding BLOCK_VARS and unexpanded variable lists
7488 associated with DEST_CFUN.
7489
7490 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7491 reimplement move_sese_region_to_fn by duplicating the region rather than
7492 moving it. */
7493
7494 basic_block
7495 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7496 basic_block exit_bb, tree orig_block)
7497 {
7498 vec<basic_block> bbs, dom_bbs;
7499 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7500 basic_block after, bb, *entry_pred, *exit_succ, abb;
7501 struct function *saved_cfun = cfun;
7502 int *entry_flag, *exit_flag;
7503 profile_probability *entry_prob, *exit_prob;
7504 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7505 edge e;
7506 edge_iterator ei;
7507 htab_t new_label_map;
7508 hash_map<void *, void *> *eh_map;
7509 class loop *loop = entry_bb->loop_father;
7510 class loop *loop0 = get_loop (saved_cfun, 0);
7511 struct move_stmt_d d;
7512
7513 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7514 region. */
7515 gcc_assert (entry_bb != exit_bb
7516 && (!exit_bb
7517 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7518
7519 /* Collect all the blocks in the region. Manually add ENTRY_BB
7520 because it won't be added by dfs_enumerate_from. */
7521 bbs.create (0);
7522 bbs.safe_push (entry_bb);
7523 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7524
7525 if (flag_checking)
7526 verify_sese (entry_bb, exit_bb, &bbs);
7527
7528 /* The blocks that used to be dominated by something in BBS will now be
7529 dominated by the new block. */
7530 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7531 bbs.address (),
7532 bbs.length ());
7533
7534 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7535 the predecessor edges to ENTRY_BB and the successor edges to
7536 EXIT_BB so that we can re-attach them to the new basic block that
7537 will replace the region. */
7538 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7539 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7540 entry_flag = XNEWVEC (int, num_entry_edges);
7541 entry_prob = XNEWVEC (profile_probability, num_entry_edges);
7542 i = 0;
7543 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7544 {
7545 entry_prob[i] = e->probability;
7546 entry_flag[i] = e->flags;
7547 entry_pred[i++] = e->src;
7548 remove_edge (e);
7549 }
7550
7551 if (exit_bb)
7552 {
7553 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7554 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7555 exit_flag = XNEWVEC (int, num_exit_edges);
7556 exit_prob = XNEWVEC (profile_probability, num_exit_edges);
7557 i = 0;
7558 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7559 {
7560 exit_prob[i] = e->probability;
7561 exit_flag[i] = e->flags;
7562 exit_succ[i++] = e->dest;
7563 remove_edge (e);
7564 }
7565 }
7566 else
7567 {
7568 num_exit_edges = 0;
7569 exit_succ = NULL;
7570 exit_flag = NULL;
7571 exit_prob = NULL;
7572 }
7573
7574 /* Switch context to the child function to initialize DEST_FN's CFG. */
7575 gcc_assert (dest_cfun->cfg == NULL);
7576 push_cfun (dest_cfun);
7577
7578 init_empty_tree_cfg ();
7579
7580 /* Initialize EH information for the new function. */
7581 eh_map = NULL;
7582 new_label_map = NULL;
7583 if (saved_cfun->eh)
7584 {
7585 eh_region region = NULL;
7586 bool all = false;
7587
7588 FOR_EACH_VEC_ELT (bbs, i, bb)
7589 {
7590 region = find_outermost_region_in_block (saved_cfun, bb, region, &all);
7591 if (all)
7592 break;
7593 }
7594
7595 init_eh_for_function ();
7596 if (region != NULL || all)
7597 {
7598 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7599 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7600 new_label_mapper, new_label_map);
7601 }
7602 }
7603
7604 /* Initialize an empty loop tree. */
7605 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7606 init_loops_structure (dest_cfun, loops, 1);
7607 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7608 set_loops_for_fn (dest_cfun, loops);
7609
7610 vec<loop_p, va_gc> *larray = get_loops (saved_cfun)->copy ();
7611
7612 /* Move the outlined loop tree part. */
7613 num_nodes = bbs.length ();
7614 FOR_EACH_VEC_ELT (bbs, i, bb)
7615 {
7616 if (bb->loop_father->header == bb)
7617 {
7618 class loop *this_loop = bb->loop_father;
7619 class loop *outer = loop_outer (this_loop);
7620 if (outer == loop
7621 /* If the SESE region contains some bbs ending with
7622 a noreturn call, those are considered to belong
7623 to the outermost loop in saved_cfun, rather than
7624 the entry_bb's loop_father. */
7625 || outer == loop0)
7626 {
7627 if (outer != loop)
7628 num_nodes -= this_loop->num_nodes;
7629 flow_loop_tree_node_remove (bb->loop_father);
7630 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7631 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7632 }
7633 }
7634 else if (bb->loop_father == loop0 && loop0 != loop)
7635 num_nodes--;
7636
7637 /* Remove loop exits from the outlined region. */
7638 if (loops_for_fn (saved_cfun)->exits)
7639 FOR_EACH_EDGE (e, ei, bb->succs)
7640 {
7641 struct loops *l = loops_for_fn (saved_cfun);
7642 loop_exit **slot
7643 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7644 NO_INSERT);
7645 if (slot)
7646 l->exits->clear_slot (slot);
7647 }
7648 }
7649
7650 /* Adjust the number of blocks in the tree root of the outlined part. */
7651 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7652
7653 /* Setup a mapping to be used by move_block_to_fn. */
7654 loop->aux = current_loops->tree_root;
7655 loop0->aux = current_loops->tree_root;
7656
7657 /* Fix up orig_loop_num. If the block referenced in it has been moved
7658 to dest_cfun, update orig_loop_num field, otherwise clear it. */
7659 class loop *dloop;
7660 signed char *moved_orig_loop_num = NULL;
7661 FOR_EACH_LOOP_FN (dest_cfun, dloop, 0)
7662 if (dloop->orig_loop_num)
7663 {
7664 if (moved_orig_loop_num == NULL)
7665 moved_orig_loop_num
7666 = XCNEWVEC (signed char, vec_safe_length (larray));
7667 if ((*larray)[dloop->orig_loop_num] != NULL
7668 && get_loop (saved_cfun, dloop->orig_loop_num) == NULL)
7669 {
7670 if (moved_orig_loop_num[dloop->orig_loop_num] >= 0
7671 && moved_orig_loop_num[dloop->orig_loop_num] < 2)
7672 moved_orig_loop_num[dloop->orig_loop_num]++;
7673 dloop->orig_loop_num = (*larray)[dloop->orig_loop_num]->num;
7674 }
7675 else
7676 {
7677 moved_orig_loop_num[dloop->orig_loop_num] = -1;
7678 dloop->orig_loop_num = 0;
7679 }
7680 }
7681 pop_cfun ();
7682
7683 if (moved_orig_loop_num)
7684 {
7685 FOR_EACH_VEC_ELT (bbs, i, bb)
7686 {
7687 gimple *g = find_loop_dist_alias (bb);
7688 if (g == NULL)
7689 continue;
7690
7691 int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
7692 gcc_assert (orig_loop_num
7693 && (unsigned) orig_loop_num < vec_safe_length (larray));
7694 if (moved_orig_loop_num[orig_loop_num] == 2)
7695 {
7696 /* If we have moved both loops with this orig_loop_num into
7697 dest_cfun and the LOOP_DIST_ALIAS call is being moved there
7698 too, update the first argument. */
7699 gcc_assert ((*larray)[dloop->orig_loop_num] != NULL
7700 && (get_loop (saved_cfun, dloop->orig_loop_num)
7701 == NULL));
7702 tree t = build_int_cst (integer_type_node,
7703 (*larray)[dloop->orig_loop_num]->num);
7704 gimple_call_set_arg (g, 0, t);
7705 update_stmt (g);
7706 /* Make sure the following loop will not update it. */
7707 moved_orig_loop_num[orig_loop_num] = 0;
7708 }
7709 else
7710 /* Otherwise at least one of the loops stayed in saved_cfun.
7711 Remove the LOOP_DIST_ALIAS call. */
7712 fold_loop_internal_call (g, gimple_call_arg (g, 1));
7713 }
7714 FOR_EACH_BB_FN (bb, saved_cfun)
7715 {
7716 gimple *g = find_loop_dist_alias (bb);
7717 if (g == NULL)
7718 continue;
7719 int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
7720 gcc_assert (orig_loop_num
7721 && (unsigned) orig_loop_num < vec_safe_length (larray));
7722 if (moved_orig_loop_num[orig_loop_num])
7723 /* LOOP_DIST_ALIAS call remained in saved_cfun, if at least one
7724 of the corresponding loops was moved, remove it. */
7725 fold_loop_internal_call (g, gimple_call_arg (g, 1));
7726 }
7727 XDELETEVEC (moved_orig_loop_num);
7728 }
7729 ggc_free (larray);
7730
7731 /* Move blocks from BBS into DEST_CFUN. */
7732 gcc_assert (bbs.length () >= 2);
7733 after = dest_cfun->cfg->x_entry_block_ptr;
7734 hash_map<tree, tree> vars_map;
7735
7736 memset (&d, 0, sizeof (d));
7737 d.orig_block = orig_block;
7738 d.new_block = DECL_INITIAL (dest_cfun->decl);
7739 d.from_context = cfun->decl;
7740 d.to_context = dest_cfun->decl;
7741 d.vars_map = &vars_map;
7742 d.new_label_map = new_label_map;
7743 d.eh_map = eh_map;
7744 d.remap_decls_p = true;
7745
7746 if (gimple_in_ssa_p (cfun))
7747 for (tree arg = DECL_ARGUMENTS (d.to_context); arg; arg = DECL_CHAIN (arg))
7748 {
7749 tree narg = make_ssa_name_fn (dest_cfun, arg, gimple_build_nop ());
7750 set_ssa_default_def (dest_cfun, arg, narg);
7751 vars_map.put (arg, narg);
7752 }
7753
7754 FOR_EACH_VEC_ELT (bbs, i, bb)
7755 {
7756 /* No need to update edge counts on the last block. It has
7757 already been updated earlier when we detached the region from
7758 the original CFG. */
7759 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7760 after = bb;
7761 }
7762
7763 /* Adjust the maximum clique used. */
7764 dest_cfun->last_clique = saved_cfun->last_clique;
7765
7766 loop->aux = NULL;
7767 loop0->aux = NULL;
7768 /* Loop sizes are no longer correct, fix them up. */
7769 loop->num_nodes -= num_nodes;
7770 for (class loop *outer = loop_outer (loop);
7771 outer; outer = loop_outer (outer))
7772 outer->num_nodes -= num_nodes;
7773 loop0->num_nodes -= bbs.length () - num_nodes;
7774
7775 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7776 {
7777 class loop *aloop;
7778 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7779 if (aloop != NULL)
7780 {
7781 if (aloop->simduid)
7782 {
7783 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7784 d.to_context);
7785 dest_cfun->has_simduid_loops = true;
7786 }
7787 if (aloop->force_vectorize)
7788 dest_cfun->has_force_vectorize_loops = true;
7789 }
7790 }
7791
7792 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7793 if (orig_block)
7794 {
7795 tree block;
7796 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7797 == NULL_TREE);
7798 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7799 = BLOCK_SUBBLOCKS (orig_block);
7800 for (block = BLOCK_SUBBLOCKS (orig_block);
7801 block; block = BLOCK_CHAIN (block))
7802 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7803 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7804 }
7805
7806 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7807 &vars_map, dest_cfun->decl);
7808
7809 if (new_label_map)
7810 htab_delete (new_label_map);
7811 if (eh_map)
7812 delete eh_map;
7813
7814 if (gimple_in_ssa_p (cfun))
7815 {
7816 /* We need to release ssa-names in a defined order, so first find them,
7817 and then iterate in ascending version order. */
7818 bitmap release_names = BITMAP_ALLOC (NULL);
7819 vars_map.traverse<void *, gather_ssa_name_hash_map_from> (release_names);
7820 bitmap_iterator bi;
7821 unsigned i;
7822 EXECUTE_IF_SET_IN_BITMAP (release_names, 0, i, bi)
7823 release_ssa_name (ssa_name (i));
7824 BITMAP_FREE (release_names);
7825 }
7826
7827 /* Rewire the entry and exit blocks. The successor to the entry
7828 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7829 the child function. Similarly, the predecessor of DEST_FN's
7830 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7831 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7832 various CFG manipulation function get to the right CFG.
7833
7834 FIXME, this is silly. The CFG ought to become a parameter to
7835 these helpers. */
7836 push_cfun (dest_cfun);
7837 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = entry_bb->count;
7838 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7839 if (exit_bb)
7840 {
7841 make_single_succ_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7842 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = exit_bb->count;
7843 }
7844 else
7845 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = profile_count::zero ();
7846 pop_cfun ();
7847
7848 /* Back in the original function, the SESE region has disappeared,
7849 create a new basic block in its place. */
7850 bb = create_empty_bb (entry_pred[0]);
7851 if (current_loops)
7852 add_bb_to_loop (bb, loop);
7853 for (i = 0; i < num_entry_edges; i++)
7854 {
7855 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7856 e->probability = entry_prob[i];
7857 }
7858
7859 for (i = 0; i < num_exit_edges; i++)
7860 {
7861 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7862 e->probability = exit_prob[i];
7863 }
7864
7865 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7866 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7867 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7868 dom_bbs.release ();
7869
7870 if (exit_bb)
7871 {
7872 free (exit_prob);
7873 free (exit_flag);
7874 free (exit_succ);
7875 }
7876 free (entry_prob);
7877 free (entry_flag);
7878 free (entry_pred);
7879 bbs.release ();
7880
7881 return bb;
7882 }
7883
7884 /* Dump default def DEF to file FILE using FLAGS and indentation
7885 SPC. */
7886
7887 static void
7888 dump_default_def (FILE *file, tree def, int spc, dump_flags_t flags)
7889 {
7890 for (int i = 0; i < spc; ++i)
7891 fprintf (file, " ");
7892 dump_ssaname_info_to_file (file, def, spc);
7893
7894 print_generic_expr (file, TREE_TYPE (def), flags);
7895 fprintf (file, " ");
7896 print_generic_expr (file, def, flags);
7897 fprintf (file, " = ");
7898 print_generic_expr (file, SSA_NAME_VAR (def), flags);
7899 fprintf (file, ";\n");
7900 }
7901
7902 /* Print no_sanitize attribute to FILE for a given attribute VALUE. */
7903
7904 static void
7905 print_no_sanitize_attr_value (FILE *file, tree value)
7906 {
7907 unsigned int flags = tree_to_uhwi (value);
7908 bool first = true;
7909 for (int i = 0; sanitizer_opts[i].name != NULL; ++i)
7910 {
7911 if ((sanitizer_opts[i].flag & flags) == sanitizer_opts[i].flag)
7912 {
7913 if (!first)
7914 fprintf (file, " | ");
7915 fprintf (file, "%s", sanitizer_opts[i].name);
7916 first = false;
7917 }
7918 }
7919 }
7920
7921 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7922 */
7923
7924 void
7925 dump_function_to_file (tree fndecl, FILE *file, dump_flags_t flags)
7926 {
7927 tree arg, var, old_current_fndecl = current_function_decl;
7928 struct function *dsf;
7929 bool ignore_topmost_bind = false, any_var = false;
7930 basic_block bb;
7931 tree chain;
7932 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7933 && decl_is_tm_clone (fndecl));
7934 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7935
7936 if (DECL_ATTRIBUTES (fndecl) != NULL_TREE)
7937 {
7938 fprintf (file, "__attribute__((");
7939
7940 bool first = true;
7941 tree chain;
7942 for (chain = DECL_ATTRIBUTES (fndecl); chain;
7943 first = false, chain = TREE_CHAIN (chain))
7944 {
7945 if (!first)
7946 fprintf (file, ", ");
7947
7948 tree name = get_attribute_name (chain);
7949 print_generic_expr (file, name, dump_flags);
7950 if (TREE_VALUE (chain) != NULL_TREE)
7951 {
7952 fprintf (file, " (");
7953
7954 if (strstr (IDENTIFIER_POINTER (name), "no_sanitize"))
7955 print_no_sanitize_attr_value (file, TREE_VALUE (chain));
7956 else
7957 print_generic_expr (file, TREE_VALUE (chain), dump_flags);
7958 fprintf (file, ")");
7959 }
7960 }
7961
7962 fprintf (file, "))\n");
7963 }
7964
7965 current_function_decl = fndecl;
7966 if (flags & TDF_GIMPLE)
7967 {
7968 static bool hotness_bb_param_printed = false;
7969 if (profile_info != NULL
7970 && !hotness_bb_param_printed)
7971 {
7972 hotness_bb_param_printed = true;
7973 fprintf (file,
7974 "/* --param=gimple-fe-computed-hot-bb-threshold=%" PRId64
7975 " */\n", get_hot_bb_threshold ());
7976 }
7977
7978 print_generic_expr (file, TREE_TYPE (TREE_TYPE (fndecl)),
7979 dump_flags | TDF_SLIM);
7980 fprintf (file, " __GIMPLE (%s",
7981 (fun->curr_properties & PROP_ssa) ? "ssa"
7982 : (fun->curr_properties & PROP_cfg) ? "cfg"
7983 : "");
7984
7985 if (cfun->cfg)
7986 {
7987 basic_block bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7988 if (bb->count.initialized_p ())
7989 fprintf (file, ",%s(%d)",
7990 profile_quality_as_string (bb->count.quality ()),
7991 bb->count.value ());
7992 fprintf (file, ")\n%s (", function_name (fun));
7993 }
7994 }
7995 else
7996 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7997
7998 arg = DECL_ARGUMENTS (fndecl);
7999 while (arg)
8000 {
8001 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
8002 fprintf (file, " ");
8003 print_generic_expr (file, arg, dump_flags);
8004 if (DECL_CHAIN (arg))
8005 fprintf (file, ", ");
8006 arg = DECL_CHAIN (arg);
8007 }
8008 fprintf (file, ")\n");
8009
8010 dsf = DECL_STRUCT_FUNCTION (fndecl);
8011 if (dsf && (flags & TDF_EH))
8012 dump_eh_tree (file, dsf);
8013
8014 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
8015 {
8016 dump_node (fndecl, TDF_SLIM | flags, file);
8017 current_function_decl = old_current_fndecl;
8018 return;
8019 }
8020
8021 /* When GIMPLE is lowered, the variables are no longer available in
8022 BIND_EXPRs, so display them separately. */
8023 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
8024 {
8025 unsigned ix;
8026 ignore_topmost_bind = true;
8027
8028 fprintf (file, "{\n");
8029 if (gimple_in_ssa_p (fun)
8030 && (flags & TDF_ALIAS))
8031 {
8032 for (arg = DECL_ARGUMENTS (fndecl); arg != NULL;
8033 arg = DECL_CHAIN (arg))
8034 {
8035 tree def = ssa_default_def (fun, arg);
8036 if (def)
8037 dump_default_def (file, def, 2, flags);
8038 }
8039
8040 tree res = DECL_RESULT (fun->decl);
8041 if (res != NULL_TREE
8042 && DECL_BY_REFERENCE (res))
8043 {
8044 tree def = ssa_default_def (fun, res);
8045 if (def)
8046 dump_default_def (file, def, 2, flags);
8047 }
8048
8049 tree static_chain = fun->static_chain_decl;
8050 if (static_chain != NULL_TREE)
8051 {
8052 tree def = ssa_default_def (fun, static_chain);
8053 if (def)
8054 dump_default_def (file, def, 2, flags);
8055 }
8056 }
8057
8058 if (!vec_safe_is_empty (fun->local_decls))
8059 FOR_EACH_LOCAL_DECL (fun, ix, var)
8060 {
8061 print_generic_decl (file, var, flags);
8062 fprintf (file, "\n");
8063
8064 any_var = true;
8065 }
8066
8067 tree name;
8068
8069 if (gimple_in_ssa_p (cfun))
8070 FOR_EACH_SSA_NAME (ix, name, cfun)
8071 {
8072 if (!SSA_NAME_VAR (name))
8073 {
8074 fprintf (file, " ");
8075 print_generic_expr (file, TREE_TYPE (name), flags);
8076 fprintf (file, " ");
8077 print_generic_expr (file, name, flags);
8078 fprintf (file, ";\n");
8079
8080 any_var = true;
8081 }
8082 }
8083 }
8084
8085 if (fun && fun->decl == fndecl
8086 && fun->cfg
8087 && basic_block_info_for_fn (fun))
8088 {
8089 /* If the CFG has been built, emit a CFG-based dump. */
8090 if (!ignore_topmost_bind)
8091 fprintf (file, "{\n");
8092
8093 if (any_var && n_basic_blocks_for_fn (fun))
8094 fprintf (file, "\n");
8095
8096 FOR_EACH_BB_FN (bb, fun)
8097 dump_bb (file, bb, 2, flags);
8098
8099 fprintf (file, "}\n");
8100 }
8101 else if (fun->curr_properties & PROP_gimple_any)
8102 {
8103 /* The function is now in GIMPLE form but the CFG has not been
8104 built yet. Emit the single sequence of GIMPLE statements
8105 that make up its body. */
8106 gimple_seq body = gimple_body (fndecl);
8107
8108 if (gimple_seq_first_stmt (body)
8109 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
8110 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
8111 print_gimple_seq (file, body, 0, flags);
8112 else
8113 {
8114 if (!ignore_topmost_bind)
8115 fprintf (file, "{\n");
8116
8117 if (any_var)
8118 fprintf (file, "\n");
8119
8120 print_gimple_seq (file, body, 2, flags);
8121 fprintf (file, "}\n");
8122 }
8123 }
8124 else
8125 {
8126 int indent;
8127
8128 /* Make a tree based dump. */
8129 chain = DECL_SAVED_TREE (fndecl);
8130 if (chain && TREE_CODE (chain) == BIND_EXPR)
8131 {
8132 if (ignore_topmost_bind)
8133 {
8134 chain = BIND_EXPR_BODY (chain);
8135 indent = 2;
8136 }
8137 else
8138 indent = 0;
8139 }
8140 else
8141 {
8142 if (!ignore_topmost_bind)
8143 {
8144 fprintf (file, "{\n");
8145 /* No topmost bind, pretend it's ignored for later. */
8146 ignore_topmost_bind = true;
8147 }
8148 indent = 2;
8149 }
8150
8151 if (any_var)
8152 fprintf (file, "\n");
8153
8154 print_generic_stmt_indented (file, chain, flags, indent);
8155 if (ignore_topmost_bind)
8156 fprintf (file, "}\n");
8157 }
8158
8159 if (flags & TDF_ENUMERATE_LOCALS)
8160 dump_enumerated_decls (file, flags);
8161 fprintf (file, "\n\n");
8162
8163 current_function_decl = old_current_fndecl;
8164 }
8165
8166 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
8167
8168 DEBUG_FUNCTION void
8169 debug_function (tree fn, dump_flags_t flags)
8170 {
8171 dump_function_to_file (fn, stderr, flags);
8172 }
8173
8174
8175 /* Print on FILE the indexes for the predecessors of basic_block BB. */
8176
8177 static void
8178 print_pred_bbs (FILE *file, basic_block bb)
8179 {
8180 edge e;
8181 edge_iterator ei;
8182
8183 FOR_EACH_EDGE (e, ei, bb->preds)
8184 fprintf (file, "bb_%d ", e->src->index);
8185 }
8186
8187
8188 /* Print on FILE the indexes for the successors of basic_block BB. */
8189
8190 static void
8191 print_succ_bbs (FILE *file, basic_block bb)
8192 {
8193 edge e;
8194 edge_iterator ei;
8195
8196 FOR_EACH_EDGE (e, ei, bb->succs)
8197 fprintf (file, "bb_%d ", e->dest->index);
8198 }
8199
8200 /* Print to FILE the basic block BB following the VERBOSITY level. */
8201
8202 void
8203 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
8204 {
8205 char *s_indent = (char *) alloca ((size_t) indent + 1);
8206 memset ((void *) s_indent, ' ', (size_t) indent);
8207 s_indent[indent] = '\0';
8208
8209 /* Print basic_block's header. */
8210 if (verbosity >= 2)
8211 {
8212 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
8213 print_pred_bbs (file, bb);
8214 fprintf (file, "}, succs = {");
8215 print_succ_bbs (file, bb);
8216 fprintf (file, "})\n");
8217 }
8218
8219 /* Print basic_block's body. */
8220 if (verbosity >= 3)
8221 {
8222 fprintf (file, "%s {\n", s_indent);
8223 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
8224 fprintf (file, "%s }\n", s_indent);
8225 }
8226 }
8227
8228 static void print_loop_and_siblings (FILE *, class loop *, int, int);
8229
8230 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
8231 VERBOSITY level this outputs the contents of the loop, or just its
8232 structure. */
8233
8234 static void
8235 print_loop (FILE *file, class loop *loop, int indent, int verbosity)
8236 {
8237 char *s_indent;
8238 basic_block bb;
8239
8240 if (loop == NULL)
8241 return;
8242
8243 s_indent = (char *) alloca ((size_t) indent + 1);
8244 memset ((void *) s_indent, ' ', (size_t) indent);
8245 s_indent[indent] = '\0';
8246
8247 /* Print loop's header. */
8248 fprintf (file, "%sloop_%d (", s_indent, loop->num);
8249 if (loop->header)
8250 fprintf (file, "header = %d", loop->header->index);
8251 else
8252 {
8253 fprintf (file, "deleted)\n");
8254 return;
8255 }
8256 if (loop->latch)
8257 fprintf (file, ", latch = %d", loop->latch->index);
8258 else
8259 fprintf (file, ", multiple latches");
8260 fprintf (file, ", niter = ");
8261 print_generic_expr (file, loop->nb_iterations);
8262
8263 if (loop->any_upper_bound)
8264 {
8265 fprintf (file, ", upper_bound = ");
8266 print_decu (loop->nb_iterations_upper_bound, file);
8267 }
8268 if (loop->any_likely_upper_bound)
8269 {
8270 fprintf (file, ", likely_upper_bound = ");
8271 print_decu (loop->nb_iterations_likely_upper_bound, file);
8272 }
8273
8274 if (loop->any_estimate)
8275 {
8276 fprintf (file, ", estimate = ");
8277 print_decu (loop->nb_iterations_estimate, file);
8278 }
8279 if (loop->unroll)
8280 fprintf (file, ", unroll = %d", loop->unroll);
8281 fprintf (file, ")\n");
8282
8283 /* Print loop's body. */
8284 if (verbosity >= 1)
8285 {
8286 fprintf (file, "%s{\n", s_indent);
8287 FOR_EACH_BB_FN (bb, cfun)
8288 if (bb->loop_father == loop)
8289 print_loops_bb (file, bb, indent, verbosity);
8290
8291 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
8292 fprintf (file, "%s}\n", s_indent);
8293 }
8294 }
8295
8296 /* Print the LOOP and its sibling loops on FILE, indented INDENT
8297 spaces. Following VERBOSITY level this outputs the contents of the
8298 loop, or just its structure. */
8299
8300 static void
8301 print_loop_and_siblings (FILE *file, class loop *loop, int indent,
8302 int verbosity)
8303 {
8304 if (loop == NULL)
8305 return;
8306
8307 print_loop (file, loop, indent, verbosity);
8308 print_loop_and_siblings (file, loop->next, indent, verbosity);
8309 }
8310
8311 /* Follow a CFG edge from the entry point of the program, and on entry
8312 of a loop, pretty print the loop structure on FILE. */
8313
8314 void
8315 print_loops (FILE *file, int verbosity)
8316 {
8317 basic_block bb;
8318
8319 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
8320 fprintf (file, "\nLoops in function: %s\n", current_function_name ());
8321 if (bb && bb->loop_father)
8322 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
8323 }
8324
8325 /* Dump a loop. */
8326
8327 DEBUG_FUNCTION void
8328 debug (class loop &ref)
8329 {
8330 print_loop (stderr, &ref, 0, /*verbosity*/0);
8331 }
8332
8333 DEBUG_FUNCTION void
8334 debug (class loop *ptr)
8335 {
8336 if (ptr)
8337 debug (*ptr);
8338 else
8339 fprintf (stderr, "<nil>\n");
8340 }
8341
8342 /* Dump a loop verbosely. */
8343
8344 DEBUG_FUNCTION void
8345 debug_verbose (class loop &ref)
8346 {
8347 print_loop (stderr, &ref, 0, /*verbosity*/3);
8348 }
8349
8350 DEBUG_FUNCTION void
8351 debug_verbose (class loop *ptr)
8352 {
8353 if (ptr)
8354 debug (*ptr);
8355 else
8356 fprintf (stderr, "<nil>\n");
8357 }
8358
8359
8360 /* Debugging loops structure at tree level, at some VERBOSITY level. */
8361
8362 DEBUG_FUNCTION void
8363 debug_loops (int verbosity)
8364 {
8365 print_loops (stderr, verbosity);
8366 }
8367
8368 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
8369
8370 DEBUG_FUNCTION void
8371 debug_loop (class loop *loop, int verbosity)
8372 {
8373 print_loop (stderr, loop, 0, verbosity);
8374 }
8375
8376 /* Print on stderr the code of loop number NUM, at some VERBOSITY
8377 level. */
8378
8379 DEBUG_FUNCTION void
8380 debug_loop_num (unsigned num, int verbosity)
8381 {
8382 debug_loop (get_loop (cfun, num), verbosity);
8383 }
8384
8385 /* Return true if BB ends with a call, possibly followed by some
8386 instructions that must stay with the call. Return false,
8387 otherwise. */
8388
8389 static bool
8390 gimple_block_ends_with_call_p (basic_block bb)
8391 {
8392 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8393 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
8394 }
8395
8396
8397 /* Return true if BB ends with a conditional branch. Return false,
8398 otherwise. */
8399
8400 static bool
8401 gimple_block_ends_with_condjump_p (const_basic_block bb)
8402 {
8403 gimple *stmt = last_stmt (CONST_CAST_BB (bb));
8404 return (stmt && gimple_code (stmt) == GIMPLE_COND);
8405 }
8406
8407
8408 /* Return true if statement T may terminate execution of BB in ways not
8409 explicitly represtented in the CFG. */
8410
8411 bool
8412 stmt_can_terminate_bb_p (gimple *t)
8413 {
8414 tree fndecl = NULL_TREE;
8415 int call_flags = 0;
8416
8417 /* Eh exception not handled internally terminates execution of the whole
8418 function. */
8419 if (stmt_can_throw_external (cfun, t))
8420 return true;
8421
8422 /* NORETURN and LONGJMP calls already have an edge to exit.
8423 CONST and PURE calls do not need one.
8424 We don't currently check for CONST and PURE here, although
8425 it would be a good idea, because those attributes are
8426 figured out from the RTL in mark_constant_function, and
8427 the counter incrementation code from -fprofile-arcs
8428 leads to different results from -fbranch-probabilities. */
8429 if (is_gimple_call (t))
8430 {
8431 fndecl = gimple_call_fndecl (t);
8432 call_flags = gimple_call_flags (t);
8433 }
8434
8435 if (is_gimple_call (t)
8436 && fndecl
8437 && fndecl_built_in_p (fndecl)
8438 && (call_flags & ECF_NOTHROW)
8439 && !(call_flags & ECF_RETURNS_TWICE)
8440 /* fork() doesn't really return twice, but the effect of
8441 wrapping it in __gcov_fork() which calls __gcov_dump() and
8442 __gcov_reset() and clears the counters before forking has the same
8443 effect as returning twice. Force a fake edge. */
8444 && !fndecl_built_in_p (fndecl, BUILT_IN_FORK))
8445 return false;
8446
8447 if (is_gimple_call (t))
8448 {
8449 edge_iterator ei;
8450 edge e;
8451 basic_block bb;
8452
8453 if (call_flags & (ECF_PURE | ECF_CONST)
8454 && !(call_flags & ECF_LOOPING_CONST_OR_PURE))
8455 return false;
8456
8457 /* Function call may do longjmp, terminate program or do other things.
8458 Special case noreturn that have non-abnormal edges out as in this case
8459 the fact is sufficiently represented by lack of edges out of T. */
8460 if (!(call_flags & ECF_NORETURN))
8461 return true;
8462
8463 bb = gimple_bb (t);
8464 FOR_EACH_EDGE (e, ei, bb->succs)
8465 if ((e->flags & EDGE_FAKE) == 0)
8466 return true;
8467 }
8468
8469 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
8470 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
8471 return true;
8472
8473 return false;
8474 }
8475
8476
8477 /* Add fake edges to the function exit for any non constant and non
8478 noreturn calls (or noreturn calls with EH/abnormal edges),
8479 volatile inline assembly in the bitmap of blocks specified by BLOCKS
8480 or to the whole CFG if BLOCKS is zero. Return the number of blocks
8481 that were split.
8482
8483 The goal is to expose cases in which entering a basic block does
8484 not imply that all subsequent instructions must be executed. */
8485
8486 static int
8487 gimple_flow_call_edges_add (sbitmap blocks)
8488 {
8489 int i;
8490 int blocks_split = 0;
8491 int last_bb = last_basic_block_for_fn (cfun);
8492 bool check_last_block = false;
8493
8494 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
8495 return 0;
8496
8497 if (! blocks)
8498 check_last_block = true;
8499 else
8500 check_last_block = bitmap_bit_p (blocks,
8501 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
8502
8503 /* In the last basic block, before epilogue generation, there will be
8504 a fallthru edge to EXIT. Special care is required if the last insn
8505 of the last basic block is a call because make_edge folds duplicate
8506 edges, which would result in the fallthru edge also being marked
8507 fake, which would result in the fallthru edge being removed by
8508 remove_fake_edges, which would result in an invalid CFG.
8509
8510 Moreover, we can't elide the outgoing fake edge, since the block
8511 profiler needs to take this into account in order to solve the minimal
8512 spanning tree in the case that the call doesn't return.
8513
8514 Handle this by adding a dummy instruction in a new last basic block. */
8515 if (check_last_block)
8516 {
8517 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
8518 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8519 gimple *t = NULL;
8520
8521 if (!gsi_end_p (gsi))
8522 t = gsi_stmt (gsi);
8523
8524 if (t && stmt_can_terminate_bb_p (t))
8525 {
8526 edge e;
8527
8528 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8529 if (e)
8530 {
8531 gsi_insert_on_edge (e, gimple_build_nop ());
8532 gsi_commit_edge_inserts ();
8533 }
8534 }
8535 }
8536
8537 /* Now add fake edges to the function exit for any non constant
8538 calls since there is no way that we can determine if they will
8539 return or not... */
8540 for (i = 0; i < last_bb; i++)
8541 {
8542 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8543 gimple_stmt_iterator gsi;
8544 gimple *stmt, *last_stmt;
8545
8546 if (!bb)
8547 continue;
8548
8549 if (blocks && !bitmap_bit_p (blocks, i))
8550 continue;
8551
8552 gsi = gsi_last_nondebug_bb (bb);
8553 if (!gsi_end_p (gsi))
8554 {
8555 last_stmt = gsi_stmt (gsi);
8556 do
8557 {
8558 stmt = gsi_stmt (gsi);
8559 if (stmt_can_terminate_bb_p (stmt))
8560 {
8561 edge e;
8562
8563 /* The handling above of the final block before the
8564 epilogue should be enough to verify that there is
8565 no edge to the exit block in CFG already.
8566 Calling make_edge in such case would cause us to
8567 mark that edge as fake and remove it later. */
8568 if (flag_checking && stmt == last_stmt)
8569 {
8570 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8571 gcc_assert (e == NULL);
8572 }
8573
8574 /* Note that the following may create a new basic block
8575 and renumber the existing basic blocks. */
8576 if (stmt != last_stmt)
8577 {
8578 e = split_block (bb, stmt);
8579 if (e)
8580 blocks_split++;
8581 }
8582 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
8583 e->probability = profile_probability::guessed_never ();
8584 }
8585 gsi_prev (&gsi);
8586 }
8587 while (!gsi_end_p (gsi));
8588 }
8589 }
8590
8591 if (blocks_split)
8592 checking_verify_flow_info ();
8593
8594 return blocks_split;
8595 }
8596
8597 /* Removes edge E and all the blocks dominated by it, and updates dominance
8598 information. The IL in E->src needs to be updated separately.
8599 If dominance info is not available, only the edge E is removed.*/
8600
8601 void
8602 remove_edge_and_dominated_blocks (edge e)
8603 {
8604 vec<basic_block> bbs_to_remove = vNULL;
8605 vec<basic_block> bbs_to_fix_dom = vNULL;
8606 edge f;
8607 edge_iterator ei;
8608 bool none_removed = false;
8609 unsigned i;
8610 basic_block bb, dbb;
8611 bitmap_iterator bi;
8612
8613 /* If we are removing a path inside a non-root loop that may change
8614 loop ownership of blocks or remove loops. Mark loops for fixup. */
8615 if (current_loops
8616 && loop_outer (e->src->loop_father) != NULL
8617 && e->src->loop_father == e->dest->loop_father)
8618 loops_state_set (LOOPS_NEED_FIXUP);
8619
8620 if (!dom_info_available_p (CDI_DOMINATORS))
8621 {
8622 remove_edge (e);
8623 return;
8624 }
8625
8626 /* No updating is needed for edges to exit. */
8627 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8628 {
8629 if (cfgcleanup_altered_bbs)
8630 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8631 remove_edge (e);
8632 return;
8633 }
8634
8635 /* First, we find the basic blocks to remove. If E->dest has a predecessor
8636 that is not dominated by E->dest, then this set is empty. Otherwise,
8637 all the basic blocks dominated by E->dest are removed.
8638
8639 Also, to DF_IDOM we store the immediate dominators of the blocks in
8640 the dominance frontier of E (i.e., of the successors of the
8641 removed blocks, if there are any, and of E->dest otherwise). */
8642 FOR_EACH_EDGE (f, ei, e->dest->preds)
8643 {
8644 if (f == e)
8645 continue;
8646
8647 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
8648 {
8649 none_removed = true;
8650 break;
8651 }
8652 }
8653
8654 auto_bitmap df, df_idom;
8655 if (none_removed)
8656 bitmap_set_bit (df_idom,
8657 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
8658 else
8659 {
8660 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
8661 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8662 {
8663 FOR_EACH_EDGE (f, ei, bb->succs)
8664 {
8665 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
8666 bitmap_set_bit (df, f->dest->index);
8667 }
8668 }
8669 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8670 bitmap_clear_bit (df, bb->index);
8671
8672 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
8673 {
8674 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8675 bitmap_set_bit (df_idom,
8676 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
8677 }
8678 }
8679
8680 if (cfgcleanup_altered_bbs)
8681 {
8682 /* Record the set of the altered basic blocks. */
8683 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8684 bitmap_ior_into (cfgcleanup_altered_bbs, df);
8685 }
8686
8687 /* Remove E and the cancelled blocks. */
8688 if (none_removed)
8689 remove_edge (e);
8690 else
8691 {
8692 /* Walk backwards so as to get a chance to substitute all
8693 released DEFs into debug stmts. See
8694 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
8695 details. */
8696 for (i = bbs_to_remove.length (); i-- > 0; )
8697 delete_basic_block (bbs_to_remove[i]);
8698 }
8699
8700 /* Update the dominance information. The immediate dominator may change only
8701 for blocks whose immediate dominator belongs to DF_IDOM:
8702
8703 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8704 removal. Let Z the arbitrary block such that idom(Z) = Y and
8705 Z dominates X after the removal. Before removal, there exists a path P
8706 from Y to X that avoids Z. Let F be the last edge on P that is
8707 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
8708 dominates W, and because of P, Z does not dominate W), and W belongs to
8709 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
8710 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
8711 {
8712 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8713 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
8714 dbb;
8715 dbb = next_dom_son (CDI_DOMINATORS, dbb))
8716 bbs_to_fix_dom.safe_push (dbb);
8717 }
8718
8719 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8720
8721 bbs_to_remove.release ();
8722 bbs_to_fix_dom.release ();
8723 }
8724
8725 /* Purge dead EH edges from basic block BB. */
8726
8727 bool
8728 gimple_purge_dead_eh_edges (basic_block bb)
8729 {
8730 bool changed = false;
8731 edge e;
8732 edge_iterator ei;
8733 gimple *stmt = last_stmt (bb);
8734
8735 if (stmt && stmt_can_throw_internal (cfun, stmt))
8736 return false;
8737
8738 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8739 {
8740 if (e->flags & EDGE_EH)
8741 {
8742 remove_edge_and_dominated_blocks (e);
8743 changed = true;
8744 }
8745 else
8746 ei_next (&ei);
8747 }
8748
8749 return changed;
8750 }
8751
8752 /* Purge dead EH edges from basic block listed in BLOCKS. */
8753
8754 bool
8755 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8756 {
8757 bool changed = false;
8758 unsigned i;
8759 bitmap_iterator bi;
8760
8761 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8762 {
8763 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8764
8765 /* Earlier gimple_purge_dead_eh_edges could have removed
8766 this basic block already. */
8767 gcc_assert (bb || changed);
8768 if (bb != NULL)
8769 changed |= gimple_purge_dead_eh_edges (bb);
8770 }
8771
8772 return changed;
8773 }
8774
8775 /* Purge dead abnormal call edges from basic block BB. */
8776
8777 bool
8778 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8779 {
8780 bool changed = false;
8781 edge e;
8782 edge_iterator ei;
8783 gimple *stmt = last_stmt (bb);
8784
8785 if (!cfun->has_nonlocal_label
8786 && !cfun->calls_setjmp)
8787 return false;
8788
8789 if (stmt && stmt_can_make_abnormal_goto (stmt))
8790 return false;
8791
8792 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8793 {
8794 if (e->flags & EDGE_ABNORMAL)
8795 {
8796 if (e->flags & EDGE_FALLTHRU)
8797 e->flags &= ~EDGE_ABNORMAL;
8798 else
8799 remove_edge_and_dominated_blocks (e);
8800 changed = true;
8801 }
8802 else
8803 ei_next (&ei);
8804 }
8805
8806 return changed;
8807 }
8808
8809 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8810
8811 bool
8812 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8813 {
8814 bool changed = false;
8815 unsigned i;
8816 bitmap_iterator bi;
8817
8818 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8819 {
8820 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8821
8822 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8823 this basic block already. */
8824 gcc_assert (bb || changed);
8825 if (bb != NULL)
8826 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8827 }
8828
8829 return changed;
8830 }
8831
8832 /* This function is called whenever a new edge is created or
8833 redirected. */
8834
8835 static void
8836 gimple_execute_on_growing_pred (edge e)
8837 {
8838 basic_block bb = e->dest;
8839
8840 if (!gimple_seq_empty_p (phi_nodes (bb)))
8841 reserve_phi_args_for_new_edge (bb);
8842 }
8843
8844 /* This function is called immediately before edge E is removed from
8845 the edge vector E->dest->preds. */
8846
8847 static void
8848 gimple_execute_on_shrinking_pred (edge e)
8849 {
8850 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8851 remove_phi_args (e);
8852 }
8853
8854 /*---------------------------------------------------------------------------
8855 Helper functions for Loop versioning
8856 ---------------------------------------------------------------------------*/
8857
8858 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8859 of 'first'. Both of them are dominated by 'new_head' basic block. When
8860 'new_head' was created by 'second's incoming edge it received phi arguments
8861 on the edge by split_edge(). Later, additional edge 'e' was created to
8862 connect 'new_head' and 'first'. Now this routine adds phi args on this
8863 additional edge 'e' that new_head to second edge received as part of edge
8864 splitting. */
8865
8866 static void
8867 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8868 basic_block new_head, edge e)
8869 {
8870 gphi *phi1, *phi2;
8871 gphi_iterator psi1, psi2;
8872 tree def;
8873 edge e2 = find_edge (new_head, second);
8874
8875 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8876 edge, we should always have an edge from NEW_HEAD to SECOND. */
8877 gcc_assert (e2 != NULL);
8878
8879 /* Browse all 'second' basic block phi nodes and add phi args to
8880 edge 'e' for 'first' head. PHI args are always in correct order. */
8881
8882 for (psi2 = gsi_start_phis (second),
8883 psi1 = gsi_start_phis (first);
8884 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8885 gsi_next (&psi2), gsi_next (&psi1))
8886 {
8887 phi1 = psi1.phi ();
8888 phi2 = psi2.phi ();
8889 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8890 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8891 }
8892 }
8893
8894
8895 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8896 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8897 the destination of the ELSE part. */
8898
8899 static void
8900 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8901 basic_block second_head ATTRIBUTE_UNUSED,
8902 basic_block cond_bb, void *cond_e)
8903 {
8904 gimple_stmt_iterator gsi;
8905 gimple *new_cond_expr;
8906 tree cond_expr = (tree) cond_e;
8907 edge e0;
8908
8909 /* Build new conditional expr */
8910 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8911 NULL_TREE, NULL_TREE);
8912
8913 /* Add new cond in cond_bb. */
8914 gsi = gsi_last_bb (cond_bb);
8915 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8916
8917 /* Adjust edges appropriately to connect new head with first head
8918 as well as second head. */
8919 e0 = single_succ_edge (cond_bb);
8920 e0->flags &= ~EDGE_FALLTHRU;
8921 e0->flags |= EDGE_FALSE_VALUE;
8922 }
8923
8924
8925 /* Do book-keeping of basic block BB for the profile consistency checker.
8926 Store the counting in RECORD. */
8927 static void
8928 gimple_account_profile_record (basic_block bb,
8929 struct profile_record *record)
8930 {
8931 gimple_stmt_iterator i;
8932 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8933 {
8934 record->size
8935 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8936 if (bb->count.initialized_p ())
8937 record->time
8938 += estimate_num_insns (gsi_stmt (i),
8939 &eni_time_weights) * bb->count.to_gcov_type ();
8940 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8941 record->time
8942 += estimate_num_insns (gsi_stmt (i),
8943 &eni_time_weights) * bb->count.to_frequency (cfun);
8944 }
8945 }
8946
8947 struct cfg_hooks gimple_cfg_hooks = {
8948 "gimple",
8949 gimple_verify_flow_info,
8950 gimple_dump_bb, /* dump_bb */
8951 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8952 create_bb, /* create_basic_block */
8953 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8954 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8955 gimple_can_remove_branch_p, /* can_remove_branch_p */
8956 remove_bb, /* delete_basic_block */
8957 gimple_split_block, /* split_block */
8958 gimple_move_block_after, /* move_block_after */
8959 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8960 gimple_merge_blocks, /* merge_blocks */
8961 gimple_predict_edge, /* predict_edge */
8962 gimple_predicted_by_p, /* predicted_by_p */
8963 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8964 gimple_duplicate_bb, /* duplicate_block */
8965 gimple_split_edge, /* split_edge */
8966 gimple_make_forwarder_block, /* make_forward_block */
8967 NULL, /* tidy_fallthru_edge */
8968 NULL, /* force_nonfallthru */
8969 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8970 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8971 gimple_flow_call_edges_add, /* flow_call_edges_add */
8972 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8973 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8974 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8975 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8976 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8977 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8978 flush_pending_stmts, /* flush_pending_stmts */
8979 gimple_empty_block_p, /* block_empty_p */
8980 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8981 gimple_account_profile_record,
8982 };
8983
8984
8985 /* Split all critical edges. Split some extra (not necessarily critical) edges
8986 if FOR_EDGE_INSERTION_P is true. */
8987
8988 unsigned int
8989 split_critical_edges (bool for_edge_insertion_p /* = false */)
8990 {
8991 basic_block bb;
8992 edge e;
8993 edge_iterator ei;
8994
8995 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8996 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8997 mappings around the calls to split_edge. */
8998 start_recording_case_labels ();
8999 FOR_ALL_BB_FN (bb, cfun)
9000 {
9001 FOR_EACH_EDGE (e, ei, bb->succs)
9002 {
9003 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
9004 split_edge (e);
9005 /* PRE inserts statements to edges and expects that
9006 since split_critical_edges was done beforehand, committing edge
9007 insertions will not split more edges. In addition to critical
9008 edges we must split edges that have multiple successors and
9009 end by control flow statements, such as RESX.
9010 Go ahead and split them too. This matches the logic in
9011 gimple_find_edge_insert_loc. */
9012 else if (for_edge_insertion_p
9013 && (!single_pred_p (e->dest)
9014 || !gimple_seq_empty_p (phi_nodes (e->dest))
9015 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
9016 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
9017 && !(e->flags & EDGE_ABNORMAL))
9018 {
9019 gimple_stmt_iterator gsi;
9020
9021 gsi = gsi_last_bb (e->src);
9022 if (!gsi_end_p (gsi)
9023 && stmt_ends_bb_p (gsi_stmt (gsi))
9024 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
9025 && !gimple_call_builtin_p (gsi_stmt (gsi),
9026 BUILT_IN_RETURN)))
9027 split_edge (e);
9028 }
9029 }
9030 }
9031 end_recording_case_labels ();
9032 return 0;
9033 }
9034
9035 namespace {
9036
9037 const pass_data pass_data_split_crit_edges =
9038 {
9039 GIMPLE_PASS, /* type */
9040 "crited", /* name */
9041 OPTGROUP_NONE, /* optinfo_flags */
9042 TV_TREE_SPLIT_EDGES, /* tv_id */
9043 PROP_cfg, /* properties_required */
9044 PROP_no_crit_edges, /* properties_provided */
9045 0, /* properties_destroyed */
9046 0, /* todo_flags_start */
9047 0, /* todo_flags_finish */
9048 };
9049
9050 class pass_split_crit_edges : public gimple_opt_pass
9051 {
9052 public:
9053 pass_split_crit_edges (gcc::context *ctxt)
9054 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
9055 {}
9056
9057 /* opt_pass methods: */
9058 virtual unsigned int execute (function *) { return split_critical_edges (); }
9059
9060 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
9061 }; // class pass_split_crit_edges
9062
9063 } // anon namespace
9064
9065 gimple_opt_pass *
9066 make_pass_split_crit_edges (gcc::context *ctxt)
9067 {
9068 return new pass_split_crit_edges (ctxt);
9069 }
9070
9071
9072 /* Insert COND expression which is GIMPLE_COND after STMT
9073 in basic block BB with appropriate basic block split
9074 and creation of a new conditionally executed basic block.
9075 Update profile so the new bb is visited with probability PROB.
9076 Return created basic block. */
9077 basic_block
9078 insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond,
9079 profile_probability prob)
9080 {
9081 edge fall = split_block (bb, stmt);
9082 gimple_stmt_iterator iter = gsi_last_bb (bb);
9083 basic_block new_bb;
9084
9085 /* Insert cond statement. */
9086 gcc_assert (gimple_code (cond) == GIMPLE_COND);
9087 if (gsi_end_p (iter))
9088 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
9089 else
9090 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
9091
9092 /* Create conditionally executed block. */
9093 new_bb = create_empty_bb (bb);
9094 edge e = make_edge (bb, new_bb, EDGE_TRUE_VALUE);
9095 e->probability = prob;
9096 new_bb->count = e->count ();
9097 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
9098
9099 /* Fix edge for split bb. */
9100 fall->flags = EDGE_FALSE_VALUE;
9101 fall->probability -= e->probability;
9102
9103 /* Update dominance info. */
9104 if (dom_info_available_p (CDI_DOMINATORS))
9105 {
9106 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
9107 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
9108 }
9109
9110 /* Update loop info. */
9111 if (current_loops)
9112 add_bb_to_loop (new_bb, bb->loop_father);
9113
9114 return new_bb;
9115 }
9116
9117 /* Build a ternary operation and gimplify it. Emit code before GSI.
9118 Return the gimple_val holding the result. */
9119
9120 tree
9121 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
9122 tree type, tree a, tree b, tree c)
9123 {
9124 tree ret;
9125 location_t loc = gimple_location (gsi_stmt (*gsi));
9126
9127 ret = fold_build3_loc (loc, code, type, a, b, c);
9128 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
9129 GSI_SAME_STMT);
9130 }
9131
9132 /* Build a binary operation and gimplify it. Emit code before GSI.
9133 Return the gimple_val holding the result. */
9134
9135 tree
9136 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
9137 tree type, tree a, tree b)
9138 {
9139 tree ret;
9140
9141 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
9142 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
9143 GSI_SAME_STMT);
9144 }
9145
9146 /* Build a unary operation and gimplify it. Emit code before GSI.
9147 Return the gimple_val holding the result. */
9148
9149 tree
9150 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
9151 tree a)
9152 {
9153 tree ret;
9154
9155 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
9156 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
9157 GSI_SAME_STMT);
9158 }
9159
9160
9161 \f
9162 /* Given a basic block B which ends with a conditional and has
9163 precisely two successors, determine which of the edges is taken if
9164 the conditional is true and which is taken if the conditional is
9165 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
9166
9167 void
9168 extract_true_false_edges_from_block (basic_block b,
9169 edge *true_edge,
9170 edge *false_edge)
9171 {
9172 edge e = EDGE_SUCC (b, 0);
9173
9174 if (e->flags & EDGE_TRUE_VALUE)
9175 {
9176 *true_edge = e;
9177 *false_edge = EDGE_SUCC (b, 1);
9178 }
9179 else
9180 {
9181 *false_edge = e;
9182 *true_edge = EDGE_SUCC (b, 1);
9183 }
9184 }
9185
9186
9187 /* From a controlling predicate in the immediate dominator DOM of
9188 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
9189 predicate evaluates to true and false and store them to
9190 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
9191 they are non-NULL. Returns true if the edges can be determined,
9192 else return false. */
9193
9194 bool
9195 extract_true_false_controlled_edges (basic_block dom, basic_block phiblock,
9196 edge *true_controlled_edge,
9197 edge *false_controlled_edge)
9198 {
9199 basic_block bb = phiblock;
9200 edge true_edge, false_edge, tem;
9201 edge e0 = NULL, e1 = NULL;
9202
9203 /* We have to verify that one edge into the PHI node is dominated
9204 by the true edge of the predicate block and the other edge
9205 dominated by the false edge. This ensures that the PHI argument
9206 we are going to take is completely determined by the path we
9207 take from the predicate block.
9208 We can only use BB dominance checks below if the destination of
9209 the true/false edges are dominated by their edge, thus only
9210 have a single predecessor. */
9211 extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
9212 tem = EDGE_PRED (bb, 0);
9213 if (tem == true_edge
9214 || (single_pred_p (true_edge->dest)
9215 && (tem->src == true_edge->dest
9216 || dominated_by_p (CDI_DOMINATORS,
9217 tem->src, true_edge->dest))))
9218 e0 = tem;
9219 else if (tem == false_edge
9220 || (single_pred_p (false_edge->dest)
9221 && (tem->src == false_edge->dest
9222 || dominated_by_p (CDI_DOMINATORS,
9223 tem->src, false_edge->dest))))
9224 e1 = tem;
9225 else
9226 return false;
9227 tem = EDGE_PRED (bb, 1);
9228 if (tem == true_edge
9229 || (single_pred_p (true_edge->dest)
9230 && (tem->src == true_edge->dest
9231 || dominated_by_p (CDI_DOMINATORS,
9232 tem->src, true_edge->dest))))
9233 e0 = tem;
9234 else if (tem == false_edge
9235 || (single_pred_p (false_edge->dest)
9236 && (tem->src == false_edge->dest
9237 || dominated_by_p (CDI_DOMINATORS,
9238 tem->src, false_edge->dest))))
9239 e1 = tem;
9240 else
9241 return false;
9242 if (!e0 || !e1)
9243 return false;
9244
9245 if (true_controlled_edge)
9246 *true_controlled_edge = e0;
9247 if (false_controlled_edge)
9248 *false_controlled_edge = e1;
9249
9250 return true;
9251 }
9252
9253 /* Generate a range test LHS CODE RHS that determines whether INDEX is in the
9254 range [low, high]. Place associated stmts before *GSI. */
9255
9256 void
9257 generate_range_test (basic_block bb, tree index, tree low, tree high,
9258 tree *lhs, tree *rhs)
9259 {
9260 tree type = TREE_TYPE (index);
9261 tree utype = range_check_type (type);
9262
9263 low = fold_convert (utype, low);
9264 high = fold_convert (utype, high);
9265
9266 gimple_seq seq = NULL;
9267 index = gimple_convert (&seq, utype, index);
9268 *lhs = gimple_build (&seq, MINUS_EXPR, utype, index, low);
9269 *rhs = const_binop (MINUS_EXPR, utype, high, low);
9270
9271 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9272 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
9273 }
9274
9275 /* Return the basic block that belongs to label numbered INDEX
9276 of a switch statement. */
9277
9278 basic_block
9279 gimple_switch_label_bb (function *ifun, gswitch *gs, unsigned index)
9280 {
9281 return label_to_block (ifun, CASE_LABEL (gimple_switch_label (gs, index)));
9282 }
9283
9284 /* Return the default basic block of a switch statement. */
9285
9286 basic_block
9287 gimple_switch_default_bb (function *ifun, gswitch *gs)
9288 {
9289 return gimple_switch_label_bb (ifun, gs, 0);
9290 }
9291
9292 /* Return the edge that belongs to label numbered INDEX
9293 of a switch statement. */
9294
9295 edge
9296 gimple_switch_edge (function *ifun, gswitch *gs, unsigned index)
9297 {
9298 return find_edge (gimple_bb (gs), gimple_switch_label_bb (ifun, gs, index));
9299 }
9300
9301 /* Return the default edge of a switch statement. */
9302
9303 edge
9304 gimple_switch_default_edge (function *ifun, gswitch *gs)
9305 {
9306 return gimple_switch_edge (ifun, gs, 0);
9307 }
9308
9309
9310 /* Emit return warnings. */
9311
9312 namespace {
9313
9314 const pass_data pass_data_warn_function_return =
9315 {
9316 GIMPLE_PASS, /* type */
9317 "*warn_function_return", /* name */
9318 OPTGROUP_NONE, /* optinfo_flags */
9319 TV_NONE, /* tv_id */
9320 PROP_cfg, /* properties_required */
9321 0, /* properties_provided */
9322 0, /* properties_destroyed */
9323 0, /* todo_flags_start */
9324 0, /* todo_flags_finish */
9325 };
9326
9327 class pass_warn_function_return : public gimple_opt_pass
9328 {
9329 public:
9330 pass_warn_function_return (gcc::context *ctxt)
9331 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
9332 {}
9333
9334 /* opt_pass methods: */
9335 virtual unsigned int execute (function *);
9336
9337 }; // class pass_warn_function_return
9338
9339 unsigned int
9340 pass_warn_function_return::execute (function *fun)
9341 {
9342 location_t location;
9343 gimple *last;
9344 edge e;
9345 edge_iterator ei;
9346
9347 if (!targetm.warn_func_return (fun->decl))
9348 return 0;
9349
9350 /* If we have a path to EXIT, then we do return. */
9351 if (TREE_THIS_VOLATILE (fun->decl)
9352 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
9353 {
9354 location = UNKNOWN_LOCATION;
9355 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (fun)->preds);
9356 (e = ei_safe_edge (ei)); )
9357 {
9358 last = last_stmt (e->src);
9359 if ((gimple_code (last) == GIMPLE_RETURN
9360 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
9361 && location == UNKNOWN_LOCATION
9362 && ((location = LOCATION_LOCUS (gimple_location (last)))
9363 != UNKNOWN_LOCATION)
9364 && !optimize)
9365 break;
9366 /* When optimizing, replace return stmts in noreturn functions
9367 with __builtin_unreachable () call. */
9368 if (optimize && gimple_code (last) == GIMPLE_RETURN)
9369 {
9370 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9371 gimple *new_stmt = gimple_build_call (fndecl, 0);
9372 gimple_set_location (new_stmt, gimple_location (last));
9373 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9374 gsi_replace (&gsi, new_stmt, true);
9375 remove_edge (e);
9376 }
9377 else
9378 ei_next (&ei);
9379 }
9380 if (location == UNKNOWN_LOCATION)
9381 location = cfun->function_end_locus;
9382 warning_at (location, 0, "%<noreturn%> function does return");
9383 }
9384
9385 /* If we see "return;" in some basic block, then we do reach the end
9386 without returning a value. */
9387 else if (warn_return_type > 0
9388 && !TREE_NO_WARNING (fun->decl)
9389 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
9390 {
9391 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
9392 {
9393 gimple *last = last_stmt (e->src);
9394 greturn *return_stmt = dyn_cast <greturn *> (last);
9395 if (return_stmt
9396 && gimple_return_retval (return_stmt) == NULL
9397 && !gimple_no_warning_p (last))
9398 {
9399 location = gimple_location (last);
9400 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9401 location = fun->function_end_locus;
9402 if (warning_at (location, OPT_Wreturn_type,
9403 "control reaches end of non-void function"))
9404 TREE_NO_WARNING (fun->decl) = 1;
9405 break;
9406 }
9407 }
9408 /* The C++ FE turns fallthrough from the end of non-void function
9409 into __builtin_unreachable () call with BUILTINS_LOCATION.
9410 Recognize those too. */
9411 basic_block bb;
9412 if (!TREE_NO_WARNING (fun->decl))
9413 FOR_EACH_BB_FN (bb, fun)
9414 if (EDGE_COUNT (bb->succs) == 0)
9415 {
9416 gimple *last = last_stmt (bb);
9417 const enum built_in_function ubsan_missing_ret
9418 = BUILT_IN_UBSAN_HANDLE_MISSING_RETURN;
9419 if (last
9420 && ((LOCATION_LOCUS (gimple_location (last))
9421 == BUILTINS_LOCATION
9422 && gimple_call_builtin_p (last, BUILT_IN_UNREACHABLE))
9423 || gimple_call_builtin_p (last, ubsan_missing_ret)))
9424 {
9425 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9426 gsi_prev_nondebug (&gsi);
9427 gimple *prev = gsi_stmt (gsi);
9428 if (prev == NULL)
9429 location = UNKNOWN_LOCATION;
9430 else
9431 location = gimple_location (prev);
9432 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9433 location = fun->function_end_locus;
9434 if (warning_at (location, OPT_Wreturn_type,
9435 "control reaches end of non-void function"))
9436 TREE_NO_WARNING (fun->decl) = 1;
9437 break;
9438 }
9439 }
9440 }
9441 return 0;
9442 }
9443
9444 } // anon namespace
9445
9446 gimple_opt_pass *
9447 make_pass_warn_function_return (gcc::context *ctxt)
9448 {
9449 return new pass_warn_function_return (ctxt);
9450 }
9451
9452 /* Walk a gimplified function and warn for functions whose return value is
9453 ignored and attribute((warn_unused_result)) is set. This is done before
9454 inlining, so we don't have to worry about that. */
9455
9456 static void
9457 do_warn_unused_result (gimple_seq seq)
9458 {
9459 tree fdecl, ftype;
9460 gimple_stmt_iterator i;
9461
9462 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
9463 {
9464 gimple *g = gsi_stmt (i);
9465
9466 switch (gimple_code (g))
9467 {
9468 case GIMPLE_BIND:
9469 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
9470 break;
9471 case GIMPLE_TRY:
9472 do_warn_unused_result (gimple_try_eval (g));
9473 do_warn_unused_result (gimple_try_cleanup (g));
9474 break;
9475 case GIMPLE_CATCH:
9476 do_warn_unused_result (gimple_catch_handler (
9477 as_a <gcatch *> (g)));
9478 break;
9479 case GIMPLE_EH_FILTER:
9480 do_warn_unused_result (gimple_eh_filter_failure (g));
9481 break;
9482
9483 case GIMPLE_CALL:
9484 if (gimple_call_lhs (g))
9485 break;
9486 if (gimple_call_internal_p (g))
9487 break;
9488
9489 /* This is a naked call, as opposed to a GIMPLE_CALL with an
9490 LHS. All calls whose value is ignored should be
9491 represented like this. Look for the attribute. */
9492 fdecl = gimple_call_fndecl (g);
9493 ftype = gimple_call_fntype (g);
9494
9495 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
9496 {
9497 location_t loc = gimple_location (g);
9498
9499 if (fdecl)
9500 warning_at (loc, OPT_Wunused_result,
9501 "ignoring return value of %qD "
9502 "declared with attribute %<warn_unused_result%>",
9503 fdecl);
9504 else
9505 warning_at (loc, OPT_Wunused_result,
9506 "ignoring return value of function "
9507 "declared with attribute %<warn_unused_result%>");
9508 }
9509 break;
9510
9511 default:
9512 /* Not a container, not a call, or a call whose value is used. */
9513 break;
9514 }
9515 }
9516 }
9517
9518 namespace {
9519
9520 const pass_data pass_data_warn_unused_result =
9521 {
9522 GIMPLE_PASS, /* type */
9523 "*warn_unused_result", /* name */
9524 OPTGROUP_NONE, /* optinfo_flags */
9525 TV_NONE, /* tv_id */
9526 PROP_gimple_any, /* properties_required */
9527 0, /* properties_provided */
9528 0, /* properties_destroyed */
9529 0, /* todo_flags_start */
9530 0, /* todo_flags_finish */
9531 };
9532
9533 class pass_warn_unused_result : public gimple_opt_pass
9534 {
9535 public:
9536 pass_warn_unused_result (gcc::context *ctxt)
9537 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
9538 {}
9539
9540 /* opt_pass methods: */
9541 virtual bool gate (function *) { return flag_warn_unused_result; }
9542 virtual unsigned int execute (function *)
9543 {
9544 do_warn_unused_result (gimple_body (current_function_decl));
9545 return 0;
9546 }
9547
9548 }; // class pass_warn_unused_result
9549
9550 } // anon namespace
9551
9552 gimple_opt_pass *
9553 make_pass_warn_unused_result (gcc::context *ctxt)
9554 {
9555 return new pass_warn_unused_result (ctxt);
9556 }
9557
9558 /* IPA passes, compilation of earlier functions or inlining
9559 might have changed some properties, such as marked functions nothrow,
9560 pure, const or noreturn.
9561 Remove redundant edges and basic blocks, and create new ones if necessary.
9562
9563 This pass can't be executed as stand alone pass from pass manager, because
9564 in between inlining and this fixup the verify_flow_info would fail. */
9565
9566 unsigned int
9567 execute_fixup_cfg (void)
9568 {
9569 basic_block bb;
9570 gimple_stmt_iterator gsi;
9571 int todo = 0;
9572 cgraph_node *node = cgraph_node::get (current_function_decl);
9573 /* Same scaling is also done by ipa_merge_profiles. */
9574 profile_count num = node->count;
9575 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
9576 bool scale = num.initialized_p () && !(num == den);
9577
9578 if (scale)
9579 {
9580 profile_count::adjust_for_ipa_scaling (&num, &den);
9581 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
9582 EXIT_BLOCK_PTR_FOR_FN (cfun)->count
9583 = EXIT_BLOCK_PTR_FOR_FN (cfun)->count.apply_scale (num, den);
9584 }
9585
9586 FOR_EACH_BB_FN (bb, cfun)
9587 {
9588 if (scale)
9589 bb->count = bb->count.apply_scale (num, den);
9590 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
9591 {
9592 gimple *stmt = gsi_stmt (gsi);
9593 tree decl = is_gimple_call (stmt)
9594 ? gimple_call_fndecl (stmt)
9595 : NULL;
9596 if (decl)
9597 {
9598 int flags = gimple_call_flags (stmt);
9599 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
9600 {
9601 if (gimple_purge_dead_abnormal_call_edges (bb))
9602 todo |= TODO_cleanup_cfg;
9603
9604 if (gimple_in_ssa_p (cfun))
9605 {
9606 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9607 update_stmt (stmt);
9608 }
9609 }
9610
9611 if (flags & ECF_NORETURN
9612 && fixup_noreturn_call (stmt))
9613 todo |= TODO_cleanup_cfg;
9614 }
9615
9616 /* Remove stores to variables we marked write-only.
9617 Keep access when store has side effect, i.e. in case when source
9618 is volatile. */
9619 if (gimple_store_p (stmt)
9620 && !gimple_has_side_effects (stmt)
9621 && !optimize_debug)
9622 {
9623 tree lhs = get_base_address (gimple_get_lhs (stmt));
9624
9625 if (VAR_P (lhs)
9626 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9627 && varpool_node::get (lhs)->writeonly)
9628 {
9629 unlink_stmt_vdef (stmt);
9630 gsi_remove (&gsi, true);
9631 release_defs (stmt);
9632 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9633 continue;
9634 }
9635 }
9636 /* For calls we can simply remove LHS when it is known
9637 to be write-only. */
9638 if (is_gimple_call (stmt)
9639 && gimple_get_lhs (stmt))
9640 {
9641 tree lhs = get_base_address (gimple_get_lhs (stmt));
9642
9643 if (VAR_P (lhs)
9644 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9645 && varpool_node::get (lhs)->writeonly)
9646 {
9647 gimple_call_set_lhs (stmt, NULL);
9648 update_stmt (stmt);
9649 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9650 }
9651 }
9652
9653 if (maybe_clean_eh_stmt (stmt)
9654 && gimple_purge_dead_eh_edges (bb))
9655 todo |= TODO_cleanup_cfg;
9656 gsi_next (&gsi);
9657 }
9658
9659 /* If we have a basic block with no successors that does not
9660 end with a control statement or a noreturn call end it with
9661 a call to __builtin_unreachable. This situation can occur
9662 when inlining a noreturn call that does in fact return. */
9663 if (EDGE_COUNT (bb->succs) == 0)
9664 {
9665 gimple *stmt = last_stmt (bb);
9666 if (!stmt
9667 || (!is_ctrl_stmt (stmt)
9668 && (!is_gimple_call (stmt)
9669 || !gimple_call_noreturn_p (stmt))))
9670 {
9671 if (stmt && is_gimple_call (stmt))
9672 gimple_call_set_ctrl_altering (stmt, false);
9673 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9674 stmt = gimple_build_call (fndecl, 0);
9675 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9676 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
9677 if (!cfun->after_inlining)
9678 {
9679 gcall *call_stmt = dyn_cast <gcall *> (stmt);
9680 node->create_edge (cgraph_node::get_create (fndecl),
9681 call_stmt, bb->count);
9682 }
9683 }
9684 }
9685 }
9686 if (scale)
9687 {
9688 update_max_bb_count ();
9689 compute_function_frequency ();
9690 }
9691
9692 if (current_loops
9693 && (todo & TODO_cleanup_cfg))
9694 loops_state_set (LOOPS_NEED_FIXUP);
9695
9696 return todo;
9697 }
9698
9699 namespace {
9700
9701 const pass_data pass_data_fixup_cfg =
9702 {
9703 GIMPLE_PASS, /* type */
9704 "fixup_cfg", /* name */
9705 OPTGROUP_NONE, /* optinfo_flags */
9706 TV_NONE, /* tv_id */
9707 PROP_cfg, /* properties_required */
9708 0, /* properties_provided */
9709 0, /* properties_destroyed */
9710 0, /* todo_flags_start */
9711 0, /* todo_flags_finish */
9712 };
9713
9714 class pass_fixup_cfg : public gimple_opt_pass
9715 {
9716 public:
9717 pass_fixup_cfg (gcc::context *ctxt)
9718 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
9719 {}
9720
9721 /* opt_pass methods: */
9722 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
9723 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
9724
9725 }; // class pass_fixup_cfg
9726
9727 } // anon namespace
9728
9729 gimple_opt_pass *
9730 make_pass_fixup_cfg (gcc::context *ctxt)
9731 {
9732 return new pass_fixup_cfg (ctxt);
9733 }
9734
9735 /* Garbage collection support for edge_def. */
9736
9737 extern void gt_ggc_mx (tree&);
9738 extern void gt_ggc_mx (gimple *&);
9739 extern void gt_ggc_mx (rtx&);
9740 extern void gt_ggc_mx (basic_block&);
9741
9742 static void
9743 gt_ggc_mx (rtx_insn *& x)
9744 {
9745 if (x)
9746 gt_ggc_mx_rtx_def ((void *) x);
9747 }
9748
9749 void
9750 gt_ggc_mx (edge_def *e)
9751 {
9752 tree block = LOCATION_BLOCK (e->goto_locus);
9753 gt_ggc_mx (e->src);
9754 gt_ggc_mx (e->dest);
9755 if (current_ir_type () == IR_GIMPLE)
9756 gt_ggc_mx (e->insns.g);
9757 else
9758 gt_ggc_mx (e->insns.r);
9759 gt_ggc_mx (block);
9760 }
9761
9762 /* PCH support for edge_def. */
9763
9764 extern void gt_pch_nx (tree&);
9765 extern void gt_pch_nx (gimple *&);
9766 extern void gt_pch_nx (rtx&);
9767 extern void gt_pch_nx (basic_block&);
9768
9769 static void
9770 gt_pch_nx (rtx_insn *& x)
9771 {
9772 if (x)
9773 gt_pch_nx_rtx_def ((void *) x);
9774 }
9775
9776 void
9777 gt_pch_nx (edge_def *e)
9778 {
9779 tree block = LOCATION_BLOCK (e->goto_locus);
9780 gt_pch_nx (e->src);
9781 gt_pch_nx (e->dest);
9782 if (current_ir_type () == IR_GIMPLE)
9783 gt_pch_nx (e->insns.g);
9784 else
9785 gt_pch_nx (e->insns.r);
9786 gt_pch_nx (block);
9787 }
9788
9789 void
9790 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
9791 {
9792 tree block = LOCATION_BLOCK (e->goto_locus);
9793 op (&(e->src), cookie);
9794 op (&(e->dest), cookie);
9795 if (current_ir_type () == IR_GIMPLE)
9796 op (&(e->insns.g), cookie);
9797 else
9798 op (&(e->insns.r), cookie);
9799 op (&(block), cookie);
9800 }
9801
9802 #if CHECKING_P
9803
9804 namespace selftest {
9805
9806 /* Helper function for CFG selftests: create a dummy function decl
9807 and push it as cfun. */
9808
9809 static tree
9810 push_fndecl (const char *name)
9811 {
9812 tree fn_type = build_function_type_array (integer_type_node, 0, NULL);
9813 /* FIXME: this uses input_location: */
9814 tree fndecl = build_fn_decl (name, fn_type);
9815 tree retval = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
9816 NULL_TREE, integer_type_node);
9817 DECL_RESULT (fndecl) = retval;
9818 push_struct_function (fndecl);
9819 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9820 ASSERT_TRUE (fun != NULL);
9821 init_empty_tree_cfg_for_function (fun);
9822 ASSERT_EQ (2, n_basic_blocks_for_fn (fun));
9823 ASSERT_EQ (0, n_edges_for_fn (fun));
9824 return fndecl;
9825 }
9826
9827 /* These tests directly create CFGs.
9828 Compare with the static fns within tree-cfg.c:
9829 - build_gimple_cfg
9830 - make_blocks: calls create_basic_block (seq, bb);
9831 - make_edges. */
9832
9833 /* Verify a simple cfg of the form:
9834 ENTRY -> A -> B -> C -> EXIT. */
9835
9836 static void
9837 test_linear_chain ()
9838 {
9839 gimple_register_cfg_hooks ();
9840
9841 tree fndecl = push_fndecl ("cfg_test_linear_chain");
9842 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9843
9844 /* Create some empty blocks. */
9845 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9846 basic_block bb_b = create_empty_bb (bb_a);
9847 basic_block bb_c = create_empty_bb (bb_b);
9848
9849 ASSERT_EQ (5, n_basic_blocks_for_fn (fun));
9850 ASSERT_EQ (0, n_edges_for_fn (fun));
9851
9852 /* Create some edges: a simple linear chain of BBs. */
9853 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9854 make_edge (bb_a, bb_b, 0);
9855 make_edge (bb_b, bb_c, 0);
9856 make_edge (bb_c, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9857
9858 /* Verify the edges. */
9859 ASSERT_EQ (4, n_edges_for_fn (fun));
9860 ASSERT_EQ (NULL, ENTRY_BLOCK_PTR_FOR_FN (fun)->preds);
9861 ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs->length ());
9862 ASSERT_EQ (1, bb_a->preds->length ());
9863 ASSERT_EQ (1, bb_a->succs->length ());
9864 ASSERT_EQ (1, bb_b->preds->length ());
9865 ASSERT_EQ (1, bb_b->succs->length ());
9866 ASSERT_EQ (1, bb_c->preds->length ());
9867 ASSERT_EQ (1, bb_c->succs->length ());
9868 ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun)->preds->length ());
9869 ASSERT_EQ (NULL, EXIT_BLOCK_PTR_FOR_FN (fun)->succs);
9870
9871 /* Verify the dominance information
9872 Each BB in our simple chain should be dominated by the one before
9873 it. */
9874 calculate_dominance_info (CDI_DOMINATORS);
9875 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9876 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9877 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9878 ASSERT_EQ (1, dom_by_b.length ());
9879 ASSERT_EQ (bb_c, dom_by_b[0]);
9880 free_dominance_info (CDI_DOMINATORS);
9881 dom_by_b.release ();
9882
9883 /* Similarly for post-dominance: each BB in our chain is post-dominated
9884 by the one after it. */
9885 calculate_dominance_info (CDI_POST_DOMINATORS);
9886 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9887 ASSERT_EQ (bb_c, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9888 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9889 ASSERT_EQ (1, postdom_by_b.length ());
9890 ASSERT_EQ (bb_a, postdom_by_b[0]);
9891 free_dominance_info (CDI_POST_DOMINATORS);
9892 postdom_by_b.release ();
9893
9894 pop_cfun ();
9895 }
9896
9897 /* Verify a simple CFG of the form:
9898 ENTRY
9899 |
9900 A
9901 / \
9902 /t \f
9903 B C
9904 \ /
9905 \ /
9906 D
9907 |
9908 EXIT. */
9909
9910 static void
9911 test_diamond ()
9912 {
9913 gimple_register_cfg_hooks ();
9914
9915 tree fndecl = push_fndecl ("cfg_test_diamond");
9916 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9917
9918 /* Create some empty blocks. */
9919 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9920 basic_block bb_b = create_empty_bb (bb_a);
9921 basic_block bb_c = create_empty_bb (bb_a);
9922 basic_block bb_d = create_empty_bb (bb_b);
9923
9924 ASSERT_EQ (6, n_basic_blocks_for_fn (fun));
9925 ASSERT_EQ (0, n_edges_for_fn (fun));
9926
9927 /* Create the edges. */
9928 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9929 make_edge (bb_a, bb_b, EDGE_TRUE_VALUE);
9930 make_edge (bb_a, bb_c, EDGE_FALSE_VALUE);
9931 make_edge (bb_b, bb_d, 0);
9932 make_edge (bb_c, bb_d, 0);
9933 make_edge (bb_d, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9934
9935 /* Verify the edges. */
9936 ASSERT_EQ (6, n_edges_for_fn (fun));
9937 ASSERT_EQ (1, bb_a->preds->length ());
9938 ASSERT_EQ (2, bb_a->succs->length ());
9939 ASSERT_EQ (1, bb_b->preds->length ());
9940 ASSERT_EQ (1, bb_b->succs->length ());
9941 ASSERT_EQ (1, bb_c->preds->length ());
9942 ASSERT_EQ (1, bb_c->succs->length ());
9943 ASSERT_EQ (2, bb_d->preds->length ());
9944 ASSERT_EQ (1, bb_d->succs->length ());
9945
9946 /* Verify the dominance information. */
9947 calculate_dominance_info (CDI_DOMINATORS);
9948 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9949 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9950 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_d));
9951 vec<basic_block> dom_by_a = get_dominated_by (CDI_DOMINATORS, bb_a);
9952 ASSERT_EQ (3, dom_by_a.length ()); /* B, C, D, in some order. */
9953 dom_by_a.release ();
9954 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9955 ASSERT_EQ (0, dom_by_b.length ());
9956 dom_by_b.release ();
9957 free_dominance_info (CDI_DOMINATORS);
9958
9959 /* Similarly for post-dominance. */
9960 calculate_dominance_info (CDI_POST_DOMINATORS);
9961 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9962 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9963 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_c));
9964 vec<basic_block> postdom_by_d = get_dominated_by (CDI_POST_DOMINATORS, bb_d);
9965 ASSERT_EQ (3, postdom_by_d.length ()); /* A, B, C in some order. */
9966 postdom_by_d.release ();
9967 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9968 ASSERT_EQ (0, postdom_by_b.length ());
9969 postdom_by_b.release ();
9970 free_dominance_info (CDI_POST_DOMINATORS);
9971
9972 pop_cfun ();
9973 }
9974
9975 /* Verify that we can handle a CFG containing a "complete" aka
9976 fully-connected subgraph (where A B C D below all have edges
9977 pointing to each other node, also to themselves).
9978 e.g.:
9979 ENTRY EXIT
9980 | ^
9981 | /
9982 | /
9983 | /
9984 V/
9985 A<--->B
9986 ^^ ^^
9987 | \ / |
9988 | X |
9989 | / \ |
9990 VV VV
9991 C<--->D
9992 */
9993
9994 static void
9995 test_fully_connected ()
9996 {
9997 gimple_register_cfg_hooks ();
9998
9999 tree fndecl = push_fndecl ("cfg_fully_connected");
10000 function *fun = DECL_STRUCT_FUNCTION (fndecl);
10001
10002 const int n = 4;
10003
10004 /* Create some empty blocks. */
10005 auto_vec <basic_block> subgraph_nodes;
10006 for (int i = 0; i < n; i++)
10007 subgraph_nodes.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun)));
10008
10009 ASSERT_EQ (n + 2, n_basic_blocks_for_fn (fun));
10010 ASSERT_EQ (0, n_edges_for_fn (fun));
10011
10012 /* Create the edges. */
10013 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), subgraph_nodes[0], EDGE_FALLTHRU);
10014 make_edge (subgraph_nodes[0], EXIT_BLOCK_PTR_FOR_FN (fun), 0);
10015 for (int i = 0; i < n; i++)
10016 for (int j = 0; j < n; j++)
10017 make_edge (subgraph_nodes[i], subgraph_nodes[j], 0);
10018
10019 /* Verify the edges. */
10020 ASSERT_EQ (2 + (n * n), n_edges_for_fn (fun));
10021 /* The first one is linked to ENTRY/EXIT as well as itself and
10022 everything else. */
10023 ASSERT_EQ (n + 1, subgraph_nodes[0]->preds->length ());
10024 ASSERT_EQ (n + 1, subgraph_nodes[0]->succs->length ());
10025 /* The other ones in the subgraph are linked to everything in
10026 the subgraph (including themselves). */
10027 for (int i = 1; i < n; i++)
10028 {
10029 ASSERT_EQ (n, subgraph_nodes[i]->preds->length ());
10030 ASSERT_EQ (n, subgraph_nodes[i]->succs->length ());
10031 }
10032
10033 /* Verify the dominance information. */
10034 calculate_dominance_info (CDI_DOMINATORS);
10035 /* The initial block in the subgraph should be dominated by ENTRY. */
10036 ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun),
10037 get_immediate_dominator (CDI_DOMINATORS,
10038 subgraph_nodes[0]));
10039 /* Every other block in the subgraph should be dominated by the
10040 initial block. */
10041 for (int i = 1; i < n; i++)
10042 ASSERT_EQ (subgraph_nodes[0],
10043 get_immediate_dominator (CDI_DOMINATORS,
10044 subgraph_nodes[i]));
10045 free_dominance_info (CDI_DOMINATORS);
10046
10047 /* Similarly for post-dominance. */
10048 calculate_dominance_info (CDI_POST_DOMINATORS);
10049 /* The initial block in the subgraph should be postdominated by EXIT. */
10050 ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun),
10051 get_immediate_dominator (CDI_POST_DOMINATORS,
10052 subgraph_nodes[0]));
10053 /* Every other block in the subgraph should be postdominated by the
10054 initial block, since that leads to EXIT. */
10055 for (int i = 1; i < n; i++)
10056 ASSERT_EQ (subgraph_nodes[0],
10057 get_immediate_dominator (CDI_POST_DOMINATORS,
10058 subgraph_nodes[i]));
10059 free_dominance_info (CDI_POST_DOMINATORS);
10060
10061 pop_cfun ();
10062 }
10063
10064 /* Run all of the selftests within this file. */
10065
10066 void
10067 tree_cfg_c_tests ()
10068 {
10069 test_linear_chain ();
10070 test_diamond ();
10071 test_fully_connected ();
10072 }
10073
10074 } // namespace selftest
10075
10076 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
10077 - loop
10078 - nested loops
10079 - switch statement (a block with many out-edges)
10080 - something that jumps to itself
10081 - etc */
10082
10083 #endif /* CHECKING_P */