i386: Emulate MMX movntq with SSE2 movntidi
[gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2019 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "cfganal.h"
40 #include "gimple-fold.h"
41 #include "tree-eh.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-cfg.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
49 #include "tree-dfa.h"
50 #include "tree-ssa.h"
51 #include "except.h"
52 #include "cfgloop.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "omp-general.h"
58 #include "omp-expand.h"
59 #include "tree-cfgcleanup.h"
60 #include "gimplify.h"
61 #include "attribs.h"
62 #include "selftest.h"
63 #include "opts.h"
64 #include "asan.h"
65 #include "profile.h"
66
67 /* This file contains functions for building the Control Flow Graph (CFG)
68 for a function tree. */
69
70 /* Local declarations. */
71
72 /* Initial capacity for the basic block array. */
73 static const int initial_cfg_capacity = 20;
74
75 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
76 which use a particular edge. The CASE_LABEL_EXPRs are chained together
77 via their CASE_CHAIN field, which we clear after we're done with the
78 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
79
80 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
81 update the case vector in response to edge redirections.
82
83 Right now this table is set up and torn down at key points in the
84 compilation process. It would be nice if we could make the table
85 more persistent. The key is getting notification of changes to
86 the CFG (particularly edge removal, creation and redirection). */
87
88 static hash_map<edge, tree> *edge_to_cases;
89
90 /* If we record edge_to_cases, this bitmap will hold indexes
91 of basic blocks that end in a GIMPLE_SWITCH which we touched
92 due to edge manipulations. */
93
94 static bitmap touched_switch_bbs;
95
96 /* CFG statistics. */
97 struct cfg_stats_d
98 {
99 long num_merged_labels;
100 };
101
102 static struct cfg_stats_d cfg_stats;
103
104 /* Data to pass to replace_block_vars_by_duplicates_1. */
105 struct replace_decls_d
106 {
107 hash_map<tree, tree> *vars_map;
108 tree to_context;
109 };
110
111 /* Hash table to store last discriminator assigned for each locus. */
112 struct locus_discrim_map
113 {
114 int location_line;
115 int discriminator;
116 };
117
118 /* Hashtable helpers. */
119
120 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
121 {
122 static inline hashval_t hash (const locus_discrim_map *);
123 static inline bool equal (const locus_discrim_map *,
124 const locus_discrim_map *);
125 };
126
127 /* Trivial hash function for a location_t. ITEM is a pointer to
128 a hash table entry that maps a location_t to a discriminator. */
129
130 inline hashval_t
131 locus_discrim_hasher::hash (const locus_discrim_map *item)
132 {
133 return item->location_line;
134 }
135
136 /* Equality function for the locus-to-discriminator map. A and B
137 point to the two hash table entries to compare. */
138
139 inline bool
140 locus_discrim_hasher::equal (const locus_discrim_map *a,
141 const locus_discrim_map *b)
142 {
143 return a->location_line == b->location_line;
144 }
145
146 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
147
148 /* Basic blocks and flowgraphs. */
149 static void make_blocks (gimple_seq);
150
151 /* Edges. */
152 static void make_edges (void);
153 static void assign_discriminators (void);
154 static void make_cond_expr_edges (basic_block);
155 static void make_gimple_switch_edges (gswitch *, basic_block);
156 static bool make_goto_expr_edges (basic_block);
157 static void make_gimple_asm_edges (basic_block);
158 static edge gimple_redirect_edge_and_branch (edge, basic_block);
159 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
160
161 /* Various helpers. */
162 static inline bool stmt_starts_bb_p (gimple *, gimple *);
163 static int gimple_verify_flow_info (void);
164 static void gimple_make_forwarder_block (edge);
165 static gimple *first_non_label_stmt (basic_block);
166 static bool verify_gimple_transaction (gtransaction *);
167 static bool call_can_make_abnormal_goto (gimple *);
168
169 /* Flowgraph optimization and cleanup. */
170 static void gimple_merge_blocks (basic_block, basic_block);
171 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
172 static void remove_bb (basic_block);
173 static edge find_taken_edge_computed_goto (basic_block, tree);
174 static edge find_taken_edge_cond_expr (const gcond *, tree);
175
176 void
177 init_empty_tree_cfg_for_function (struct function *fn)
178 {
179 /* Initialize the basic block array. */
180 init_flow (fn);
181 profile_status_for_fn (fn) = PROFILE_ABSENT;
182 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
183 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
184 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
185 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
186 initial_cfg_capacity);
187
188 /* Build a mapping of labels to their associated blocks. */
189 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
190 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
191 initial_cfg_capacity);
192
193 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
194 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
195
196 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
197 = EXIT_BLOCK_PTR_FOR_FN (fn);
198 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
199 = ENTRY_BLOCK_PTR_FOR_FN (fn);
200 }
201
202 void
203 init_empty_tree_cfg (void)
204 {
205 init_empty_tree_cfg_for_function (cfun);
206 }
207
208 /*---------------------------------------------------------------------------
209 Create basic blocks
210 ---------------------------------------------------------------------------*/
211
212 /* Entry point to the CFG builder for trees. SEQ is the sequence of
213 statements to be added to the flowgraph. */
214
215 static void
216 build_gimple_cfg (gimple_seq seq)
217 {
218 /* Register specific gimple functions. */
219 gimple_register_cfg_hooks ();
220
221 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
222
223 init_empty_tree_cfg ();
224
225 make_blocks (seq);
226
227 /* Make sure there is always at least one block, even if it's empty. */
228 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
229 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
230
231 /* Adjust the size of the array. */
232 if (basic_block_info_for_fn (cfun)->length ()
233 < (size_t) n_basic_blocks_for_fn (cfun))
234 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
235 n_basic_blocks_for_fn (cfun));
236
237 /* To speed up statement iterator walks, we first purge dead labels. */
238 cleanup_dead_labels ();
239
240 /* Group case nodes to reduce the number of edges.
241 We do this after cleaning up dead labels because otherwise we miss
242 a lot of obvious case merging opportunities. */
243 group_case_labels ();
244
245 /* Create the edges of the flowgraph. */
246 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
247 make_edges ();
248 assign_discriminators ();
249 cleanup_dead_labels ();
250 delete discriminator_per_locus;
251 discriminator_per_locus = NULL;
252 }
253
254 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
255 them and propagate the information to LOOP. We assume that the annotations
256 come immediately before the condition in BB, if any. */
257
258 static void
259 replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
260 {
261 gimple_stmt_iterator gsi = gsi_last_bb (bb);
262 gimple *stmt = gsi_stmt (gsi);
263
264 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
265 return;
266
267 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
268 {
269 stmt = gsi_stmt (gsi);
270 if (gimple_code (stmt) != GIMPLE_CALL)
271 break;
272 if (!gimple_call_internal_p (stmt)
273 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
274 break;
275
276 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
277 {
278 case annot_expr_ivdep_kind:
279 loop->safelen = INT_MAX;
280 break;
281 case annot_expr_unroll_kind:
282 loop->unroll
283 = (unsigned short) tree_to_shwi (gimple_call_arg (stmt, 2));
284 cfun->has_unroll = true;
285 break;
286 case annot_expr_no_vector_kind:
287 loop->dont_vectorize = true;
288 break;
289 case annot_expr_vector_kind:
290 loop->force_vectorize = true;
291 cfun->has_force_vectorize_loops = true;
292 break;
293 case annot_expr_parallel_kind:
294 loop->can_be_parallel = true;
295 loop->safelen = INT_MAX;
296 break;
297 default:
298 gcc_unreachable ();
299 }
300
301 stmt = gimple_build_assign (gimple_call_lhs (stmt),
302 gimple_call_arg (stmt, 0));
303 gsi_replace (&gsi, stmt, true);
304 }
305 }
306
307 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
308 them and propagate the information to the loop. We assume that the
309 annotations come immediately before the condition of the loop. */
310
311 static void
312 replace_loop_annotate (void)
313 {
314 struct loop *loop;
315 basic_block bb;
316 gimple_stmt_iterator gsi;
317 gimple *stmt;
318
319 FOR_EACH_LOOP (loop, 0)
320 {
321 /* First look into the header. */
322 replace_loop_annotate_in_block (loop->header, loop);
323
324 /* Then look into the latch, if any. */
325 if (loop->latch)
326 replace_loop_annotate_in_block (loop->latch, loop);
327 }
328
329 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
330 FOR_EACH_BB_FN (bb, cfun)
331 {
332 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
333 {
334 stmt = gsi_stmt (gsi);
335 if (gimple_code (stmt) != GIMPLE_CALL)
336 continue;
337 if (!gimple_call_internal_p (stmt)
338 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
339 continue;
340
341 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
342 {
343 case annot_expr_ivdep_kind:
344 case annot_expr_unroll_kind:
345 case annot_expr_no_vector_kind:
346 case annot_expr_vector_kind:
347 case annot_expr_parallel_kind:
348 break;
349 default:
350 gcc_unreachable ();
351 }
352
353 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
354 stmt = gimple_build_assign (gimple_call_lhs (stmt),
355 gimple_call_arg (stmt, 0));
356 gsi_replace (&gsi, stmt, true);
357 }
358 }
359 }
360
361 static unsigned int
362 execute_build_cfg (void)
363 {
364 gimple_seq body = gimple_body (current_function_decl);
365
366 build_gimple_cfg (body);
367 gimple_set_body (current_function_decl, NULL);
368 if (dump_file && (dump_flags & TDF_DETAILS))
369 {
370 fprintf (dump_file, "Scope blocks:\n");
371 dump_scope_blocks (dump_file, dump_flags);
372 }
373 cleanup_tree_cfg ();
374 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
375 replace_loop_annotate ();
376 return 0;
377 }
378
379 namespace {
380
381 const pass_data pass_data_build_cfg =
382 {
383 GIMPLE_PASS, /* type */
384 "cfg", /* name */
385 OPTGROUP_NONE, /* optinfo_flags */
386 TV_TREE_CFG, /* tv_id */
387 PROP_gimple_leh, /* properties_required */
388 ( PROP_cfg | PROP_loops ), /* properties_provided */
389 0, /* properties_destroyed */
390 0, /* todo_flags_start */
391 0, /* todo_flags_finish */
392 };
393
394 class pass_build_cfg : public gimple_opt_pass
395 {
396 public:
397 pass_build_cfg (gcc::context *ctxt)
398 : gimple_opt_pass (pass_data_build_cfg, ctxt)
399 {}
400
401 /* opt_pass methods: */
402 virtual unsigned int execute (function *) { return execute_build_cfg (); }
403
404 }; // class pass_build_cfg
405
406 } // anon namespace
407
408 gimple_opt_pass *
409 make_pass_build_cfg (gcc::context *ctxt)
410 {
411 return new pass_build_cfg (ctxt);
412 }
413
414
415 /* Return true if T is a computed goto. */
416
417 bool
418 computed_goto_p (gimple *t)
419 {
420 return (gimple_code (t) == GIMPLE_GOTO
421 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
422 }
423
424 /* Returns true if the sequence of statements STMTS only contains
425 a call to __builtin_unreachable (). */
426
427 bool
428 gimple_seq_unreachable_p (gimple_seq stmts)
429 {
430 if (stmts == NULL
431 /* Return false if -fsanitize=unreachable, we don't want to
432 optimize away those calls, but rather turn them into
433 __ubsan_handle_builtin_unreachable () or __builtin_trap ()
434 later. */
435 || sanitize_flags_p (SANITIZE_UNREACHABLE))
436 return false;
437
438 gimple_stmt_iterator gsi = gsi_last (stmts);
439
440 if (!gimple_call_builtin_p (gsi_stmt (gsi), BUILT_IN_UNREACHABLE))
441 return false;
442
443 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
444 {
445 gimple *stmt = gsi_stmt (gsi);
446 if (gimple_code (stmt) != GIMPLE_LABEL
447 && !is_gimple_debug (stmt)
448 && !gimple_clobber_p (stmt))
449 return false;
450 }
451 return true;
452 }
453
454 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
455 the other edge points to a bb with just __builtin_unreachable ().
456 I.e. return true for C->M edge in:
457 <bb C>:
458 ...
459 if (something)
460 goto <bb N>;
461 else
462 goto <bb M>;
463 <bb N>:
464 __builtin_unreachable ();
465 <bb M>: */
466
467 bool
468 assert_unreachable_fallthru_edge_p (edge e)
469 {
470 basic_block pred_bb = e->src;
471 gimple *last = last_stmt (pred_bb);
472 if (last && gimple_code (last) == GIMPLE_COND)
473 {
474 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
475 if (other_bb == e->dest)
476 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
477 if (EDGE_COUNT (other_bb->succs) == 0)
478 return gimple_seq_unreachable_p (bb_seq (other_bb));
479 }
480 return false;
481 }
482
483
484 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
485 could alter control flow except via eh. We initialize the flag at
486 CFG build time and only ever clear it later. */
487
488 static void
489 gimple_call_initialize_ctrl_altering (gimple *stmt)
490 {
491 int flags = gimple_call_flags (stmt);
492
493 /* A call alters control flow if it can make an abnormal goto. */
494 if (call_can_make_abnormal_goto (stmt)
495 /* A call also alters control flow if it does not return. */
496 || flags & ECF_NORETURN
497 /* TM ending statements have backedges out of the transaction.
498 Return true so we split the basic block containing them.
499 Note that the TM_BUILTIN test is merely an optimization. */
500 || ((flags & ECF_TM_BUILTIN)
501 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
502 /* BUILT_IN_RETURN call is same as return statement. */
503 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
504 /* IFN_UNIQUE should be the last insn, to make checking for it
505 as cheap as possible. */
506 || (gimple_call_internal_p (stmt)
507 && gimple_call_internal_unique_p (stmt)))
508 gimple_call_set_ctrl_altering (stmt, true);
509 else
510 gimple_call_set_ctrl_altering (stmt, false);
511 }
512
513
514 /* Insert SEQ after BB and build a flowgraph. */
515
516 static basic_block
517 make_blocks_1 (gimple_seq seq, basic_block bb)
518 {
519 gimple_stmt_iterator i = gsi_start (seq);
520 gimple *stmt = NULL;
521 gimple *prev_stmt = NULL;
522 bool start_new_block = true;
523 bool first_stmt_of_seq = true;
524
525 while (!gsi_end_p (i))
526 {
527 /* PREV_STMT should only be set to a debug stmt if the debug
528 stmt is before nondebug stmts. Once stmt reaches a nondebug
529 nonlabel, prev_stmt will be set to it, so that
530 stmt_starts_bb_p will know to start a new block if a label is
531 found. However, if stmt was a label after debug stmts only,
532 keep the label in prev_stmt even if we find further debug
533 stmts, for there may be other labels after them, and they
534 should land in the same block. */
535 if (!prev_stmt || !stmt || !is_gimple_debug (stmt))
536 prev_stmt = stmt;
537 stmt = gsi_stmt (i);
538
539 if (stmt && is_gimple_call (stmt))
540 gimple_call_initialize_ctrl_altering (stmt);
541
542 /* If the statement starts a new basic block or if we have determined
543 in a previous pass that we need to create a new block for STMT, do
544 so now. */
545 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
546 {
547 if (!first_stmt_of_seq)
548 gsi_split_seq_before (&i, &seq);
549 bb = create_basic_block (seq, bb);
550 start_new_block = false;
551 prev_stmt = NULL;
552 }
553
554 /* Now add STMT to BB and create the subgraphs for special statement
555 codes. */
556 gimple_set_bb (stmt, bb);
557
558 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
559 next iteration. */
560 if (stmt_ends_bb_p (stmt))
561 {
562 /* If the stmt can make abnormal goto use a new temporary
563 for the assignment to the LHS. This makes sure the old value
564 of the LHS is available on the abnormal edge. Otherwise
565 we will end up with overlapping life-ranges for abnormal
566 SSA names. */
567 if (gimple_has_lhs (stmt)
568 && stmt_can_make_abnormal_goto (stmt)
569 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
570 {
571 tree lhs = gimple_get_lhs (stmt);
572 tree tmp = create_tmp_var (TREE_TYPE (lhs));
573 gimple *s = gimple_build_assign (lhs, tmp);
574 gimple_set_location (s, gimple_location (stmt));
575 gimple_set_block (s, gimple_block (stmt));
576 gimple_set_lhs (stmt, tmp);
577 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
578 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
579 DECL_GIMPLE_REG_P (tmp) = 1;
580 gsi_insert_after (&i, s, GSI_SAME_STMT);
581 }
582 start_new_block = true;
583 }
584
585 gsi_next (&i);
586 first_stmt_of_seq = false;
587 }
588 return bb;
589 }
590
591 /* Build a flowgraph for the sequence of stmts SEQ. */
592
593 static void
594 make_blocks (gimple_seq seq)
595 {
596 /* Look for debug markers right before labels, and move the debug
597 stmts after the labels. Accepting labels among debug markers
598 adds no value, just complexity; if we wanted to annotate labels
599 with view numbers (so sequencing among markers would matter) or
600 somesuch, we're probably better off still moving the labels, but
601 adding other debug annotations in their original positions or
602 emitting nonbind or bind markers associated with the labels in
603 the original position of the labels.
604
605 Moving labels would probably be simpler, but we can't do that:
606 moving labels assigns label ids to them, and doing so because of
607 debug markers makes for -fcompare-debug and possibly even codegen
608 differences. So, we have to move the debug stmts instead. To
609 that end, we scan SEQ backwards, marking the position of the
610 latest (earliest we find) label, and moving debug stmts that are
611 not separated from it by nondebug nonlabel stmts after the
612 label. */
613 if (MAY_HAVE_DEBUG_MARKER_STMTS)
614 {
615 gimple_stmt_iterator label = gsi_none ();
616
617 for (gimple_stmt_iterator i = gsi_last (seq); !gsi_end_p (i); gsi_prev (&i))
618 {
619 gimple *stmt = gsi_stmt (i);
620
621 /* If this is the first label we encounter (latest in SEQ)
622 before nondebug stmts, record its position. */
623 if (is_a <glabel *> (stmt))
624 {
625 if (gsi_end_p (label))
626 label = i;
627 continue;
628 }
629
630 /* Without a recorded label position to move debug stmts to,
631 there's nothing to do. */
632 if (gsi_end_p (label))
633 continue;
634
635 /* Move the debug stmt at I after LABEL. */
636 if (is_gimple_debug (stmt))
637 {
638 gcc_assert (gimple_debug_nonbind_marker_p (stmt));
639 /* As STMT is removed, I advances to the stmt after
640 STMT, so the gsi_prev in the for "increment"
641 expression gets us to the stmt we're to visit after
642 STMT. LABEL, however, would advance to the moved
643 stmt if we passed it to gsi_move_after, so pass it a
644 copy instead, so as to keep LABEL pointing to the
645 LABEL. */
646 gimple_stmt_iterator copy = label;
647 gsi_move_after (&i, &copy);
648 continue;
649 }
650
651 /* There aren't any (more?) debug stmts before label, so
652 there isn't anything else to move after it. */
653 label = gsi_none ();
654 }
655 }
656
657 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
658 }
659
660 /* Create and return a new empty basic block after bb AFTER. */
661
662 static basic_block
663 create_bb (void *h, void *e, basic_block after)
664 {
665 basic_block bb;
666
667 gcc_assert (!e);
668
669 /* Create and initialize a new basic block. Since alloc_block uses
670 GC allocation that clears memory to allocate a basic block, we do
671 not have to clear the newly allocated basic block here. */
672 bb = alloc_block ();
673
674 bb->index = last_basic_block_for_fn (cfun);
675 bb->flags = BB_NEW;
676 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
677
678 /* Add the new block to the linked list of blocks. */
679 link_block (bb, after);
680
681 /* Grow the basic block array if needed. */
682 if ((size_t) last_basic_block_for_fn (cfun)
683 == basic_block_info_for_fn (cfun)->length ())
684 {
685 size_t new_size =
686 (last_basic_block_for_fn (cfun)
687 + (last_basic_block_for_fn (cfun) + 3) / 4);
688 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
689 }
690
691 /* Add the newly created block to the array. */
692 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
693
694 n_basic_blocks_for_fn (cfun)++;
695 last_basic_block_for_fn (cfun)++;
696
697 return bb;
698 }
699
700
701 /*---------------------------------------------------------------------------
702 Edge creation
703 ---------------------------------------------------------------------------*/
704
705 /* If basic block BB has an abnormal edge to a basic block
706 containing IFN_ABNORMAL_DISPATCHER internal call, return
707 that the dispatcher's basic block, otherwise return NULL. */
708
709 basic_block
710 get_abnormal_succ_dispatcher (basic_block bb)
711 {
712 edge e;
713 edge_iterator ei;
714
715 FOR_EACH_EDGE (e, ei, bb->succs)
716 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
717 {
718 gimple_stmt_iterator gsi
719 = gsi_start_nondebug_after_labels_bb (e->dest);
720 gimple *g = gsi_stmt (gsi);
721 if (g && gimple_call_internal_p (g, IFN_ABNORMAL_DISPATCHER))
722 return e->dest;
723 }
724 return NULL;
725 }
726
727 /* Helper function for make_edges. Create a basic block with
728 with ABNORMAL_DISPATCHER internal call in it if needed, and
729 create abnormal edges from BBS to it and from it to FOR_BB
730 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
731
732 static void
733 handle_abnormal_edges (basic_block *dispatcher_bbs,
734 basic_block for_bb, int *bb_to_omp_idx,
735 auto_vec<basic_block> *bbs, bool computed_goto)
736 {
737 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
738 unsigned int idx = 0;
739 basic_block bb;
740 bool inner = false;
741
742 if (bb_to_omp_idx)
743 {
744 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
745 if (bb_to_omp_idx[for_bb->index] != 0)
746 inner = true;
747 }
748
749 /* If the dispatcher has been created already, then there are basic
750 blocks with abnormal edges to it, so just make a new edge to
751 for_bb. */
752 if (*dispatcher == NULL)
753 {
754 /* Check if there are any basic blocks that need to have
755 abnormal edges to this dispatcher. If there are none, return
756 early. */
757 if (bb_to_omp_idx == NULL)
758 {
759 if (bbs->is_empty ())
760 return;
761 }
762 else
763 {
764 FOR_EACH_VEC_ELT (*bbs, idx, bb)
765 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
766 break;
767 if (bb == NULL)
768 return;
769 }
770
771 /* Create the dispatcher bb. */
772 *dispatcher = create_basic_block (NULL, for_bb);
773 if (computed_goto)
774 {
775 /* Factor computed gotos into a common computed goto site. Also
776 record the location of that site so that we can un-factor the
777 gotos after we have converted back to normal form. */
778 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
779
780 /* Create the destination of the factored goto. Each original
781 computed goto will put its desired destination into this
782 variable and jump to the label we create immediately below. */
783 tree var = create_tmp_var (ptr_type_node, "gotovar");
784
785 /* Build a label for the new block which will contain the
786 factored computed goto. */
787 tree factored_label_decl
788 = create_artificial_label (UNKNOWN_LOCATION);
789 gimple *factored_computed_goto_label
790 = gimple_build_label (factored_label_decl);
791 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
792
793 /* Build our new computed goto. */
794 gimple *factored_computed_goto = gimple_build_goto (var);
795 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
796
797 FOR_EACH_VEC_ELT (*bbs, idx, bb)
798 {
799 if (bb_to_omp_idx
800 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
801 continue;
802
803 gsi = gsi_last_bb (bb);
804 gimple *last = gsi_stmt (gsi);
805
806 gcc_assert (computed_goto_p (last));
807
808 /* Copy the original computed goto's destination into VAR. */
809 gimple *assignment
810 = gimple_build_assign (var, gimple_goto_dest (last));
811 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
812
813 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
814 e->goto_locus = gimple_location (last);
815 gsi_remove (&gsi, true);
816 }
817 }
818 else
819 {
820 tree arg = inner ? boolean_true_node : boolean_false_node;
821 gimple *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
822 1, arg);
823 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
824 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
825
826 /* Create predecessor edges of the dispatcher. */
827 FOR_EACH_VEC_ELT (*bbs, idx, bb)
828 {
829 if (bb_to_omp_idx
830 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
831 continue;
832 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
833 }
834 }
835 }
836
837 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
838 }
839
840 /* Creates outgoing edges for BB. Returns 1 when it ends with an
841 computed goto, returns 2 when it ends with a statement that
842 might return to this function via an nonlocal goto, otherwise
843 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
844
845 static int
846 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
847 {
848 gimple *last = last_stmt (bb);
849 bool fallthru = false;
850 int ret = 0;
851
852 if (!last)
853 return ret;
854
855 switch (gimple_code (last))
856 {
857 case GIMPLE_GOTO:
858 if (make_goto_expr_edges (bb))
859 ret = 1;
860 fallthru = false;
861 break;
862 case GIMPLE_RETURN:
863 {
864 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
865 e->goto_locus = gimple_location (last);
866 fallthru = false;
867 }
868 break;
869 case GIMPLE_COND:
870 make_cond_expr_edges (bb);
871 fallthru = false;
872 break;
873 case GIMPLE_SWITCH:
874 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
875 fallthru = false;
876 break;
877 case GIMPLE_RESX:
878 make_eh_edges (last);
879 fallthru = false;
880 break;
881 case GIMPLE_EH_DISPATCH:
882 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
883 break;
884
885 case GIMPLE_CALL:
886 /* If this function receives a nonlocal goto, then we need to
887 make edges from this call site to all the nonlocal goto
888 handlers. */
889 if (stmt_can_make_abnormal_goto (last))
890 ret = 2;
891
892 /* If this statement has reachable exception handlers, then
893 create abnormal edges to them. */
894 make_eh_edges (last);
895
896 /* BUILTIN_RETURN is really a return statement. */
897 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
898 {
899 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
900 fallthru = false;
901 }
902 /* Some calls are known not to return. */
903 else
904 fallthru = !gimple_call_noreturn_p (last);
905 break;
906
907 case GIMPLE_ASSIGN:
908 /* A GIMPLE_ASSIGN may throw internally and thus be considered
909 control-altering. */
910 if (is_ctrl_altering_stmt (last))
911 make_eh_edges (last);
912 fallthru = true;
913 break;
914
915 case GIMPLE_ASM:
916 make_gimple_asm_edges (bb);
917 fallthru = true;
918 break;
919
920 CASE_GIMPLE_OMP:
921 fallthru = omp_make_gimple_edges (bb, pcur_region, pomp_index);
922 break;
923
924 case GIMPLE_TRANSACTION:
925 {
926 gtransaction *txn = as_a <gtransaction *> (last);
927 tree label1 = gimple_transaction_label_norm (txn);
928 tree label2 = gimple_transaction_label_uninst (txn);
929
930 if (label1)
931 make_edge (bb, label_to_block (cfun, label1), EDGE_FALLTHRU);
932 if (label2)
933 make_edge (bb, label_to_block (cfun, label2),
934 EDGE_TM_UNINSTRUMENTED | (label1 ? 0 : EDGE_FALLTHRU));
935
936 tree label3 = gimple_transaction_label_over (txn);
937 if (gimple_transaction_subcode (txn)
938 & (GTMA_HAVE_ABORT | GTMA_IS_OUTER))
939 make_edge (bb, label_to_block (cfun, label3), EDGE_TM_ABORT);
940
941 fallthru = false;
942 }
943 break;
944
945 default:
946 gcc_assert (!stmt_ends_bb_p (last));
947 fallthru = true;
948 break;
949 }
950
951 if (fallthru)
952 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
953
954 return ret;
955 }
956
957 /* Join all the blocks in the flowgraph. */
958
959 static void
960 make_edges (void)
961 {
962 basic_block bb;
963 struct omp_region *cur_region = NULL;
964 auto_vec<basic_block> ab_edge_goto;
965 auto_vec<basic_block> ab_edge_call;
966 int *bb_to_omp_idx = NULL;
967 int cur_omp_region_idx = 0;
968
969 /* Create an edge from entry to the first block with executable
970 statements in it. */
971 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
972 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
973 EDGE_FALLTHRU);
974
975 /* Traverse the basic block array placing edges. */
976 FOR_EACH_BB_FN (bb, cfun)
977 {
978 int mer;
979
980 if (bb_to_omp_idx)
981 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
982
983 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
984 if (mer == 1)
985 ab_edge_goto.safe_push (bb);
986 else if (mer == 2)
987 ab_edge_call.safe_push (bb);
988
989 if (cur_region && bb_to_omp_idx == NULL)
990 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
991 }
992
993 /* Computed gotos are hell to deal with, especially if there are
994 lots of them with a large number of destinations. So we factor
995 them to a common computed goto location before we build the
996 edge list. After we convert back to normal form, we will un-factor
997 the computed gotos since factoring introduces an unwanted jump.
998 For non-local gotos and abnormal edges from calls to calls that return
999 twice or forced labels, factor the abnormal edges too, by having all
1000 abnormal edges from the calls go to a common artificial basic block
1001 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
1002 basic block to all forced labels and calls returning twice.
1003 We do this per-OpenMP structured block, because those regions
1004 are guaranteed to be single entry single exit by the standard,
1005 so it is not allowed to enter or exit such regions abnormally this way,
1006 thus all computed gotos, non-local gotos and setjmp/longjmp calls
1007 must not transfer control across SESE region boundaries. */
1008 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
1009 {
1010 gimple_stmt_iterator gsi;
1011 basic_block dispatcher_bb_array[2] = { NULL, NULL };
1012 basic_block *dispatcher_bbs = dispatcher_bb_array;
1013 int count = n_basic_blocks_for_fn (cfun);
1014
1015 if (bb_to_omp_idx)
1016 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
1017
1018 FOR_EACH_BB_FN (bb, cfun)
1019 {
1020 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1021 {
1022 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1023 tree target;
1024
1025 if (!label_stmt)
1026 break;
1027
1028 target = gimple_label_label (label_stmt);
1029
1030 /* Make an edge to every label block that has been marked as a
1031 potential target for a computed goto or a non-local goto. */
1032 if (FORCED_LABEL (target))
1033 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1034 &ab_edge_goto, true);
1035 if (DECL_NONLOCAL (target))
1036 {
1037 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1038 &ab_edge_call, false);
1039 break;
1040 }
1041 }
1042
1043 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
1044 gsi_next_nondebug (&gsi);
1045 if (!gsi_end_p (gsi))
1046 {
1047 /* Make an edge to every setjmp-like call. */
1048 gimple *call_stmt = gsi_stmt (gsi);
1049 if (is_gimple_call (call_stmt)
1050 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
1051 || gimple_call_builtin_p (call_stmt,
1052 BUILT_IN_SETJMP_RECEIVER)))
1053 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1054 &ab_edge_call, false);
1055 }
1056 }
1057
1058 if (bb_to_omp_idx)
1059 XDELETE (dispatcher_bbs);
1060 }
1061
1062 XDELETE (bb_to_omp_idx);
1063
1064 omp_free_regions ();
1065 }
1066
1067 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1068 needed. Returns true if new bbs were created.
1069 Note: This is transitional code, and should not be used for new code. We
1070 should be able to get rid of this by rewriting all target va-arg
1071 gimplification hooks to use an interface gimple_build_cond_value as described
1072 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1073
1074 bool
1075 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
1076 {
1077 gimple *stmt = gsi_stmt (*gsi);
1078 basic_block bb = gimple_bb (stmt);
1079 basic_block lastbb, afterbb;
1080 int old_num_bbs = n_basic_blocks_for_fn (cfun);
1081 edge e;
1082 lastbb = make_blocks_1 (seq, bb);
1083 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
1084 return false;
1085 e = split_block (bb, stmt);
1086 /* Move e->dest to come after the new basic blocks. */
1087 afterbb = e->dest;
1088 unlink_block (afterbb);
1089 link_block (afterbb, lastbb);
1090 redirect_edge_succ (e, bb->next_bb);
1091 bb = bb->next_bb;
1092 while (bb != afterbb)
1093 {
1094 struct omp_region *cur_region = NULL;
1095 profile_count cnt = profile_count::zero ();
1096 bool all = true;
1097
1098 int cur_omp_region_idx = 0;
1099 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1100 gcc_assert (!mer && !cur_region);
1101 add_bb_to_loop (bb, afterbb->loop_father);
1102
1103 edge e;
1104 edge_iterator ei;
1105 FOR_EACH_EDGE (e, ei, bb->preds)
1106 {
1107 if (e->count ().initialized_p ())
1108 cnt += e->count ();
1109 else
1110 all = false;
1111 }
1112 tree_guess_outgoing_edge_probabilities (bb);
1113 if (all || profile_status_for_fn (cfun) == PROFILE_READ)
1114 bb->count = cnt;
1115
1116 bb = bb->next_bb;
1117 }
1118 return true;
1119 }
1120
1121 /* Find the next available discriminator value for LOCUS. The
1122 discriminator distinguishes among several basic blocks that
1123 share a common locus, allowing for more accurate sample-based
1124 profiling. */
1125
1126 static int
1127 next_discriminator_for_locus (int line)
1128 {
1129 struct locus_discrim_map item;
1130 struct locus_discrim_map **slot;
1131
1132 item.location_line = line;
1133 item.discriminator = 0;
1134 slot = discriminator_per_locus->find_slot_with_hash (&item, line, INSERT);
1135 gcc_assert (slot);
1136 if (*slot == HTAB_EMPTY_ENTRY)
1137 {
1138 *slot = XNEW (struct locus_discrim_map);
1139 gcc_assert (*slot);
1140 (*slot)->location_line = line;
1141 (*slot)->discriminator = 0;
1142 }
1143 (*slot)->discriminator++;
1144 return (*slot)->discriminator;
1145 }
1146
1147 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1148
1149 static bool
1150 same_line_p (location_t locus1, expanded_location *from, location_t locus2)
1151 {
1152 expanded_location to;
1153
1154 if (locus1 == locus2)
1155 return true;
1156
1157 to = expand_location (locus2);
1158
1159 if (from->line != to.line)
1160 return false;
1161 if (from->file == to.file)
1162 return true;
1163 return (from->file != NULL
1164 && to.file != NULL
1165 && filename_cmp (from->file, to.file) == 0);
1166 }
1167
1168 /* Assign discriminators to each basic block. */
1169
1170 static void
1171 assign_discriminators (void)
1172 {
1173 basic_block bb;
1174
1175 FOR_EACH_BB_FN (bb, cfun)
1176 {
1177 edge e;
1178 edge_iterator ei;
1179 gimple *last = last_stmt (bb);
1180 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1181
1182 if (locus == UNKNOWN_LOCATION)
1183 continue;
1184
1185 expanded_location locus_e = expand_location (locus);
1186
1187 FOR_EACH_EDGE (e, ei, bb->succs)
1188 {
1189 gimple *first = first_non_label_stmt (e->dest);
1190 gimple *last = last_stmt (e->dest);
1191 if ((first && same_line_p (locus, &locus_e,
1192 gimple_location (first)))
1193 || (last && same_line_p (locus, &locus_e,
1194 gimple_location (last))))
1195 {
1196 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1197 bb->discriminator
1198 = next_discriminator_for_locus (locus_e.line);
1199 else
1200 e->dest->discriminator
1201 = next_discriminator_for_locus (locus_e.line);
1202 }
1203 }
1204 }
1205 }
1206
1207 /* Create the edges for a GIMPLE_COND starting at block BB. */
1208
1209 static void
1210 make_cond_expr_edges (basic_block bb)
1211 {
1212 gcond *entry = as_a <gcond *> (last_stmt (bb));
1213 gimple *then_stmt, *else_stmt;
1214 basic_block then_bb, else_bb;
1215 tree then_label, else_label;
1216 edge e;
1217
1218 gcc_assert (entry);
1219 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1220
1221 /* Entry basic blocks for each component. */
1222 then_label = gimple_cond_true_label (entry);
1223 else_label = gimple_cond_false_label (entry);
1224 then_bb = label_to_block (cfun, then_label);
1225 else_bb = label_to_block (cfun, else_label);
1226 then_stmt = first_stmt (then_bb);
1227 else_stmt = first_stmt (else_bb);
1228
1229 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1230 e->goto_locus = gimple_location (then_stmt);
1231 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1232 if (e)
1233 e->goto_locus = gimple_location (else_stmt);
1234
1235 /* We do not need the labels anymore. */
1236 gimple_cond_set_true_label (entry, NULL_TREE);
1237 gimple_cond_set_false_label (entry, NULL_TREE);
1238 }
1239
1240
1241 /* Called for each element in the hash table (P) as we delete the
1242 edge to cases hash table.
1243
1244 Clear all the CASE_CHAINs to prevent problems with copying of
1245 SWITCH_EXPRs and structure sharing rules, then free the hash table
1246 element. */
1247
1248 bool
1249 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1250 {
1251 tree t, next;
1252
1253 for (t = value; t; t = next)
1254 {
1255 next = CASE_CHAIN (t);
1256 CASE_CHAIN (t) = NULL;
1257 }
1258
1259 return true;
1260 }
1261
1262 /* Start recording information mapping edges to case labels. */
1263
1264 void
1265 start_recording_case_labels (void)
1266 {
1267 gcc_assert (edge_to_cases == NULL);
1268 edge_to_cases = new hash_map<edge, tree>;
1269 touched_switch_bbs = BITMAP_ALLOC (NULL);
1270 }
1271
1272 /* Return nonzero if we are recording information for case labels. */
1273
1274 static bool
1275 recording_case_labels_p (void)
1276 {
1277 return (edge_to_cases != NULL);
1278 }
1279
1280 /* Stop recording information mapping edges to case labels and
1281 remove any information we have recorded. */
1282 void
1283 end_recording_case_labels (void)
1284 {
1285 bitmap_iterator bi;
1286 unsigned i;
1287 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1288 delete edge_to_cases;
1289 edge_to_cases = NULL;
1290 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1291 {
1292 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1293 if (bb)
1294 {
1295 gimple *stmt = last_stmt (bb);
1296 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1297 group_case_labels_stmt (as_a <gswitch *> (stmt));
1298 }
1299 }
1300 BITMAP_FREE (touched_switch_bbs);
1301 }
1302
1303 /* If we are inside a {start,end}_recording_cases block, then return
1304 a chain of CASE_LABEL_EXPRs from T which reference E.
1305
1306 Otherwise return NULL. */
1307
1308 static tree
1309 get_cases_for_edge (edge e, gswitch *t)
1310 {
1311 tree *slot;
1312 size_t i, n;
1313
1314 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1315 chains available. Return NULL so the caller can detect this case. */
1316 if (!recording_case_labels_p ())
1317 return NULL;
1318
1319 slot = edge_to_cases->get (e);
1320 if (slot)
1321 return *slot;
1322
1323 /* If we did not find E in the hash table, then this must be the first
1324 time we have been queried for information about E & T. Add all the
1325 elements from T to the hash table then perform the query again. */
1326
1327 n = gimple_switch_num_labels (t);
1328 for (i = 0; i < n; i++)
1329 {
1330 tree elt = gimple_switch_label (t, i);
1331 tree lab = CASE_LABEL (elt);
1332 basic_block label_bb = label_to_block (cfun, lab);
1333 edge this_edge = find_edge (e->src, label_bb);
1334
1335 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1336 a new chain. */
1337 tree &s = edge_to_cases->get_or_insert (this_edge);
1338 CASE_CHAIN (elt) = s;
1339 s = elt;
1340 }
1341
1342 return *edge_to_cases->get (e);
1343 }
1344
1345 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1346
1347 static void
1348 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1349 {
1350 size_t i, n;
1351
1352 n = gimple_switch_num_labels (entry);
1353
1354 for (i = 0; i < n; ++i)
1355 {
1356 basic_block label_bb = gimple_switch_label_bb (cfun, entry, i);
1357 make_edge (bb, label_bb, 0);
1358 }
1359 }
1360
1361
1362 /* Return the basic block holding label DEST. */
1363
1364 basic_block
1365 label_to_block (struct function *ifun, tree dest)
1366 {
1367 int uid = LABEL_DECL_UID (dest);
1368
1369 /* We would die hard when faced by an undefined label. Emit a label to
1370 the very first basic block. This will hopefully make even the dataflow
1371 and undefined variable warnings quite right. */
1372 if (seen_error () && uid < 0)
1373 {
1374 gimple_stmt_iterator gsi =
1375 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1376 gimple *stmt;
1377
1378 stmt = gimple_build_label (dest);
1379 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1380 uid = LABEL_DECL_UID (dest);
1381 }
1382 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1383 return NULL;
1384 return (*ifun->cfg->x_label_to_block_map)[uid];
1385 }
1386
1387 /* Create edges for a goto statement at block BB. Returns true
1388 if abnormal edges should be created. */
1389
1390 static bool
1391 make_goto_expr_edges (basic_block bb)
1392 {
1393 gimple_stmt_iterator last = gsi_last_bb (bb);
1394 gimple *goto_t = gsi_stmt (last);
1395
1396 /* A simple GOTO creates normal edges. */
1397 if (simple_goto_p (goto_t))
1398 {
1399 tree dest = gimple_goto_dest (goto_t);
1400 basic_block label_bb = label_to_block (cfun, dest);
1401 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1402 e->goto_locus = gimple_location (goto_t);
1403 gsi_remove (&last, true);
1404 return false;
1405 }
1406
1407 /* A computed GOTO creates abnormal edges. */
1408 return true;
1409 }
1410
1411 /* Create edges for an asm statement with labels at block BB. */
1412
1413 static void
1414 make_gimple_asm_edges (basic_block bb)
1415 {
1416 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1417 int i, n = gimple_asm_nlabels (stmt);
1418
1419 for (i = 0; i < n; ++i)
1420 {
1421 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1422 basic_block label_bb = label_to_block (cfun, label);
1423 make_edge (bb, label_bb, 0);
1424 }
1425 }
1426
1427 /*---------------------------------------------------------------------------
1428 Flowgraph analysis
1429 ---------------------------------------------------------------------------*/
1430
1431 /* Cleanup useless labels in basic blocks. This is something we wish
1432 to do early because it allows us to group case labels before creating
1433 the edges for the CFG, and it speeds up block statement iterators in
1434 all passes later on.
1435 We rerun this pass after CFG is created, to get rid of the labels that
1436 are no longer referenced. After then we do not run it any more, since
1437 (almost) no new labels should be created. */
1438
1439 /* A map from basic block index to the leading label of that block. */
1440 static struct label_record
1441 {
1442 /* The label. */
1443 tree label;
1444
1445 /* True if the label is referenced from somewhere. */
1446 bool used;
1447 } *label_for_bb;
1448
1449 /* Given LABEL return the first label in the same basic block. */
1450
1451 static tree
1452 main_block_label (tree label)
1453 {
1454 basic_block bb = label_to_block (cfun, label);
1455 tree main_label = label_for_bb[bb->index].label;
1456
1457 /* label_to_block possibly inserted undefined label into the chain. */
1458 if (!main_label)
1459 {
1460 label_for_bb[bb->index].label = label;
1461 main_label = label;
1462 }
1463
1464 label_for_bb[bb->index].used = true;
1465 return main_label;
1466 }
1467
1468 /* Clean up redundant labels within the exception tree. */
1469
1470 static void
1471 cleanup_dead_labels_eh (void)
1472 {
1473 eh_landing_pad lp;
1474 eh_region r;
1475 tree lab;
1476 int i;
1477
1478 if (cfun->eh == NULL)
1479 return;
1480
1481 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1482 if (lp && lp->post_landing_pad)
1483 {
1484 lab = main_block_label (lp->post_landing_pad);
1485 if (lab != lp->post_landing_pad)
1486 {
1487 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1488 EH_LANDING_PAD_NR (lab) = lp->index;
1489 }
1490 }
1491
1492 FOR_ALL_EH_REGION (r)
1493 switch (r->type)
1494 {
1495 case ERT_CLEANUP:
1496 case ERT_MUST_NOT_THROW:
1497 break;
1498
1499 case ERT_TRY:
1500 {
1501 eh_catch c;
1502 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1503 {
1504 lab = c->label;
1505 if (lab)
1506 c->label = main_block_label (lab);
1507 }
1508 }
1509 break;
1510
1511 case ERT_ALLOWED_EXCEPTIONS:
1512 lab = r->u.allowed.label;
1513 if (lab)
1514 r->u.allowed.label = main_block_label (lab);
1515 break;
1516 }
1517 }
1518
1519
1520 /* Cleanup redundant labels. This is a three-step process:
1521 1) Find the leading label for each block.
1522 2) Redirect all references to labels to the leading labels.
1523 3) Cleanup all useless labels. */
1524
1525 void
1526 cleanup_dead_labels (void)
1527 {
1528 basic_block bb;
1529 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1530
1531 /* Find a suitable label for each block. We use the first user-defined
1532 label if there is one, or otherwise just the first label we see. */
1533 FOR_EACH_BB_FN (bb, cfun)
1534 {
1535 gimple_stmt_iterator i;
1536
1537 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1538 {
1539 tree label;
1540 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1541
1542 if (!label_stmt)
1543 break;
1544
1545 label = gimple_label_label (label_stmt);
1546
1547 /* If we have not yet seen a label for the current block,
1548 remember this one and see if there are more labels. */
1549 if (!label_for_bb[bb->index].label)
1550 {
1551 label_for_bb[bb->index].label = label;
1552 continue;
1553 }
1554
1555 /* If we did see a label for the current block already, but it
1556 is an artificially created label, replace it if the current
1557 label is a user defined label. */
1558 if (!DECL_ARTIFICIAL (label)
1559 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1560 {
1561 label_for_bb[bb->index].label = label;
1562 break;
1563 }
1564 }
1565 }
1566
1567 /* Now redirect all jumps/branches to the selected label.
1568 First do so for each block ending in a control statement. */
1569 FOR_EACH_BB_FN (bb, cfun)
1570 {
1571 gimple *stmt = last_stmt (bb);
1572 tree label, new_label;
1573
1574 if (!stmt)
1575 continue;
1576
1577 switch (gimple_code (stmt))
1578 {
1579 case GIMPLE_COND:
1580 {
1581 gcond *cond_stmt = as_a <gcond *> (stmt);
1582 label = gimple_cond_true_label (cond_stmt);
1583 if (label)
1584 {
1585 new_label = main_block_label (label);
1586 if (new_label != label)
1587 gimple_cond_set_true_label (cond_stmt, new_label);
1588 }
1589
1590 label = gimple_cond_false_label (cond_stmt);
1591 if (label)
1592 {
1593 new_label = main_block_label (label);
1594 if (new_label != label)
1595 gimple_cond_set_false_label (cond_stmt, new_label);
1596 }
1597 }
1598 break;
1599
1600 case GIMPLE_SWITCH:
1601 {
1602 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1603 size_t i, n = gimple_switch_num_labels (switch_stmt);
1604
1605 /* Replace all destination labels. */
1606 for (i = 0; i < n; ++i)
1607 {
1608 tree case_label = gimple_switch_label (switch_stmt, i);
1609 label = CASE_LABEL (case_label);
1610 new_label = main_block_label (label);
1611 if (new_label != label)
1612 CASE_LABEL (case_label) = new_label;
1613 }
1614 break;
1615 }
1616
1617 case GIMPLE_ASM:
1618 {
1619 gasm *asm_stmt = as_a <gasm *> (stmt);
1620 int i, n = gimple_asm_nlabels (asm_stmt);
1621
1622 for (i = 0; i < n; ++i)
1623 {
1624 tree cons = gimple_asm_label_op (asm_stmt, i);
1625 tree label = main_block_label (TREE_VALUE (cons));
1626 TREE_VALUE (cons) = label;
1627 }
1628 break;
1629 }
1630
1631 /* We have to handle gotos until they're removed, and we don't
1632 remove them until after we've created the CFG edges. */
1633 case GIMPLE_GOTO:
1634 if (!computed_goto_p (stmt))
1635 {
1636 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1637 label = gimple_goto_dest (goto_stmt);
1638 new_label = main_block_label (label);
1639 if (new_label != label)
1640 gimple_goto_set_dest (goto_stmt, new_label);
1641 }
1642 break;
1643
1644 case GIMPLE_TRANSACTION:
1645 {
1646 gtransaction *txn = as_a <gtransaction *> (stmt);
1647
1648 label = gimple_transaction_label_norm (txn);
1649 if (label)
1650 {
1651 new_label = main_block_label (label);
1652 if (new_label != label)
1653 gimple_transaction_set_label_norm (txn, new_label);
1654 }
1655
1656 label = gimple_transaction_label_uninst (txn);
1657 if (label)
1658 {
1659 new_label = main_block_label (label);
1660 if (new_label != label)
1661 gimple_transaction_set_label_uninst (txn, new_label);
1662 }
1663
1664 label = gimple_transaction_label_over (txn);
1665 if (label)
1666 {
1667 new_label = main_block_label (label);
1668 if (new_label != label)
1669 gimple_transaction_set_label_over (txn, new_label);
1670 }
1671 }
1672 break;
1673
1674 default:
1675 break;
1676 }
1677 }
1678
1679 /* Do the same for the exception region tree labels. */
1680 cleanup_dead_labels_eh ();
1681
1682 /* Finally, purge dead labels. All user-defined labels and labels that
1683 can be the target of non-local gotos and labels which have their
1684 address taken are preserved. */
1685 FOR_EACH_BB_FN (bb, cfun)
1686 {
1687 gimple_stmt_iterator i;
1688 tree label_for_this_bb = label_for_bb[bb->index].label;
1689
1690 if (!label_for_this_bb)
1691 continue;
1692
1693 /* If the main label of the block is unused, we may still remove it. */
1694 if (!label_for_bb[bb->index].used)
1695 label_for_this_bb = NULL;
1696
1697 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1698 {
1699 tree label;
1700 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1701
1702 if (!label_stmt)
1703 break;
1704
1705 label = gimple_label_label (label_stmt);
1706
1707 if (label == label_for_this_bb
1708 || !DECL_ARTIFICIAL (label)
1709 || DECL_NONLOCAL (label)
1710 || FORCED_LABEL (label))
1711 gsi_next (&i);
1712 else
1713 gsi_remove (&i, true);
1714 }
1715 }
1716
1717 free (label_for_bb);
1718 }
1719
1720 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1721 the ones jumping to the same label.
1722 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1723
1724 bool
1725 group_case_labels_stmt (gswitch *stmt)
1726 {
1727 int old_size = gimple_switch_num_labels (stmt);
1728 int i, next_index, new_size;
1729 basic_block default_bb = NULL;
1730
1731 default_bb = gimple_switch_default_bb (cfun, stmt);
1732
1733 /* Look for possible opportunities to merge cases. */
1734 new_size = i = 1;
1735 while (i < old_size)
1736 {
1737 tree base_case, base_high;
1738 basic_block base_bb;
1739
1740 base_case = gimple_switch_label (stmt, i);
1741
1742 gcc_assert (base_case);
1743 base_bb = label_to_block (cfun, CASE_LABEL (base_case));
1744
1745 /* Discard cases that have the same destination as the default case or
1746 whose destiniation blocks have already been removed as unreachable. */
1747 if (base_bb == NULL || base_bb == default_bb)
1748 {
1749 i++;
1750 continue;
1751 }
1752
1753 base_high = CASE_HIGH (base_case)
1754 ? CASE_HIGH (base_case)
1755 : CASE_LOW (base_case);
1756 next_index = i + 1;
1757
1758 /* Try to merge case labels. Break out when we reach the end
1759 of the label vector or when we cannot merge the next case
1760 label with the current one. */
1761 while (next_index < old_size)
1762 {
1763 tree merge_case = gimple_switch_label (stmt, next_index);
1764 basic_block merge_bb = label_to_block (cfun, CASE_LABEL (merge_case));
1765 wide_int bhp1 = wi::to_wide (base_high) + 1;
1766
1767 /* Merge the cases if they jump to the same place,
1768 and their ranges are consecutive. */
1769 if (merge_bb == base_bb
1770 && wi::to_wide (CASE_LOW (merge_case)) == bhp1)
1771 {
1772 base_high = CASE_HIGH (merge_case) ?
1773 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1774 CASE_HIGH (base_case) = base_high;
1775 next_index++;
1776 }
1777 else
1778 break;
1779 }
1780
1781 /* Discard cases that have an unreachable destination block. */
1782 if (EDGE_COUNT (base_bb->succs) == 0
1783 && gimple_seq_unreachable_p (bb_seq (base_bb))
1784 /* Don't optimize this if __builtin_unreachable () is the
1785 implicitly added one by the C++ FE too early, before
1786 -Wreturn-type can be diagnosed. We'll optimize it later
1787 during switchconv pass or any other cfg cleanup. */
1788 && (gimple_in_ssa_p (cfun)
1789 || (LOCATION_LOCUS (gimple_location (last_stmt (base_bb)))
1790 != BUILTINS_LOCATION)))
1791 {
1792 edge base_edge = find_edge (gimple_bb (stmt), base_bb);
1793 if (base_edge != NULL)
1794 remove_edge_and_dominated_blocks (base_edge);
1795 i = next_index;
1796 continue;
1797 }
1798
1799 if (new_size < i)
1800 gimple_switch_set_label (stmt, new_size,
1801 gimple_switch_label (stmt, i));
1802 i = next_index;
1803 new_size++;
1804 }
1805
1806 gcc_assert (new_size <= old_size);
1807
1808 if (new_size < old_size)
1809 gimple_switch_set_num_labels (stmt, new_size);
1810
1811 return new_size < old_size;
1812 }
1813
1814 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1815 and scan the sorted vector of cases. Combine the ones jumping to the
1816 same label. */
1817
1818 bool
1819 group_case_labels (void)
1820 {
1821 basic_block bb;
1822 bool changed = false;
1823
1824 FOR_EACH_BB_FN (bb, cfun)
1825 {
1826 gimple *stmt = last_stmt (bb);
1827 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1828 changed |= group_case_labels_stmt (as_a <gswitch *> (stmt));
1829 }
1830
1831 return changed;
1832 }
1833
1834 /* Checks whether we can merge block B into block A. */
1835
1836 static bool
1837 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1838 {
1839 gimple *stmt;
1840
1841 if (!single_succ_p (a))
1842 return false;
1843
1844 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1845 return false;
1846
1847 if (single_succ (a) != b)
1848 return false;
1849
1850 if (!single_pred_p (b))
1851 return false;
1852
1853 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1854 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1855 return false;
1856
1857 /* If A ends by a statement causing exceptions or something similar, we
1858 cannot merge the blocks. */
1859 stmt = last_stmt (a);
1860 if (stmt && stmt_ends_bb_p (stmt))
1861 return false;
1862
1863 /* Do not allow a block with only a non-local label to be merged. */
1864 if (stmt)
1865 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1866 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1867 return false;
1868
1869 /* Examine the labels at the beginning of B. */
1870 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1871 gsi_next (&gsi))
1872 {
1873 tree lab;
1874 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1875 if (!label_stmt)
1876 break;
1877 lab = gimple_label_label (label_stmt);
1878
1879 /* Do not remove user forced labels or for -O0 any user labels. */
1880 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1881 return false;
1882 }
1883
1884 /* Protect simple loop latches. We only want to avoid merging
1885 the latch with the loop header or with a block in another
1886 loop in this case. */
1887 if (current_loops
1888 && b->loop_father->latch == b
1889 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1890 && (b->loop_father->header == a
1891 || b->loop_father != a->loop_father))
1892 return false;
1893
1894 /* It must be possible to eliminate all phi nodes in B. If ssa form
1895 is not up-to-date and a name-mapping is registered, we cannot eliminate
1896 any phis. Symbols marked for renaming are never a problem though. */
1897 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1898 gsi_next (&gsi))
1899 {
1900 gphi *phi = gsi.phi ();
1901 /* Technically only new names matter. */
1902 if (name_registered_for_update_p (PHI_RESULT (phi)))
1903 return false;
1904 }
1905
1906 /* When not optimizing, don't merge if we'd lose goto_locus. */
1907 if (!optimize
1908 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1909 {
1910 location_t goto_locus = single_succ_edge (a)->goto_locus;
1911 gimple_stmt_iterator prev, next;
1912 prev = gsi_last_nondebug_bb (a);
1913 next = gsi_after_labels (b);
1914 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1915 gsi_next_nondebug (&next);
1916 if ((gsi_end_p (prev)
1917 || gimple_location (gsi_stmt (prev)) != goto_locus)
1918 && (gsi_end_p (next)
1919 || gimple_location (gsi_stmt (next)) != goto_locus))
1920 return false;
1921 }
1922
1923 return true;
1924 }
1925
1926 /* Replaces all uses of NAME by VAL. */
1927
1928 void
1929 replace_uses_by (tree name, tree val)
1930 {
1931 imm_use_iterator imm_iter;
1932 use_operand_p use;
1933 gimple *stmt;
1934 edge e;
1935
1936 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1937 {
1938 /* Mark the block if we change the last stmt in it. */
1939 if (cfgcleanup_altered_bbs
1940 && stmt_ends_bb_p (stmt))
1941 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1942
1943 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1944 {
1945 replace_exp (use, val);
1946
1947 if (gimple_code (stmt) == GIMPLE_PHI)
1948 {
1949 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1950 PHI_ARG_INDEX_FROM_USE (use));
1951 if (e->flags & EDGE_ABNORMAL
1952 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1953 {
1954 /* This can only occur for virtual operands, since
1955 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1956 would prevent replacement. */
1957 gcc_checking_assert (virtual_operand_p (name));
1958 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1959 }
1960 }
1961 }
1962
1963 if (gimple_code (stmt) != GIMPLE_PHI)
1964 {
1965 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1966 gimple *orig_stmt = stmt;
1967 size_t i;
1968
1969 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1970 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1971 only change sth from non-invariant to invariant, and only
1972 when propagating constants. */
1973 if (is_gimple_min_invariant (val))
1974 for (i = 0; i < gimple_num_ops (stmt); i++)
1975 {
1976 tree op = gimple_op (stmt, i);
1977 /* Operands may be empty here. For example, the labels
1978 of a GIMPLE_COND are nulled out following the creation
1979 of the corresponding CFG edges. */
1980 if (op && TREE_CODE (op) == ADDR_EXPR)
1981 recompute_tree_invariant_for_addr_expr (op);
1982 }
1983
1984 if (fold_stmt (&gsi))
1985 stmt = gsi_stmt (gsi);
1986
1987 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1988 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1989
1990 update_stmt (stmt);
1991 }
1992 }
1993
1994 gcc_checking_assert (has_zero_uses (name));
1995
1996 /* Also update the trees stored in loop structures. */
1997 if (current_loops)
1998 {
1999 struct loop *loop;
2000
2001 FOR_EACH_LOOP (loop, 0)
2002 {
2003 substitute_in_loop_info (loop, name, val);
2004 }
2005 }
2006 }
2007
2008 /* Merge block B into block A. */
2009
2010 static void
2011 gimple_merge_blocks (basic_block a, basic_block b)
2012 {
2013 gimple_stmt_iterator last, gsi;
2014 gphi_iterator psi;
2015
2016 if (dump_file)
2017 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
2018
2019 /* Remove all single-valued PHI nodes from block B of the form
2020 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
2021 gsi = gsi_last_bb (a);
2022 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
2023 {
2024 gimple *phi = gsi_stmt (psi);
2025 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
2026 gimple *copy;
2027 bool may_replace_uses = (virtual_operand_p (def)
2028 || may_propagate_copy (def, use));
2029
2030 /* In case we maintain loop closed ssa form, do not propagate arguments
2031 of loop exit phi nodes. */
2032 if (current_loops
2033 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
2034 && !virtual_operand_p (def)
2035 && TREE_CODE (use) == SSA_NAME
2036 && a->loop_father != b->loop_father)
2037 may_replace_uses = false;
2038
2039 if (!may_replace_uses)
2040 {
2041 gcc_assert (!virtual_operand_p (def));
2042
2043 /* Note that just emitting the copies is fine -- there is no problem
2044 with ordering of phi nodes. This is because A is the single
2045 predecessor of B, therefore results of the phi nodes cannot
2046 appear as arguments of the phi nodes. */
2047 copy = gimple_build_assign (def, use);
2048 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
2049 remove_phi_node (&psi, false);
2050 }
2051 else
2052 {
2053 /* If we deal with a PHI for virtual operands, we can simply
2054 propagate these without fussing with folding or updating
2055 the stmt. */
2056 if (virtual_operand_p (def))
2057 {
2058 imm_use_iterator iter;
2059 use_operand_p use_p;
2060 gimple *stmt;
2061
2062 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
2063 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2064 SET_USE (use_p, use);
2065
2066 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2067 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
2068 }
2069 else
2070 replace_uses_by (def, use);
2071
2072 remove_phi_node (&psi, true);
2073 }
2074 }
2075
2076 /* Ensure that B follows A. */
2077 move_block_after (b, a);
2078
2079 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
2080 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
2081
2082 /* Remove labels from B and set gimple_bb to A for other statements. */
2083 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
2084 {
2085 gimple *stmt = gsi_stmt (gsi);
2086 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2087 {
2088 tree label = gimple_label_label (label_stmt);
2089 int lp_nr;
2090
2091 gsi_remove (&gsi, false);
2092
2093 /* Now that we can thread computed gotos, we might have
2094 a situation where we have a forced label in block B
2095 However, the label at the start of block B might still be
2096 used in other ways (think about the runtime checking for
2097 Fortran assigned gotos). So we cannot just delete the
2098 label. Instead we move the label to the start of block A. */
2099 if (FORCED_LABEL (label))
2100 {
2101 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
2102 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
2103 }
2104 /* Other user labels keep around in a form of a debug stmt. */
2105 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_BIND_STMTS)
2106 {
2107 gimple *dbg = gimple_build_debug_bind (label,
2108 integer_zero_node,
2109 stmt);
2110 gimple_debug_bind_reset_value (dbg);
2111 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
2112 }
2113
2114 lp_nr = EH_LANDING_PAD_NR (label);
2115 if (lp_nr)
2116 {
2117 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2118 lp->post_landing_pad = NULL;
2119 }
2120 }
2121 else
2122 {
2123 gimple_set_bb (stmt, a);
2124 gsi_next (&gsi);
2125 }
2126 }
2127
2128 /* When merging two BBs, if their counts are different, the larger count
2129 is selected as the new bb count. This is to handle inconsistent
2130 profiles. */
2131 if (a->loop_father == b->loop_father)
2132 {
2133 a->count = a->count.merge (b->count);
2134 }
2135
2136 /* Merge the sequences. */
2137 last = gsi_last_bb (a);
2138 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2139 set_bb_seq (b, NULL);
2140
2141 if (cfgcleanup_altered_bbs)
2142 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2143 }
2144
2145
2146 /* Return the one of two successors of BB that is not reachable by a
2147 complex edge, if there is one. Else, return BB. We use
2148 this in optimizations that use post-dominators for their heuristics,
2149 to catch the cases in C++ where function calls are involved. */
2150
2151 basic_block
2152 single_noncomplex_succ (basic_block bb)
2153 {
2154 edge e0, e1;
2155 if (EDGE_COUNT (bb->succs) != 2)
2156 return bb;
2157
2158 e0 = EDGE_SUCC (bb, 0);
2159 e1 = EDGE_SUCC (bb, 1);
2160 if (e0->flags & EDGE_COMPLEX)
2161 return e1->dest;
2162 if (e1->flags & EDGE_COMPLEX)
2163 return e0->dest;
2164
2165 return bb;
2166 }
2167
2168 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2169
2170 void
2171 notice_special_calls (gcall *call)
2172 {
2173 int flags = gimple_call_flags (call);
2174
2175 if (flags & ECF_MAY_BE_ALLOCA)
2176 cfun->calls_alloca = true;
2177 if (flags & ECF_RETURNS_TWICE)
2178 cfun->calls_setjmp = true;
2179 }
2180
2181
2182 /* Clear flags set by notice_special_calls. Used by dead code removal
2183 to update the flags. */
2184
2185 void
2186 clear_special_calls (void)
2187 {
2188 cfun->calls_alloca = false;
2189 cfun->calls_setjmp = false;
2190 }
2191
2192 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2193
2194 static void
2195 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2196 {
2197 /* Since this block is no longer reachable, we can just delete all
2198 of its PHI nodes. */
2199 remove_phi_nodes (bb);
2200
2201 /* Remove edges to BB's successors. */
2202 while (EDGE_COUNT (bb->succs) > 0)
2203 remove_edge (EDGE_SUCC (bb, 0));
2204 }
2205
2206
2207 /* Remove statements of basic block BB. */
2208
2209 static void
2210 remove_bb (basic_block bb)
2211 {
2212 gimple_stmt_iterator i;
2213
2214 if (dump_file)
2215 {
2216 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2217 if (dump_flags & TDF_DETAILS)
2218 {
2219 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2220 fprintf (dump_file, "\n");
2221 }
2222 }
2223
2224 if (current_loops)
2225 {
2226 struct loop *loop = bb->loop_father;
2227
2228 /* If a loop gets removed, clean up the information associated
2229 with it. */
2230 if (loop->latch == bb
2231 || loop->header == bb)
2232 free_numbers_of_iterations_estimates (loop);
2233 }
2234
2235 /* Remove all the instructions in the block. */
2236 if (bb_seq (bb) != NULL)
2237 {
2238 /* Walk backwards so as to get a chance to substitute all
2239 released DEFs into debug stmts. See
2240 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2241 details. */
2242 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2243 {
2244 gimple *stmt = gsi_stmt (i);
2245 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2246 if (label_stmt
2247 && (FORCED_LABEL (gimple_label_label (label_stmt))
2248 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2249 {
2250 basic_block new_bb;
2251 gimple_stmt_iterator new_gsi;
2252
2253 /* A non-reachable non-local label may still be referenced.
2254 But it no longer needs to carry the extra semantics of
2255 non-locality. */
2256 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2257 {
2258 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2259 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2260 }
2261
2262 new_bb = bb->prev_bb;
2263 /* Don't move any labels into ENTRY block. */
2264 if (new_bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
2265 {
2266 new_bb = single_succ (new_bb);
2267 gcc_assert (new_bb != bb);
2268 }
2269 new_gsi = gsi_after_labels (new_bb);
2270 gsi_remove (&i, false);
2271 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2272 }
2273 else
2274 {
2275 /* Release SSA definitions. */
2276 release_defs (stmt);
2277 gsi_remove (&i, true);
2278 }
2279
2280 if (gsi_end_p (i))
2281 i = gsi_last_bb (bb);
2282 else
2283 gsi_prev (&i);
2284 }
2285 }
2286
2287 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2288 bb->il.gimple.seq = NULL;
2289 bb->il.gimple.phi_nodes = NULL;
2290 }
2291
2292
2293 /* Given a basic block BB and a value VAL for use in the final statement
2294 of the block (if a GIMPLE_COND, GIMPLE_SWITCH, or computed goto), return
2295 the edge that will be taken out of the block.
2296 If VAL is NULL_TREE, then the current value of the final statement's
2297 predicate or index is used.
2298 If the value does not match a unique edge, NULL is returned. */
2299
2300 edge
2301 find_taken_edge (basic_block bb, tree val)
2302 {
2303 gimple *stmt;
2304
2305 stmt = last_stmt (bb);
2306
2307 /* Handle ENTRY and EXIT. */
2308 if (!stmt)
2309 return NULL;
2310
2311 if (gimple_code (stmt) == GIMPLE_COND)
2312 return find_taken_edge_cond_expr (as_a <gcond *> (stmt), val);
2313
2314 if (gimple_code (stmt) == GIMPLE_SWITCH)
2315 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), val);
2316
2317 if (computed_goto_p (stmt))
2318 {
2319 /* Only optimize if the argument is a label, if the argument is
2320 not a label then we cannot construct a proper CFG.
2321
2322 It may be the case that we only need to allow the LABEL_REF to
2323 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2324 appear inside a LABEL_EXPR just to be safe. */
2325 if (val
2326 && (TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2327 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2328 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2329 }
2330
2331 /* Otherwise we only know the taken successor edge if it's unique. */
2332 return single_succ_p (bb) ? single_succ_edge (bb) : NULL;
2333 }
2334
2335 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2336 statement, determine which of the outgoing edges will be taken out of the
2337 block. Return NULL if either edge may be taken. */
2338
2339 static edge
2340 find_taken_edge_computed_goto (basic_block bb, tree val)
2341 {
2342 basic_block dest;
2343 edge e = NULL;
2344
2345 dest = label_to_block (cfun, val);
2346 if (dest)
2347 e = find_edge (bb, dest);
2348
2349 /* It's possible for find_edge to return NULL here on invalid code
2350 that abuses the labels-as-values extension (e.g. code that attempts to
2351 jump *between* functions via stored labels-as-values; PR 84136).
2352 If so, then we simply return that NULL for the edge.
2353 We don't currently have a way of detecting such invalid code, so we
2354 can't assert that it was the case when a NULL edge occurs here. */
2355
2356 return e;
2357 }
2358
2359 /* Given COND_STMT and a constant value VAL for use as the predicate,
2360 determine which of the two edges will be taken out of
2361 the statement's block. Return NULL if either edge may be taken.
2362 If VAL is NULL_TREE, then the current value of COND_STMT's predicate
2363 is used. */
2364
2365 static edge
2366 find_taken_edge_cond_expr (const gcond *cond_stmt, tree val)
2367 {
2368 edge true_edge, false_edge;
2369
2370 if (val == NULL_TREE)
2371 {
2372 /* Use the current value of the predicate. */
2373 if (gimple_cond_true_p (cond_stmt))
2374 val = integer_one_node;
2375 else if (gimple_cond_false_p (cond_stmt))
2376 val = integer_zero_node;
2377 else
2378 return NULL;
2379 }
2380 else if (TREE_CODE (val) != INTEGER_CST)
2381 return NULL;
2382
2383 extract_true_false_edges_from_block (gimple_bb (cond_stmt),
2384 &true_edge, &false_edge);
2385
2386 return (integer_zerop (val) ? false_edge : true_edge);
2387 }
2388
2389 /* Given SWITCH_STMT and an INTEGER_CST VAL for use as the index, determine
2390 which edge will be taken out of the statement's block. Return NULL if any
2391 edge may be taken.
2392 If VAL is NULL_TREE, then the current value of SWITCH_STMT's index
2393 is used. */
2394
2395 edge
2396 find_taken_edge_switch_expr (const gswitch *switch_stmt, tree val)
2397 {
2398 basic_block dest_bb;
2399 edge e;
2400 tree taken_case;
2401
2402 if (gimple_switch_num_labels (switch_stmt) == 1)
2403 taken_case = gimple_switch_default_label (switch_stmt);
2404 else
2405 {
2406 if (val == NULL_TREE)
2407 val = gimple_switch_index (switch_stmt);
2408 if (TREE_CODE (val) != INTEGER_CST)
2409 return NULL;
2410 else
2411 taken_case = find_case_label_for_value (switch_stmt, val);
2412 }
2413 dest_bb = label_to_block (cfun, CASE_LABEL (taken_case));
2414
2415 e = find_edge (gimple_bb (switch_stmt), dest_bb);
2416 gcc_assert (e);
2417 return e;
2418 }
2419
2420
2421 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2422 We can make optimal use here of the fact that the case labels are
2423 sorted: We can do a binary search for a case matching VAL. */
2424
2425 tree
2426 find_case_label_for_value (const gswitch *switch_stmt, tree val)
2427 {
2428 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2429 tree default_case = gimple_switch_default_label (switch_stmt);
2430
2431 for (low = 0, high = n; high - low > 1; )
2432 {
2433 size_t i = (high + low) / 2;
2434 tree t = gimple_switch_label (switch_stmt, i);
2435 int cmp;
2436
2437 /* Cache the result of comparing CASE_LOW and val. */
2438 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2439
2440 if (cmp > 0)
2441 high = i;
2442 else
2443 low = i;
2444
2445 if (CASE_HIGH (t) == NULL)
2446 {
2447 /* A singe-valued case label. */
2448 if (cmp == 0)
2449 return t;
2450 }
2451 else
2452 {
2453 /* A case range. We can only handle integer ranges. */
2454 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2455 return t;
2456 }
2457 }
2458
2459 return default_case;
2460 }
2461
2462
2463 /* Dump a basic block on stderr. */
2464
2465 void
2466 gimple_debug_bb (basic_block bb)
2467 {
2468 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2469 }
2470
2471
2472 /* Dump basic block with index N on stderr. */
2473
2474 basic_block
2475 gimple_debug_bb_n (int n)
2476 {
2477 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2478 return BASIC_BLOCK_FOR_FN (cfun, n);
2479 }
2480
2481
2482 /* Dump the CFG on stderr.
2483
2484 FLAGS are the same used by the tree dumping functions
2485 (see TDF_* in dumpfile.h). */
2486
2487 void
2488 gimple_debug_cfg (dump_flags_t flags)
2489 {
2490 gimple_dump_cfg (stderr, flags);
2491 }
2492
2493
2494 /* Dump the program showing basic block boundaries on the given FILE.
2495
2496 FLAGS are the same used by the tree dumping functions (see TDF_* in
2497 tree.h). */
2498
2499 void
2500 gimple_dump_cfg (FILE *file, dump_flags_t flags)
2501 {
2502 if (flags & TDF_DETAILS)
2503 {
2504 dump_function_header (file, current_function_decl, flags);
2505 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2506 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2507 last_basic_block_for_fn (cfun));
2508
2509 brief_dump_cfg (file, flags);
2510 fprintf (file, "\n");
2511 }
2512
2513 if (flags & TDF_STATS)
2514 dump_cfg_stats (file);
2515
2516 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2517 }
2518
2519
2520 /* Dump CFG statistics on FILE. */
2521
2522 void
2523 dump_cfg_stats (FILE *file)
2524 {
2525 static long max_num_merged_labels = 0;
2526 unsigned long size, total = 0;
2527 long num_edges;
2528 basic_block bb;
2529 const char * const fmt_str = "%-30s%-13s%12s\n";
2530 const char * const fmt_str_1 = "%-30s%13d" PRsa (11) "\n";
2531 const char * const fmt_str_2 = "%-30s%13ld" PRsa (11) "\n";
2532 const char * const fmt_str_3 = "%-43s" PRsa (11) "\n";
2533 const char *funcname = current_function_name ();
2534
2535 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2536
2537 fprintf (file, "---------------------------------------------------------\n");
2538 fprintf (file, fmt_str, "", " Number of ", "Memory");
2539 fprintf (file, fmt_str, "", " instances ", "used ");
2540 fprintf (file, "---------------------------------------------------------\n");
2541
2542 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2543 total += size;
2544 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2545 SIZE_AMOUNT (size));
2546
2547 num_edges = 0;
2548 FOR_EACH_BB_FN (bb, cfun)
2549 num_edges += EDGE_COUNT (bb->succs);
2550 size = num_edges * sizeof (struct edge_def);
2551 total += size;
2552 fprintf (file, fmt_str_2, "Edges", num_edges, SIZE_AMOUNT (size));
2553
2554 fprintf (file, "---------------------------------------------------------\n");
2555 fprintf (file, fmt_str_3, "Total memory used by CFG data",
2556 SIZE_AMOUNT (total));
2557 fprintf (file, "---------------------------------------------------------\n");
2558 fprintf (file, "\n");
2559
2560 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2561 max_num_merged_labels = cfg_stats.num_merged_labels;
2562
2563 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2564 cfg_stats.num_merged_labels, max_num_merged_labels);
2565
2566 fprintf (file, "\n");
2567 }
2568
2569
2570 /* Dump CFG statistics on stderr. Keep extern so that it's always
2571 linked in the final executable. */
2572
2573 DEBUG_FUNCTION void
2574 debug_cfg_stats (void)
2575 {
2576 dump_cfg_stats (stderr);
2577 }
2578
2579 /*---------------------------------------------------------------------------
2580 Miscellaneous helpers
2581 ---------------------------------------------------------------------------*/
2582
2583 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2584 flow. Transfers of control flow associated with EH are excluded. */
2585
2586 static bool
2587 call_can_make_abnormal_goto (gimple *t)
2588 {
2589 /* If the function has no non-local labels, then a call cannot make an
2590 abnormal transfer of control. */
2591 if (!cfun->has_nonlocal_label
2592 && !cfun->calls_setjmp)
2593 return false;
2594
2595 /* Likewise if the call has no side effects. */
2596 if (!gimple_has_side_effects (t))
2597 return false;
2598
2599 /* Likewise if the called function is leaf. */
2600 if (gimple_call_flags (t) & ECF_LEAF)
2601 return false;
2602
2603 return true;
2604 }
2605
2606
2607 /* Return true if T can make an abnormal transfer of control flow.
2608 Transfers of control flow associated with EH are excluded. */
2609
2610 bool
2611 stmt_can_make_abnormal_goto (gimple *t)
2612 {
2613 if (computed_goto_p (t))
2614 return true;
2615 if (is_gimple_call (t))
2616 return call_can_make_abnormal_goto (t);
2617 return false;
2618 }
2619
2620
2621 /* Return true if T represents a stmt that always transfers control. */
2622
2623 bool
2624 is_ctrl_stmt (gimple *t)
2625 {
2626 switch (gimple_code (t))
2627 {
2628 case GIMPLE_COND:
2629 case GIMPLE_SWITCH:
2630 case GIMPLE_GOTO:
2631 case GIMPLE_RETURN:
2632 case GIMPLE_RESX:
2633 return true;
2634 default:
2635 return false;
2636 }
2637 }
2638
2639
2640 /* Return true if T is a statement that may alter the flow of control
2641 (e.g., a call to a non-returning function). */
2642
2643 bool
2644 is_ctrl_altering_stmt (gimple *t)
2645 {
2646 gcc_assert (t);
2647
2648 switch (gimple_code (t))
2649 {
2650 case GIMPLE_CALL:
2651 /* Per stmt call flag indicates whether the call could alter
2652 controlflow. */
2653 if (gimple_call_ctrl_altering_p (t))
2654 return true;
2655 break;
2656
2657 case GIMPLE_EH_DISPATCH:
2658 /* EH_DISPATCH branches to the individual catch handlers at
2659 this level of a try or allowed-exceptions region. It can
2660 fallthru to the next statement as well. */
2661 return true;
2662
2663 case GIMPLE_ASM:
2664 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2665 return true;
2666 break;
2667
2668 CASE_GIMPLE_OMP:
2669 /* OpenMP directives alter control flow. */
2670 return true;
2671
2672 case GIMPLE_TRANSACTION:
2673 /* A transaction start alters control flow. */
2674 return true;
2675
2676 default:
2677 break;
2678 }
2679
2680 /* If a statement can throw, it alters control flow. */
2681 return stmt_can_throw_internal (cfun, t);
2682 }
2683
2684
2685 /* Return true if T is a simple local goto. */
2686
2687 bool
2688 simple_goto_p (gimple *t)
2689 {
2690 return (gimple_code (t) == GIMPLE_GOTO
2691 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2692 }
2693
2694
2695 /* Return true if STMT should start a new basic block. PREV_STMT is
2696 the statement preceding STMT. It is used when STMT is a label or a
2697 case label. Labels should only start a new basic block if their
2698 previous statement wasn't a label. Otherwise, sequence of labels
2699 would generate unnecessary basic blocks that only contain a single
2700 label. */
2701
2702 static inline bool
2703 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2704 {
2705 if (stmt == NULL)
2706 return false;
2707
2708 /* PREV_STMT is only set to a debug stmt if the debug stmt is before
2709 any nondebug stmts in the block. We don't want to start another
2710 block in this case: the debug stmt will already have started the
2711 one STMT would start if we weren't outputting debug stmts. */
2712 if (prev_stmt && is_gimple_debug (prev_stmt))
2713 return false;
2714
2715 /* Labels start a new basic block only if the preceding statement
2716 wasn't a label of the same type. This prevents the creation of
2717 consecutive blocks that have nothing but a single label. */
2718 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2719 {
2720 /* Nonlocal and computed GOTO targets always start a new block. */
2721 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2722 || FORCED_LABEL (gimple_label_label (label_stmt)))
2723 return true;
2724
2725 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2726 {
2727 if (DECL_NONLOCAL (gimple_label_label (
2728 as_a <glabel *> (prev_stmt))))
2729 return true;
2730
2731 cfg_stats.num_merged_labels++;
2732 return false;
2733 }
2734 else
2735 return true;
2736 }
2737 else if (gimple_code (stmt) == GIMPLE_CALL)
2738 {
2739 if (gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2740 /* setjmp acts similar to a nonlocal GOTO target and thus should
2741 start a new block. */
2742 return true;
2743 if (gimple_call_internal_p (stmt, IFN_PHI)
2744 && prev_stmt
2745 && gimple_code (prev_stmt) != GIMPLE_LABEL
2746 && (gimple_code (prev_stmt) != GIMPLE_CALL
2747 || ! gimple_call_internal_p (prev_stmt, IFN_PHI)))
2748 /* PHI nodes start a new block unless preceeded by a label
2749 or another PHI. */
2750 return true;
2751 }
2752
2753 return false;
2754 }
2755
2756
2757 /* Return true if T should end a basic block. */
2758
2759 bool
2760 stmt_ends_bb_p (gimple *t)
2761 {
2762 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2763 }
2764
2765 /* Remove block annotations and other data structures. */
2766
2767 void
2768 delete_tree_cfg_annotations (struct function *fn)
2769 {
2770 vec_free (label_to_block_map_for_fn (fn));
2771 }
2772
2773 /* Return the virtual phi in BB. */
2774
2775 gphi *
2776 get_virtual_phi (basic_block bb)
2777 {
2778 for (gphi_iterator gsi = gsi_start_phis (bb);
2779 !gsi_end_p (gsi);
2780 gsi_next (&gsi))
2781 {
2782 gphi *phi = gsi.phi ();
2783
2784 if (virtual_operand_p (PHI_RESULT (phi)))
2785 return phi;
2786 }
2787
2788 return NULL;
2789 }
2790
2791 /* Return the first statement in basic block BB. */
2792
2793 gimple *
2794 first_stmt (basic_block bb)
2795 {
2796 gimple_stmt_iterator i = gsi_start_bb (bb);
2797 gimple *stmt = NULL;
2798
2799 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2800 {
2801 gsi_next (&i);
2802 stmt = NULL;
2803 }
2804 return stmt;
2805 }
2806
2807 /* Return the first non-label statement in basic block BB. */
2808
2809 static gimple *
2810 first_non_label_stmt (basic_block bb)
2811 {
2812 gimple_stmt_iterator i = gsi_start_bb (bb);
2813 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2814 gsi_next (&i);
2815 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2816 }
2817
2818 /* Return the last statement in basic block BB. */
2819
2820 gimple *
2821 last_stmt (basic_block bb)
2822 {
2823 gimple_stmt_iterator i = gsi_last_bb (bb);
2824 gimple *stmt = NULL;
2825
2826 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2827 {
2828 gsi_prev (&i);
2829 stmt = NULL;
2830 }
2831 return stmt;
2832 }
2833
2834 /* Return the last statement of an otherwise empty block. Return NULL
2835 if the block is totally empty, or if it contains more than one
2836 statement. */
2837
2838 gimple *
2839 last_and_only_stmt (basic_block bb)
2840 {
2841 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2842 gimple *last, *prev;
2843
2844 if (gsi_end_p (i))
2845 return NULL;
2846
2847 last = gsi_stmt (i);
2848 gsi_prev_nondebug (&i);
2849 if (gsi_end_p (i))
2850 return last;
2851
2852 /* Empty statements should no longer appear in the instruction stream.
2853 Everything that might have appeared before should be deleted by
2854 remove_useless_stmts, and the optimizers should just gsi_remove
2855 instead of smashing with build_empty_stmt.
2856
2857 Thus the only thing that should appear here in a block containing
2858 one executable statement is a label. */
2859 prev = gsi_stmt (i);
2860 if (gimple_code (prev) == GIMPLE_LABEL)
2861 return last;
2862 else
2863 return NULL;
2864 }
2865
2866 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2867
2868 static void
2869 reinstall_phi_args (edge new_edge, edge old_edge)
2870 {
2871 edge_var_map *vm;
2872 int i;
2873 gphi_iterator phis;
2874
2875 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2876 if (!v)
2877 return;
2878
2879 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2880 v->iterate (i, &vm) && !gsi_end_p (phis);
2881 i++, gsi_next (&phis))
2882 {
2883 gphi *phi = phis.phi ();
2884 tree result = redirect_edge_var_map_result (vm);
2885 tree arg = redirect_edge_var_map_def (vm);
2886
2887 gcc_assert (result == gimple_phi_result (phi));
2888
2889 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2890 }
2891
2892 redirect_edge_var_map_clear (old_edge);
2893 }
2894
2895 /* Returns the basic block after which the new basic block created
2896 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2897 near its "logical" location. This is of most help to humans looking
2898 at debugging dumps. */
2899
2900 basic_block
2901 split_edge_bb_loc (edge edge_in)
2902 {
2903 basic_block dest = edge_in->dest;
2904 basic_block dest_prev = dest->prev_bb;
2905
2906 if (dest_prev)
2907 {
2908 edge e = find_edge (dest_prev, dest);
2909 if (e && !(e->flags & EDGE_COMPLEX))
2910 return edge_in->src;
2911 }
2912 return dest_prev;
2913 }
2914
2915 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2916 Abort on abnormal edges. */
2917
2918 static basic_block
2919 gimple_split_edge (edge edge_in)
2920 {
2921 basic_block new_bb, after_bb, dest;
2922 edge new_edge, e;
2923
2924 /* Abnormal edges cannot be split. */
2925 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2926
2927 dest = edge_in->dest;
2928
2929 after_bb = split_edge_bb_loc (edge_in);
2930
2931 new_bb = create_empty_bb (after_bb);
2932 new_bb->count = edge_in->count ();
2933
2934 e = redirect_edge_and_branch (edge_in, new_bb);
2935 gcc_assert (e == edge_in);
2936
2937 new_edge = make_single_succ_edge (new_bb, dest, EDGE_FALLTHRU);
2938 reinstall_phi_args (new_edge, e);
2939
2940 return new_bb;
2941 }
2942
2943
2944 /* Verify properties of the address expression T whose base should be
2945 TREE_ADDRESSABLE if VERIFY_ADDRESSABLE is true. */
2946
2947 static bool
2948 verify_address (tree t, bool verify_addressable)
2949 {
2950 bool old_constant;
2951 bool old_side_effects;
2952 bool new_constant;
2953 bool new_side_effects;
2954
2955 old_constant = TREE_CONSTANT (t);
2956 old_side_effects = TREE_SIDE_EFFECTS (t);
2957
2958 recompute_tree_invariant_for_addr_expr (t);
2959 new_side_effects = TREE_SIDE_EFFECTS (t);
2960 new_constant = TREE_CONSTANT (t);
2961
2962 if (old_constant != new_constant)
2963 {
2964 error ("constant not recomputed when ADDR_EXPR changed");
2965 return true;
2966 }
2967 if (old_side_effects != new_side_effects)
2968 {
2969 error ("side effects not recomputed when ADDR_EXPR changed");
2970 return true;
2971 }
2972
2973 tree base = TREE_OPERAND (t, 0);
2974 while (handled_component_p (base))
2975 base = TREE_OPERAND (base, 0);
2976
2977 if (!(VAR_P (base)
2978 || TREE_CODE (base) == PARM_DECL
2979 || TREE_CODE (base) == RESULT_DECL))
2980 return false;
2981
2982 if (DECL_GIMPLE_REG_P (base))
2983 {
2984 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2985 return true;
2986 }
2987
2988 if (verify_addressable && !TREE_ADDRESSABLE (base))
2989 {
2990 error ("address taken, but ADDRESSABLE bit not set");
2991 return true;
2992 }
2993
2994 return false;
2995 }
2996
2997
2998 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
2999 Returns true if there is an error, otherwise false. */
3000
3001 static bool
3002 verify_types_in_gimple_min_lval (tree expr)
3003 {
3004 tree op;
3005
3006 if (is_gimple_id (expr))
3007 return false;
3008
3009 if (TREE_CODE (expr) != TARGET_MEM_REF
3010 && TREE_CODE (expr) != MEM_REF)
3011 {
3012 error ("invalid expression for min lvalue");
3013 return true;
3014 }
3015
3016 /* TARGET_MEM_REFs are strange beasts. */
3017 if (TREE_CODE (expr) == TARGET_MEM_REF)
3018 return false;
3019
3020 op = TREE_OPERAND (expr, 0);
3021 if (!is_gimple_val (op))
3022 {
3023 error ("invalid operand in indirect reference");
3024 debug_generic_stmt (op);
3025 return true;
3026 }
3027 /* Memory references now generally can involve a value conversion. */
3028
3029 return false;
3030 }
3031
3032 /* Verify if EXPR is a valid GIMPLE reference expression. If
3033 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3034 if there is an error, otherwise false. */
3035
3036 static bool
3037 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3038 {
3039 if (TREE_CODE (expr) == REALPART_EXPR
3040 || TREE_CODE (expr) == IMAGPART_EXPR
3041 || TREE_CODE (expr) == BIT_FIELD_REF)
3042 {
3043 tree op = TREE_OPERAND (expr, 0);
3044 if (!is_gimple_reg_type (TREE_TYPE (expr)))
3045 {
3046 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
3047 return true;
3048 }
3049
3050 if (TREE_CODE (expr) == BIT_FIELD_REF)
3051 {
3052 tree t1 = TREE_OPERAND (expr, 1);
3053 tree t2 = TREE_OPERAND (expr, 2);
3054 poly_uint64 size, bitpos;
3055 if (!poly_int_tree_p (t1, &size)
3056 || !poly_int_tree_p (t2, &bitpos)
3057 || !types_compatible_p (bitsizetype, TREE_TYPE (t1))
3058 || !types_compatible_p (bitsizetype, TREE_TYPE (t2)))
3059 {
3060 error ("invalid position or size operand to BIT_FIELD_REF");
3061 return true;
3062 }
3063 if (INTEGRAL_TYPE_P (TREE_TYPE (expr))
3064 && maybe_ne (TYPE_PRECISION (TREE_TYPE (expr)), size))
3065 {
3066 error ("integral result type precision does not match "
3067 "field size of BIT_FIELD_REF");
3068 return true;
3069 }
3070 else if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
3071 && TYPE_MODE (TREE_TYPE (expr)) != BLKmode
3072 && maybe_ne (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (expr))),
3073 size))
3074 {
3075 error ("mode size of non-integral result does not "
3076 "match field size of BIT_FIELD_REF");
3077 return true;
3078 }
3079 if (INTEGRAL_TYPE_P (TREE_TYPE (op))
3080 && !type_has_mode_precision_p (TREE_TYPE (op)))
3081 {
3082 error ("BIT_FIELD_REF of non-mode-precision operand");
3083 return true;
3084 }
3085 if (!AGGREGATE_TYPE_P (TREE_TYPE (op))
3086 && maybe_gt (size + bitpos,
3087 tree_to_poly_uint64 (TYPE_SIZE (TREE_TYPE (op)))))
3088 {
3089 error ("position plus size exceeds size of referenced object in "
3090 "BIT_FIELD_REF");
3091 return true;
3092 }
3093 }
3094
3095 if ((TREE_CODE (expr) == REALPART_EXPR
3096 || TREE_CODE (expr) == IMAGPART_EXPR)
3097 && !useless_type_conversion_p (TREE_TYPE (expr),
3098 TREE_TYPE (TREE_TYPE (op))))
3099 {
3100 error ("type mismatch in real/imagpart reference");
3101 debug_generic_stmt (TREE_TYPE (expr));
3102 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3103 return true;
3104 }
3105 expr = op;
3106 }
3107
3108 while (handled_component_p (expr))
3109 {
3110 if (TREE_CODE (expr) == REALPART_EXPR
3111 || TREE_CODE (expr) == IMAGPART_EXPR
3112 || TREE_CODE (expr) == BIT_FIELD_REF)
3113 {
3114 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
3115 return true;
3116 }
3117
3118 tree op = TREE_OPERAND (expr, 0);
3119
3120 if (TREE_CODE (expr) == ARRAY_REF
3121 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3122 {
3123 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3124 || (TREE_OPERAND (expr, 2)
3125 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3126 || (TREE_OPERAND (expr, 3)
3127 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3128 {
3129 error ("invalid operands to array reference");
3130 debug_generic_stmt (expr);
3131 return true;
3132 }
3133 }
3134
3135 /* Verify if the reference array element types are compatible. */
3136 if (TREE_CODE (expr) == ARRAY_REF
3137 && !useless_type_conversion_p (TREE_TYPE (expr),
3138 TREE_TYPE (TREE_TYPE (op))))
3139 {
3140 error ("type mismatch in array reference");
3141 debug_generic_stmt (TREE_TYPE (expr));
3142 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3143 return true;
3144 }
3145 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3146 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3147 TREE_TYPE (TREE_TYPE (op))))
3148 {
3149 error ("type mismatch in array range reference");
3150 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3151 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3152 return true;
3153 }
3154
3155 if (TREE_CODE (expr) == COMPONENT_REF)
3156 {
3157 if (TREE_OPERAND (expr, 2)
3158 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3159 {
3160 error ("invalid COMPONENT_REF offset operator");
3161 return true;
3162 }
3163 if (!useless_type_conversion_p (TREE_TYPE (expr),
3164 TREE_TYPE (TREE_OPERAND (expr, 1))))
3165 {
3166 error ("type mismatch in component reference");
3167 debug_generic_stmt (TREE_TYPE (expr));
3168 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3169 return true;
3170 }
3171 }
3172
3173 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3174 {
3175 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3176 that their operand is not an SSA name or an invariant when
3177 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3178 bug). Otherwise there is nothing to verify, gross mismatches at
3179 most invoke undefined behavior. */
3180 if (require_lvalue
3181 && (TREE_CODE (op) == SSA_NAME
3182 || is_gimple_min_invariant (op)))
3183 {
3184 error ("conversion of an SSA_NAME on the left hand side");
3185 debug_generic_stmt (expr);
3186 return true;
3187 }
3188 else if (TREE_CODE (op) == SSA_NAME
3189 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3190 {
3191 error ("conversion of register to a different size");
3192 debug_generic_stmt (expr);
3193 return true;
3194 }
3195 else if (!handled_component_p (op))
3196 return false;
3197 }
3198
3199 expr = op;
3200 }
3201
3202 if (TREE_CODE (expr) == MEM_REF)
3203 {
3204 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0))
3205 || (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
3206 && verify_address (TREE_OPERAND (expr, 0), false)))
3207 {
3208 error ("invalid address operand in MEM_REF");
3209 debug_generic_stmt (expr);
3210 return true;
3211 }
3212 if (!poly_int_tree_p (TREE_OPERAND (expr, 1))
3213 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3214 {
3215 error ("invalid offset operand in MEM_REF");
3216 debug_generic_stmt (expr);
3217 return true;
3218 }
3219 }
3220 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3221 {
3222 if (!TMR_BASE (expr)
3223 || !is_gimple_mem_ref_addr (TMR_BASE (expr))
3224 || (TREE_CODE (TMR_BASE (expr)) == ADDR_EXPR
3225 && verify_address (TMR_BASE (expr), false)))
3226 {
3227 error ("invalid address operand in TARGET_MEM_REF");
3228 return true;
3229 }
3230 if (!TMR_OFFSET (expr)
3231 || !poly_int_tree_p (TMR_OFFSET (expr))
3232 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3233 {
3234 error ("invalid offset operand in TARGET_MEM_REF");
3235 debug_generic_stmt (expr);
3236 return true;
3237 }
3238 }
3239 else if (TREE_CODE (expr) == INDIRECT_REF)
3240 {
3241 error ("INDIRECT_REF in gimple IL");
3242 debug_generic_stmt (expr);
3243 return true;
3244 }
3245
3246 return ((require_lvalue || !is_gimple_min_invariant (expr))
3247 && verify_types_in_gimple_min_lval (expr));
3248 }
3249
3250 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3251 list of pointer-to types that is trivially convertible to DEST. */
3252
3253 static bool
3254 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3255 {
3256 tree src;
3257
3258 if (!TYPE_POINTER_TO (src_obj))
3259 return true;
3260
3261 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3262 if (useless_type_conversion_p (dest, src))
3263 return true;
3264
3265 return false;
3266 }
3267
3268 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3269 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3270
3271 static bool
3272 valid_fixed_convert_types_p (tree type1, tree type2)
3273 {
3274 return (FIXED_POINT_TYPE_P (type1)
3275 && (INTEGRAL_TYPE_P (type2)
3276 || SCALAR_FLOAT_TYPE_P (type2)
3277 || FIXED_POINT_TYPE_P (type2)));
3278 }
3279
3280 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3281 is a problem, otherwise false. */
3282
3283 static bool
3284 verify_gimple_call (gcall *stmt)
3285 {
3286 tree fn = gimple_call_fn (stmt);
3287 tree fntype, fndecl;
3288 unsigned i;
3289
3290 if (gimple_call_internal_p (stmt))
3291 {
3292 if (fn)
3293 {
3294 error ("gimple call has two targets");
3295 debug_generic_stmt (fn);
3296 return true;
3297 }
3298 }
3299 else
3300 {
3301 if (!fn)
3302 {
3303 error ("gimple call has no target");
3304 return true;
3305 }
3306 }
3307
3308 if (fn && !is_gimple_call_addr (fn))
3309 {
3310 error ("invalid function in gimple call");
3311 debug_generic_stmt (fn);
3312 return true;
3313 }
3314
3315 if (fn
3316 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3317 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3318 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3319 {
3320 error ("non-function in gimple call");
3321 return true;
3322 }
3323
3324 fndecl = gimple_call_fndecl (stmt);
3325 if (fndecl
3326 && TREE_CODE (fndecl) == FUNCTION_DECL
3327 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3328 && !DECL_PURE_P (fndecl)
3329 && !TREE_READONLY (fndecl))
3330 {
3331 error ("invalid pure const state for function");
3332 return true;
3333 }
3334
3335 tree lhs = gimple_call_lhs (stmt);
3336 if (lhs
3337 && (!is_gimple_lvalue (lhs)
3338 || verify_types_in_gimple_reference (lhs, true)))
3339 {
3340 error ("invalid LHS in gimple call");
3341 return true;
3342 }
3343
3344 if (gimple_call_ctrl_altering_p (stmt)
3345 && gimple_call_noreturn_p (stmt)
3346 && should_remove_lhs_p (lhs))
3347 {
3348 error ("LHS in noreturn call");
3349 return true;
3350 }
3351
3352 fntype = gimple_call_fntype (stmt);
3353 if (fntype
3354 && lhs
3355 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3356 /* ??? At least C++ misses conversions at assignments from
3357 void * call results.
3358 For now simply allow arbitrary pointer type conversions. */
3359 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3360 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3361 {
3362 error ("invalid conversion in gimple call");
3363 debug_generic_stmt (TREE_TYPE (lhs));
3364 debug_generic_stmt (TREE_TYPE (fntype));
3365 return true;
3366 }
3367
3368 if (gimple_call_chain (stmt)
3369 && !is_gimple_val (gimple_call_chain (stmt)))
3370 {
3371 error ("invalid static chain in gimple call");
3372 debug_generic_stmt (gimple_call_chain (stmt));
3373 return true;
3374 }
3375
3376 /* If there is a static chain argument, the call should either be
3377 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3378 if (gimple_call_chain (stmt)
3379 && fndecl
3380 && !DECL_STATIC_CHAIN (fndecl))
3381 {
3382 error ("static chain with function that doesn%'t use one");
3383 return true;
3384 }
3385
3386 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3387 {
3388 switch (DECL_FUNCTION_CODE (fndecl))
3389 {
3390 case BUILT_IN_UNREACHABLE:
3391 case BUILT_IN_TRAP:
3392 if (gimple_call_num_args (stmt) > 0)
3393 {
3394 /* Built-in unreachable with parameters might not be caught by
3395 undefined behavior sanitizer. Front-ends do check users do not
3396 call them that way but we also produce calls to
3397 __builtin_unreachable internally, for example when IPA figures
3398 out a call cannot happen in a legal program. In such cases,
3399 we must make sure arguments are stripped off. */
3400 error ("%<__builtin_unreachable%> or %<__builtin_trap%> call "
3401 "with arguments");
3402 return true;
3403 }
3404 break;
3405 default:
3406 break;
3407 }
3408 }
3409
3410 /* ??? The C frontend passes unpromoted arguments in case it
3411 didn't see a function declaration before the call. So for now
3412 leave the call arguments mostly unverified. Once we gimplify
3413 unit-at-a-time we have a chance to fix this. */
3414
3415 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3416 {
3417 tree arg = gimple_call_arg (stmt, i);
3418 if ((is_gimple_reg_type (TREE_TYPE (arg))
3419 && !is_gimple_val (arg))
3420 || (!is_gimple_reg_type (TREE_TYPE (arg))
3421 && !is_gimple_lvalue (arg)))
3422 {
3423 error ("invalid argument to gimple call");
3424 debug_generic_expr (arg);
3425 return true;
3426 }
3427 }
3428
3429 return false;
3430 }
3431
3432 /* Verifies the gimple comparison with the result type TYPE and
3433 the operands OP0 and OP1, comparison code is CODE. */
3434
3435 static bool
3436 verify_gimple_comparison (tree type, tree op0, tree op1, enum tree_code code)
3437 {
3438 tree op0_type = TREE_TYPE (op0);
3439 tree op1_type = TREE_TYPE (op1);
3440
3441 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3442 {
3443 error ("invalid operands in gimple comparison");
3444 return true;
3445 }
3446
3447 /* For comparisons we do not have the operations type as the
3448 effective type the comparison is carried out in. Instead
3449 we require that either the first operand is trivially
3450 convertible into the second, or the other way around.
3451 Because we special-case pointers to void we allow
3452 comparisons of pointers with the same mode as well. */
3453 if (!useless_type_conversion_p (op0_type, op1_type)
3454 && !useless_type_conversion_p (op1_type, op0_type)
3455 && (!POINTER_TYPE_P (op0_type)
3456 || !POINTER_TYPE_P (op1_type)
3457 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3458 {
3459 error ("mismatching comparison operand types");
3460 debug_generic_expr (op0_type);
3461 debug_generic_expr (op1_type);
3462 return true;
3463 }
3464
3465 /* The resulting type of a comparison may be an effective boolean type. */
3466 if (INTEGRAL_TYPE_P (type)
3467 && (TREE_CODE (type) == BOOLEAN_TYPE
3468 || TYPE_PRECISION (type) == 1))
3469 {
3470 if ((TREE_CODE (op0_type) == VECTOR_TYPE
3471 || TREE_CODE (op1_type) == VECTOR_TYPE)
3472 && code != EQ_EXPR && code != NE_EXPR
3473 && !VECTOR_BOOLEAN_TYPE_P (op0_type)
3474 && !VECTOR_INTEGER_TYPE_P (op0_type))
3475 {
3476 error ("unsupported operation or type for vector comparison"
3477 " returning a boolean");
3478 debug_generic_expr (op0_type);
3479 debug_generic_expr (op1_type);
3480 return true;
3481 }
3482 }
3483 /* Or a boolean vector type with the same element count
3484 as the comparison operand types. */
3485 else if (TREE_CODE (type) == VECTOR_TYPE
3486 && TREE_CODE (TREE_TYPE (type)) == BOOLEAN_TYPE)
3487 {
3488 if (TREE_CODE (op0_type) != VECTOR_TYPE
3489 || TREE_CODE (op1_type) != VECTOR_TYPE)
3490 {
3491 error ("non-vector operands in vector comparison");
3492 debug_generic_expr (op0_type);
3493 debug_generic_expr (op1_type);
3494 return true;
3495 }
3496
3497 if (maybe_ne (TYPE_VECTOR_SUBPARTS (type),
3498 TYPE_VECTOR_SUBPARTS (op0_type)))
3499 {
3500 error ("invalid vector comparison resulting type");
3501 debug_generic_expr (type);
3502 return true;
3503 }
3504 }
3505 else
3506 {
3507 error ("bogus comparison result type");
3508 debug_generic_expr (type);
3509 return true;
3510 }
3511
3512 return false;
3513 }
3514
3515 /* Verify a gimple assignment statement STMT with an unary rhs.
3516 Returns true if anything is wrong. */
3517
3518 static bool
3519 verify_gimple_assign_unary (gassign *stmt)
3520 {
3521 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3522 tree lhs = gimple_assign_lhs (stmt);
3523 tree lhs_type = TREE_TYPE (lhs);
3524 tree rhs1 = gimple_assign_rhs1 (stmt);
3525 tree rhs1_type = TREE_TYPE (rhs1);
3526
3527 if (!is_gimple_reg (lhs))
3528 {
3529 error ("non-register as LHS of unary operation");
3530 return true;
3531 }
3532
3533 if (!is_gimple_val (rhs1))
3534 {
3535 error ("invalid operand in unary operation");
3536 return true;
3537 }
3538
3539 /* First handle conversions. */
3540 switch (rhs_code)
3541 {
3542 CASE_CONVERT:
3543 {
3544 /* Allow conversions from pointer type to integral type only if
3545 there is no sign or zero extension involved.
3546 For targets were the precision of ptrofftype doesn't match that
3547 of pointers we need to allow arbitrary conversions to ptrofftype. */
3548 if ((POINTER_TYPE_P (lhs_type)
3549 && INTEGRAL_TYPE_P (rhs1_type))
3550 || (POINTER_TYPE_P (rhs1_type)
3551 && INTEGRAL_TYPE_P (lhs_type)
3552 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3553 || ptrofftype_p (lhs_type))))
3554 return false;
3555
3556 /* Allow conversion from integral to offset type and vice versa. */
3557 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3558 && INTEGRAL_TYPE_P (rhs1_type))
3559 || (INTEGRAL_TYPE_P (lhs_type)
3560 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3561 return false;
3562
3563 /* Otherwise assert we are converting between types of the
3564 same kind. */
3565 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3566 {
3567 error ("invalid types in nop conversion");
3568 debug_generic_expr (lhs_type);
3569 debug_generic_expr (rhs1_type);
3570 return true;
3571 }
3572
3573 return false;
3574 }
3575
3576 case ADDR_SPACE_CONVERT_EXPR:
3577 {
3578 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3579 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3580 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3581 {
3582 error ("invalid types in address space conversion");
3583 debug_generic_expr (lhs_type);
3584 debug_generic_expr (rhs1_type);
3585 return true;
3586 }
3587
3588 return false;
3589 }
3590
3591 case FIXED_CONVERT_EXPR:
3592 {
3593 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3594 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3595 {
3596 error ("invalid types in fixed-point conversion");
3597 debug_generic_expr (lhs_type);
3598 debug_generic_expr (rhs1_type);
3599 return true;
3600 }
3601
3602 return false;
3603 }
3604
3605 case FLOAT_EXPR:
3606 {
3607 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3608 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3609 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3610 {
3611 error ("invalid types in conversion to floating point");
3612 debug_generic_expr (lhs_type);
3613 debug_generic_expr (rhs1_type);
3614 return true;
3615 }
3616
3617 return false;
3618 }
3619
3620 case FIX_TRUNC_EXPR:
3621 {
3622 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3623 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3624 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3625 {
3626 error ("invalid types in conversion to integer");
3627 debug_generic_expr (lhs_type);
3628 debug_generic_expr (rhs1_type);
3629 return true;
3630 }
3631
3632 return false;
3633 }
3634
3635 case VEC_UNPACK_HI_EXPR:
3636 case VEC_UNPACK_LO_EXPR:
3637 case VEC_UNPACK_FLOAT_HI_EXPR:
3638 case VEC_UNPACK_FLOAT_LO_EXPR:
3639 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
3640 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
3641 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3642 || TREE_CODE (lhs_type) != VECTOR_TYPE
3643 || (!INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3644 && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type)))
3645 || (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3646 && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3647 || ((rhs_code == VEC_UNPACK_HI_EXPR
3648 || rhs_code == VEC_UNPACK_LO_EXPR)
3649 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3650 != INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3651 || ((rhs_code == VEC_UNPACK_FLOAT_HI_EXPR
3652 || rhs_code == VEC_UNPACK_FLOAT_LO_EXPR)
3653 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3654 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))))
3655 || ((rhs_code == VEC_UNPACK_FIX_TRUNC_HI_EXPR
3656 || rhs_code == VEC_UNPACK_FIX_TRUNC_LO_EXPR)
3657 && (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3658 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type))))
3659 || (maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
3660 2 * GET_MODE_SIZE (element_mode (rhs1_type)))
3661 && (!VECTOR_BOOLEAN_TYPE_P (lhs_type)
3662 || !VECTOR_BOOLEAN_TYPE_P (rhs1_type)))
3663 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (lhs_type),
3664 TYPE_VECTOR_SUBPARTS (rhs1_type)))
3665 {
3666 error ("type mismatch in vector unpack expression");
3667 debug_generic_expr (lhs_type);
3668 debug_generic_expr (rhs1_type);
3669 return true;
3670 }
3671
3672 return false;
3673
3674 case NEGATE_EXPR:
3675 case ABS_EXPR:
3676 case BIT_NOT_EXPR:
3677 case PAREN_EXPR:
3678 case CONJ_EXPR:
3679 break;
3680
3681 case ABSU_EXPR:
3682 if (!ANY_INTEGRAL_TYPE_P (lhs_type)
3683 || !TYPE_UNSIGNED (lhs_type)
3684 || !ANY_INTEGRAL_TYPE_P (rhs1_type)
3685 || TYPE_UNSIGNED (rhs1_type)
3686 || element_precision (lhs_type) != element_precision (rhs1_type))
3687 {
3688 error ("invalid types for ABSU_EXPR");
3689 debug_generic_expr (lhs_type);
3690 debug_generic_expr (rhs1_type);
3691 return true;
3692 }
3693 return false;
3694
3695 case VEC_DUPLICATE_EXPR:
3696 if (TREE_CODE (lhs_type) != VECTOR_TYPE
3697 || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
3698 {
3699 error ("vec_duplicate should be from a scalar to a like vector");
3700 debug_generic_expr (lhs_type);
3701 debug_generic_expr (rhs1_type);
3702 return true;
3703 }
3704 return false;
3705
3706 default:
3707 gcc_unreachable ();
3708 }
3709
3710 /* For the remaining codes assert there is no conversion involved. */
3711 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3712 {
3713 error ("non-trivial conversion in unary operation");
3714 debug_generic_expr (lhs_type);
3715 debug_generic_expr (rhs1_type);
3716 return true;
3717 }
3718
3719 return false;
3720 }
3721
3722 /* Verify a gimple assignment statement STMT with a binary rhs.
3723 Returns true if anything is wrong. */
3724
3725 static bool
3726 verify_gimple_assign_binary (gassign *stmt)
3727 {
3728 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3729 tree lhs = gimple_assign_lhs (stmt);
3730 tree lhs_type = TREE_TYPE (lhs);
3731 tree rhs1 = gimple_assign_rhs1 (stmt);
3732 tree rhs1_type = TREE_TYPE (rhs1);
3733 tree rhs2 = gimple_assign_rhs2 (stmt);
3734 tree rhs2_type = TREE_TYPE (rhs2);
3735
3736 if (!is_gimple_reg (lhs))
3737 {
3738 error ("non-register as LHS of binary operation");
3739 return true;
3740 }
3741
3742 if (!is_gimple_val (rhs1)
3743 || !is_gimple_val (rhs2))
3744 {
3745 error ("invalid operands in binary operation");
3746 return true;
3747 }
3748
3749 /* First handle operations that involve different types. */
3750 switch (rhs_code)
3751 {
3752 case COMPLEX_EXPR:
3753 {
3754 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3755 || !(INTEGRAL_TYPE_P (rhs1_type)
3756 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3757 || !(INTEGRAL_TYPE_P (rhs2_type)
3758 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3759 {
3760 error ("type mismatch in complex expression");
3761 debug_generic_expr (lhs_type);
3762 debug_generic_expr (rhs1_type);
3763 debug_generic_expr (rhs2_type);
3764 return true;
3765 }
3766
3767 return false;
3768 }
3769
3770 case LSHIFT_EXPR:
3771 case RSHIFT_EXPR:
3772 case LROTATE_EXPR:
3773 case RROTATE_EXPR:
3774 {
3775 /* Shifts and rotates are ok on integral types, fixed point
3776 types and integer vector types. */
3777 if ((!INTEGRAL_TYPE_P (rhs1_type)
3778 && !FIXED_POINT_TYPE_P (rhs1_type)
3779 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3780 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3781 || (!INTEGRAL_TYPE_P (rhs2_type)
3782 /* Vector shifts of vectors are also ok. */
3783 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3784 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3785 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3786 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3787 || !useless_type_conversion_p (lhs_type, rhs1_type))
3788 {
3789 error ("type mismatch in shift expression");
3790 debug_generic_expr (lhs_type);
3791 debug_generic_expr (rhs1_type);
3792 debug_generic_expr (rhs2_type);
3793 return true;
3794 }
3795
3796 return false;
3797 }
3798
3799 case WIDEN_LSHIFT_EXPR:
3800 {
3801 if (!INTEGRAL_TYPE_P (lhs_type)
3802 || !INTEGRAL_TYPE_P (rhs1_type)
3803 || TREE_CODE (rhs2) != INTEGER_CST
3804 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3805 {
3806 error ("type mismatch in widening vector shift expression");
3807 debug_generic_expr (lhs_type);
3808 debug_generic_expr (rhs1_type);
3809 debug_generic_expr (rhs2_type);
3810 return true;
3811 }
3812
3813 return false;
3814 }
3815
3816 case VEC_WIDEN_LSHIFT_HI_EXPR:
3817 case VEC_WIDEN_LSHIFT_LO_EXPR:
3818 {
3819 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3820 || TREE_CODE (lhs_type) != VECTOR_TYPE
3821 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3822 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3823 || TREE_CODE (rhs2) != INTEGER_CST
3824 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3825 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3826 {
3827 error ("type mismatch in widening vector shift expression");
3828 debug_generic_expr (lhs_type);
3829 debug_generic_expr (rhs1_type);
3830 debug_generic_expr (rhs2_type);
3831 return true;
3832 }
3833
3834 return false;
3835 }
3836
3837 case PLUS_EXPR:
3838 case MINUS_EXPR:
3839 {
3840 tree lhs_etype = lhs_type;
3841 tree rhs1_etype = rhs1_type;
3842 tree rhs2_etype = rhs2_type;
3843 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3844 {
3845 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3846 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3847 {
3848 error ("invalid non-vector operands to vector valued plus");
3849 return true;
3850 }
3851 lhs_etype = TREE_TYPE (lhs_type);
3852 rhs1_etype = TREE_TYPE (rhs1_type);
3853 rhs2_etype = TREE_TYPE (rhs2_type);
3854 }
3855 if (POINTER_TYPE_P (lhs_etype)
3856 || POINTER_TYPE_P (rhs1_etype)
3857 || POINTER_TYPE_P (rhs2_etype))
3858 {
3859 error ("invalid (pointer) operands to plus/minus");
3860 return true;
3861 }
3862
3863 /* Continue with generic binary expression handling. */
3864 break;
3865 }
3866
3867 case POINTER_PLUS_EXPR:
3868 {
3869 if (!POINTER_TYPE_P (rhs1_type)
3870 || !useless_type_conversion_p (lhs_type, rhs1_type)
3871 || !ptrofftype_p (rhs2_type))
3872 {
3873 error ("type mismatch in pointer plus expression");
3874 debug_generic_stmt (lhs_type);
3875 debug_generic_stmt (rhs1_type);
3876 debug_generic_stmt (rhs2_type);
3877 return true;
3878 }
3879
3880 return false;
3881 }
3882
3883 case POINTER_DIFF_EXPR:
3884 {
3885 if (!POINTER_TYPE_P (rhs1_type)
3886 || !POINTER_TYPE_P (rhs2_type)
3887 /* Because we special-case pointers to void we allow difference
3888 of arbitrary pointers with the same mode. */
3889 || TYPE_MODE (rhs1_type) != TYPE_MODE (rhs2_type)
3890 || TREE_CODE (lhs_type) != INTEGER_TYPE
3891 || TYPE_UNSIGNED (lhs_type)
3892 || TYPE_PRECISION (lhs_type) != TYPE_PRECISION (rhs1_type))
3893 {
3894 error ("type mismatch in pointer diff expression");
3895 debug_generic_stmt (lhs_type);
3896 debug_generic_stmt (rhs1_type);
3897 debug_generic_stmt (rhs2_type);
3898 return true;
3899 }
3900
3901 return false;
3902 }
3903
3904 case TRUTH_ANDIF_EXPR:
3905 case TRUTH_ORIF_EXPR:
3906 case TRUTH_AND_EXPR:
3907 case TRUTH_OR_EXPR:
3908 case TRUTH_XOR_EXPR:
3909
3910 gcc_unreachable ();
3911
3912 case LT_EXPR:
3913 case LE_EXPR:
3914 case GT_EXPR:
3915 case GE_EXPR:
3916 case EQ_EXPR:
3917 case NE_EXPR:
3918 case UNORDERED_EXPR:
3919 case ORDERED_EXPR:
3920 case UNLT_EXPR:
3921 case UNLE_EXPR:
3922 case UNGT_EXPR:
3923 case UNGE_EXPR:
3924 case UNEQ_EXPR:
3925 case LTGT_EXPR:
3926 /* Comparisons are also binary, but the result type is not
3927 connected to the operand types. */
3928 return verify_gimple_comparison (lhs_type, rhs1, rhs2, rhs_code);
3929
3930 case WIDEN_MULT_EXPR:
3931 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3932 return true;
3933 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3934 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3935
3936 case WIDEN_SUM_EXPR:
3937 {
3938 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
3939 || TREE_CODE (lhs_type) != VECTOR_TYPE)
3940 && ((!INTEGRAL_TYPE_P (rhs1_type)
3941 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
3942 || (!INTEGRAL_TYPE_P (lhs_type)
3943 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
3944 || !useless_type_conversion_p (lhs_type, rhs2_type)
3945 || maybe_lt (GET_MODE_SIZE (element_mode (rhs2_type)),
3946 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
3947 {
3948 error ("type mismatch in widening sum reduction");
3949 debug_generic_expr (lhs_type);
3950 debug_generic_expr (rhs1_type);
3951 debug_generic_expr (rhs2_type);
3952 return true;
3953 }
3954 return false;
3955 }
3956
3957 case VEC_WIDEN_MULT_HI_EXPR:
3958 case VEC_WIDEN_MULT_LO_EXPR:
3959 case VEC_WIDEN_MULT_EVEN_EXPR:
3960 case VEC_WIDEN_MULT_ODD_EXPR:
3961 {
3962 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3963 || TREE_CODE (lhs_type) != VECTOR_TYPE
3964 || !types_compatible_p (rhs1_type, rhs2_type)
3965 || maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
3966 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
3967 {
3968 error ("type mismatch in vector widening multiplication");
3969 debug_generic_expr (lhs_type);
3970 debug_generic_expr (rhs1_type);
3971 debug_generic_expr (rhs2_type);
3972 return true;
3973 }
3974 return false;
3975 }
3976
3977 case VEC_PACK_TRUNC_EXPR:
3978 /* ??? We currently use VEC_PACK_TRUNC_EXPR to simply concat
3979 vector boolean types. */
3980 if (VECTOR_BOOLEAN_TYPE_P (lhs_type)
3981 && VECTOR_BOOLEAN_TYPE_P (rhs1_type)
3982 && types_compatible_p (rhs1_type, rhs2_type)
3983 && known_eq (TYPE_VECTOR_SUBPARTS (lhs_type),
3984 2 * TYPE_VECTOR_SUBPARTS (rhs1_type)))
3985 return false;
3986
3987 /* Fallthru. */
3988 case VEC_PACK_SAT_EXPR:
3989 case VEC_PACK_FIX_TRUNC_EXPR:
3990 {
3991 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3992 || TREE_CODE (lhs_type) != VECTOR_TYPE
3993 || !((rhs_code == VEC_PACK_FIX_TRUNC_EXPR
3994 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))
3995 && INTEGRAL_TYPE_P (TREE_TYPE (lhs_type)))
3996 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3997 == INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))))
3998 || !types_compatible_p (rhs1_type, rhs2_type)
3999 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
4000 2 * GET_MODE_SIZE (element_mode (lhs_type)))
4001 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type),
4002 TYPE_VECTOR_SUBPARTS (lhs_type)))
4003 {
4004 error ("type mismatch in vector pack expression");
4005 debug_generic_expr (lhs_type);
4006 debug_generic_expr (rhs1_type);
4007 debug_generic_expr (rhs2_type);
4008 return true;
4009 }
4010
4011 return false;
4012 }
4013
4014 case VEC_PACK_FLOAT_EXPR:
4015 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4016 || TREE_CODE (lhs_type) != VECTOR_TYPE
4017 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4018 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type))
4019 || !types_compatible_p (rhs1_type, rhs2_type)
4020 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
4021 2 * GET_MODE_SIZE (element_mode (lhs_type)))
4022 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type),
4023 TYPE_VECTOR_SUBPARTS (lhs_type)))
4024 {
4025 error ("type mismatch in vector pack expression");
4026 debug_generic_expr (lhs_type);
4027 debug_generic_expr (rhs1_type);
4028 debug_generic_expr (rhs2_type);
4029 return true;
4030 }
4031
4032 return false;
4033
4034 case MULT_EXPR:
4035 case MULT_HIGHPART_EXPR:
4036 case TRUNC_DIV_EXPR:
4037 case CEIL_DIV_EXPR:
4038 case FLOOR_DIV_EXPR:
4039 case ROUND_DIV_EXPR:
4040 case TRUNC_MOD_EXPR:
4041 case CEIL_MOD_EXPR:
4042 case FLOOR_MOD_EXPR:
4043 case ROUND_MOD_EXPR:
4044 case RDIV_EXPR:
4045 case EXACT_DIV_EXPR:
4046 case MIN_EXPR:
4047 case MAX_EXPR:
4048 case BIT_IOR_EXPR:
4049 case BIT_XOR_EXPR:
4050 case BIT_AND_EXPR:
4051 /* Continue with generic binary expression handling. */
4052 break;
4053
4054 case VEC_SERIES_EXPR:
4055 if (!useless_type_conversion_p (rhs1_type, rhs2_type))
4056 {
4057 error ("type mismatch in series expression");
4058 debug_generic_expr (rhs1_type);
4059 debug_generic_expr (rhs2_type);
4060 return true;
4061 }
4062 if (TREE_CODE (lhs_type) != VECTOR_TYPE
4063 || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
4064 {
4065 error ("vector type expected in series expression");
4066 debug_generic_expr (lhs_type);
4067 return true;
4068 }
4069 return false;
4070
4071 default:
4072 gcc_unreachable ();
4073 }
4074
4075 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4076 || !useless_type_conversion_p (lhs_type, rhs2_type))
4077 {
4078 error ("type mismatch in binary expression");
4079 debug_generic_stmt (lhs_type);
4080 debug_generic_stmt (rhs1_type);
4081 debug_generic_stmt (rhs2_type);
4082 return true;
4083 }
4084
4085 return false;
4086 }
4087
4088 /* Verify a gimple assignment statement STMT with a ternary rhs.
4089 Returns true if anything is wrong. */
4090
4091 static bool
4092 verify_gimple_assign_ternary (gassign *stmt)
4093 {
4094 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4095 tree lhs = gimple_assign_lhs (stmt);
4096 tree lhs_type = TREE_TYPE (lhs);
4097 tree rhs1 = gimple_assign_rhs1 (stmt);
4098 tree rhs1_type = TREE_TYPE (rhs1);
4099 tree rhs2 = gimple_assign_rhs2 (stmt);
4100 tree rhs2_type = TREE_TYPE (rhs2);
4101 tree rhs3 = gimple_assign_rhs3 (stmt);
4102 tree rhs3_type = TREE_TYPE (rhs3);
4103
4104 if (!is_gimple_reg (lhs))
4105 {
4106 error ("non-register as LHS of ternary operation");
4107 return true;
4108 }
4109
4110 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
4111 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
4112 || !is_gimple_val (rhs2)
4113 || !is_gimple_val (rhs3))
4114 {
4115 error ("invalid operands in ternary operation");
4116 return true;
4117 }
4118
4119 /* First handle operations that involve different types. */
4120 switch (rhs_code)
4121 {
4122 case WIDEN_MULT_PLUS_EXPR:
4123 case WIDEN_MULT_MINUS_EXPR:
4124 if ((!INTEGRAL_TYPE_P (rhs1_type)
4125 && !FIXED_POINT_TYPE_P (rhs1_type))
4126 || !useless_type_conversion_p (rhs1_type, rhs2_type)
4127 || !useless_type_conversion_p (lhs_type, rhs3_type)
4128 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
4129 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
4130 {
4131 error ("type mismatch in widening multiply-accumulate expression");
4132 debug_generic_expr (lhs_type);
4133 debug_generic_expr (rhs1_type);
4134 debug_generic_expr (rhs2_type);
4135 debug_generic_expr (rhs3_type);
4136 return true;
4137 }
4138 break;
4139
4140 case VEC_COND_EXPR:
4141 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4142 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4143 TYPE_VECTOR_SUBPARTS (lhs_type)))
4144 {
4145 error ("the first argument of a VEC_COND_EXPR must be of a "
4146 "boolean vector type of the same number of elements "
4147 "as the result");
4148 debug_generic_expr (lhs_type);
4149 debug_generic_expr (rhs1_type);
4150 return true;
4151 }
4152 /* Fallthrough. */
4153 case COND_EXPR:
4154 if (!is_gimple_val (rhs1)
4155 && verify_gimple_comparison (TREE_TYPE (rhs1),
4156 TREE_OPERAND (rhs1, 0),
4157 TREE_OPERAND (rhs1, 1),
4158 TREE_CODE (rhs1)))
4159 return true;
4160 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4161 || !useless_type_conversion_p (lhs_type, rhs3_type))
4162 {
4163 error ("type mismatch in conditional expression");
4164 debug_generic_expr (lhs_type);
4165 debug_generic_expr (rhs2_type);
4166 debug_generic_expr (rhs3_type);
4167 return true;
4168 }
4169 break;
4170
4171 case VEC_PERM_EXPR:
4172 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4173 || !useless_type_conversion_p (lhs_type, rhs2_type))
4174 {
4175 error ("type mismatch in vector permute expression");
4176 debug_generic_expr (lhs_type);
4177 debug_generic_expr (rhs1_type);
4178 debug_generic_expr (rhs2_type);
4179 debug_generic_expr (rhs3_type);
4180 return true;
4181 }
4182
4183 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4184 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4185 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4186 {
4187 error ("vector types expected in vector permute expression");
4188 debug_generic_expr (lhs_type);
4189 debug_generic_expr (rhs1_type);
4190 debug_generic_expr (rhs2_type);
4191 debug_generic_expr (rhs3_type);
4192 return true;
4193 }
4194
4195 if (maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4196 TYPE_VECTOR_SUBPARTS (rhs2_type))
4197 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs2_type),
4198 TYPE_VECTOR_SUBPARTS (rhs3_type))
4199 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs3_type),
4200 TYPE_VECTOR_SUBPARTS (lhs_type)))
4201 {
4202 error ("vectors with different element number found "
4203 "in vector permute expression");
4204 debug_generic_expr (lhs_type);
4205 debug_generic_expr (rhs1_type);
4206 debug_generic_expr (rhs2_type);
4207 debug_generic_expr (rhs3_type);
4208 return true;
4209 }
4210
4211 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4212 || (TREE_CODE (rhs3) != VECTOR_CST
4213 && (GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE
4214 (TREE_TYPE (rhs3_type)))
4215 != GET_MODE_BITSIZE (SCALAR_TYPE_MODE
4216 (TREE_TYPE (rhs1_type))))))
4217 {
4218 error ("invalid mask type in vector permute expression");
4219 debug_generic_expr (lhs_type);
4220 debug_generic_expr (rhs1_type);
4221 debug_generic_expr (rhs2_type);
4222 debug_generic_expr (rhs3_type);
4223 return true;
4224 }
4225
4226 return false;
4227
4228 case SAD_EXPR:
4229 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4230 || !useless_type_conversion_p (lhs_type, rhs3_type)
4231 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4232 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4233 {
4234 error ("type mismatch in sad expression");
4235 debug_generic_expr (lhs_type);
4236 debug_generic_expr (rhs1_type);
4237 debug_generic_expr (rhs2_type);
4238 debug_generic_expr (rhs3_type);
4239 return true;
4240 }
4241
4242 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4243 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4244 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4245 {
4246 error ("vector types expected in sad expression");
4247 debug_generic_expr (lhs_type);
4248 debug_generic_expr (rhs1_type);
4249 debug_generic_expr (rhs2_type);
4250 debug_generic_expr (rhs3_type);
4251 return true;
4252 }
4253
4254 return false;
4255
4256 case BIT_INSERT_EXPR:
4257 if (! useless_type_conversion_p (lhs_type, rhs1_type))
4258 {
4259 error ("type mismatch in BIT_INSERT_EXPR");
4260 debug_generic_expr (lhs_type);
4261 debug_generic_expr (rhs1_type);
4262 return true;
4263 }
4264 if (! ((INTEGRAL_TYPE_P (rhs1_type)
4265 && INTEGRAL_TYPE_P (rhs2_type))
4266 || (VECTOR_TYPE_P (rhs1_type)
4267 && types_compatible_p (TREE_TYPE (rhs1_type), rhs2_type))))
4268 {
4269 error ("not allowed type combination in BIT_INSERT_EXPR");
4270 debug_generic_expr (rhs1_type);
4271 debug_generic_expr (rhs2_type);
4272 return true;
4273 }
4274 if (! tree_fits_uhwi_p (rhs3)
4275 || ! types_compatible_p (bitsizetype, TREE_TYPE (rhs3))
4276 || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type)))
4277 {
4278 error ("invalid position or size in BIT_INSERT_EXPR");
4279 return true;
4280 }
4281 if (INTEGRAL_TYPE_P (rhs1_type)
4282 && !type_has_mode_precision_p (rhs1_type))
4283 {
4284 error ("BIT_INSERT_EXPR into non-mode-precision operand");
4285 return true;
4286 }
4287 if (INTEGRAL_TYPE_P (rhs1_type))
4288 {
4289 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4290 if (bitpos >= TYPE_PRECISION (rhs1_type)
4291 || (bitpos + TYPE_PRECISION (rhs2_type)
4292 > TYPE_PRECISION (rhs1_type)))
4293 {
4294 error ("insertion out of range in BIT_INSERT_EXPR");
4295 return true;
4296 }
4297 }
4298 else if (VECTOR_TYPE_P (rhs1_type))
4299 {
4300 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4301 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (TYPE_SIZE (rhs2_type));
4302 if (bitpos % bitsize != 0)
4303 {
4304 error ("vector insertion not at element boundary");
4305 return true;
4306 }
4307 }
4308 return false;
4309
4310 case DOT_PROD_EXPR:
4311 {
4312 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4313 || TREE_CODE (lhs_type) != VECTOR_TYPE)
4314 && ((!INTEGRAL_TYPE_P (rhs1_type)
4315 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4316 || (!INTEGRAL_TYPE_P (lhs_type)
4317 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4318 || !types_compatible_p (rhs1_type, rhs2_type)
4319 || !useless_type_conversion_p (lhs_type, rhs3_type)
4320 || maybe_lt (GET_MODE_SIZE (element_mode (rhs3_type)),
4321 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4322 {
4323 error ("type mismatch in dot product reduction");
4324 debug_generic_expr (lhs_type);
4325 debug_generic_expr (rhs1_type);
4326 debug_generic_expr (rhs2_type);
4327 return true;
4328 }
4329 return false;
4330 }
4331
4332 case REALIGN_LOAD_EXPR:
4333 /* FIXME. */
4334 return false;
4335
4336 default:
4337 gcc_unreachable ();
4338 }
4339 return false;
4340 }
4341
4342 /* Verify a gimple assignment statement STMT with a single rhs.
4343 Returns true if anything is wrong. */
4344
4345 static bool
4346 verify_gimple_assign_single (gassign *stmt)
4347 {
4348 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4349 tree lhs = gimple_assign_lhs (stmt);
4350 tree lhs_type = TREE_TYPE (lhs);
4351 tree rhs1 = gimple_assign_rhs1 (stmt);
4352 tree rhs1_type = TREE_TYPE (rhs1);
4353 bool res = false;
4354
4355 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4356 {
4357 error ("non-trivial conversion at assignment");
4358 debug_generic_expr (lhs_type);
4359 debug_generic_expr (rhs1_type);
4360 return true;
4361 }
4362
4363 if (gimple_clobber_p (stmt)
4364 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4365 {
4366 error ("non-decl/MEM_REF LHS in clobber statement");
4367 debug_generic_expr (lhs);
4368 return true;
4369 }
4370
4371 if (handled_component_p (lhs)
4372 || TREE_CODE (lhs) == MEM_REF
4373 || TREE_CODE (lhs) == TARGET_MEM_REF)
4374 res |= verify_types_in_gimple_reference (lhs, true);
4375
4376 /* Special codes we cannot handle via their class. */
4377 switch (rhs_code)
4378 {
4379 case ADDR_EXPR:
4380 {
4381 tree op = TREE_OPERAND (rhs1, 0);
4382 if (!is_gimple_addressable (op))
4383 {
4384 error ("invalid operand in unary expression");
4385 return true;
4386 }
4387
4388 /* Technically there is no longer a need for matching types, but
4389 gimple hygiene asks for this check. In LTO we can end up
4390 combining incompatible units and thus end up with addresses
4391 of globals that change their type to a common one. */
4392 if (!in_lto_p
4393 && !types_compatible_p (TREE_TYPE (op),
4394 TREE_TYPE (TREE_TYPE (rhs1)))
4395 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4396 TREE_TYPE (op)))
4397 {
4398 error ("type mismatch in address expression");
4399 debug_generic_stmt (TREE_TYPE (rhs1));
4400 debug_generic_stmt (TREE_TYPE (op));
4401 return true;
4402 }
4403
4404 return (verify_address (rhs1, true)
4405 || verify_types_in_gimple_reference (op, true));
4406 }
4407
4408 /* tcc_reference */
4409 case INDIRECT_REF:
4410 error ("INDIRECT_REF in gimple IL");
4411 return true;
4412
4413 case COMPONENT_REF:
4414 case BIT_FIELD_REF:
4415 case ARRAY_REF:
4416 case ARRAY_RANGE_REF:
4417 case VIEW_CONVERT_EXPR:
4418 case REALPART_EXPR:
4419 case IMAGPART_EXPR:
4420 case TARGET_MEM_REF:
4421 case MEM_REF:
4422 if (!is_gimple_reg (lhs)
4423 && is_gimple_reg_type (TREE_TYPE (lhs)))
4424 {
4425 error ("invalid rhs for gimple memory store");
4426 debug_generic_stmt (lhs);
4427 debug_generic_stmt (rhs1);
4428 return true;
4429 }
4430 return res || verify_types_in_gimple_reference (rhs1, false);
4431
4432 /* tcc_constant */
4433 case SSA_NAME:
4434 case INTEGER_CST:
4435 case REAL_CST:
4436 case FIXED_CST:
4437 case COMPLEX_CST:
4438 case VECTOR_CST:
4439 case STRING_CST:
4440 return res;
4441
4442 /* tcc_declaration */
4443 case CONST_DECL:
4444 return res;
4445 case VAR_DECL:
4446 case PARM_DECL:
4447 if (!is_gimple_reg (lhs)
4448 && !is_gimple_reg (rhs1)
4449 && is_gimple_reg_type (TREE_TYPE (lhs)))
4450 {
4451 error ("invalid rhs for gimple memory store");
4452 debug_generic_stmt (lhs);
4453 debug_generic_stmt (rhs1);
4454 return true;
4455 }
4456 return res;
4457
4458 case CONSTRUCTOR:
4459 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4460 {
4461 unsigned int i;
4462 tree elt_i, elt_v, elt_t = NULL_TREE;
4463
4464 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4465 return res;
4466 /* For vector CONSTRUCTORs we require that either it is empty
4467 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4468 (then the element count must be correct to cover the whole
4469 outer vector and index must be NULL on all elements, or it is
4470 a CONSTRUCTOR of scalar elements, where we as an exception allow
4471 smaller number of elements (assuming zero filling) and
4472 consecutive indexes as compared to NULL indexes (such
4473 CONSTRUCTORs can appear in the IL from FEs). */
4474 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4475 {
4476 if (elt_t == NULL_TREE)
4477 {
4478 elt_t = TREE_TYPE (elt_v);
4479 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4480 {
4481 tree elt_t = TREE_TYPE (elt_v);
4482 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4483 TREE_TYPE (elt_t)))
4484 {
4485 error ("incorrect type of vector CONSTRUCTOR"
4486 " elements");
4487 debug_generic_stmt (rhs1);
4488 return true;
4489 }
4490 else if (maybe_ne (CONSTRUCTOR_NELTS (rhs1)
4491 * TYPE_VECTOR_SUBPARTS (elt_t),
4492 TYPE_VECTOR_SUBPARTS (rhs1_type)))
4493 {
4494 error ("incorrect number of vector CONSTRUCTOR"
4495 " elements");
4496 debug_generic_stmt (rhs1);
4497 return true;
4498 }
4499 }
4500 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4501 elt_t))
4502 {
4503 error ("incorrect type of vector CONSTRUCTOR elements");
4504 debug_generic_stmt (rhs1);
4505 return true;
4506 }
4507 else if (maybe_gt (CONSTRUCTOR_NELTS (rhs1),
4508 TYPE_VECTOR_SUBPARTS (rhs1_type)))
4509 {
4510 error ("incorrect number of vector CONSTRUCTOR elements");
4511 debug_generic_stmt (rhs1);
4512 return true;
4513 }
4514 }
4515 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4516 {
4517 error ("incorrect type of vector CONSTRUCTOR elements");
4518 debug_generic_stmt (rhs1);
4519 return true;
4520 }
4521 if (elt_i != NULL_TREE
4522 && (TREE_CODE (elt_t) == VECTOR_TYPE
4523 || TREE_CODE (elt_i) != INTEGER_CST
4524 || compare_tree_int (elt_i, i) != 0))
4525 {
4526 error ("vector CONSTRUCTOR with non-NULL element index");
4527 debug_generic_stmt (rhs1);
4528 return true;
4529 }
4530 if (!is_gimple_val (elt_v))
4531 {
4532 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4533 debug_generic_stmt (rhs1);
4534 return true;
4535 }
4536 }
4537 }
4538 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4539 {
4540 error ("non-vector CONSTRUCTOR with elements");
4541 debug_generic_stmt (rhs1);
4542 return true;
4543 }
4544 return res;
4545
4546 case ASSERT_EXPR:
4547 /* FIXME. */
4548 rhs1 = fold (ASSERT_EXPR_COND (rhs1));
4549 if (rhs1 == boolean_false_node)
4550 {
4551 error ("ASSERT_EXPR with an always-false condition");
4552 debug_generic_stmt (rhs1);
4553 return true;
4554 }
4555 break;
4556
4557 case OBJ_TYPE_REF:
4558 case WITH_SIZE_EXPR:
4559 /* FIXME. */
4560 return res;
4561
4562 default:;
4563 }
4564
4565 return res;
4566 }
4567
4568 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4569 is a problem, otherwise false. */
4570
4571 static bool
4572 verify_gimple_assign (gassign *stmt)
4573 {
4574 switch (gimple_assign_rhs_class (stmt))
4575 {
4576 case GIMPLE_SINGLE_RHS:
4577 return verify_gimple_assign_single (stmt);
4578
4579 case GIMPLE_UNARY_RHS:
4580 return verify_gimple_assign_unary (stmt);
4581
4582 case GIMPLE_BINARY_RHS:
4583 return verify_gimple_assign_binary (stmt);
4584
4585 case GIMPLE_TERNARY_RHS:
4586 return verify_gimple_assign_ternary (stmt);
4587
4588 default:
4589 gcc_unreachable ();
4590 }
4591 }
4592
4593 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4594 is a problem, otherwise false. */
4595
4596 static bool
4597 verify_gimple_return (greturn *stmt)
4598 {
4599 tree op = gimple_return_retval (stmt);
4600 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4601
4602 /* We cannot test for present return values as we do not fix up missing
4603 return values from the original source. */
4604 if (op == NULL)
4605 return false;
4606
4607 if (!is_gimple_val (op)
4608 && TREE_CODE (op) != RESULT_DECL)
4609 {
4610 error ("invalid operand in return statement");
4611 debug_generic_stmt (op);
4612 return true;
4613 }
4614
4615 if ((TREE_CODE (op) == RESULT_DECL
4616 && DECL_BY_REFERENCE (op))
4617 || (TREE_CODE (op) == SSA_NAME
4618 && SSA_NAME_VAR (op)
4619 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4620 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4621 op = TREE_TYPE (op);
4622
4623 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4624 {
4625 error ("invalid conversion in return statement");
4626 debug_generic_stmt (restype);
4627 debug_generic_stmt (TREE_TYPE (op));
4628 return true;
4629 }
4630
4631 return false;
4632 }
4633
4634
4635 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4636 is a problem, otherwise false. */
4637
4638 static bool
4639 verify_gimple_goto (ggoto *stmt)
4640 {
4641 tree dest = gimple_goto_dest (stmt);
4642
4643 /* ??? We have two canonical forms of direct goto destinations, a
4644 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4645 if (TREE_CODE (dest) != LABEL_DECL
4646 && (!is_gimple_val (dest)
4647 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4648 {
4649 error ("goto destination is neither a label nor a pointer");
4650 return true;
4651 }
4652
4653 return false;
4654 }
4655
4656 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4657 is a problem, otherwise false. */
4658
4659 static bool
4660 verify_gimple_switch (gswitch *stmt)
4661 {
4662 unsigned int i, n;
4663 tree elt, prev_upper_bound = NULL_TREE;
4664 tree index_type, elt_type = NULL_TREE;
4665
4666 if (!is_gimple_val (gimple_switch_index (stmt)))
4667 {
4668 error ("invalid operand to switch statement");
4669 debug_generic_stmt (gimple_switch_index (stmt));
4670 return true;
4671 }
4672
4673 index_type = TREE_TYPE (gimple_switch_index (stmt));
4674 if (! INTEGRAL_TYPE_P (index_type))
4675 {
4676 error ("non-integral type switch statement");
4677 debug_generic_expr (index_type);
4678 return true;
4679 }
4680
4681 elt = gimple_switch_label (stmt, 0);
4682 if (CASE_LOW (elt) != NULL_TREE
4683 || CASE_HIGH (elt) != NULL_TREE
4684 || CASE_CHAIN (elt) != NULL_TREE)
4685 {
4686 error ("invalid default case label in switch statement");
4687 debug_generic_expr (elt);
4688 return true;
4689 }
4690
4691 n = gimple_switch_num_labels (stmt);
4692 for (i = 1; i < n; i++)
4693 {
4694 elt = gimple_switch_label (stmt, i);
4695
4696 if (CASE_CHAIN (elt))
4697 {
4698 error ("invalid CASE_CHAIN");
4699 debug_generic_expr (elt);
4700 return true;
4701 }
4702 if (! CASE_LOW (elt))
4703 {
4704 error ("invalid case label in switch statement");
4705 debug_generic_expr (elt);
4706 return true;
4707 }
4708 if (CASE_HIGH (elt)
4709 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4710 {
4711 error ("invalid case range in switch statement");
4712 debug_generic_expr (elt);
4713 return true;
4714 }
4715
4716 if (elt_type)
4717 {
4718 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4719 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4720 {
4721 error ("type mismatch for case label in switch statement");
4722 debug_generic_expr (elt);
4723 return true;
4724 }
4725 }
4726 else
4727 {
4728 elt_type = TREE_TYPE (CASE_LOW (elt));
4729 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4730 {
4731 error ("type precision mismatch in switch statement");
4732 return true;
4733 }
4734 }
4735
4736 if (prev_upper_bound)
4737 {
4738 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4739 {
4740 error ("case labels not sorted in switch statement");
4741 return true;
4742 }
4743 }
4744
4745 prev_upper_bound = CASE_HIGH (elt);
4746 if (! prev_upper_bound)
4747 prev_upper_bound = CASE_LOW (elt);
4748 }
4749
4750 return false;
4751 }
4752
4753 /* Verify a gimple debug statement STMT.
4754 Returns true if anything is wrong. */
4755
4756 static bool
4757 verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
4758 {
4759 /* There isn't much that could be wrong in a gimple debug stmt. A
4760 gimple debug bind stmt, for example, maps a tree, that's usually
4761 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4762 component or member of an aggregate type, to another tree, that
4763 can be an arbitrary expression. These stmts expand into debug
4764 insns, and are converted to debug notes by var-tracking.c. */
4765 return false;
4766 }
4767
4768 /* Verify a gimple label statement STMT.
4769 Returns true if anything is wrong. */
4770
4771 static bool
4772 verify_gimple_label (glabel *stmt)
4773 {
4774 tree decl = gimple_label_label (stmt);
4775 int uid;
4776 bool err = false;
4777
4778 if (TREE_CODE (decl) != LABEL_DECL)
4779 return true;
4780 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4781 && DECL_CONTEXT (decl) != current_function_decl)
4782 {
4783 error ("label%'s context is not the current function decl");
4784 err |= true;
4785 }
4786
4787 uid = LABEL_DECL_UID (decl);
4788 if (cfun->cfg
4789 && (uid == -1
4790 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4791 {
4792 error ("incorrect entry in label_to_block_map");
4793 err |= true;
4794 }
4795
4796 uid = EH_LANDING_PAD_NR (decl);
4797 if (uid)
4798 {
4799 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4800 if (decl != lp->post_landing_pad)
4801 {
4802 error ("incorrect setting of landing pad number");
4803 err |= true;
4804 }
4805 }
4806
4807 return err;
4808 }
4809
4810 /* Verify a gimple cond statement STMT.
4811 Returns true if anything is wrong. */
4812
4813 static bool
4814 verify_gimple_cond (gcond *stmt)
4815 {
4816 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4817 {
4818 error ("invalid comparison code in gimple cond");
4819 return true;
4820 }
4821 if (!(!gimple_cond_true_label (stmt)
4822 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4823 || !(!gimple_cond_false_label (stmt)
4824 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4825 {
4826 error ("invalid labels in gimple cond");
4827 return true;
4828 }
4829
4830 return verify_gimple_comparison (boolean_type_node,
4831 gimple_cond_lhs (stmt),
4832 gimple_cond_rhs (stmt),
4833 gimple_cond_code (stmt));
4834 }
4835
4836 /* Verify the GIMPLE statement STMT. Returns true if there is an
4837 error, otherwise false. */
4838
4839 static bool
4840 verify_gimple_stmt (gimple *stmt)
4841 {
4842 switch (gimple_code (stmt))
4843 {
4844 case GIMPLE_ASSIGN:
4845 return verify_gimple_assign (as_a <gassign *> (stmt));
4846
4847 case GIMPLE_LABEL:
4848 return verify_gimple_label (as_a <glabel *> (stmt));
4849
4850 case GIMPLE_CALL:
4851 return verify_gimple_call (as_a <gcall *> (stmt));
4852
4853 case GIMPLE_COND:
4854 return verify_gimple_cond (as_a <gcond *> (stmt));
4855
4856 case GIMPLE_GOTO:
4857 return verify_gimple_goto (as_a <ggoto *> (stmt));
4858
4859 case GIMPLE_SWITCH:
4860 return verify_gimple_switch (as_a <gswitch *> (stmt));
4861
4862 case GIMPLE_RETURN:
4863 return verify_gimple_return (as_a <greturn *> (stmt));
4864
4865 case GIMPLE_ASM:
4866 return false;
4867
4868 case GIMPLE_TRANSACTION:
4869 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4870
4871 /* Tuples that do not have tree operands. */
4872 case GIMPLE_NOP:
4873 case GIMPLE_PREDICT:
4874 case GIMPLE_RESX:
4875 case GIMPLE_EH_DISPATCH:
4876 case GIMPLE_EH_MUST_NOT_THROW:
4877 return false;
4878
4879 CASE_GIMPLE_OMP:
4880 /* OpenMP directives are validated by the FE and never operated
4881 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4882 non-gimple expressions when the main index variable has had
4883 its address taken. This does not affect the loop itself
4884 because the header of an GIMPLE_OMP_FOR is merely used to determine
4885 how to setup the parallel iteration. */
4886 return false;
4887
4888 case GIMPLE_DEBUG:
4889 return verify_gimple_debug (stmt);
4890
4891 default:
4892 gcc_unreachable ();
4893 }
4894 }
4895
4896 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4897 and false otherwise. */
4898
4899 static bool
4900 verify_gimple_phi (gphi *phi)
4901 {
4902 bool err = false;
4903 unsigned i;
4904 tree phi_result = gimple_phi_result (phi);
4905 bool virtual_p;
4906
4907 if (!phi_result)
4908 {
4909 error ("invalid PHI result");
4910 return true;
4911 }
4912
4913 virtual_p = virtual_operand_p (phi_result);
4914 if (TREE_CODE (phi_result) != SSA_NAME
4915 || (virtual_p
4916 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4917 {
4918 error ("invalid PHI result");
4919 err = true;
4920 }
4921
4922 for (i = 0; i < gimple_phi_num_args (phi); i++)
4923 {
4924 tree t = gimple_phi_arg_def (phi, i);
4925
4926 if (!t)
4927 {
4928 error ("missing PHI def");
4929 err |= true;
4930 continue;
4931 }
4932 /* Addressable variables do have SSA_NAMEs but they
4933 are not considered gimple values. */
4934 else if ((TREE_CODE (t) == SSA_NAME
4935 && virtual_p != virtual_operand_p (t))
4936 || (virtual_p
4937 && (TREE_CODE (t) != SSA_NAME
4938 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4939 || (!virtual_p
4940 && !is_gimple_val (t)))
4941 {
4942 error ("invalid PHI argument");
4943 debug_generic_expr (t);
4944 err |= true;
4945 }
4946 #ifdef ENABLE_TYPES_CHECKING
4947 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4948 {
4949 error ("incompatible types in PHI argument %u", i);
4950 debug_generic_stmt (TREE_TYPE (phi_result));
4951 debug_generic_stmt (TREE_TYPE (t));
4952 err |= true;
4953 }
4954 #endif
4955 }
4956
4957 return err;
4958 }
4959
4960 /* Verify the GIMPLE statements inside the sequence STMTS. */
4961
4962 static bool
4963 verify_gimple_in_seq_2 (gimple_seq stmts)
4964 {
4965 gimple_stmt_iterator ittr;
4966 bool err = false;
4967
4968 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4969 {
4970 gimple *stmt = gsi_stmt (ittr);
4971
4972 switch (gimple_code (stmt))
4973 {
4974 case GIMPLE_BIND:
4975 err |= verify_gimple_in_seq_2 (
4976 gimple_bind_body (as_a <gbind *> (stmt)));
4977 break;
4978
4979 case GIMPLE_TRY:
4980 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4981 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4982 break;
4983
4984 case GIMPLE_EH_FILTER:
4985 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4986 break;
4987
4988 case GIMPLE_EH_ELSE:
4989 {
4990 geh_else *eh_else = as_a <geh_else *> (stmt);
4991 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
4992 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
4993 }
4994 break;
4995
4996 case GIMPLE_CATCH:
4997 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
4998 as_a <gcatch *> (stmt)));
4999 break;
5000
5001 case GIMPLE_TRANSACTION:
5002 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
5003 break;
5004
5005 default:
5006 {
5007 bool err2 = verify_gimple_stmt (stmt);
5008 if (err2)
5009 debug_gimple_stmt (stmt);
5010 err |= err2;
5011 }
5012 }
5013 }
5014
5015 return err;
5016 }
5017
5018 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
5019 is a problem, otherwise false. */
5020
5021 static bool
5022 verify_gimple_transaction (gtransaction *stmt)
5023 {
5024 tree lab;
5025
5026 lab = gimple_transaction_label_norm (stmt);
5027 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5028 return true;
5029 lab = gimple_transaction_label_uninst (stmt);
5030 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5031 return true;
5032 lab = gimple_transaction_label_over (stmt);
5033 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5034 return true;
5035
5036 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
5037 }
5038
5039
5040 /* Verify the GIMPLE statements inside the statement list STMTS. */
5041
5042 DEBUG_FUNCTION void
5043 verify_gimple_in_seq (gimple_seq stmts)
5044 {
5045 timevar_push (TV_TREE_STMT_VERIFY);
5046 if (verify_gimple_in_seq_2 (stmts))
5047 internal_error ("verify_gimple failed");
5048 timevar_pop (TV_TREE_STMT_VERIFY);
5049 }
5050
5051 /* Return true when the T can be shared. */
5052
5053 static bool
5054 tree_node_can_be_shared (tree t)
5055 {
5056 if (IS_TYPE_OR_DECL_P (t)
5057 || TREE_CODE (t) == SSA_NAME
5058 || TREE_CODE (t) == IDENTIFIER_NODE
5059 || TREE_CODE (t) == CASE_LABEL_EXPR
5060 || is_gimple_min_invariant (t))
5061 return true;
5062
5063 if (t == error_mark_node)
5064 return true;
5065
5066 return false;
5067 }
5068
5069 /* Called via walk_tree. Verify tree sharing. */
5070
5071 static tree
5072 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
5073 {
5074 hash_set<void *> *visited = (hash_set<void *> *) data;
5075
5076 if (tree_node_can_be_shared (*tp))
5077 {
5078 *walk_subtrees = false;
5079 return NULL;
5080 }
5081
5082 if (visited->add (*tp))
5083 return *tp;
5084
5085 return NULL;
5086 }
5087
5088 /* Called via walk_gimple_stmt. Verify tree sharing. */
5089
5090 static tree
5091 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
5092 {
5093 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5094 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
5095 }
5096
5097 static bool eh_error_found;
5098 bool
5099 verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
5100 hash_set<gimple *> *visited)
5101 {
5102 if (!visited->contains (stmt))
5103 {
5104 error ("dead STMT in EH table");
5105 debug_gimple_stmt (stmt);
5106 eh_error_found = true;
5107 }
5108 return true;
5109 }
5110
5111 /* Verify if the location LOCs block is in BLOCKS. */
5112
5113 static bool
5114 verify_location (hash_set<tree> *blocks, location_t loc)
5115 {
5116 tree block = LOCATION_BLOCK (loc);
5117 if (block != NULL_TREE
5118 && !blocks->contains (block))
5119 {
5120 error ("location references block not in block tree");
5121 return true;
5122 }
5123 if (block != NULL_TREE)
5124 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
5125 return false;
5126 }
5127
5128 /* Called via walk_tree. Verify that expressions have no blocks. */
5129
5130 static tree
5131 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
5132 {
5133 if (!EXPR_P (*tp))
5134 {
5135 *walk_subtrees = false;
5136 return NULL;
5137 }
5138
5139 location_t loc = EXPR_LOCATION (*tp);
5140 if (LOCATION_BLOCK (loc) != NULL)
5141 return *tp;
5142
5143 return NULL;
5144 }
5145
5146 /* Called via walk_tree. Verify locations of expressions. */
5147
5148 static tree
5149 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
5150 {
5151 hash_set<tree> *blocks = (hash_set<tree> *) data;
5152 tree t = *tp;
5153
5154 /* ??? This doesn't really belong here but there's no good place to
5155 stick this remainder of old verify_expr. */
5156 /* ??? This barfs on debug stmts which contain binds to vars with
5157 different function context. */
5158 #if 0
5159 if (VAR_P (t)
5160 || TREE_CODE (t) == PARM_DECL
5161 || TREE_CODE (t) == RESULT_DECL)
5162 {
5163 tree context = decl_function_context (t);
5164 if (context != cfun->decl
5165 && !SCOPE_FILE_SCOPE_P (context)
5166 && !TREE_STATIC (t)
5167 && !DECL_EXTERNAL (t))
5168 {
5169 error ("local declaration from a different function");
5170 return t;
5171 }
5172 }
5173 #endif
5174
5175 if (VAR_P (t) && DECL_HAS_DEBUG_EXPR_P (t))
5176 {
5177 tree x = DECL_DEBUG_EXPR (t);
5178 tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5179 if (addr)
5180 return addr;
5181 }
5182 if ((VAR_P (t)
5183 || TREE_CODE (t) == PARM_DECL
5184 || TREE_CODE (t) == RESULT_DECL)
5185 && DECL_HAS_VALUE_EXPR_P (t))
5186 {
5187 tree x = DECL_VALUE_EXPR (t);
5188 tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5189 if (addr)
5190 return addr;
5191 }
5192
5193 if (!EXPR_P (t))
5194 {
5195 *walk_subtrees = false;
5196 return NULL;
5197 }
5198
5199 location_t loc = EXPR_LOCATION (t);
5200 if (verify_location (blocks, loc))
5201 return t;
5202
5203 return NULL;
5204 }
5205
5206 /* Called via walk_gimple_op. Verify locations of expressions. */
5207
5208 static tree
5209 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
5210 {
5211 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5212 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
5213 }
5214
5215 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
5216
5217 static void
5218 collect_subblocks (hash_set<tree> *blocks, tree block)
5219 {
5220 tree t;
5221 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
5222 {
5223 blocks->add (t);
5224 collect_subblocks (blocks, t);
5225 }
5226 }
5227
5228 /* Verify the GIMPLE statements in the CFG of FN. */
5229
5230 DEBUG_FUNCTION void
5231 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
5232 {
5233 basic_block bb;
5234 bool err = false;
5235
5236 timevar_push (TV_TREE_STMT_VERIFY);
5237 hash_set<void *> visited;
5238 hash_set<gimple *> visited_throwing_stmts;
5239
5240 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
5241 hash_set<tree> blocks;
5242 if (DECL_INITIAL (fn->decl))
5243 {
5244 blocks.add (DECL_INITIAL (fn->decl));
5245 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
5246 }
5247
5248 FOR_EACH_BB_FN (bb, fn)
5249 {
5250 gimple_stmt_iterator gsi;
5251 edge_iterator ei;
5252 edge e;
5253
5254 for (gphi_iterator gpi = gsi_start_phis (bb);
5255 !gsi_end_p (gpi);
5256 gsi_next (&gpi))
5257 {
5258 gphi *phi = gpi.phi ();
5259 bool err2 = false;
5260 unsigned i;
5261
5262 if (gimple_bb (phi) != bb)
5263 {
5264 error ("gimple_bb (phi) is set to a wrong basic block");
5265 err2 = true;
5266 }
5267
5268 err2 |= verify_gimple_phi (phi);
5269
5270 /* Only PHI arguments have locations. */
5271 if (gimple_location (phi) != UNKNOWN_LOCATION)
5272 {
5273 error ("PHI node with location");
5274 err2 = true;
5275 }
5276
5277 for (i = 0; i < gimple_phi_num_args (phi); i++)
5278 {
5279 tree arg = gimple_phi_arg_def (phi, i);
5280 tree addr = walk_tree (&arg, verify_node_sharing_1,
5281 &visited, NULL);
5282 if (addr)
5283 {
5284 error ("incorrect sharing of tree nodes");
5285 debug_generic_expr (addr);
5286 err2 |= true;
5287 }
5288 location_t loc = gimple_phi_arg_location (phi, i);
5289 if (virtual_operand_p (gimple_phi_result (phi))
5290 && loc != UNKNOWN_LOCATION)
5291 {
5292 error ("virtual PHI with argument locations");
5293 err2 = true;
5294 }
5295 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5296 if (addr)
5297 {
5298 debug_generic_expr (addr);
5299 err2 = true;
5300 }
5301 err2 |= verify_location (&blocks, loc);
5302 }
5303
5304 if (err2)
5305 debug_gimple_stmt (phi);
5306 err |= err2;
5307 }
5308
5309 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5310 {
5311 gimple *stmt = gsi_stmt (gsi);
5312 bool err2 = false;
5313 struct walk_stmt_info wi;
5314 tree addr;
5315 int lp_nr;
5316
5317 if (gimple_bb (stmt) != bb)
5318 {
5319 error ("gimple_bb (stmt) is set to a wrong basic block");
5320 err2 = true;
5321 }
5322
5323 err2 |= verify_gimple_stmt (stmt);
5324 err2 |= verify_location (&blocks, gimple_location (stmt));
5325
5326 memset (&wi, 0, sizeof (wi));
5327 wi.info = (void *) &visited;
5328 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5329 if (addr)
5330 {
5331 error ("incorrect sharing of tree nodes");
5332 debug_generic_expr (addr);
5333 err2 |= true;
5334 }
5335
5336 memset (&wi, 0, sizeof (wi));
5337 wi.info = (void *) &blocks;
5338 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5339 if (addr)
5340 {
5341 debug_generic_expr (addr);
5342 err2 |= true;
5343 }
5344
5345 /* If the statement is marked as part of an EH region, then it is
5346 expected that the statement could throw. Verify that when we
5347 have optimizations that simplify statements such that we prove
5348 that they cannot throw, that we update other data structures
5349 to match. */
5350 lp_nr = lookup_stmt_eh_lp (stmt);
5351 if (lp_nr != 0)
5352 visited_throwing_stmts.add (stmt);
5353 if (lp_nr > 0)
5354 {
5355 if (!stmt_could_throw_p (cfun, stmt))
5356 {
5357 if (verify_nothrow)
5358 {
5359 error ("statement marked for throw, but doesn%'t");
5360 err2 |= true;
5361 }
5362 }
5363 else if (!gsi_one_before_end_p (gsi))
5364 {
5365 error ("statement marked for throw in middle of block");
5366 err2 |= true;
5367 }
5368 }
5369
5370 if (err2)
5371 debug_gimple_stmt (stmt);
5372 err |= err2;
5373 }
5374
5375 FOR_EACH_EDGE (e, ei, bb->succs)
5376 if (e->goto_locus != UNKNOWN_LOCATION)
5377 err |= verify_location (&blocks, e->goto_locus);
5378 }
5379
5380 hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5381 eh_error_found = false;
5382 if (eh_table)
5383 eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
5384 (&visited_throwing_stmts);
5385
5386 if (err || eh_error_found)
5387 internal_error ("verify_gimple failed");
5388
5389 verify_histograms ();
5390 timevar_pop (TV_TREE_STMT_VERIFY);
5391 }
5392
5393
5394 /* Verifies that the flow information is OK. */
5395
5396 static int
5397 gimple_verify_flow_info (void)
5398 {
5399 int err = 0;
5400 basic_block bb;
5401 gimple_stmt_iterator gsi;
5402 gimple *stmt;
5403 edge e;
5404 edge_iterator ei;
5405
5406 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5407 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5408 {
5409 error ("ENTRY_BLOCK has IL associated with it");
5410 err = 1;
5411 }
5412
5413 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5414 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5415 {
5416 error ("EXIT_BLOCK has IL associated with it");
5417 err = 1;
5418 }
5419
5420 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5421 if (e->flags & EDGE_FALLTHRU)
5422 {
5423 error ("fallthru to exit from bb %d", e->src->index);
5424 err = 1;
5425 }
5426
5427 FOR_EACH_BB_FN (bb, cfun)
5428 {
5429 bool found_ctrl_stmt = false;
5430
5431 stmt = NULL;
5432
5433 /* Skip labels on the start of basic block. */
5434 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5435 {
5436 tree label;
5437 gimple *prev_stmt = stmt;
5438
5439 stmt = gsi_stmt (gsi);
5440
5441 if (gimple_code (stmt) != GIMPLE_LABEL)
5442 break;
5443
5444 label = gimple_label_label (as_a <glabel *> (stmt));
5445 if (prev_stmt && DECL_NONLOCAL (label))
5446 {
5447 error ("nonlocal label ");
5448 print_generic_expr (stderr, label);
5449 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5450 bb->index);
5451 err = 1;
5452 }
5453
5454 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5455 {
5456 error ("EH landing pad label ");
5457 print_generic_expr (stderr, label);
5458 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5459 bb->index);
5460 err = 1;
5461 }
5462
5463 if (label_to_block (cfun, label) != bb)
5464 {
5465 error ("label ");
5466 print_generic_expr (stderr, label);
5467 fprintf (stderr, " to block does not match in bb %d",
5468 bb->index);
5469 err = 1;
5470 }
5471
5472 if (decl_function_context (label) != current_function_decl)
5473 {
5474 error ("label ");
5475 print_generic_expr (stderr, label);
5476 fprintf (stderr, " has incorrect context in bb %d",
5477 bb->index);
5478 err = 1;
5479 }
5480 }
5481
5482 /* Verify that body of basic block BB is free of control flow. */
5483 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5484 {
5485 gimple *stmt = gsi_stmt (gsi);
5486
5487 if (found_ctrl_stmt)
5488 {
5489 error ("control flow in the middle of basic block %d",
5490 bb->index);
5491 err = 1;
5492 }
5493
5494 if (stmt_ends_bb_p (stmt))
5495 found_ctrl_stmt = true;
5496
5497 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5498 {
5499 error ("label ");
5500 print_generic_expr (stderr, gimple_label_label (label_stmt));
5501 fprintf (stderr, " in the middle of basic block %d", bb->index);
5502 err = 1;
5503 }
5504 }
5505
5506 gsi = gsi_last_nondebug_bb (bb);
5507 if (gsi_end_p (gsi))
5508 continue;
5509
5510 stmt = gsi_stmt (gsi);
5511
5512 if (gimple_code (stmt) == GIMPLE_LABEL)
5513 continue;
5514
5515 err |= verify_eh_edges (stmt);
5516
5517 if (is_ctrl_stmt (stmt))
5518 {
5519 FOR_EACH_EDGE (e, ei, bb->succs)
5520 if (e->flags & EDGE_FALLTHRU)
5521 {
5522 error ("fallthru edge after a control statement in bb %d",
5523 bb->index);
5524 err = 1;
5525 }
5526 }
5527
5528 if (gimple_code (stmt) != GIMPLE_COND)
5529 {
5530 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5531 after anything else but if statement. */
5532 FOR_EACH_EDGE (e, ei, bb->succs)
5533 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5534 {
5535 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5536 bb->index);
5537 err = 1;
5538 }
5539 }
5540
5541 switch (gimple_code (stmt))
5542 {
5543 case GIMPLE_COND:
5544 {
5545 edge true_edge;
5546 edge false_edge;
5547
5548 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5549
5550 if (!true_edge
5551 || !false_edge
5552 || !(true_edge->flags & EDGE_TRUE_VALUE)
5553 || !(false_edge->flags & EDGE_FALSE_VALUE)
5554 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5555 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5556 || EDGE_COUNT (bb->succs) >= 3)
5557 {
5558 error ("wrong outgoing edge flags at end of bb %d",
5559 bb->index);
5560 err = 1;
5561 }
5562 }
5563 break;
5564
5565 case GIMPLE_GOTO:
5566 if (simple_goto_p (stmt))
5567 {
5568 error ("explicit goto at end of bb %d", bb->index);
5569 err = 1;
5570 }
5571 else
5572 {
5573 /* FIXME. We should double check that the labels in the
5574 destination blocks have their address taken. */
5575 FOR_EACH_EDGE (e, ei, bb->succs)
5576 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5577 | EDGE_FALSE_VALUE))
5578 || !(e->flags & EDGE_ABNORMAL))
5579 {
5580 error ("wrong outgoing edge flags at end of bb %d",
5581 bb->index);
5582 err = 1;
5583 }
5584 }
5585 break;
5586
5587 case GIMPLE_CALL:
5588 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5589 break;
5590 /* fallthru */
5591 case GIMPLE_RETURN:
5592 if (!single_succ_p (bb)
5593 || (single_succ_edge (bb)->flags
5594 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5595 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5596 {
5597 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5598 err = 1;
5599 }
5600 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5601 {
5602 error ("return edge does not point to exit in bb %d",
5603 bb->index);
5604 err = 1;
5605 }
5606 break;
5607
5608 case GIMPLE_SWITCH:
5609 {
5610 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5611 tree prev;
5612 edge e;
5613 size_t i, n;
5614
5615 n = gimple_switch_num_labels (switch_stmt);
5616
5617 /* Mark all the destination basic blocks. */
5618 for (i = 0; i < n; ++i)
5619 {
5620 basic_block label_bb = gimple_switch_label_bb (cfun, switch_stmt, i);
5621 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5622 label_bb->aux = (void *)1;
5623 }
5624
5625 /* Verify that the case labels are sorted. */
5626 prev = gimple_switch_label (switch_stmt, 0);
5627 for (i = 1; i < n; ++i)
5628 {
5629 tree c = gimple_switch_label (switch_stmt, i);
5630 if (!CASE_LOW (c))
5631 {
5632 error ("found default case not at the start of "
5633 "case vector");
5634 err = 1;
5635 continue;
5636 }
5637 if (CASE_LOW (prev)
5638 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5639 {
5640 error ("case labels not sorted: ");
5641 print_generic_expr (stderr, prev);
5642 fprintf (stderr," is greater than ");
5643 print_generic_expr (stderr, c);
5644 fprintf (stderr," but comes before it.\n");
5645 err = 1;
5646 }
5647 prev = c;
5648 }
5649 /* VRP will remove the default case if it can prove it will
5650 never be executed. So do not verify there always exists
5651 a default case here. */
5652
5653 FOR_EACH_EDGE (e, ei, bb->succs)
5654 {
5655 if (!e->dest->aux)
5656 {
5657 error ("extra outgoing edge %d->%d",
5658 bb->index, e->dest->index);
5659 err = 1;
5660 }
5661
5662 e->dest->aux = (void *)2;
5663 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5664 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5665 {
5666 error ("wrong outgoing edge flags at end of bb %d",
5667 bb->index);
5668 err = 1;
5669 }
5670 }
5671
5672 /* Check that we have all of them. */
5673 for (i = 0; i < n; ++i)
5674 {
5675 basic_block label_bb = gimple_switch_label_bb (cfun,
5676 switch_stmt, i);
5677
5678 if (label_bb->aux != (void *)2)
5679 {
5680 error ("missing edge %i->%i", bb->index, label_bb->index);
5681 err = 1;
5682 }
5683 }
5684
5685 FOR_EACH_EDGE (e, ei, bb->succs)
5686 e->dest->aux = (void *)0;
5687 }
5688 break;
5689
5690 case GIMPLE_EH_DISPATCH:
5691 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5692 break;
5693
5694 default:
5695 break;
5696 }
5697 }
5698
5699 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5700 verify_dominators (CDI_DOMINATORS);
5701
5702 return err;
5703 }
5704
5705
5706 /* Updates phi nodes after creating a forwarder block joined
5707 by edge FALLTHRU. */
5708
5709 static void
5710 gimple_make_forwarder_block (edge fallthru)
5711 {
5712 edge e;
5713 edge_iterator ei;
5714 basic_block dummy, bb;
5715 tree var;
5716 gphi_iterator gsi;
5717
5718 dummy = fallthru->src;
5719 bb = fallthru->dest;
5720
5721 if (single_pred_p (bb))
5722 return;
5723
5724 /* If we redirected a branch we must create new PHI nodes at the
5725 start of BB. */
5726 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5727 {
5728 gphi *phi, *new_phi;
5729
5730 phi = gsi.phi ();
5731 var = gimple_phi_result (phi);
5732 new_phi = create_phi_node (var, bb);
5733 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5734 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5735 UNKNOWN_LOCATION);
5736 }
5737
5738 /* Add the arguments we have stored on edges. */
5739 FOR_EACH_EDGE (e, ei, bb->preds)
5740 {
5741 if (e == fallthru)
5742 continue;
5743
5744 flush_pending_stmts (e);
5745 }
5746 }
5747
5748
5749 /* Return a non-special label in the head of basic block BLOCK.
5750 Create one if it doesn't exist. */
5751
5752 tree
5753 gimple_block_label (basic_block bb)
5754 {
5755 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5756 bool first = true;
5757 tree label;
5758 glabel *stmt;
5759
5760 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5761 {
5762 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5763 if (!stmt)
5764 break;
5765 label = gimple_label_label (stmt);
5766 if (!DECL_NONLOCAL (label))
5767 {
5768 if (!first)
5769 gsi_move_before (&i, &s);
5770 return label;
5771 }
5772 }
5773
5774 label = create_artificial_label (UNKNOWN_LOCATION);
5775 stmt = gimple_build_label (label);
5776 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5777 return label;
5778 }
5779
5780
5781 /* Attempt to perform edge redirection by replacing a possibly complex
5782 jump instruction by a goto or by removing the jump completely.
5783 This can apply only if all edges now point to the same block. The
5784 parameters and return values are equivalent to
5785 redirect_edge_and_branch. */
5786
5787 static edge
5788 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5789 {
5790 basic_block src = e->src;
5791 gimple_stmt_iterator i;
5792 gimple *stmt;
5793
5794 /* We can replace or remove a complex jump only when we have exactly
5795 two edges. */
5796 if (EDGE_COUNT (src->succs) != 2
5797 /* Verify that all targets will be TARGET. Specifically, the
5798 edge that is not E must also go to TARGET. */
5799 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5800 return NULL;
5801
5802 i = gsi_last_bb (src);
5803 if (gsi_end_p (i))
5804 return NULL;
5805
5806 stmt = gsi_stmt (i);
5807
5808 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5809 {
5810 gsi_remove (&i, true);
5811 e = ssa_redirect_edge (e, target);
5812 e->flags = EDGE_FALLTHRU;
5813 return e;
5814 }
5815
5816 return NULL;
5817 }
5818
5819
5820 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5821 edge representing the redirected branch. */
5822
5823 static edge
5824 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5825 {
5826 basic_block bb = e->src;
5827 gimple_stmt_iterator gsi;
5828 edge ret;
5829 gimple *stmt;
5830
5831 if (e->flags & EDGE_ABNORMAL)
5832 return NULL;
5833
5834 if (e->dest == dest)
5835 return NULL;
5836
5837 if (e->flags & EDGE_EH)
5838 return redirect_eh_edge (e, dest);
5839
5840 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5841 {
5842 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5843 if (ret)
5844 return ret;
5845 }
5846
5847 gsi = gsi_last_nondebug_bb (bb);
5848 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5849
5850 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5851 {
5852 case GIMPLE_COND:
5853 /* For COND_EXPR, we only need to redirect the edge. */
5854 break;
5855
5856 case GIMPLE_GOTO:
5857 /* No non-abnormal edges should lead from a non-simple goto, and
5858 simple ones should be represented implicitly. */
5859 gcc_unreachable ();
5860
5861 case GIMPLE_SWITCH:
5862 {
5863 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5864 tree label = gimple_block_label (dest);
5865 tree cases = get_cases_for_edge (e, switch_stmt);
5866
5867 /* If we have a list of cases associated with E, then use it
5868 as it's a lot faster than walking the entire case vector. */
5869 if (cases)
5870 {
5871 edge e2 = find_edge (e->src, dest);
5872 tree last, first;
5873
5874 first = cases;
5875 while (cases)
5876 {
5877 last = cases;
5878 CASE_LABEL (cases) = label;
5879 cases = CASE_CHAIN (cases);
5880 }
5881
5882 /* If there was already an edge in the CFG, then we need
5883 to move all the cases associated with E to E2. */
5884 if (e2)
5885 {
5886 tree cases2 = get_cases_for_edge (e2, switch_stmt);
5887
5888 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5889 CASE_CHAIN (cases2) = first;
5890 }
5891 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5892 }
5893 else
5894 {
5895 size_t i, n = gimple_switch_num_labels (switch_stmt);
5896
5897 for (i = 0; i < n; i++)
5898 {
5899 tree elt = gimple_switch_label (switch_stmt, i);
5900 if (label_to_block (cfun, CASE_LABEL (elt)) == e->dest)
5901 CASE_LABEL (elt) = label;
5902 }
5903 }
5904 }
5905 break;
5906
5907 case GIMPLE_ASM:
5908 {
5909 gasm *asm_stmt = as_a <gasm *> (stmt);
5910 int i, n = gimple_asm_nlabels (asm_stmt);
5911 tree label = NULL;
5912
5913 for (i = 0; i < n; ++i)
5914 {
5915 tree cons = gimple_asm_label_op (asm_stmt, i);
5916 if (label_to_block (cfun, TREE_VALUE (cons)) == e->dest)
5917 {
5918 if (!label)
5919 label = gimple_block_label (dest);
5920 TREE_VALUE (cons) = label;
5921 }
5922 }
5923
5924 /* If we didn't find any label matching the former edge in the
5925 asm labels, we must be redirecting the fallthrough
5926 edge. */
5927 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5928 }
5929 break;
5930
5931 case GIMPLE_RETURN:
5932 gsi_remove (&gsi, true);
5933 e->flags |= EDGE_FALLTHRU;
5934 break;
5935
5936 case GIMPLE_OMP_RETURN:
5937 case GIMPLE_OMP_CONTINUE:
5938 case GIMPLE_OMP_SECTIONS_SWITCH:
5939 case GIMPLE_OMP_FOR:
5940 /* The edges from OMP constructs can be simply redirected. */
5941 break;
5942
5943 case GIMPLE_EH_DISPATCH:
5944 if (!(e->flags & EDGE_FALLTHRU))
5945 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
5946 break;
5947
5948 case GIMPLE_TRANSACTION:
5949 if (e->flags & EDGE_TM_ABORT)
5950 gimple_transaction_set_label_over (as_a <gtransaction *> (stmt),
5951 gimple_block_label (dest));
5952 else if (e->flags & EDGE_TM_UNINSTRUMENTED)
5953 gimple_transaction_set_label_uninst (as_a <gtransaction *> (stmt),
5954 gimple_block_label (dest));
5955 else
5956 gimple_transaction_set_label_norm (as_a <gtransaction *> (stmt),
5957 gimple_block_label (dest));
5958 break;
5959
5960 default:
5961 /* Otherwise it must be a fallthru edge, and we don't need to
5962 do anything besides redirecting it. */
5963 gcc_assert (e->flags & EDGE_FALLTHRU);
5964 break;
5965 }
5966
5967 /* Update/insert PHI nodes as necessary. */
5968
5969 /* Now update the edges in the CFG. */
5970 e = ssa_redirect_edge (e, dest);
5971
5972 return e;
5973 }
5974
5975 /* Returns true if it is possible to remove edge E by redirecting
5976 it to the destination of the other edge from E->src. */
5977
5978 static bool
5979 gimple_can_remove_branch_p (const_edge e)
5980 {
5981 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5982 return false;
5983
5984 return true;
5985 }
5986
5987 /* Simple wrapper, as we can always redirect fallthru edges. */
5988
5989 static basic_block
5990 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5991 {
5992 e = gimple_redirect_edge_and_branch (e, dest);
5993 gcc_assert (e);
5994
5995 return NULL;
5996 }
5997
5998
5999 /* Splits basic block BB after statement STMT (but at least after the
6000 labels). If STMT is NULL, BB is split just after the labels. */
6001
6002 static basic_block
6003 gimple_split_block (basic_block bb, void *stmt)
6004 {
6005 gimple_stmt_iterator gsi;
6006 gimple_stmt_iterator gsi_tgt;
6007 gimple_seq list;
6008 basic_block new_bb;
6009 edge e;
6010 edge_iterator ei;
6011
6012 new_bb = create_empty_bb (bb);
6013
6014 /* Redirect the outgoing edges. */
6015 new_bb->succs = bb->succs;
6016 bb->succs = NULL;
6017 FOR_EACH_EDGE (e, ei, new_bb->succs)
6018 e->src = new_bb;
6019
6020 /* Get a stmt iterator pointing to the first stmt to move. */
6021 if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
6022 gsi = gsi_after_labels (bb);
6023 else
6024 {
6025 gsi = gsi_for_stmt ((gimple *) stmt);
6026 gsi_next (&gsi);
6027 }
6028
6029 /* Move everything from GSI to the new basic block. */
6030 if (gsi_end_p (gsi))
6031 return new_bb;
6032
6033 /* Split the statement list - avoid re-creating new containers as this
6034 brings ugly quadratic memory consumption in the inliner.
6035 (We are still quadratic since we need to update stmt BB pointers,
6036 sadly.) */
6037 gsi_split_seq_before (&gsi, &list);
6038 set_bb_seq (new_bb, list);
6039 for (gsi_tgt = gsi_start (list);
6040 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
6041 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
6042
6043 return new_bb;
6044 }
6045
6046
6047 /* Moves basic block BB after block AFTER. */
6048
6049 static bool
6050 gimple_move_block_after (basic_block bb, basic_block after)
6051 {
6052 if (bb->prev_bb == after)
6053 return true;
6054
6055 unlink_block (bb);
6056 link_block (bb, after);
6057
6058 return true;
6059 }
6060
6061
6062 /* Return TRUE if block BB has no executable statements, otherwise return
6063 FALSE. */
6064
6065 static bool
6066 gimple_empty_block_p (basic_block bb)
6067 {
6068 /* BB must have no executable statements. */
6069 gimple_stmt_iterator gsi = gsi_after_labels (bb);
6070 if (phi_nodes (bb))
6071 return false;
6072 while (!gsi_end_p (gsi))
6073 {
6074 gimple *stmt = gsi_stmt (gsi);
6075 if (is_gimple_debug (stmt))
6076 ;
6077 else if (gimple_code (stmt) == GIMPLE_NOP
6078 || gimple_code (stmt) == GIMPLE_PREDICT)
6079 ;
6080 else
6081 return false;
6082 gsi_next (&gsi);
6083 }
6084 return true;
6085 }
6086
6087
6088 /* Split a basic block if it ends with a conditional branch and if the
6089 other part of the block is not empty. */
6090
6091 static basic_block
6092 gimple_split_block_before_cond_jump (basic_block bb)
6093 {
6094 gimple *last, *split_point;
6095 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6096 if (gsi_end_p (gsi))
6097 return NULL;
6098 last = gsi_stmt (gsi);
6099 if (gimple_code (last) != GIMPLE_COND
6100 && gimple_code (last) != GIMPLE_SWITCH)
6101 return NULL;
6102 gsi_prev (&gsi);
6103 split_point = gsi_stmt (gsi);
6104 return split_block (bb, split_point)->dest;
6105 }
6106
6107
6108 /* Return true if basic_block can be duplicated. */
6109
6110 static bool
6111 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
6112 {
6113 return true;
6114 }
6115
6116 /* Create a duplicate of the basic block BB. NOTE: This does not
6117 preserve SSA form. */
6118
6119 static basic_block
6120 gimple_duplicate_bb (basic_block bb, copy_bb_data *id)
6121 {
6122 basic_block new_bb;
6123 gimple_stmt_iterator gsi_tgt;
6124
6125 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
6126
6127 /* Copy the PHI nodes. We ignore PHI node arguments here because
6128 the incoming edges have not been setup yet. */
6129 for (gphi_iterator gpi = gsi_start_phis (bb);
6130 !gsi_end_p (gpi);
6131 gsi_next (&gpi))
6132 {
6133 gphi *phi, *copy;
6134 phi = gpi.phi ();
6135 copy = create_phi_node (NULL_TREE, new_bb);
6136 create_new_def_for (gimple_phi_result (phi), copy,
6137 gimple_phi_result_ptr (copy));
6138 gimple_set_uid (copy, gimple_uid (phi));
6139 }
6140
6141 gsi_tgt = gsi_start_bb (new_bb);
6142 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6143 !gsi_end_p (gsi);
6144 gsi_next (&gsi))
6145 {
6146 def_operand_p def_p;
6147 ssa_op_iter op_iter;
6148 tree lhs;
6149 gimple *stmt, *copy;
6150
6151 stmt = gsi_stmt (gsi);
6152 if (gimple_code (stmt) == GIMPLE_LABEL)
6153 continue;
6154
6155 /* Don't duplicate label debug stmts. */
6156 if (gimple_debug_bind_p (stmt)
6157 && TREE_CODE (gimple_debug_bind_get_var (stmt))
6158 == LABEL_DECL)
6159 continue;
6160
6161 /* Create a new copy of STMT and duplicate STMT's virtual
6162 operands. */
6163 copy = gimple_copy (stmt);
6164 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
6165
6166 maybe_duplicate_eh_stmt (copy, stmt);
6167 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
6168
6169 /* When copying around a stmt writing into a local non-user
6170 aggregate, make sure it won't share stack slot with other
6171 vars. */
6172 lhs = gimple_get_lhs (stmt);
6173 if (lhs && TREE_CODE (lhs) != SSA_NAME)
6174 {
6175 tree base = get_base_address (lhs);
6176 if (base
6177 && (VAR_P (base) || TREE_CODE (base) == RESULT_DECL)
6178 && DECL_IGNORED_P (base)
6179 && !TREE_STATIC (base)
6180 && !DECL_EXTERNAL (base)
6181 && (!VAR_P (base) || !DECL_HAS_VALUE_EXPR_P (base)))
6182 DECL_NONSHAREABLE (base) = 1;
6183 }
6184
6185 /* If requested remap dependence info of cliques brought in
6186 via inlining. */
6187 if (id)
6188 for (unsigned i = 0; i < gimple_num_ops (copy); ++i)
6189 {
6190 tree op = gimple_op (copy, i);
6191 if (!op)
6192 continue;
6193 if (TREE_CODE (op) == ADDR_EXPR
6194 || TREE_CODE (op) == WITH_SIZE_EXPR)
6195 op = TREE_OPERAND (op, 0);
6196 while (handled_component_p (op))
6197 op = TREE_OPERAND (op, 0);
6198 if ((TREE_CODE (op) == MEM_REF
6199 || TREE_CODE (op) == TARGET_MEM_REF)
6200 && MR_DEPENDENCE_CLIQUE (op) > 1
6201 && MR_DEPENDENCE_CLIQUE (op) != bb->loop_father->owned_clique)
6202 {
6203 if (!id->dependence_map)
6204 id->dependence_map = new hash_map<dependence_hash,
6205 unsigned short>;
6206 bool existed;
6207 unsigned short &newc = id->dependence_map->get_or_insert
6208 (MR_DEPENDENCE_CLIQUE (op), &existed);
6209 if (!existed)
6210 {
6211 gcc_assert (MR_DEPENDENCE_CLIQUE (op) <= cfun->last_clique);
6212 newc = ++cfun->last_clique;
6213 }
6214 MR_DEPENDENCE_CLIQUE (op) = newc;
6215 }
6216 }
6217
6218 /* Create new names for all the definitions created by COPY and
6219 add replacement mappings for each new name. */
6220 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
6221 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
6222 }
6223
6224 return new_bb;
6225 }
6226
6227 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
6228
6229 static void
6230 add_phi_args_after_copy_edge (edge e_copy)
6231 {
6232 basic_block bb, bb_copy = e_copy->src, dest;
6233 edge e;
6234 edge_iterator ei;
6235 gphi *phi, *phi_copy;
6236 tree def;
6237 gphi_iterator psi, psi_copy;
6238
6239 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
6240 return;
6241
6242 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
6243
6244 if (e_copy->dest->flags & BB_DUPLICATED)
6245 dest = get_bb_original (e_copy->dest);
6246 else
6247 dest = e_copy->dest;
6248
6249 e = find_edge (bb, dest);
6250 if (!e)
6251 {
6252 /* During loop unrolling the target of the latch edge is copied.
6253 In this case we are not looking for edge to dest, but to
6254 duplicated block whose original was dest. */
6255 FOR_EACH_EDGE (e, ei, bb->succs)
6256 {
6257 if ((e->dest->flags & BB_DUPLICATED)
6258 && get_bb_original (e->dest) == dest)
6259 break;
6260 }
6261
6262 gcc_assert (e != NULL);
6263 }
6264
6265 for (psi = gsi_start_phis (e->dest),
6266 psi_copy = gsi_start_phis (e_copy->dest);
6267 !gsi_end_p (psi);
6268 gsi_next (&psi), gsi_next (&psi_copy))
6269 {
6270 phi = psi.phi ();
6271 phi_copy = psi_copy.phi ();
6272 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
6273 add_phi_arg (phi_copy, def, e_copy,
6274 gimple_phi_arg_location_from_edge (phi, e));
6275 }
6276 }
6277
6278
6279 /* Basic block BB_COPY was created by code duplication. Add phi node
6280 arguments for edges going out of BB_COPY. The blocks that were
6281 duplicated have BB_DUPLICATED set. */
6282
6283 void
6284 add_phi_args_after_copy_bb (basic_block bb_copy)
6285 {
6286 edge e_copy;
6287 edge_iterator ei;
6288
6289 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
6290 {
6291 add_phi_args_after_copy_edge (e_copy);
6292 }
6293 }
6294
6295 /* Blocks in REGION_COPY array of length N_REGION were created by
6296 duplication of basic blocks. Add phi node arguments for edges
6297 going from these blocks. If E_COPY is not NULL, also add
6298 phi node arguments for its destination.*/
6299
6300 void
6301 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
6302 edge e_copy)
6303 {
6304 unsigned i;
6305
6306 for (i = 0; i < n_region; i++)
6307 region_copy[i]->flags |= BB_DUPLICATED;
6308
6309 for (i = 0; i < n_region; i++)
6310 add_phi_args_after_copy_bb (region_copy[i]);
6311 if (e_copy)
6312 add_phi_args_after_copy_edge (e_copy);
6313
6314 for (i = 0; i < n_region; i++)
6315 region_copy[i]->flags &= ~BB_DUPLICATED;
6316 }
6317
6318 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6319 important exit edge EXIT. By important we mean that no SSA name defined
6320 inside region is live over the other exit edges of the region. All entry
6321 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6322 to the duplicate of the region. Dominance and loop information is
6323 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6324 UPDATE_DOMINANCE is false then we assume that the caller will update the
6325 dominance information after calling this function. The new basic
6326 blocks are stored to REGION_COPY in the same order as they had in REGION,
6327 provided that REGION_COPY is not NULL.
6328 The function returns false if it is unable to copy the region,
6329 true otherwise. */
6330
6331 bool
6332 gimple_duplicate_sese_region (edge entry, edge exit,
6333 basic_block *region, unsigned n_region,
6334 basic_block *region_copy,
6335 bool update_dominance)
6336 {
6337 unsigned i;
6338 bool free_region_copy = false, copying_header = false;
6339 struct loop *loop = entry->dest->loop_father;
6340 edge exit_copy;
6341 vec<basic_block> doms = vNULL;
6342 edge redirected;
6343 profile_count total_count = profile_count::uninitialized ();
6344 profile_count entry_count = profile_count::uninitialized ();
6345
6346 if (!can_copy_bbs_p (region, n_region))
6347 return false;
6348
6349 /* Some sanity checking. Note that we do not check for all possible
6350 missuses of the functions. I.e. if you ask to copy something weird,
6351 it will work, but the state of structures probably will not be
6352 correct. */
6353 for (i = 0; i < n_region; i++)
6354 {
6355 /* We do not handle subloops, i.e. all the blocks must belong to the
6356 same loop. */
6357 if (region[i]->loop_father != loop)
6358 return false;
6359
6360 if (region[i] != entry->dest
6361 && region[i] == loop->header)
6362 return false;
6363 }
6364
6365 /* In case the function is used for loop header copying (which is the primary
6366 use), ensure that EXIT and its copy will be new latch and entry edges. */
6367 if (loop->header == entry->dest)
6368 {
6369 copying_header = true;
6370
6371 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6372 return false;
6373
6374 for (i = 0; i < n_region; i++)
6375 if (region[i] != exit->src
6376 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6377 return false;
6378 }
6379
6380 initialize_original_copy_tables ();
6381
6382 if (copying_header)
6383 set_loop_copy (loop, loop_outer (loop));
6384 else
6385 set_loop_copy (loop, loop);
6386
6387 if (!region_copy)
6388 {
6389 region_copy = XNEWVEC (basic_block, n_region);
6390 free_region_copy = true;
6391 }
6392
6393 /* Record blocks outside the region that are dominated by something
6394 inside. */
6395 if (update_dominance)
6396 {
6397 doms.create (0);
6398 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6399 }
6400
6401 if (entry->dest->count.initialized_p ())
6402 {
6403 total_count = entry->dest->count;
6404 entry_count = entry->count ();
6405 /* Fix up corner cases, to avoid division by zero or creation of negative
6406 frequencies. */
6407 if (entry_count > total_count)
6408 entry_count = total_count;
6409 }
6410
6411 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6412 split_edge_bb_loc (entry), update_dominance);
6413 if (total_count.initialized_p () && entry_count.initialized_p ())
6414 {
6415 scale_bbs_frequencies_profile_count (region, n_region,
6416 total_count - entry_count,
6417 total_count);
6418 scale_bbs_frequencies_profile_count (region_copy, n_region, entry_count,
6419 total_count);
6420 }
6421
6422 if (copying_header)
6423 {
6424 loop->header = exit->dest;
6425 loop->latch = exit->src;
6426 }
6427
6428 /* Redirect the entry and add the phi node arguments. */
6429 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6430 gcc_assert (redirected != NULL);
6431 flush_pending_stmts (entry);
6432
6433 /* Concerning updating of dominators: We must recount dominators
6434 for entry block and its copy. Anything that is outside of the
6435 region, but was dominated by something inside needs recounting as
6436 well. */
6437 if (update_dominance)
6438 {
6439 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6440 doms.safe_push (get_bb_original (entry->dest));
6441 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6442 doms.release ();
6443 }
6444
6445 /* Add the other PHI node arguments. */
6446 add_phi_args_after_copy (region_copy, n_region, NULL);
6447
6448 if (free_region_copy)
6449 free (region_copy);
6450
6451 free_original_copy_tables ();
6452 return true;
6453 }
6454
6455 /* Checks if BB is part of the region defined by N_REGION BBS. */
6456 static bool
6457 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6458 {
6459 unsigned int n;
6460
6461 for (n = 0; n < n_region; n++)
6462 {
6463 if (bb == bbs[n])
6464 return true;
6465 }
6466 return false;
6467 }
6468
6469 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6470 are stored to REGION_COPY in the same order in that they appear
6471 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6472 the region, EXIT an exit from it. The condition guarding EXIT
6473 is moved to ENTRY. Returns true if duplication succeeds, false
6474 otherwise.
6475
6476 For example,
6477
6478 some_code;
6479 if (cond)
6480 A;
6481 else
6482 B;
6483
6484 is transformed to
6485
6486 if (cond)
6487 {
6488 some_code;
6489 A;
6490 }
6491 else
6492 {
6493 some_code;
6494 B;
6495 }
6496 */
6497
6498 bool
6499 gimple_duplicate_sese_tail (edge entry, edge exit,
6500 basic_block *region, unsigned n_region,
6501 basic_block *region_copy)
6502 {
6503 unsigned i;
6504 bool free_region_copy = false;
6505 struct loop *loop = exit->dest->loop_father;
6506 struct loop *orig_loop = entry->dest->loop_father;
6507 basic_block switch_bb, entry_bb, nentry_bb;
6508 vec<basic_block> doms;
6509 profile_count total_count = profile_count::uninitialized (),
6510 exit_count = profile_count::uninitialized ();
6511 edge exits[2], nexits[2], e;
6512 gimple_stmt_iterator gsi;
6513 gimple *cond_stmt;
6514 edge sorig, snew;
6515 basic_block exit_bb;
6516 gphi_iterator psi;
6517 gphi *phi;
6518 tree def;
6519 struct loop *target, *aloop, *cloop;
6520
6521 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6522 exits[0] = exit;
6523 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6524
6525 if (!can_copy_bbs_p (region, n_region))
6526 return false;
6527
6528 initialize_original_copy_tables ();
6529 set_loop_copy (orig_loop, loop);
6530
6531 target= loop;
6532 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6533 {
6534 if (bb_part_of_region_p (aloop->header, region, n_region))
6535 {
6536 cloop = duplicate_loop (aloop, target);
6537 duplicate_subloops (aloop, cloop);
6538 }
6539 }
6540
6541 if (!region_copy)
6542 {
6543 region_copy = XNEWVEC (basic_block, n_region);
6544 free_region_copy = true;
6545 }
6546
6547 gcc_assert (!need_ssa_update_p (cfun));
6548
6549 /* Record blocks outside the region that are dominated by something
6550 inside. */
6551 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6552
6553 total_count = exit->src->count;
6554 exit_count = exit->count ();
6555 /* Fix up corner cases, to avoid division by zero or creation of negative
6556 frequencies. */
6557 if (exit_count > total_count)
6558 exit_count = total_count;
6559
6560 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6561 split_edge_bb_loc (exit), true);
6562 if (total_count.initialized_p () && exit_count.initialized_p ())
6563 {
6564 scale_bbs_frequencies_profile_count (region, n_region,
6565 total_count - exit_count,
6566 total_count);
6567 scale_bbs_frequencies_profile_count (region_copy, n_region, exit_count,
6568 total_count);
6569 }
6570
6571 /* Create the switch block, and put the exit condition to it. */
6572 entry_bb = entry->dest;
6573 nentry_bb = get_bb_copy (entry_bb);
6574 if (!last_stmt (entry->src)
6575 || !stmt_ends_bb_p (last_stmt (entry->src)))
6576 switch_bb = entry->src;
6577 else
6578 switch_bb = split_edge (entry);
6579 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6580
6581 gsi = gsi_last_bb (switch_bb);
6582 cond_stmt = last_stmt (exit->src);
6583 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6584 cond_stmt = gimple_copy (cond_stmt);
6585
6586 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6587
6588 sorig = single_succ_edge (switch_bb);
6589 sorig->flags = exits[1]->flags;
6590 sorig->probability = exits[1]->probability;
6591 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6592 snew->probability = exits[0]->probability;
6593
6594
6595 /* Register the new edge from SWITCH_BB in loop exit lists. */
6596 rescan_loop_exit (snew, true, false);
6597
6598 /* Add the PHI node arguments. */
6599 add_phi_args_after_copy (region_copy, n_region, snew);
6600
6601 /* Get rid of now superfluous conditions and associated edges (and phi node
6602 arguments). */
6603 exit_bb = exit->dest;
6604
6605 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6606 PENDING_STMT (e) = NULL;
6607
6608 /* The latch of ORIG_LOOP was copied, and so was the backedge
6609 to the original header. We redirect this backedge to EXIT_BB. */
6610 for (i = 0; i < n_region; i++)
6611 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6612 {
6613 gcc_assert (single_succ_edge (region_copy[i]));
6614 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6615 PENDING_STMT (e) = NULL;
6616 for (psi = gsi_start_phis (exit_bb);
6617 !gsi_end_p (psi);
6618 gsi_next (&psi))
6619 {
6620 phi = psi.phi ();
6621 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6622 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6623 }
6624 }
6625 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6626 PENDING_STMT (e) = NULL;
6627
6628 /* Anything that is outside of the region, but was dominated by something
6629 inside needs to update dominance info. */
6630 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6631 doms.release ();
6632 /* Update the SSA web. */
6633 update_ssa (TODO_update_ssa);
6634
6635 if (free_region_copy)
6636 free (region_copy);
6637
6638 free_original_copy_tables ();
6639 return true;
6640 }
6641
6642 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6643 adding blocks when the dominator traversal reaches EXIT. This
6644 function silently assumes that ENTRY strictly dominates EXIT. */
6645
6646 void
6647 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6648 vec<basic_block> *bbs_p)
6649 {
6650 basic_block son;
6651
6652 for (son = first_dom_son (CDI_DOMINATORS, entry);
6653 son;
6654 son = next_dom_son (CDI_DOMINATORS, son))
6655 {
6656 bbs_p->safe_push (son);
6657 if (son != exit)
6658 gather_blocks_in_sese_region (son, exit, bbs_p);
6659 }
6660 }
6661
6662 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6663 The duplicates are recorded in VARS_MAP. */
6664
6665 static void
6666 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6667 tree to_context)
6668 {
6669 tree t = *tp, new_t;
6670 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6671
6672 if (DECL_CONTEXT (t) == to_context)
6673 return;
6674
6675 bool existed;
6676 tree &loc = vars_map->get_or_insert (t, &existed);
6677
6678 if (!existed)
6679 {
6680 if (SSA_VAR_P (t))
6681 {
6682 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6683 add_local_decl (f, new_t);
6684 }
6685 else
6686 {
6687 gcc_assert (TREE_CODE (t) == CONST_DECL);
6688 new_t = copy_node (t);
6689 }
6690 DECL_CONTEXT (new_t) = to_context;
6691
6692 loc = new_t;
6693 }
6694 else
6695 new_t = loc;
6696
6697 *tp = new_t;
6698 }
6699
6700
6701 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6702 VARS_MAP maps old ssa names and var_decls to the new ones. */
6703
6704 static tree
6705 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6706 tree to_context)
6707 {
6708 tree new_name;
6709
6710 gcc_assert (!virtual_operand_p (name));
6711
6712 tree *loc = vars_map->get (name);
6713
6714 if (!loc)
6715 {
6716 tree decl = SSA_NAME_VAR (name);
6717 if (decl)
6718 {
6719 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name));
6720 replace_by_duplicate_decl (&decl, vars_map, to_context);
6721 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6722 decl, SSA_NAME_DEF_STMT (name));
6723 }
6724 else
6725 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6726 name, SSA_NAME_DEF_STMT (name));
6727
6728 /* Now that we've used the def stmt to define new_name, make sure it
6729 doesn't define name anymore. */
6730 SSA_NAME_DEF_STMT (name) = NULL;
6731
6732 vars_map->put (name, new_name);
6733 }
6734 else
6735 new_name = *loc;
6736
6737 return new_name;
6738 }
6739
6740 struct move_stmt_d
6741 {
6742 tree orig_block;
6743 tree new_block;
6744 tree from_context;
6745 tree to_context;
6746 hash_map<tree, tree> *vars_map;
6747 htab_t new_label_map;
6748 hash_map<void *, void *> *eh_map;
6749 bool remap_decls_p;
6750 };
6751
6752 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6753 contained in *TP if it has been ORIG_BLOCK previously and change the
6754 DECL_CONTEXT of every local variable referenced in *TP. */
6755
6756 static tree
6757 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6758 {
6759 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6760 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6761 tree t = *tp;
6762
6763 if (EXPR_P (t))
6764 {
6765 tree block = TREE_BLOCK (t);
6766 if (block == NULL_TREE)
6767 ;
6768 else if (block == p->orig_block
6769 || p->orig_block == NULL_TREE)
6770 {
6771 /* tree_node_can_be_shared says we can share invariant
6772 addresses but unshare_expr copies them anyways. Make sure
6773 to unshare before adjusting the block in place - we do not
6774 always see a copy here. */
6775 if (TREE_CODE (t) == ADDR_EXPR
6776 && is_gimple_min_invariant (t))
6777 *tp = t = unshare_expr (t);
6778 TREE_SET_BLOCK (t, p->new_block);
6779 }
6780 else if (flag_checking)
6781 {
6782 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6783 block = BLOCK_SUPERCONTEXT (block);
6784 gcc_assert (block == p->orig_block);
6785 }
6786 }
6787 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6788 {
6789 if (TREE_CODE (t) == SSA_NAME)
6790 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6791 else if (TREE_CODE (t) == PARM_DECL
6792 && gimple_in_ssa_p (cfun))
6793 *tp = *(p->vars_map->get (t));
6794 else if (TREE_CODE (t) == LABEL_DECL)
6795 {
6796 if (p->new_label_map)
6797 {
6798 struct tree_map in, *out;
6799 in.base.from = t;
6800 out = (struct tree_map *)
6801 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6802 if (out)
6803 *tp = t = out->to;
6804 }
6805
6806 /* For FORCED_LABELs we can end up with references from other
6807 functions if some SESE regions are outlined. It is UB to
6808 jump in between them, but they could be used just for printing
6809 addresses etc. In that case, DECL_CONTEXT on the label should
6810 be the function containing the glabel stmt with that LABEL_DECL,
6811 rather than whatever function a reference to the label was seen
6812 last time. */
6813 if (!FORCED_LABEL (t) && !DECL_NONLOCAL (t))
6814 DECL_CONTEXT (t) = p->to_context;
6815 }
6816 else if (p->remap_decls_p)
6817 {
6818 /* Replace T with its duplicate. T should no longer appear in the
6819 parent function, so this looks wasteful; however, it may appear
6820 in referenced_vars, and more importantly, as virtual operands of
6821 statements, and in alias lists of other variables. It would be
6822 quite difficult to expunge it from all those places. ??? It might
6823 suffice to do this for addressable variables. */
6824 if ((VAR_P (t) && !is_global_var (t))
6825 || TREE_CODE (t) == CONST_DECL)
6826 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6827 }
6828 *walk_subtrees = 0;
6829 }
6830 else if (TYPE_P (t))
6831 *walk_subtrees = 0;
6832
6833 return NULL_TREE;
6834 }
6835
6836 /* Helper for move_stmt_r. Given an EH region number for the source
6837 function, map that to the duplicate EH regio number in the dest. */
6838
6839 static int
6840 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6841 {
6842 eh_region old_r, new_r;
6843
6844 old_r = get_eh_region_from_number (old_nr);
6845 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6846
6847 return new_r->index;
6848 }
6849
6850 /* Similar, but operate on INTEGER_CSTs. */
6851
6852 static tree
6853 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6854 {
6855 int old_nr, new_nr;
6856
6857 old_nr = tree_to_shwi (old_t_nr);
6858 new_nr = move_stmt_eh_region_nr (old_nr, p);
6859
6860 return build_int_cst (integer_type_node, new_nr);
6861 }
6862
6863 /* Like move_stmt_op, but for gimple statements.
6864
6865 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6866 contained in the current statement in *GSI_P and change the
6867 DECL_CONTEXT of every local variable referenced in the current
6868 statement. */
6869
6870 static tree
6871 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6872 struct walk_stmt_info *wi)
6873 {
6874 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6875 gimple *stmt = gsi_stmt (*gsi_p);
6876 tree block = gimple_block (stmt);
6877
6878 if (block == p->orig_block
6879 || (p->orig_block == NULL_TREE
6880 && block != NULL_TREE))
6881 gimple_set_block (stmt, p->new_block);
6882
6883 switch (gimple_code (stmt))
6884 {
6885 case GIMPLE_CALL:
6886 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6887 {
6888 tree r, fndecl = gimple_call_fndecl (stmt);
6889 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
6890 switch (DECL_FUNCTION_CODE (fndecl))
6891 {
6892 case BUILT_IN_EH_COPY_VALUES:
6893 r = gimple_call_arg (stmt, 1);
6894 r = move_stmt_eh_region_tree_nr (r, p);
6895 gimple_call_set_arg (stmt, 1, r);
6896 /* FALLTHRU */
6897
6898 case BUILT_IN_EH_POINTER:
6899 case BUILT_IN_EH_FILTER:
6900 r = gimple_call_arg (stmt, 0);
6901 r = move_stmt_eh_region_tree_nr (r, p);
6902 gimple_call_set_arg (stmt, 0, r);
6903 break;
6904
6905 default:
6906 break;
6907 }
6908 }
6909 break;
6910
6911 case GIMPLE_RESX:
6912 {
6913 gresx *resx_stmt = as_a <gresx *> (stmt);
6914 int r = gimple_resx_region (resx_stmt);
6915 r = move_stmt_eh_region_nr (r, p);
6916 gimple_resx_set_region (resx_stmt, r);
6917 }
6918 break;
6919
6920 case GIMPLE_EH_DISPATCH:
6921 {
6922 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
6923 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
6924 r = move_stmt_eh_region_nr (r, p);
6925 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
6926 }
6927 break;
6928
6929 case GIMPLE_OMP_RETURN:
6930 case GIMPLE_OMP_CONTINUE:
6931 break;
6932
6933 case GIMPLE_LABEL:
6934 {
6935 /* For FORCED_LABEL, move_stmt_op doesn't adjust DECL_CONTEXT,
6936 so that such labels can be referenced from other regions.
6937 Make sure to update it when seeing a GIMPLE_LABEL though,
6938 that is the owner of the label. */
6939 walk_gimple_op (stmt, move_stmt_op, wi);
6940 *handled_ops_p = true;
6941 tree label = gimple_label_label (as_a <glabel *> (stmt));
6942 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
6943 DECL_CONTEXT (label) = p->to_context;
6944 }
6945 break;
6946
6947 default:
6948 if (is_gimple_omp (stmt))
6949 {
6950 /* Do not remap variables inside OMP directives. Variables
6951 referenced in clauses and directive header belong to the
6952 parent function and should not be moved into the child
6953 function. */
6954 bool save_remap_decls_p = p->remap_decls_p;
6955 p->remap_decls_p = false;
6956 *handled_ops_p = true;
6957
6958 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6959 move_stmt_op, wi);
6960
6961 p->remap_decls_p = save_remap_decls_p;
6962 }
6963 break;
6964 }
6965
6966 return NULL_TREE;
6967 }
6968
6969 /* Move basic block BB from function CFUN to function DEST_FN. The
6970 block is moved out of the original linked list and placed after
6971 block AFTER in the new list. Also, the block is removed from the
6972 original array of blocks and placed in DEST_FN's array of blocks.
6973 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6974 updated to reflect the moved edges.
6975
6976 The local variables are remapped to new instances, VARS_MAP is used
6977 to record the mapping. */
6978
6979 static void
6980 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6981 basic_block after, bool update_edge_count_p,
6982 struct move_stmt_d *d)
6983 {
6984 struct control_flow_graph *cfg;
6985 edge_iterator ei;
6986 edge e;
6987 gimple_stmt_iterator si;
6988 unsigned old_len, new_len;
6989
6990 /* Remove BB from dominance structures. */
6991 delete_from_dominance_info (CDI_DOMINATORS, bb);
6992
6993 /* Move BB from its current loop to the copy in the new function. */
6994 if (current_loops)
6995 {
6996 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6997 if (new_loop)
6998 bb->loop_father = new_loop;
6999 }
7000
7001 /* Link BB to the new linked list. */
7002 move_block_after (bb, after);
7003
7004 /* Update the edge count in the corresponding flowgraphs. */
7005 if (update_edge_count_p)
7006 FOR_EACH_EDGE (e, ei, bb->succs)
7007 {
7008 cfun->cfg->x_n_edges--;
7009 dest_cfun->cfg->x_n_edges++;
7010 }
7011
7012 /* Remove BB from the original basic block array. */
7013 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
7014 cfun->cfg->x_n_basic_blocks--;
7015
7016 /* Grow DEST_CFUN's basic block array if needed. */
7017 cfg = dest_cfun->cfg;
7018 cfg->x_n_basic_blocks++;
7019 if (bb->index >= cfg->x_last_basic_block)
7020 cfg->x_last_basic_block = bb->index + 1;
7021
7022 old_len = vec_safe_length (cfg->x_basic_block_info);
7023 if ((unsigned) cfg->x_last_basic_block >= old_len)
7024 {
7025 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
7026 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
7027 }
7028
7029 (*cfg->x_basic_block_info)[bb->index] = bb;
7030
7031 /* Remap the variables in phi nodes. */
7032 for (gphi_iterator psi = gsi_start_phis (bb);
7033 !gsi_end_p (psi); )
7034 {
7035 gphi *phi = psi.phi ();
7036 use_operand_p use;
7037 tree op = PHI_RESULT (phi);
7038 ssa_op_iter oi;
7039 unsigned i;
7040
7041 if (virtual_operand_p (op))
7042 {
7043 /* Remove the phi nodes for virtual operands (alias analysis will be
7044 run for the new function, anyway). */
7045 remove_phi_node (&psi, true);
7046 continue;
7047 }
7048
7049 SET_PHI_RESULT (phi,
7050 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7051 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
7052 {
7053 op = USE_FROM_PTR (use);
7054 if (TREE_CODE (op) == SSA_NAME)
7055 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7056 }
7057
7058 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
7059 {
7060 location_t locus = gimple_phi_arg_location (phi, i);
7061 tree block = LOCATION_BLOCK (locus);
7062
7063 if (locus == UNKNOWN_LOCATION)
7064 continue;
7065 if (d->orig_block == NULL_TREE || block == d->orig_block)
7066 {
7067 locus = set_block (locus, d->new_block);
7068 gimple_phi_arg_set_location (phi, i, locus);
7069 }
7070 }
7071
7072 gsi_next (&psi);
7073 }
7074
7075 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7076 {
7077 gimple *stmt = gsi_stmt (si);
7078 struct walk_stmt_info wi;
7079
7080 memset (&wi, 0, sizeof (wi));
7081 wi.info = d;
7082 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
7083
7084 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
7085 {
7086 tree label = gimple_label_label (label_stmt);
7087 int uid = LABEL_DECL_UID (label);
7088
7089 gcc_assert (uid > -1);
7090
7091 old_len = vec_safe_length (cfg->x_label_to_block_map);
7092 if (old_len <= (unsigned) uid)
7093 {
7094 new_len = 3 * uid / 2 + 1;
7095 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
7096 }
7097
7098 (*cfg->x_label_to_block_map)[uid] = bb;
7099 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
7100
7101 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
7102
7103 if (uid >= dest_cfun->cfg->last_label_uid)
7104 dest_cfun->cfg->last_label_uid = uid + 1;
7105 }
7106
7107 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
7108 remove_stmt_from_eh_lp_fn (cfun, stmt);
7109
7110 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
7111 gimple_remove_stmt_histograms (cfun, stmt);
7112
7113 /* We cannot leave any operands allocated from the operand caches of
7114 the current function. */
7115 free_stmt_operands (cfun, stmt);
7116 push_cfun (dest_cfun);
7117 update_stmt (stmt);
7118 pop_cfun ();
7119 }
7120
7121 FOR_EACH_EDGE (e, ei, bb->succs)
7122 if (e->goto_locus != UNKNOWN_LOCATION)
7123 {
7124 tree block = LOCATION_BLOCK (e->goto_locus);
7125 if (d->orig_block == NULL_TREE
7126 || block == d->orig_block)
7127 e->goto_locus = set_block (e->goto_locus, d->new_block);
7128 }
7129 }
7130
7131 /* Examine the statements in BB (which is in SRC_CFUN); find and return
7132 the outermost EH region. Use REGION as the incoming base EH region.
7133 If there is no single outermost region, return NULL and set *ALL to
7134 true. */
7135
7136 static eh_region
7137 find_outermost_region_in_block (struct function *src_cfun,
7138 basic_block bb, eh_region region,
7139 bool *all)
7140 {
7141 gimple_stmt_iterator si;
7142
7143 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7144 {
7145 gimple *stmt = gsi_stmt (si);
7146 eh_region stmt_region;
7147 int lp_nr;
7148
7149 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
7150 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
7151 if (stmt_region)
7152 {
7153 if (region == NULL)
7154 region = stmt_region;
7155 else if (stmt_region != region)
7156 {
7157 region = eh_region_outermost (src_cfun, stmt_region, region);
7158 if (region == NULL)
7159 {
7160 *all = true;
7161 return NULL;
7162 }
7163 }
7164 }
7165 }
7166
7167 return region;
7168 }
7169
7170 static tree
7171 new_label_mapper (tree decl, void *data)
7172 {
7173 htab_t hash = (htab_t) data;
7174 struct tree_map *m;
7175 void **slot;
7176
7177 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
7178
7179 m = XNEW (struct tree_map);
7180 m->hash = DECL_UID (decl);
7181 m->base.from = decl;
7182 m->to = create_artificial_label (UNKNOWN_LOCATION);
7183 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
7184 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
7185 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
7186
7187 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
7188 gcc_assert (*slot == NULL);
7189
7190 *slot = m;
7191
7192 return m->to;
7193 }
7194
7195 /* Tree walker to replace the decls used inside value expressions by
7196 duplicates. */
7197
7198 static tree
7199 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
7200 {
7201 struct replace_decls_d *rd = (struct replace_decls_d *)data;
7202
7203 switch (TREE_CODE (*tp))
7204 {
7205 case VAR_DECL:
7206 case PARM_DECL:
7207 case RESULT_DECL:
7208 replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
7209 break;
7210 default:
7211 break;
7212 }
7213
7214 if (IS_TYPE_OR_DECL_P (*tp))
7215 *walk_subtrees = false;
7216
7217 return NULL;
7218 }
7219
7220 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7221 subblocks. */
7222
7223 static void
7224 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
7225 tree to_context)
7226 {
7227 tree *tp, t;
7228
7229 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
7230 {
7231 t = *tp;
7232 if (!VAR_P (t) && TREE_CODE (t) != CONST_DECL)
7233 continue;
7234 replace_by_duplicate_decl (&t, vars_map, to_context);
7235 if (t != *tp)
7236 {
7237 if (VAR_P (*tp) && DECL_HAS_VALUE_EXPR_P (*tp))
7238 {
7239 tree x = DECL_VALUE_EXPR (*tp);
7240 struct replace_decls_d rd = { vars_map, to_context };
7241 unshare_expr (x);
7242 walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
7243 SET_DECL_VALUE_EXPR (t, x);
7244 DECL_HAS_VALUE_EXPR_P (t) = 1;
7245 }
7246 DECL_CHAIN (t) = DECL_CHAIN (*tp);
7247 *tp = t;
7248 }
7249 }
7250
7251 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
7252 replace_block_vars_by_duplicates (block, vars_map, to_context);
7253 }
7254
7255 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7256 from FN1 to FN2. */
7257
7258 static void
7259 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
7260 struct loop *loop)
7261 {
7262 /* Discard it from the old loop array. */
7263 (*get_loops (fn1))[loop->num] = NULL;
7264
7265 /* Place it in the new loop array, assigning it a new number. */
7266 loop->num = number_of_loops (fn2);
7267 vec_safe_push (loops_for_fn (fn2)->larray, loop);
7268
7269 /* Recurse to children. */
7270 for (loop = loop->inner; loop; loop = loop->next)
7271 fixup_loop_arrays_after_move (fn1, fn2, loop);
7272 }
7273
7274 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7275 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
7276
7277 DEBUG_FUNCTION void
7278 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
7279 {
7280 basic_block bb;
7281 edge_iterator ei;
7282 edge e;
7283 bitmap bbs = BITMAP_ALLOC (NULL);
7284 int i;
7285
7286 gcc_assert (entry != NULL);
7287 gcc_assert (entry != exit);
7288 gcc_assert (bbs_p != NULL);
7289
7290 gcc_assert (bbs_p->length () > 0);
7291
7292 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7293 bitmap_set_bit (bbs, bb->index);
7294
7295 gcc_assert (bitmap_bit_p (bbs, entry->index));
7296 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
7297
7298 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7299 {
7300 if (bb == entry)
7301 {
7302 gcc_assert (single_pred_p (entry));
7303 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
7304 }
7305 else
7306 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
7307 {
7308 e = ei_edge (ei);
7309 gcc_assert (bitmap_bit_p (bbs, e->src->index));
7310 }
7311
7312 if (bb == exit)
7313 {
7314 gcc_assert (single_succ_p (exit));
7315 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
7316 }
7317 else
7318 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
7319 {
7320 e = ei_edge (ei);
7321 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
7322 }
7323 }
7324
7325 BITMAP_FREE (bbs);
7326 }
7327
7328 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7329
7330 bool
7331 gather_ssa_name_hash_map_from (tree const &from, tree const &, void *data)
7332 {
7333 bitmap release_names = (bitmap)data;
7334
7335 if (TREE_CODE (from) != SSA_NAME)
7336 return true;
7337
7338 bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7339 return true;
7340 }
7341
7342 /* Return LOOP_DIST_ALIAS call if present in BB. */
7343
7344 static gimple *
7345 find_loop_dist_alias (basic_block bb)
7346 {
7347 gimple *g = last_stmt (bb);
7348 if (g == NULL || gimple_code (g) != GIMPLE_COND)
7349 return NULL;
7350
7351 gimple_stmt_iterator gsi = gsi_for_stmt (g);
7352 gsi_prev (&gsi);
7353 if (gsi_end_p (gsi))
7354 return NULL;
7355
7356 g = gsi_stmt (gsi);
7357 if (gimple_call_internal_p (g, IFN_LOOP_DIST_ALIAS))
7358 return g;
7359 return NULL;
7360 }
7361
7362 /* Fold loop internal call G like IFN_LOOP_VECTORIZED/IFN_LOOP_DIST_ALIAS
7363 to VALUE and update any immediate uses of it's LHS. */
7364
7365 void
7366 fold_loop_internal_call (gimple *g, tree value)
7367 {
7368 tree lhs = gimple_call_lhs (g);
7369 use_operand_p use_p;
7370 imm_use_iterator iter;
7371 gimple *use_stmt;
7372 gimple_stmt_iterator gsi = gsi_for_stmt (g);
7373
7374 update_call_from_tree (&gsi, value);
7375 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
7376 {
7377 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
7378 SET_USE (use_p, value);
7379 update_stmt (use_stmt);
7380 }
7381 }
7382
7383 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7384 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7385 single basic block in the original CFG and the new basic block is
7386 returned. DEST_CFUN must not have a CFG yet.
7387
7388 Note that the region need not be a pure SESE region. Blocks inside
7389 the region may contain calls to abort/exit. The only restriction
7390 is that ENTRY_BB should be the only entry point and it must
7391 dominate EXIT_BB.
7392
7393 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7394 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7395 to the new function.
7396
7397 All local variables referenced in the region are assumed to be in
7398 the corresponding BLOCK_VARS and unexpanded variable lists
7399 associated with DEST_CFUN.
7400
7401 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7402 reimplement move_sese_region_to_fn by duplicating the region rather than
7403 moving it. */
7404
7405 basic_block
7406 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7407 basic_block exit_bb, tree orig_block)
7408 {
7409 vec<basic_block> bbs, dom_bbs;
7410 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7411 basic_block after, bb, *entry_pred, *exit_succ, abb;
7412 struct function *saved_cfun = cfun;
7413 int *entry_flag, *exit_flag;
7414 profile_probability *entry_prob, *exit_prob;
7415 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7416 edge e;
7417 edge_iterator ei;
7418 htab_t new_label_map;
7419 hash_map<void *, void *> *eh_map;
7420 struct loop *loop = entry_bb->loop_father;
7421 struct loop *loop0 = get_loop (saved_cfun, 0);
7422 struct move_stmt_d d;
7423
7424 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7425 region. */
7426 gcc_assert (entry_bb != exit_bb
7427 && (!exit_bb
7428 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7429
7430 /* Collect all the blocks in the region. Manually add ENTRY_BB
7431 because it won't be added by dfs_enumerate_from. */
7432 bbs.create (0);
7433 bbs.safe_push (entry_bb);
7434 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7435
7436 if (flag_checking)
7437 verify_sese (entry_bb, exit_bb, &bbs);
7438
7439 /* The blocks that used to be dominated by something in BBS will now be
7440 dominated by the new block. */
7441 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7442 bbs.address (),
7443 bbs.length ());
7444
7445 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7446 the predecessor edges to ENTRY_BB and the successor edges to
7447 EXIT_BB so that we can re-attach them to the new basic block that
7448 will replace the region. */
7449 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7450 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7451 entry_flag = XNEWVEC (int, num_entry_edges);
7452 entry_prob = XNEWVEC (profile_probability, num_entry_edges);
7453 i = 0;
7454 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7455 {
7456 entry_prob[i] = e->probability;
7457 entry_flag[i] = e->flags;
7458 entry_pred[i++] = e->src;
7459 remove_edge (e);
7460 }
7461
7462 if (exit_bb)
7463 {
7464 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7465 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7466 exit_flag = XNEWVEC (int, num_exit_edges);
7467 exit_prob = XNEWVEC (profile_probability, num_exit_edges);
7468 i = 0;
7469 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7470 {
7471 exit_prob[i] = e->probability;
7472 exit_flag[i] = e->flags;
7473 exit_succ[i++] = e->dest;
7474 remove_edge (e);
7475 }
7476 }
7477 else
7478 {
7479 num_exit_edges = 0;
7480 exit_succ = NULL;
7481 exit_flag = NULL;
7482 exit_prob = NULL;
7483 }
7484
7485 /* Switch context to the child function to initialize DEST_FN's CFG. */
7486 gcc_assert (dest_cfun->cfg == NULL);
7487 push_cfun (dest_cfun);
7488
7489 init_empty_tree_cfg ();
7490
7491 /* Initialize EH information for the new function. */
7492 eh_map = NULL;
7493 new_label_map = NULL;
7494 if (saved_cfun->eh)
7495 {
7496 eh_region region = NULL;
7497 bool all = false;
7498
7499 FOR_EACH_VEC_ELT (bbs, i, bb)
7500 {
7501 region = find_outermost_region_in_block (saved_cfun, bb, region, &all);
7502 if (all)
7503 break;
7504 }
7505
7506 init_eh_for_function ();
7507 if (region != NULL || all)
7508 {
7509 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7510 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7511 new_label_mapper, new_label_map);
7512 }
7513 }
7514
7515 /* Initialize an empty loop tree. */
7516 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7517 init_loops_structure (dest_cfun, loops, 1);
7518 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7519 set_loops_for_fn (dest_cfun, loops);
7520
7521 vec<loop_p, va_gc> *larray = get_loops (saved_cfun)->copy ();
7522
7523 /* Move the outlined loop tree part. */
7524 num_nodes = bbs.length ();
7525 FOR_EACH_VEC_ELT (bbs, i, bb)
7526 {
7527 if (bb->loop_father->header == bb)
7528 {
7529 struct loop *this_loop = bb->loop_father;
7530 struct loop *outer = loop_outer (this_loop);
7531 if (outer == loop
7532 /* If the SESE region contains some bbs ending with
7533 a noreturn call, those are considered to belong
7534 to the outermost loop in saved_cfun, rather than
7535 the entry_bb's loop_father. */
7536 || outer == loop0)
7537 {
7538 if (outer != loop)
7539 num_nodes -= this_loop->num_nodes;
7540 flow_loop_tree_node_remove (bb->loop_father);
7541 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7542 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7543 }
7544 }
7545 else if (bb->loop_father == loop0 && loop0 != loop)
7546 num_nodes--;
7547
7548 /* Remove loop exits from the outlined region. */
7549 if (loops_for_fn (saved_cfun)->exits)
7550 FOR_EACH_EDGE (e, ei, bb->succs)
7551 {
7552 struct loops *l = loops_for_fn (saved_cfun);
7553 loop_exit **slot
7554 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7555 NO_INSERT);
7556 if (slot)
7557 l->exits->clear_slot (slot);
7558 }
7559 }
7560
7561 /* Adjust the number of blocks in the tree root of the outlined part. */
7562 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7563
7564 /* Setup a mapping to be used by move_block_to_fn. */
7565 loop->aux = current_loops->tree_root;
7566 loop0->aux = current_loops->tree_root;
7567
7568 /* Fix up orig_loop_num. If the block referenced in it has been moved
7569 to dest_cfun, update orig_loop_num field, otherwise clear it. */
7570 struct loop *dloop;
7571 signed char *moved_orig_loop_num = NULL;
7572 FOR_EACH_LOOP_FN (dest_cfun, dloop, 0)
7573 if (dloop->orig_loop_num)
7574 {
7575 if (moved_orig_loop_num == NULL)
7576 moved_orig_loop_num
7577 = XCNEWVEC (signed char, vec_safe_length (larray));
7578 if ((*larray)[dloop->orig_loop_num] != NULL
7579 && get_loop (saved_cfun, dloop->orig_loop_num) == NULL)
7580 {
7581 if (moved_orig_loop_num[dloop->orig_loop_num] >= 0
7582 && moved_orig_loop_num[dloop->orig_loop_num] < 2)
7583 moved_orig_loop_num[dloop->orig_loop_num]++;
7584 dloop->orig_loop_num = (*larray)[dloop->orig_loop_num]->num;
7585 }
7586 else
7587 {
7588 moved_orig_loop_num[dloop->orig_loop_num] = -1;
7589 dloop->orig_loop_num = 0;
7590 }
7591 }
7592 pop_cfun ();
7593
7594 if (moved_orig_loop_num)
7595 {
7596 FOR_EACH_VEC_ELT (bbs, i, bb)
7597 {
7598 gimple *g = find_loop_dist_alias (bb);
7599 if (g == NULL)
7600 continue;
7601
7602 int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
7603 gcc_assert (orig_loop_num
7604 && (unsigned) orig_loop_num < vec_safe_length (larray));
7605 if (moved_orig_loop_num[orig_loop_num] == 2)
7606 {
7607 /* If we have moved both loops with this orig_loop_num into
7608 dest_cfun and the LOOP_DIST_ALIAS call is being moved there
7609 too, update the first argument. */
7610 gcc_assert ((*larray)[dloop->orig_loop_num] != NULL
7611 && (get_loop (saved_cfun, dloop->orig_loop_num)
7612 == NULL));
7613 tree t = build_int_cst (integer_type_node,
7614 (*larray)[dloop->orig_loop_num]->num);
7615 gimple_call_set_arg (g, 0, t);
7616 update_stmt (g);
7617 /* Make sure the following loop will not update it. */
7618 moved_orig_loop_num[orig_loop_num] = 0;
7619 }
7620 else
7621 /* Otherwise at least one of the loops stayed in saved_cfun.
7622 Remove the LOOP_DIST_ALIAS call. */
7623 fold_loop_internal_call (g, gimple_call_arg (g, 1));
7624 }
7625 FOR_EACH_BB_FN (bb, saved_cfun)
7626 {
7627 gimple *g = find_loop_dist_alias (bb);
7628 if (g == NULL)
7629 continue;
7630 int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
7631 gcc_assert (orig_loop_num
7632 && (unsigned) orig_loop_num < vec_safe_length (larray));
7633 if (moved_orig_loop_num[orig_loop_num])
7634 /* LOOP_DIST_ALIAS call remained in saved_cfun, if at least one
7635 of the corresponding loops was moved, remove it. */
7636 fold_loop_internal_call (g, gimple_call_arg (g, 1));
7637 }
7638 XDELETEVEC (moved_orig_loop_num);
7639 }
7640 ggc_free (larray);
7641
7642 /* Move blocks from BBS into DEST_CFUN. */
7643 gcc_assert (bbs.length () >= 2);
7644 after = dest_cfun->cfg->x_entry_block_ptr;
7645 hash_map<tree, tree> vars_map;
7646
7647 memset (&d, 0, sizeof (d));
7648 d.orig_block = orig_block;
7649 d.new_block = DECL_INITIAL (dest_cfun->decl);
7650 d.from_context = cfun->decl;
7651 d.to_context = dest_cfun->decl;
7652 d.vars_map = &vars_map;
7653 d.new_label_map = new_label_map;
7654 d.eh_map = eh_map;
7655 d.remap_decls_p = true;
7656
7657 if (gimple_in_ssa_p (cfun))
7658 for (tree arg = DECL_ARGUMENTS (d.to_context); arg; arg = DECL_CHAIN (arg))
7659 {
7660 tree narg = make_ssa_name_fn (dest_cfun, arg, gimple_build_nop ());
7661 set_ssa_default_def (dest_cfun, arg, narg);
7662 vars_map.put (arg, narg);
7663 }
7664
7665 FOR_EACH_VEC_ELT (bbs, i, bb)
7666 {
7667 /* No need to update edge counts on the last block. It has
7668 already been updated earlier when we detached the region from
7669 the original CFG. */
7670 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7671 after = bb;
7672 }
7673
7674 loop->aux = NULL;
7675 loop0->aux = NULL;
7676 /* Loop sizes are no longer correct, fix them up. */
7677 loop->num_nodes -= num_nodes;
7678 for (struct loop *outer = loop_outer (loop);
7679 outer; outer = loop_outer (outer))
7680 outer->num_nodes -= num_nodes;
7681 loop0->num_nodes -= bbs.length () - num_nodes;
7682
7683 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7684 {
7685 struct loop *aloop;
7686 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7687 if (aloop != NULL)
7688 {
7689 if (aloop->simduid)
7690 {
7691 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7692 d.to_context);
7693 dest_cfun->has_simduid_loops = true;
7694 }
7695 if (aloop->force_vectorize)
7696 dest_cfun->has_force_vectorize_loops = true;
7697 }
7698 }
7699
7700 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7701 if (orig_block)
7702 {
7703 tree block;
7704 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7705 == NULL_TREE);
7706 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7707 = BLOCK_SUBBLOCKS (orig_block);
7708 for (block = BLOCK_SUBBLOCKS (orig_block);
7709 block; block = BLOCK_CHAIN (block))
7710 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7711 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7712 }
7713
7714 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7715 &vars_map, dest_cfun->decl);
7716
7717 if (new_label_map)
7718 htab_delete (new_label_map);
7719 if (eh_map)
7720 delete eh_map;
7721
7722 if (gimple_in_ssa_p (cfun))
7723 {
7724 /* We need to release ssa-names in a defined order, so first find them,
7725 and then iterate in ascending version order. */
7726 bitmap release_names = BITMAP_ALLOC (NULL);
7727 vars_map.traverse<void *, gather_ssa_name_hash_map_from> (release_names);
7728 bitmap_iterator bi;
7729 unsigned i;
7730 EXECUTE_IF_SET_IN_BITMAP (release_names, 0, i, bi)
7731 release_ssa_name (ssa_name (i));
7732 BITMAP_FREE (release_names);
7733 }
7734
7735 /* Rewire the entry and exit blocks. The successor to the entry
7736 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7737 the child function. Similarly, the predecessor of DEST_FN's
7738 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7739 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7740 various CFG manipulation function get to the right CFG.
7741
7742 FIXME, this is silly. The CFG ought to become a parameter to
7743 these helpers. */
7744 push_cfun (dest_cfun);
7745 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = entry_bb->count;
7746 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7747 if (exit_bb)
7748 {
7749 make_single_succ_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7750 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = exit_bb->count;
7751 }
7752 else
7753 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = profile_count::zero ();
7754 pop_cfun ();
7755
7756 /* Back in the original function, the SESE region has disappeared,
7757 create a new basic block in its place. */
7758 bb = create_empty_bb (entry_pred[0]);
7759 if (current_loops)
7760 add_bb_to_loop (bb, loop);
7761 for (i = 0; i < num_entry_edges; i++)
7762 {
7763 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7764 e->probability = entry_prob[i];
7765 }
7766
7767 for (i = 0; i < num_exit_edges; i++)
7768 {
7769 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7770 e->probability = exit_prob[i];
7771 }
7772
7773 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7774 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7775 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7776 dom_bbs.release ();
7777
7778 if (exit_bb)
7779 {
7780 free (exit_prob);
7781 free (exit_flag);
7782 free (exit_succ);
7783 }
7784 free (entry_prob);
7785 free (entry_flag);
7786 free (entry_pred);
7787 bbs.release ();
7788
7789 return bb;
7790 }
7791
7792 /* Dump default def DEF to file FILE using FLAGS and indentation
7793 SPC. */
7794
7795 static void
7796 dump_default_def (FILE *file, tree def, int spc, dump_flags_t flags)
7797 {
7798 for (int i = 0; i < spc; ++i)
7799 fprintf (file, " ");
7800 dump_ssaname_info_to_file (file, def, spc);
7801
7802 print_generic_expr (file, TREE_TYPE (def), flags);
7803 fprintf (file, " ");
7804 print_generic_expr (file, def, flags);
7805 fprintf (file, " = ");
7806 print_generic_expr (file, SSA_NAME_VAR (def), flags);
7807 fprintf (file, ";\n");
7808 }
7809
7810 /* Print no_sanitize attribute to FILE for a given attribute VALUE. */
7811
7812 static void
7813 print_no_sanitize_attr_value (FILE *file, tree value)
7814 {
7815 unsigned int flags = tree_to_uhwi (value);
7816 bool first = true;
7817 for (int i = 0; sanitizer_opts[i].name != NULL; ++i)
7818 {
7819 if ((sanitizer_opts[i].flag & flags) == sanitizer_opts[i].flag)
7820 {
7821 if (!first)
7822 fprintf (file, " | ");
7823 fprintf (file, "%s", sanitizer_opts[i].name);
7824 first = false;
7825 }
7826 }
7827 }
7828
7829 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7830 */
7831
7832 void
7833 dump_function_to_file (tree fndecl, FILE *file, dump_flags_t flags)
7834 {
7835 tree arg, var, old_current_fndecl = current_function_decl;
7836 struct function *dsf;
7837 bool ignore_topmost_bind = false, any_var = false;
7838 basic_block bb;
7839 tree chain;
7840 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7841 && decl_is_tm_clone (fndecl));
7842 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7843
7844 if (DECL_ATTRIBUTES (fndecl) != NULL_TREE)
7845 {
7846 fprintf (file, "__attribute__((");
7847
7848 bool first = true;
7849 tree chain;
7850 for (chain = DECL_ATTRIBUTES (fndecl); chain;
7851 first = false, chain = TREE_CHAIN (chain))
7852 {
7853 if (!first)
7854 fprintf (file, ", ");
7855
7856 tree name = get_attribute_name (chain);
7857 print_generic_expr (file, name, dump_flags);
7858 if (TREE_VALUE (chain) != NULL_TREE)
7859 {
7860 fprintf (file, " (");
7861
7862 if (strstr (IDENTIFIER_POINTER (name), "no_sanitize"))
7863 print_no_sanitize_attr_value (file, TREE_VALUE (chain));
7864 else
7865 print_generic_expr (file, TREE_VALUE (chain), dump_flags);
7866 fprintf (file, ")");
7867 }
7868 }
7869
7870 fprintf (file, "))\n");
7871 }
7872
7873 current_function_decl = fndecl;
7874 if (flags & TDF_GIMPLE)
7875 {
7876 static bool hotness_bb_param_printed = false;
7877 if (profile_info != NULL
7878 && !hotness_bb_param_printed)
7879 {
7880 hotness_bb_param_printed = true;
7881 fprintf (file,
7882 "/* --param=gimple-fe-computed-hot-bb-threshold=%" PRId64
7883 " */\n", get_hot_bb_threshold ());
7884 }
7885
7886 print_generic_expr (file, TREE_TYPE (TREE_TYPE (fndecl)),
7887 dump_flags | TDF_SLIM);
7888 fprintf (file, " __GIMPLE (%s",
7889 (fun->curr_properties & PROP_ssa) ? "ssa"
7890 : (fun->curr_properties & PROP_cfg) ? "cfg"
7891 : "");
7892
7893 if (cfun->cfg)
7894 {
7895 basic_block bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7896 if (bb->count.initialized_p ())
7897 fprintf (file, ",%s(%d)",
7898 profile_quality_as_string (bb->count.quality ()),
7899 bb->count.value ());
7900 fprintf (file, ")\n%s (", function_name (fun));
7901 }
7902 }
7903 else
7904 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7905
7906 arg = DECL_ARGUMENTS (fndecl);
7907 while (arg)
7908 {
7909 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7910 fprintf (file, " ");
7911 print_generic_expr (file, arg, dump_flags);
7912 if (DECL_CHAIN (arg))
7913 fprintf (file, ", ");
7914 arg = DECL_CHAIN (arg);
7915 }
7916 fprintf (file, ")\n");
7917
7918 dsf = DECL_STRUCT_FUNCTION (fndecl);
7919 if (dsf && (flags & TDF_EH))
7920 dump_eh_tree (file, dsf);
7921
7922 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7923 {
7924 dump_node (fndecl, TDF_SLIM | flags, file);
7925 current_function_decl = old_current_fndecl;
7926 return;
7927 }
7928
7929 /* When GIMPLE is lowered, the variables are no longer available in
7930 BIND_EXPRs, so display them separately. */
7931 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7932 {
7933 unsigned ix;
7934 ignore_topmost_bind = true;
7935
7936 fprintf (file, "{\n");
7937 if (gimple_in_ssa_p (fun)
7938 && (flags & TDF_ALIAS))
7939 {
7940 for (arg = DECL_ARGUMENTS (fndecl); arg != NULL;
7941 arg = DECL_CHAIN (arg))
7942 {
7943 tree def = ssa_default_def (fun, arg);
7944 if (def)
7945 dump_default_def (file, def, 2, flags);
7946 }
7947
7948 tree res = DECL_RESULT (fun->decl);
7949 if (res != NULL_TREE
7950 && DECL_BY_REFERENCE (res))
7951 {
7952 tree def = ssa_default_def (fun, res);
7953 if (def)
7954 dump_default_def (file, def, 2, flags);
7955 }
7956
7957 tree static_chain = fun->static_chain_decl;
7958 if (static_chain != NULL_TREE)
7959 {
7960 tree def = ssa_default_def (fun, static_chain);
7961 if (def)
7962 dump_default_def (file, def, 2, flags);
7963 }
7964 }
7965
7966 if (!vec_safe_is_empty (fun->local_decls))
7967 FOR_EACH_LOCAL_DECL (fun, ix, var)
7968 {
7969 print_generic_decl (file, var, flags);
7970 fprintf (file, "\n");
7971
7972 any_var = true;
7973 }
7974
7975 tree name;
7976
7977 if (gimple_in_ssa_p (cfun))
7978 FOR_EACH_SSA_NAME (ix, name, cfun)
7979 {
7980 if (!SSA_NAME_VAR (name))
7981 {
7982 fprintf (file, " ");
7983 print_generic_expr (file, TREE_TYPE (name), flags);
7984 fprintf (file, " ");
7985 print_generic_expr (file, name, flags);
7986 fprintf (file, ";\n");
7987
7988 any_var = true;
7989 }
7990 }
7991 }
7992
7993 if (fun && fun->decl == fndecl
7994 && fun->cfg
7995 && basic_block_info_for_fn (fun))
7996 {
7997 /* If the CFG has been built, emit a CFG-based dump. */
7998 if (!ignore_topmost_bind)
7999 fprintf (file, "{\n");
8000
8001 if (any_var && n_basic_blocks_for_fn (fun))
8002 fprintf (file, "\n");
8003
8004 FOR_EACH_BB_FN (bb, fun)
8005 dump_bb (file, bb, 2, flags);
8006
8007 fprintf (file, "}\n");
8008 }
8009 else if (fun->curr_properties & PROP_gimple_any)
8010 {
8011 /* The function is now in GIMPLE form but the CFG has not been
8012 built yet. Emit the single sequence of GIMPLE statements
8013 that make up its body. */
8014 gimple_seq body = gimple_body (fndecl);
8015
8016 if (gimple_seq_first_stmt (body)
8017 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
8018 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
8019 print_gimple_seq (file, body, 0, flags);
8020 else
8021 {
8022 if (!ignore_topmost_bind)
8023 fprintf (file, "{\n");
8024
8025 if (any_var)
8026 fprintf (file, "\n");
8027
8028 print_gimple_seq (file, body, 2, flags);
8029 fprintf (file, "}\n");
8030 }
8031 }
8032 else
8033 {
8034 int indent;
8035
8036 /* Make a tree based dump. */
8037 chain = DECL_SAVED_TREE (fndecl);
8038 if (chain && TREE_CODE (chain) == BIND_EXPR)
8039 {
8040 if (ignore_topmost_bind)
8041 {
8042 chain = BIND_EXPR_BODY (chain);
8043 indent = 2;
8044 }
8045 else
8046 indent = 0;
8047 }
8048 else
8049 {
8050 if (!ignore_topmost_bind)
8051 {
8052 fprintf (file, "{\n");
8053 /* No topmost bind, pretend it's ignored for later. */
8054 ignore_topmost_bind = true;
8055 }
8056 indent = 2;
8057 }
8058
8059 if (any_var)
8060 fprintf (file, "\n");
8061
8062 print_generic_stmt_indented (file, chain, flags, indent);
8063 if (ignore_topmost_bind)
8064 fprintf (file, "}\n");
8065 }
8066
8067 if (flags & TDF_ENUMERATE_LOCALS)
8068 dump_enumerated_decls (file, flags);
8069 fprintf (file, "\n\n");
8070
8071 current_function_decl = old_current_fndecl;
8072 }
8073
8074 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
8075
8076 DEBUG_FUNCTION void
8077 debug_function (tree fn, dump_flags_t flags)
8078 {
8079 dump_function_to_file (fn, stderr, flags);
8080 }
8081
8082
8083 /* Print on FILE the indexes for the predecessors of basic_block BB. */
8084
8085 static void
8086 print_pred_bbs (FILE *file, basic_block bb)
8087 {
8088 edge e;
8089 edge_iterator ei;
8090
8091 FOR_EACH_EDGE (e, ei, bb->preds)
8092 fprintf (file, "bb_%d ", e->src->index);
8093 }
8094
8095
8096 /* Print on FILE the indexes for the successors of basic_block BB. */
8097
8098 static void
8099 print_succ_bbs (FILE *file, basic_block bb)
8100 {
8101 edge e;
8102 edge_iterator ei;
8103
8104 FOR_EACH_EDGE (e, ei, bb->succs)
8105 fprintf (file, "bb_%d ", e->dest->index);
8106 }
8107
8108 /* Print to FILE the basic block BB following the VERBOSITY level. */
8109
8110 void
8111 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
8112 {
8113 char *s_indent = (char *) alloca ((size_t) indent + 1);
8114 memset ((void *) s_indent, ' ', (size_t) indent);
8115 s_indent[indent] = '\0';
8116
8117 /* Print basic_block's header. */
8118 if (verbosity >= 2)
8119 {
8120 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
8121 print_pred_bbs (file, bb);
8122 fprintf (file, "}, succs = {");
8123 print_succ_bbs (file, bb);
8124 fprintf (file, "})\n");
8125 }
8126
8127 /* Print basic_block's body. */
8128 if (verbosity >= 3)
8129 {
8130 fprintf (file, "%s {\n", s_indent);
8131 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
8132 fprintf (file, "%s }\n", s_indent);
8133 }
8134 }
8135
8136 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
8137
8138 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
8139 VERBOSITY level this outputs the contents of the loop, or just its
8140 structure. */
8141
8142 static void
8143 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
8144 {
8145 char *s_indent;
8146 basic_block bb;
8147
8148 if (loop == NULL)
8149 return;
8150
8151 s_indent = (char *) alloca ((size_t) indent + 1);
8152 memset ((void *) s_indent, ' ', (size_t) indent);
8153 s_indent[indent] = '\0';
8154
8155 /* Print loop's header. */
8156 fprintf (file, "%sloop_%d (", s_indent, loop->num);
8157 if (loop->header)
8158 fprintf (file, "header = %d", loop->header->index);
8159 else
8160 {
8161 fprintf (file, "deleted)\n");
8162 return;
8163 }
8164 if (loop->latch)
8165 fprintf (file, ", latch = %d", loop->latch->index);
8166 else
8167 fprintf (file, ", multiple latches");
8168 fprintf (file, ", niter = ");
8169 print_generic_expr (file, loop->nb_iterations);
8170
8171 if (loop->any_upper_bound)
8172 {
8173 fprintf (file, ", upper_bound = ");
8174 print_decu (loop->nb_iterations_upper_bound, file);
8175 }
8176 if (loop->any_likely_upper_bound)
8177 {
8178 fprintf (file, ", likely_upper_bound = ");
8179 print_decu (loop->nb_iterations_likely_upper_bound, file);
8180 }
8181
8182 if (loop->any_estimate)
8183 {
8184 fprintf (file, ", estimate = ");
8185 print_decu (loop->nb_iterations_estimate, file);
8186 }
8187 if (loop->unroll)
8188 fprintf (file, ", unroll = %d", loop->unroll);
8189 fprintf (file, ")\n");
8190
8191 /* Print loop's body. */
8192 if (verbosity >= 1)
8193 {
8194 fprintf (file, "%s{\n", s_indent);
8195 FOR_EACH_BB_FN (bb, cfun)
8196 if (bb->loop_father == loop)
8197 print_loops_bb (file, bb, indent, verbosity);
8198
8199 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
8200 fprintf (file, "%s}\n", s_indent);
8201 }
8202 }
8203
8204 /* Print the LOOP and its sibling loops on FILE, indented INDENT
8205 spaces. Following VERBOSITY level this outputs the contents of the
8206 loop, or just its structure. */
8207
8208 static void
8209 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
8210 int verbosity)
8211 {
8212 if (loop == NULL)
8213 return;
8214
8215 print_loop (file, loop, indent, verbosity);
8216 print_loop_and_siblings (file, loop->next, indent, verbosity);
8217 }
8218
8219 /* Follow a CFG edge from the entry point of the program, and on entry
8220 of a loop, pretty print the loop structure on FILE. */
8221
8222 void
8223 print_loops (FILE *file, int verbosity)
8224 {
8225 basic_block bb;
8226
8227 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
8228 fprintf (file, "\nLoops in function: %s\n", current_function_name ());
8229 if (bb && bb->loop_father)
8230 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
8231 }
8232
8233 /* Dump a loop. */
8234
8235 DEBUG_FUNCTION void
8236 debug (struct loop &ref)
8237 {
8238 print_loop (stderr, &ref, 0, /*verbosity*/0);
8239 }
8240
8241 DEBUG_FUNCTION void
8242 debug (struct loop *ptr)
8243 {
8244 if (ptr)
8245 debug (*ptr);
8246 else
8247 fprintf (stderr, "<nil>\n");
8248 }
8249
8250 /* Dump a loop verbosely. */
8251
8252 DEBUG_FUNCTION void
8253 debug_verbose (struct loop &ref)
8254 {
8255 print_loop (stderr, &ref, 0, /*verbosity*/3);
8256 }
8257
8258 DEBUG_FUNCTION void
8259 debug_verbose (struct loop *ptr)
8260 {
8261 if (ptr)
8262 debug (*ptr);
8263 else
8264 fprintf (stderr, "<nil>\n");
8265 }
8266
8267
8268 /* Debugging loops structure at tree level, at some VERBOSITY level. */
8269
8270 DEBUG_FUNCTION void
8271 debug_loops (int verbosity)
8272 {
8273 print_loops (stderr, verbosity);
8274 }
8275
8276 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
8277
8278 DEBUG_FUNCTION void
8279 debug_loop (struct loop *loop, int verbosity)
8280 {
8281 print_loop (stderr, loop, 0, verbosity);
8282 }
8283
8284 /* Print on stderr the code of loop number NUM, at some VERBOSITY
8285 level. */
8286
8287 DEBUG_FUNCTION void
8288 debug_loop_num (unsigned num, int verbosity)
8289 {
8290 debug_loop (get_loop (cfun, num), verbosity);
8291 }
8292
8293 /* Return true if BB ends with a call, possibly followed by some
8294 instructions that must stay with the call. Return false,
8295 otherwise. */
8296
8297 static bool
8298 gimple_block_ends_with_call_p (basic_block bb)
8299 {
8300 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8301 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
8302 }
8303
8304
8305 /* Return true if BB ends with a conditional branch. Return false,
8306 otherwise. */
8307
8308 static bool
8309 gimple_block_ends_with_condjump_p (const_basic_block bb)
8310 {
8311 gimple *stmt = last_stmt (CONST_CAST_BB (bb));
8312 return (stmt && gimple_code (stmt) == GIMPLE_COND);
8313 }
8314
8315
8316 /* Return true if statement T may terminate execution of BB in ways not
8317 explicitly represtented in the CFG. */
8318
8319 bool
8320 stmt_can_terminate_bb_p (gimple *t)
8321 {
8322 tree fndecl = NULL_TREE;
8323 int call_flags = 0;
8324
8325 /* Eh exception not handled internally terminates execution of the whole
8326 function. */
8327 if (stmt_can_throw_external (cfun, t))
8328 return true;
8329
8330 /* NORETURN and LONGJMP calls already have an edge to exit.
8331 CONST and PURE calls do not need one.
8332 We don't currently check for CONST and PURE here, although
8333 it would be a good idea, because those attributes are
8334 figured out from the RTL in mark_constant_function, and
8335 the counter incrementation code from -fprofile-arcs
8336 leads to different results from -fbranch-probabilities. */
8337 if (is_gimple_call (t))
8338 {
8339 fndecl = gimple_call_fndecl (t);
8340 call_flags = gimple_call_flags (t);
8341 }
8342
8343 if (is_gimple_call (t)
8344 && fndecl
8345 && fndecl_built_in_p (fndecl)
8346 && (call_flags & ECF_NOTHROW)
8347 && !(call_flags & ECF_RETURNS_TWICE)
8348 /* fork() doesn't really return twice, but the effect of
8349 wrapping it in __gcov_fork() which calls __gcov_flush()
8350 and clears the counters before forking has the same
8351 effect as returning twice. Force a fake edge. */
8352 && !fndecl_built_in_p (fndecl, BUILT_IN_FORK))
8353 return false;
8354
8355 if (is_gimple_call (t))
8356 {
8357 edge_iterator ei;
8358 edge e;
8359 basic_block bb;
8360
8361 if (call_flags & (ECF_PURE | ECF_CONST)
8362 && !(call_flags & ECF_LOOPING_CONST_OR_PURE))
8363 return false;
8364
8365 /* Function call may do longjmp, terminate program or do other things.
8366 Special case noreturn that have non-abnormal edges out as in this case
8367 the fact is sufficiently represented by lack of edges out of T. */
8368 if (!(call_flags & ECF_NORETURN))
8369 return true;
8370
8371 bb = gimple_bb (t);
8372 FOR_EACH_EDGE (e, ei, bb->succs)
8373 if ((e->flags & EDGE_FAKE) == 0)
8374 return true;
8375 }
8376
8377 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
8378 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
8379 return true;
8380
8381 return false;
8382 }
8383
8384
8385 /* Add fake edges to the function exit for any non constant and non
8386 noreturn calls (or noreturn calls with EH/abnormal edges),
8387 volatile inline assembly in the bitmap of blocks specified by BLOCKS
8388 or to the whole CFG if BLOCKS is zero. Return the number of blocks
8389 that were split.
8390
8391 The goal is to expose cases in which entering a basic block does
8392 not imply that all subsequent instructions must be executed. */
8393
8394 static int
8395 gimple_flow_call_edges_add (sbitmap blocks)
8396 {
8397 int i;
8398 int blocks_split = 0;
8399 int last_bb = last_basic_block_for_fn (cfun);
8400 bool check_last_block = false;
8401
8402 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
8403 return 0;
8404
8405 if (! blocks)
8406 check_last_block = true;
8407 else
8408 check_last_block = bitmap_bit_p (blocks,
8409 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
8410
8411 /* In the last basic block, before epilogue generation, there will be
8412 a fallthru edge to EXIT. Special care is required if the last insn
8413 of the last basic block is a call because make_edge folds duplicate
8414 edges, which would result in the fallthru edge also being marked
8415 fake, which would result in the fallthru edge being removed by
8416 remove_fake_edges, which would result in an invalid CFG.
8417
8418 Moreover, we can't elide the outgoing fake edge, since the block
8419 profiler needs to take this into account in order to solve the minimal
8420 spanning tree in the case that the call doesn't return.
8421
8422 Handle this by adding a dummy instruction in a new last basic block. */
8423 if (check_last_block)
8424 {
8425 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
8426 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8427 gimple *t = NULL;
8428
8429 if (!gsi_end_p (gsi))
8430 t = gsi_stmt (gsi);
8431
8432 if (t && stmt_can_terminate_bb_p (t))
8433 {
8434 edge e;
8435
8436 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8437 if (e)
8438 {
8439 gsi_insert_on_edge (e, gimple_build_nop ());
8440 gsi_commit_edge_inserts ();
8441 }
8442 }
8443 }
8444
8445 /* Now add fake edges to the function exit for any non constant
8446 calls since there is no way that we can determine if they will
8447 return or not... */
8448 for (i = 0; i < last_bb; i++)
8449 {
8450 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8451 gimple_stmt_iterator gsi;
8452 gimple *stmt, *last_stmt;
8453
8454 if (!bb)
8455 continue;
8456
8457 if (blocks && !bitmap_bit_p (blocks, i))
8458 continue;
8459
8460 gsi = gsi_last_nondebug_bb (bb);
8461 if (!gsi_end_p (gsi))
8462 {
8463 last_stmt = gsi_stmt (gsi);
8464 do
8465 {
8466 stmt = gsi_stmt (gsi);
8467 if (stmt_can_terminate_bb_p (stmt))
8468 {
8469 edge e;
8470
8471 /* The handling above of the final block before the
8472 epilogue should be enough to verify that there is
8473 no edge to the exit block in CFG already.
8474 Calling make_edge in such case would cause us to
8475 mark that edge as fake and remove it later. */
8476 if (flag_checking && stmt == last_stmt)
8477 {
8478 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8479 gcc_assert (e == NULL);
8480 }
8481
8482 /* Note that the following may create a new basic block
8483 and renumber the existing basic blocks. */
8484 if (stmt != last_stmt)
8485 {
8486 e = split_block (bb, stmt);
8487 if (e)
8488 blocks_split++;
8489 }
8490 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
8491 e->probability = profile_probability::guessed_never ();
8492 }
8493 gsi_prev (&gsi);
8494 }
8495 while (!gsi_end_p (gsi));
8496 }
8497 }
8498
8499 if (blocks_split)
8500 checking_verify_flow_info ();
8501
8502 return blocks_split;
8503 }
8504
8505 /* Removes edge E and all the blocks dominated by it, and updates dominance
8506 information. The IL in E->src needs to be updated separately.
8507 If dominance info is not available, only the edge E is removed.*/
8508
8509 void
8510 remove_edge_and_dominated_blocks (edge e)
8511 {
8512 vec<basic_block> bbs_to_remove = vNULL;
8513 vec<basic_block> bbs_to_fix_dom = vNULL;
8514 edge f;
8515 edge_iterator ei;
8516 bool none_removed = false;
8517 unsigned i;
8518 basic_block bb, dbb;
8519 bitmap_iterator bi;
8520
8521 /* If we are removing a path inside a non-root loop that may change
8522 loop ownership of blocks or remove loops. Mark loops for fixup. */
8523 if (current_loops
8524 && loop_outer (e->src->loop_father) != NULL
8525 && e->src->loop_father == e->dest->loop_father)
8526 loops_state_set (LOOPS_NEED_FIXUP);
8527
8528 if (!dom_info_available_p (CDI_DOMINATORS))
8529 {
8530 remove_edge (e);
8531 return;
8532 }
8533
8534 /* No updating is needed for edges to exit. */
8535 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8536 {
8537 if (cfgcleanup_altered_bbs)
8538 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8539 remove_edge (e);
8540 return;
8541 }
8542
8543 /* First, we find the basic blocks to remove. If E->dest has a predecessor
8544 that is not dominated by E->dest, then this set is empty. Otherwise,
8545 all the basic blocks dominated by E->dest are removed.
8546
8547 Also, to DF_IDOM we store the immediate dominators of the blocks in
8548 the dominance frontier of E (i.e., of the successors of the
8549 removed blocks, if there are any, and of E->dest otherwise). */
8550 FOR_EACH_EDGE (f, ei, e->dest->preds)
8551 {
8552 if (f == e)
8553 continue;
8554
8555 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
8556 {
8557 none_removed = true;
8558 break;
8559 }
8560 }
8561
8562 auto_bitmap df, df_idom;
8563 if (none_removed)
8564 bitmap_set_bit (df_idom,
8565 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
8566 else
8567 {
8568 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
8569 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8570 {
8571 FOR_EACH_EDGE (f, ei, bb->succs)
8572 {
8573 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
8574 bitmap_set_bit (df, f->dest->index);
8575 }
8576 }
8577 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8578 bitmap_clear_bit (df, bb->index);
8579
8580 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
8581 {
8582 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8583 bitmap_set_bit (df_idom,
8584 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
8585 }
8586 }
8587
8588 if (cfgcleanup_altered_bbs)
8589 {
8590 /* Record the set of the altered basic blocks. */
8591 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8592 bitmap_ior_into (cfgcleanup_altered_bbs, df);
8593 }
8594
8595 /* Remove E and the cancelled blocks. */
8596 if (none_removed)
8597 remove_edge (e);
8598 else
8599 {
8600 /* Walk backwards so as to get a chance to substitute all
8601 released DEFs into debug stmts. See
8602 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
8603 details. */
8604 for (i = bbs_to_remove.length (); i-- > 0; )
8605 delete_basic_block (bbs_to_remove[i]);
8606 }
8607
8608 /* Update the dominance information. The immediate dominator may change only
8609 for blocks whose immediate dominator belongs to DF_IDOM:
8610
8611 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8612 removal. Let Z the arbitrary block such that idom(Z) = Y and
8613 Z dominates X after the removal. Before removal, there exists a path P
8614 from Y to X that avoids Z. Let F be the last edge on P that is
8615 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
8616 dominates W, and because of P, Z does not dominate W), and W belongs to
8617 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
8618 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
8619 {
8620 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8621 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
8622 dbb;
8623 dbb = next_dom_son (CDI_DOMINATORS, dbb))
8624 bbs_to_fix_dom.safe_push (dbb);
8625 }
8626
8627 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8628
8629 bbs_to_remove.release ();
8630 bbs_to_fix_dom.release ();
8631 }
8632
8633 /* Purge dead EH edges from basic block BB. */
8634
8635 bool
8636 gimple_purge_dead_eh_edges (basic_block bb)
8637 {
8638 bool changed = false;
8639 edge e;
8640 edge_iterator ei;
8641 gimple *stmt = last_stmt (bb);
8642
8643 if (stmt && stmt_can_throw_internal (cfun, stmt))
8644 return false;
8645
8646 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8647 {
8648 if (e->flags & EDGE_EH)
8649 {
8650 remove_edge_and_dominated_blocks (e);
8651 changed = true;
8652 }
8653 else
8654 ei_next (&ei);
8655 }
8656
8657 return changed;
8658 }
8659
8660 /* Purge dead EH edges from basic block listed in BLOCKS. */
8661
8662 bool
8663 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8664 {
8665 bool changed = false;
8666 unsigned i;
8667 bitmap_iterator bi;
8668
8669 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8670 {
8671 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8672
8673 /* Earlier gimple_purge_dead_eh_edges could have removed
8674 this basic block already. */
8675 gcc_assert (bb || changed);
8676 if (bb != NULL)
8677 changed |= gimple_purge_dead_eh_edges (bb);
8678 }
8679
8680 return changed;
8681 }
8682
8683 /* Purge dead abnormal call edges from basic block BB. */
8684
8685 bool
8686 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8687 {
8688 bool changed = false;
8689 edge e;
8690 edge_iterator ei;
8691 gimple *stmt = last_stmt (bb);
8692
8693 if (!cfun->has_nonlocal_label
8694 && !cfun->calls_setjmp)
8695 return false;
8696
8697 if (stmt && stmt_can_make_abnormal_goto (stmt))
8698 return false;
8699
8700 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8701 {
8702 if (e->flags & EDGE_ABNORMAL)
8703 {
8704 if (e->flags & EDGE_FALLTHRU)
8705 e->flags &= ~EDGE_ABNORMAL;
8706 else
8707 remove_edge_and_dominated_blocks (e);
8708 changed = true;
8709 }
8710 else
8711 ei_next (&ei);
8712 }
8713
8714 return changed;
8715 }
8716
8717 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8718
8719 bool
8720 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8721 {
8722 bool changed = false;
8723 unsigned i;
8724 bitmap_iterator bi;
8725
8726 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8727 {
8728 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8729
8730 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8731 this basic block already. */
8732 gcc_assert (bb || changed);
8733 if (bb != NULL)
8734 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8735 }
8736
8737 return changed;
8738 }
8739
8740 /* This function is called whenever a new edge is created or
8741 redirected. */
8742
8743 static void
8744 gimple_execute_on_growing_pred (edge e)
8745 {
8746 basic_block bb = e->dest;
8747
8748 if (!gimple_seq_empty_p (phi_nodes (bb)))
8749 reserve_phi_args_for_new_edge (bb);
8750 }
8751
8752 /* This function is called immediately before edge E is removed from
8753 the edge vector E->dest->preds. */
8754
8755 static void
8756 gimple_execute_on_shrinking_pred (edge e)
8757 {
8758 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8759 remove_phi_args (e);
8760 }
8761
8762 /*---------------------------------------------------------------------------
8763 Helper functions for Loop versioning
8764 ---------------------------------------------------------------------------*/
8765
8766 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8767 of 'first'. Both of them are dominated by 'new_head' basic block. When
8768 'new_head' was created by 'second's incoming edge it received phi arguments
8769 on the edge by split_edge(). Later, additional edge 'e' was created to
8770 connect 'new_head' and 'first'. Now this routine adds phi args on this
8771 additional edge 'e' that new_head to second edge received as part of edge
8772 splitting. */
8773
8774 static void
8775 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8776 basic_block new_head, edge e)
8777 {
8778 gphi *phi1, *phi2;
8779 gphi_iterator psi1, psi2;
8780 tree def;
8781 edge e2 = find_edge (new_head, second);
8782
8783 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8784 edge, we should always have an edge from NEW_HEAD to SECOND. */
8785 gcc_assert (e2 != NULL);
8786
8787 /* Browse all 'second' basic block phi nodes and add phi args to
8788 edge 'e' for 'first' head. PHI args are always in correct order. */
8789
8790 for (psi2 = gsi_start_phis (second),
8791 psi1 = gsi_start_phis (first);
8792 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8793 gsi_next (&psi2), gsi_next (&psi1))
8794 {
8795 phi1 = psi1.phi ();
8796 phi2 = psi2.phi ();
8797 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8798 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8799 }
8800 }
8801
8802
8803 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8804 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8805 the destination of the ELSE part. */
8806
8807 static void
8808 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8809 basic_block second_head ATTRIBUTE_UNUSED,
8810 basic_block cond_bb, void *cond_e)
8811 {
8812 gimple_stmt_iterator gsi;
8813 gimple *new_cond_expr;
8814 tree cond_expr = (tree) cond_e;
8815 edge e0;
8816
8817 /* Build new conditional expr */
8818 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8819 NULL_TREE, NULL_TREE);
8820
8821 /* Add new cond in cond_bb. */
8822 gsi = gsi_last_bb (cond_bb);
8823 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8824
8825 /* Adjust edges appropriately to connect new head with first head
8826 as well as second head. */
8827 e0 = single_succ_edge (cond_bb);
8828 e0->flags &= ~EDGE_FALLTHRU;
8829 e0->flags |= EDGE_FALSE_VALUE;
8830 }
8831
8832
8833 /* Do book-keeping of basic block BB for the profile consistency checker.
8834 Store the counting in RECORD. */
8835 static void
8836 gimple_account_profile_record (basic_block bb,
8837 struct profile_record *record)
8838 {
8839 gimple_stmt_iterator i;
8840 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8841 {
8842 record->size
8843 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8844 if (bb->count.initialized_p ())
8845 record->time
8846 += estimate_num_insns (gsi_stmt (i),
8847 &eni_time_weights) * bb->count.to_gcov_type ();
8848 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8849 record->time
8850 += estimate_num_insns (gsi_stmt (i),
8851 &eni_time_weights) * bb->count.to_frequency (cfun);
8852 }
8853 }
8854
8855 struct cfg_hooks gimple_cfg_hooks = {
8856 "gimple",
8857 gimple_verify_flow_info,
8858 gimple_dump_bb, /* dump_bb */
8859 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8860 create_bb, /* create_basic_block */
8861 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8862 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8863 gimple_can_remove_branch_p, /* can_remove_branch_p */
8864 remove_bb, /* delete_basic_block */
8865 gimple_split_block, /* split_block */
8866 gimple_move_block_after, /* move_block_after */
8867 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8868 gimple_merge_blocks, /* merge_blocks */
8869 gimple_predict_edge, /* predict_edge */
8870 gimple_predicted_by_p, /* predicted_by_p */
8871 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8872 gimple_duplicate_bb, /* duplicate_block */
8873 gimple_split_edge, /* split_edge */
8874 gimple_make_forwarder_block, /* make_forward_block */
8875 NULL, /* tidy_fallthru_edge */
8876 NULL, /* force_nonfallthru */
8877 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8878 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8879 gimple_flow_call_edges_add, /* flow_call_edges_add */
8880 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8881 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8882 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8883 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8884 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8885 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8886 flush_pending_stmts, /* flush_pending_stmts */
8887 gimple_empty_block_p, /* block_empty_p */
8888 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8889 gimple_account_profile_record,
8890 };
8891
8892
8893 /* Split all critical edges. */
8894
8895 unsigned int
8896 split_critical_edges (void)
8897 {
8898 basic_block bb;
8899 edge e;
8900 edge_iterator ei;
8901
8902 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8903 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8904 mappings around the calls to split_edge. */
8905 start_recording_case_labels ();
8906 FOR_ALL_BB_FN (bb, cfun)
8907 {
8908 FOR_EACH_EDGE (e, ei, bb->succs)
8909 {
8910 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8911 split_edge (e);
8912 /* PRE inserts statements to edges and expects that
8913 since split_critical_edges was done beforehand, committing edge
8914 insertions will not split more edges. In addition to critical
8915 edges we must split edges that have multiple successors and
8916 end by control flow statements, such as RESX.
8917 Go ahead and split them too. This matches the logic in
8918 gimple_find_edge_insert_loc. */
8919 else if ((!single_pred_p (e->dest)
8920 || !gimple_seq_empty_p (phi_nodes (e->dest))
8921 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8922 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8923 && !(e->flags & EDGE_ABNORMAL))
8924 {
8925 gimple_stmt_iterator gsi;
8926
8927 gsi = gsi_last_bb (e->src);
8928 if (!gsi_end_p (gsi)
8929 && stmt_ends_bb_p (gsi_stmt (gsi))
8930 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8931 && !gimple_call_builtin_p (gsi_stmt (gsi),
8932 BUILT_IN_RETURN)))
8933 split_edge (e);
8934 }
8935 }
8936 }
8937 end_recording_case_labels ();
8938 return 0;
8939 }
8940
8941 namespace {
8942
8943 const pass_data pass_data_split_crit_edges =
8944 {
8945 GIMPLE_PASS, /* type */
8946 "crited", /* name */
8947 OPTGROUP_NONE, /* optinfo_flags */
8948 TV_TREE_SPLIT_EDGES, /* tv_id */
8949 PROP_cfg, /* properties_required */
8950 PROP_no_crit_edges, /* properties_provided */
8951 0, /* properties_destroyed */
8952 0, /* todo_flags_start */
8953 0, /* todo_flags_finish */
8954 };
8955
8956 class pass_split_crit_edges : public gimple_opt_pass
8957 {
8958 public:
8959 pass_split_crit_edges (gcc::context *ctxt)
8960 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8961 {}
8962
8963 /* opt_pass methods: */
8964 virtual unsigned int execute (function *) { return split_critical_edges (); }
8965
8966 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8967 }; // class pass_split_crit_edges
8968
8969 } // anon namespace
8970
8971 gimple_opt_pass *
8972 make_pass_split_crit_edges (gcc::context *ctxt)
8973 {
8974 return new pass_split_crit_edges (ctxt);
8975 }
8976
8977
8978 /* Insert COND expression which is GIMPLE_COND after STMT
8979 in basic block BB with appropriate basic block split
8980 and creation of a new conditionally executed basic block.
8981 Update profile so the new bb is visited with probability PROB.
8982 Return created basic block. */
8983 basic_block
8984 insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond,
8985 profile_probability prob)
8986 {
8987 edge fall = split_block (bb, stmt);
8988 gimple_stmt_iterator iter = gsi_last_bb (bb);
8989 basic_block new_bb;
8990
8991 /* Insert cond statement. */
8992 gcc_assert (gimple_code (cond) == GIMPLE_COND);
8993 if (gsi_end_p (iter))
8994 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
8995 else
8996 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
8997
8998 /* Create conditionally executed block. */
8999 new_bb = create_empty_bb (bb);
9000 edge e = make_edge (bb, new_bb, EDGE_TRUE_VALUE);
9001 e->probability = prob;
9002 new_bb->count = e->count ();
9003 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
9004
9005 /* Fix edge for split bb. */
9006 fall->flags = EDGE_FALSE_VALUE;
9007 fall->probability -= e->probability;
9008
9009 /* Update dominance info. */
9010 if (dom_info_available_p (CDI_DOMINATORS))
9011 {
9012 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
9013 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
9014 }
9015
9016 /* Update loop info. */
9017 if (current_loops)
9018 add_bb_to_loop (new_bb, bb->loop_father);
9019
9020 return new_bb;
9021 }
9022
9023 /* Build a ternary operation and gimplify it. Emit code before GSI.
9024 Return the gimple_val holding the result. */
9025
9026 tree
9027 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
9028 tree type, tree a, tree b, tree c)
9029 {
9030 tree ret;
9031 location_t loc = gimple_location (gsi_stmt (*gsi));
9032
9033 ret = fold_build3_loc (loc, code, type, a, b, c);
9034 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
9035 GSI_SAME_STMT);
9036 }
9037
9038 /* Build a binary operation and gimplify it. Emit code before GSI.
9039 Return the gimple_val holding the result. */
9040
9041 tree
9042 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
9043 tree type, tree a, tree b)
9044 {
9045 tree ret;
9046
9047 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
9048 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
9049 GSI_SAME_STMT);
9050 }
9051
9052 /* Build a unary operation and gimplify it. Emit code before GSI.
9053 Return the gimple_val holding the result. */
9054
9055 tree
9056 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
9057 tree a)
9058 {
9059 tree ret;
9060
9061 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
9062 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
9063 GSI_SAME_STMT);
9064 }
9065
9066
9067 \f
9068 /* Given a basic block B which ends with a conditional and has
9069 precisely two successors, determine which of the edges is taken if
9070 the conditional is true and which is taken if the conditional is
9071 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
9072
9073 void
9074 extract_true_false_edges_from_block (basic_block b,
9075 edge *true_edge,
9076 edge *false_edge)
9077 {
9078 edge e = EDGE_SUCC (b, 0);
9079
9080 if (e->flags & EDGE_TRUE_VALUE)
9081 {
9082 *true_edge = e;
9083 *false_edge = EDGE_SUCC (b, 1);
9084 }
9085 else
9086 {
9087 *false_edge = e;
9088 *true_edge = EDGE_SUCC (b, 1);
9089 }
9090 }
9091
9092
9093 /* From a controlling predicate in the immediate dominator DOM of
9094 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
9095 predicate evaluates to true and false and store them to
9096 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
9097 they are non-NULL. Returns true if the edges can be determined,
9098 else return false. */
9099
9100 bool
9101 extract_true_false_controlled_edges (basic_block dom, basic_block phiblock,
9102 edge *true_controlled_edge,
9103 edge *false_controlled_edge)
9104 {
9105 basic_block bb = phiblock;
9106 edge true_edge, false_edge, tem;
9107 edge e0 = NULL, e1 = NULL;
9108
9109 /* We have to verify that one edge into the PHI node is dominated
9110 by the true edge of the predicate block and the other edge
9111 dominated by the false edge. This ensures that the PHI argument
9112 we are going to take is completely determined by the path we
9113 take from the predicate block.
9114 We can only use BB dominance checks below if the destination of
9115 the true/false edges are dominated by their edge, thus only
9116 have a single predecessor. */
9117 extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
9118 tem = EDGE_PRED (bb, 0);
9119 if (tem == true_edge
9120 || (single_pred_p (true_edge->dest)
9121 && (tem->src == true_edge->dest
9122 || dominated_by_p (CDI_DOMINATORS,
9123 tem->src, true_edge->dest))))
9124 e0 = tem;
9125 else if (tem == false_edge
9126 || (single_pred_p (false_edge->dest)
9127 && (tem->src == false_edge->dest
9128 || dominated_by_p (CDI_DOMINATORS,
9129 tem->src, false_edge->dest))))
9130 e1 = tem;
9131 else
9132 return false;
9133 tem = EDGE_PRED (bb, 1);
9134 if (tem == true_edge
9135 || (single_pred_p (true_edge->dest)
9136 && (tem->src == true_edge->dest
9137 || dominated_by_p (CDI_DOMINATORS,
9138 tem->src, true_edge->dest))))
9139 e0 = tem;
9140 else if (tem == false_edge
9141 || (single_pred_p (false_edge->dest)
9142 && (tem->src == false_edge->dest
9143 || dominated_by_p (CDI_DOMINATORS,
9144 tem->src, false_edge->dest))))
9145 e1 = tem;
9146 else
9147 return false;
9148 if (!e0 || !e1)
9149 return false;
9150
9151 if (true_controlled_edge)
9152 *true_controlled_edge = e0;
9153 if (false_controlled_edge)
9154 *false_controlled_edge = e1;
9155
9156 return true;
9157 }
9158
9159 /* Generate a range test LHS CODE RHS that determines whether INDEX is in the
9160 range [low, high]. Place associated stmts before *GSI. */
9161
9162 void
9163 generate_range_test (basic_block bb, tree index, tree low, tree high,
9164 tree *lhs, tree *rhs)
9165 {
9166 tree type = TREE_TYPE (index);
9167 tree utype = unsigned_type_for (type);
9168
9169 low = fold_convert (utype, low);
9170 high = fold_convert (utype, high);
9171
9172 gimple_seq seq = NULL;
9173 index = gimple_convert (&seq, utype, index);
9174 *lhs = gimple_build (&seq, MINUS_EXPR, utype, index, low);
9175 *rhs = const_binop (MINUS_EXPR, utype, high, low);
9176
9177 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9178 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
9179 }
9180
9181 /* Return the basic block that belongs to label numbered INDEX
9182 of a switch statement. */
9183
9184 basic_block
9185 gimple_switch_label_bb (function *ifun, gswitch *gs, unsigned index)
9186 {
9187 return label_to_block (ifun, CASE_LABEL (gimple_switch_label (gs, index)));
9188 }
9189
9190 /* Return the default basic block of a switch statement. */
9191
9192 basic_block
9193 gimple_switch_default_bb (function *ifun, gswitch *gs)
9194 {
9195 return gimple_switch_label_bb (ifun, gs, 0);
9196 }
9197
9198 /* Return the edge that belongs to label numbered INDEX
9199 of a switch statement. */
9200
9201 edge
9202 gimple_switch_edge (function *ifun, gswitch *gs, unsigned index)
9203 {
9204 return find_edge (gimple_bb (gs), gimple_switch_label_bb (ifun, gs, index));
9205 }
9206
9207 /* Return the default edge of a switch statement. */
9208
9209 edge
9210 gimple_switch_default_edge (function *ifun, gswitch *gs)
9211 {
9212 return gimple_switch_edge (ifun, gs, 0);
9213 }
9214
9215
9216 /* Emit return warnings. */
9217
9218 namespace {
9219
9220 const pass_data pass_data_warn_function_return =
9221 {
9222 GIMPLE_PASS, /* type */
9223 "*warn_function_return", /* name */
9224 OPTGROUP_NONE, /* optinfo_flags */
9225 TV_NONE, /* tv_id */
9226 PROP_cfg, /* properties_required */
9227 0, /* properties_provided */
9228 0, /* properties_destroyed */
9229 0, /* todo_flags_start */
9230 0, /* todo_flags_finish */
9231 };
9232
9233 class pass_warn_function_return : public gimple_opt_pass
9234 {
9235 public:
9236 pass_warn_function_return (gcc::context *ctxt)
9237 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
9238 {}
9239
9240 /* opt_pass methods: */
9241 virtual unsigned int execute (function *);
9242
9243 }; // class pass_warn_function_return
9244
9245 unsigned int
9246 pass_warn_function_return::execute (function *fun)
9247 {
9248 location_t location;
9249 gimple *last;
9250 edge e;
9251 edge_iterator ei;
9252
9253 if (!targetm.warn_func_return (fun->decl))
9254 return 0;
9255
9256 /* If we have a path to EXIT, then we do return. */
9257 if (TREE_THIS_VOLATILE (fun->decl)
9258 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
9259 {
9260 location = UNKNOWN_LOCATION;
9261 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (fun)->preds);
9262 (e = ei_safe_edge (ei)); )
9263 {
9264 last = last_stmt (e->src);
9265 if ((gimple_code (last) == GIMPLE_RETURN
9266 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
9267 && location == UNKNOWN_LOCATION
9268 && ((location = LOCATION_LOCUS (gimple_location (last)))
9269 != UNKNOWN_LOCATION)
9270 && !optimize)
9271 break;
9272 /* When optimizing, replace return stmts in noreturn functions
9273 with __builtin_unreachable () call. */
9274 if (optimize && gimple_code (last) == GIMPLE_RETURN)
9275 {
9276 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9277 gimple *new_stmt = gimple_build_call (fndecl, 0);
9278 gimple_set_location (new_stmt, gimple_location (last));
9279 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9280 gsi_replace (&gsi, new_stmt, true);
9281 remove_edge (e);
9282 }
9283 else
9284 ei_next (&ei);
9285 }
9286 if (location == UNKNOWN_LOCATION)
9287 location = cfun->function_end_locus;
9288 warning_at (location, 0, "%<noreturn%> function does return");
9289 }
9290
9291 /* If we see "return;" in some basic block, then we do reach the end
9292 without returning a value. */
9293 else if (warn_return_type > 0
9294 && !TREE_NO_WARNING (fun->decl)
9295 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
9296 {
9297 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
9298 {
9299 gimple *last = last_stmt (e->src);
9300 greturn *return_stmt = dyn_cast <greturn *> (last);
9301 if (return_stmt
9302 && gimple_return_retval (return_stmt) == NULL
9303 && !gimple_no_warning_p (last))
9304 {
9305 location = gimple_location (last);
9306 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9307 location = fun->function_end_locus;
9308 if (warning_at (location, OPT_Wreturn_type,
9309 "control reaches end of non-void function"))
9310 TREE_NO_WARNING (fun->decl) = 1;
9311 break;
9312 }
9313 }
9314 /* The C++ FE turns fallthrough from the end of non-void function
9315 into __builtin_unreachable () call with BUILTINS_LOCATION.
9316 Recognize those too. */
9317 basic_block bb;
9318 if (!TREE_NO_WARNING (fun->decl))
9319 FOR_EACH_BB_FN (bb, fun)
9320 if (EDGE_COUNT (bb->succs) == 0)
9321 {
9322 gimple *last = last_stmt (bb);
9323 const enum built_in_function ubsan_missing_ret
9324 = BUILT_IN_UBSAN_HANDLE_MISSING_RETURN;
9325 if (last
9326 && ((LOCATION_LOCUS (gimple_location (last))
9327 == BUILTINS_LOCATION
9328 && gimple_call_builtin_p (last, BUILT_IN_UNREACHABLE))
9329 || gimple_call_builtin_p (last, ubsan_missing_ret)))
9330 {
9331 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9332 gsi_prev_nondebug (&gsi);
9333 gimple *prev = gsi_stmt (gsi);
9334 if (prev == NULL)
9335 location = UNKNOWN_LOCATION;
9336 else
9337 location = gimple_location (prev);
9338 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9339 location = fun->function_end_locus;
9340 if (warning_at (location, OPT_Wreturn_type,
9341 "control reaches end of non-void function"))
9342 TREE_NO_WARNING (fun->decl) = 1;
9343 break;
9344 }
9345 }
9346 }
9347 return 0;
9348 }
9349
9350 } // anon namespace
9351
9352 gimple_opt_pass *
9353 make_pass_warn_function_return (gcc::context *ctxt)
9354 {
9355 return new pass_warn_function_return (ctxt);
9356 }
9357
9358 /* Walk a gimplified function and warn for functions whose return value is
9359 ignored and attribute((warn_unused_result)) is set. This is done before
9360 inlining, so we don't have to worry about that. */
9361
9362 static void
9363 do_warn_unused_result (gimple_seq seq)
9364 {
9365 tree fdecl, ftype;
9366 gimple_stmt_iterator i;
9367
9368 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
9369 {
9370 gimple *g = gsi_stmt (i);
9371
9372 switch (gimple_code (g))
9373 {
9374 case GIMPLE_BIND:
9375 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
9376 break;
9377 case GIMPLE_TRY:
9378 do_warn_unused_result (gimple_try_eval (g));
9379 do_warn_unused_result (gimple_try_cleanup (g));
9380 break;
9381 case GIMPLE_CATCH:
9382 do_warn_unused_result (gimple_catch_handler (
9383 as_a <gcatch *> (g)));
9384 break;
9385 case GIMPLE_EH_FILTER:
9386 do_warn_unused_result (gimple_eh_filter_failure (g));
9387 break;
9388
9389 case GIMPLE_CALL:
9390 if (gimple_call_lhs (g))
9391 break;
9392 if (gimple_call_internal_p (g))
9393 break;
9394
9395 /* This is a naked call, as opposed to a GIMPLE_CALL with an
9396 LHS. All calls whose value is ignored should be
9397 represented like this. Look for the attribute. */
9398 fdecl = gimple_call_fndecl (g);
9399 ftype = gimple_call_fntype (g);
9400
9401 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
9402 {
9403 location_t loc = gimple_location (g);
9404
9405 if (fdecl)
9406 warning_at (loc, OPT_Wunused_result,
9407 "ignoring return value of %qD, "
9408 "declared with attribute warn_unused_result",
9409 fdecl);
9410 else
9411 warning_at (loc, OPT_Wunused_result,
9412 "ignoring return value of function "
9413 "declared with attribute warn_unused_result");
9414 }
9415 break;
9416
9417 default:
9418 /* Not a container, not a call, or a call whose value is used. */
9419 break;
9420 }
9421 }
9422 }
9423
9424 namespace {
9425
9426 const pass_data pass_data_warn_unused_result =
9427 {
9428 GIMPLE_PASS, /* type */
9429 "*warn_unused_result", /* name */
9430 OPTGROUP_NONE, /* optinfo_flags */
9431 TV_NONE, /* tv_id */
9432 PROP_gimple_any, /* properties_required */
9433 0, /* properties_provided */
9434 0, /* properties_destroyed */
9435 0, /* todo_flags_start */
9436 0, /* todo_flags_finish */
9437 };
9438
9439 class pass_warn_unused_result : public gimple_opt_pass
9440 {
9441 public:
9442 pass_warn_unused_result (gcc::context *ctxt)
9443 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
9444 {}
9445
9446 /* opt_pass methods: */
9447 virtual bool gate (function *) { return flag_warn_unused_result; }
9448 virtual unsigned int execute (function *)
9449 {
9450 do_warn_unused_result (gimple_body (current_function_decl));
9451 return 0;
9452 }
9453
9454 }; // class pass_warn_unused_result
9455
9456 } // anon namespace
9457
9458 gimple_opt_pass *
9459 make_pass_warn_unused_result (gcc::context *ctxt)
9460 {
9461 return new pass_warn_unused_result (ctxt);
9462 }
9463
9464 /* IPA passes, compilation of earlier functions or inlining
9465 might have changed some properties, such as marked functions nothrow,
9466 pure, const or noreturn.
9467 Remove redundant edges and basic blocks, and create new ones if necessary.
9468
9469 This pass can't be executed as stand alone pass from pass manager, because
9470 in between inlining and this fixup the verify_flow_info would fail. */
9471
9472 unsigned int
9473 execute_fixup_cfg (void)
9474 {
9475 basic_block bb;
9476 gimple_stmt_iterator gsi;
9477 int todo = 0;
9478 cgraph_node *node = cgraph_node::get (current_function_decl);
9479 profile_count num = node->count;
9480 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
9481 bool scale = num.initialized_p () && !(num == den);
9482
9483 if (scale)
9484 {
9485 profile_count::adjust_for_ipa_scaling (&num, &den);
9486 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
9487 EXIT_BLOCK_PTR_FOR_FN (cfun)->count
9488 = EXIT_BLOCK_PTR_FOR_FN (cfun)->count.apply_scale (num, den);
9489 }
9490
9491 FOR_EACH_BB_FN (bb, cfun)
9492 {
9493 if (scale)
9494 bb->count = bb->count.apply_scale (num, den);
9495 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
9496 {
9497 gimple *stmt = gsi_stmt (gsi);
9498 tree decl = is_gimple_call (stmt)
9499 ? gimple_call_fndecl (stmt)
9500 : NULL;
9501 if (decl)
9502 {
9503 int flags = gimple_call_flags (stmt);
9504 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
9505 {
9506 if (gimple_purge_dead_abnormal_call_edges (bb))
9507 todo |= TODO_cleanup_cfg;
9508
9509 if (gimple_in_ssa_p (cfun))
9510 {
9511 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9512 update_stmt (stmt);
9513 }
9514 }
9515
9516 if (flags & ECF_NORETURN
9517 && fixup_noreturn_call (stmt))
9518 todo |= TODO_cleanup_cfg;
9519 }
9520
9521 /* Remove stores to variables we marked write-only.
9522 Keep access when store has side effect, i.e. in case when source
9523 is volatile. */
9524 if (gimple_store_p (stmt)
9525 && !gimple_has_side_effects (stmt))
9526 {
9527 tree lhs = get_base_address (gimple_get_lhs (stmt));
9528
9529 if (VAR_P (lhs)
9530 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9531 && varpool_node::get (lhs)->writeonly)
9532 {
9533 unlink_stmt_vdef (stmt);
9534 gsi_remove (&gsi, true);
9535 release_defs (stmt);
9536 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9537 continue;
9538 }
9539 }
9540 /* For calls we can simply remove LHS when it is known
9541 to be write-only. */
9542 if (is_gimple_call (stmt)
9543 && gimple_get_lhs (stmt))
9544 {
9545 tree lhs = get_base_address (gimple_get_lhs (stmt));
9546
9547 if (VAR_P (lhs)
9548 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9549 && varpool_node::get (lhs)->writeonly)
9550 {
9551 gimple_call_set_lhs (stmt, NULL);
9552 update_stmt (stmt);
9553 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9554 }
9555 }
9556
9557 if (maybe_clean_eh_stmt (stmt)
9558 && gimple_purge_dead_eh_edges (bb))
9559 todo |= TODO_cleanup_cfg;
9560 gsi_next (&gsi);
9561 }
9562
9563 /* If we have a basic block with no successors that does not
9564 end with a control statement or a noreturn call end it with
9565 a call to __builtin_unreachable. This situation can occur
9566 when inlining a noreturn call that does in fact return. */
9567 if (EDGE_COUNT (bb->succs) == 0)
9568 {
9569 gimple *stmt = last_stmt (bb);
9570 if (!stmt
9571 || (!is_ctrl_stmt (stmt)
9572 && (!is_gimple_call (stmt)
9573 || !gimple_call_noreturn_p (stmt))))
9574 {
9575 if (stmt && is_gimple_call (stmt))
9576 gimple_call_set_ctrl_altering (stmt, false);
9577 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9578 stmt = gimple_build_call (fndecl, 0);
9579 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9580 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
9581 if (!cfun->after_inlining)
9582 {
9583 gcall *call_stmt = dyn_cast <gcall *> (stmt);
9584 node->create_edge (cgraph_node::get_create (fndecl),
9585 call_stmt, bb->count);
9586 }
9587 }
9588 }
9589 }
9590 if (scale)
9591 compute_function_frequency ();
9592
9593 if (current_loops
9594 && (todo & TODO_cleanup_cfg))
9595 loops_state_set (LOOPS_NEED_FIXUP);
9596
9597 return todo;
9598 }
9599
9600 namespace {
9601
9602 const pass_data pass_data_fixup_cfg =
9603 {
9604 GIMPLE_PASS, /* type */
9605 "fixup_cfg", /* name */
9606 OPTGROUP_NONE, /* optinfo_flags */
9607 TV_NONE, /* tv_id */
9608 PROP_cfg, /* properties_required */
9609 0, /* properties_provided */
9610 0, /* properties_destroyed */
9611 0, /* todo_flags_start */
9612 0, /* todo_flags_finish */
9613 };
9614
9615 class pass_fixup_cfg : public gimple_opt_pass
9616 {
9617 public:
9618 pass_fixup_cfg (gcc::context *ctxt)
9619 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
9620 {}
9621
9622 /* opt_pass methods: */
9623 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
9624 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
9625
9626 }; // class pass_fixup_cfg
9627
9628 } // anon namespace
9629
9630 gimple_opt_pass *
9631 make_pass_fixup_cfg (gcc::context *ctxt)
9632 {
9633 return new pass_fixup_cfg (ctxt);
9634 }
9635
9636 /* Garbage collection support for edge_def. */
9637
9638 extern void gt_ggc_mx (tree&);
9639 extern void gt_ggc_mx (gimple *&);
9640 extern void gt_ggc_mx (rtx&);
9641 extern void gt_ggc_mx (basic_block&);
9642
9643 static void
9644 gt_ggc_mx (rtx_insn *& x)
9645 {
9646 if (x)
9647 gt_ggc_mx_rtx_def ((void *) x);
9648 }
9649
9650 void
9651 gt_ggc_mx (edge_def *e)
9652 {
9653 tree block = LOCATION_BLOCK (e->goto_locus);
9654 gt_ggc_mx (e->src);
9655 gt_ggc_mx (e->dest);
9656 if (current_ir_type () == IR_GIMPLE)
9657 gt_ggc_mx (e->insns.g);
9658 else
9659 gt_ggc_mx (e->insns.r);
9660 gt_ggc_mx (block);
9661 }
9662
9663 /* PCH support for edge_def. */
9664
9665 extern void gt_pch_nx (tree&);
9666 extern void gt_pch_nx (gimple *&);
9667 extern void gt_pch_nx (rtx&);
9668 extern void gt_pch_nx (basic_block&);
9669
9670 static void
9671 gt_pch_nx (rtx_insn *& x)
9672 {
9673 if (x)
9674 gt_pch_nx_rtx_def ((void *) x);
9675 }
9676
9677 void
9678 gt_pch_nx (edge_def *e)
9679 {
9680 tree block = LOCATION_BLOCK (e->goto_locus);
9681 gt_pch_nx (e->src);
9682 gt_pch_nx (e->dest);
9683 if (current_ir_type () == IR_GIMPLE)
9684 gt_pch_nx (e->insns.g);
9685 else
9686 gt_pch_nx (e->insns.r);
9687 gt_pch_nx (block);
9688 }
9689
9690 void
9691 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
9692 {
9693 tree block = LOCATION_BLOCK (e->goto_locus);
9694 op (&(e->src), cookie);
9695 op (&(e->dest), cookie);
9696 if (current_ir_type () == IR_GIMPLE)
9697 op (&(e->insns.g), cookie);
9698 else
9699 op (&(e->insns.r), cookie);
9700 op (&(block), cookie);
9701 }
9702
9703 #if CHECKING_P
9704
9705 namespace selftest {
9706
9707 /* Helper function for CFG selftests: create a dummy function decl
9708 and push it as cfun. */
9709
9710 static tree
9711 push_fndecl (const char *name)
9712 {
9713 tree fn_type = build_function_type_array (integer_type_node, 0, NULL);
9714 /* FIXME: this uses input_location: */
9715 tree fndecl = build_fn_decl (name, fn_type);
9716 tree retval = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
9717 NULL_TREE, integer_type_node);
9718 DECL_RESULT (fndecl) = retval;
9719 push_struct_function (fndecl);
9720 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9721 ASSERT_TRUE (fun != NULL);
9722 init_empty_tree_cfg_for_function (fun);
9723 ASSERT_EQ (2, n_basic_blocks_for_fn (fun));
9724 ASSERT_EQ (0, n_edges_for_fn (fun));
9725 return fndecl;
9726 }
9727
9728 /* These tests directly create CFGs.
9729 Compare with the static fns within tree-cfg.c:
9730 - build_gimple_cfg
9731 - make_blocks: calls create_basic_block (seq, bb);
9732 - make_edges. */
9733
9734 /* Verify a simple cfg of the form:
9735 ENTRY -> A -> B -> C -> EXIT. */
9736
9737 static void
9738 test_linear_chain ()
9739 {
9740 gimple_register_cfg_hooks ();
9741
9742 tree fndecl = push_fndecl ("cfg_test_linear_chain");
9743 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9744
9745 /* Create some empty blocks. */
9746 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9747 basic_block bb_b = create_empty_bb (bb_a);
9748 basic_block bb_c = create_empty_bb (bb_b);
9749
9750 ASSERT_EQ (5, n_basic_blocks_for_fn (fun));
9751 ASSERT_EQ (0, n_edges_for_fn (fun));
9752
9753 /* Create some edges: a simple linear chain of BBs. */
9754 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9755 make_edge (bb_a, bb_b, 0);
9756 make_edge (bb_b, bb_c, 0);
9757 make_edge (bb_c, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9758
9759 /* Verify the edges. */
9760 ASSERT_EQ (4, n_edges_for_fn (fun));
9761 ASSERT_EQ (NULL, ENTRY_BLOCK_PTR_FOR_FN (fun)->preds);
9762 ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs->length ());
9763 ASSERT_EQ (1, bb_a->preds->length ());
9764 ASSERT_EQ (1, bb_a->succs->length ());
9765 ASSERT_EQ (1, bb_b->preds->length ());
9766 ASSERT_EQ (1, bb_b->succs->length ());
9767 ASSERT_EQ (1, bb_c->preds->length ());
9768 ASSERT_EQ (1, bb_c->succs->length ());
9769 ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun)->preds->length ());
9770 ASSERT_EQ (NULL, EXIT_BLOCK_PTR_FOR_FN (fun)->succs);
9771
9772 /* Verify the dominance information
9773 Each BB in our simple chain should be dominated by the one before
9774 it. */
9775 calculate_dominance_info (CDI_DOMINATORS);
9776 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9777 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9778 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9779 ASSERT_EQ (1, dom_by_b.length ());
9780 ASSERT_EQ (bb_c, dom_by_b[0]);
9781 free_dominance_info (CDI_DOMINATORS);
9782 dom_by_b.release ();
9783
9784 /* Similarly for post-dominance: each BB in our chain is post-dominated
9785 by the one after it. */
9786 calculate_dominance_info (CDI_POST_DOMINATORS);
9787 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9788 ASSERT_EQ (bb_c, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9789 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9790 ASSERT_EQ (1, postdom_by_b.length ());
9791 ASSERT_EQ (bb_a, postdom_by_b[0]);
9792 free_dominance_info (CDI_POST_DOMINATORS);
9793 postdom_by_b.release ();
9794
9795 pop_cfun ();
9796 }
9797
9798 /* Verify a simple CFG of the form:
9799 ENTRY
9800 |
9801 A
9802 / \
9803 /t \f
9804 B C
9805 \ /
9806 \ /
9807 D
9808 |
9809 EXIT. */
9810
9811 static void
9812 test_diamond ()
9813 {
9814 gimple_register_cfg_hooks ();
9815
9816 tree fndecl = push_fndecl ("cfg_test_diamond");
9817 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9818
9819 /* Create some empty blocks. */
9820 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9821 basic_block bb_b = create_empty_bb (bb_a);
9822 basic_block bb_c = create_empty_bb (bb_a);
9823 basic_block bb_d = create_empty_bb (bb_b);
9824
9825 ASSERT_EQ (6, n_basic_blocks_for_fn (fun));
9826 ASSERT_EQ (0, n_edges_for_fn (fun));
9827
9828 /* Create the edges. */
9829 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9830 make_edge (bb_a, bb_b, EDGE_TRUE_VALUE);
9831 make_edge (bb_a, bb_c, EDGE_FALSE_VALUE);
9832 make_edge (bb_b, bb_d, 0);
9833 make_edge (bb_c, bb_d, 0);
9834 make_edge (bb_d, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9835
9836 /* Verify the edges. */
9837 ASSERT_EQ (6, n_edges_for_fn (fun));
9838 ASSERT_EQ (1, bb_a->preds->length ());
9839 ASSERT_EQ (2, bb_a->succs->length ());
9840 ASSERT_EQ (1, bb_b->preds->length ());
9841 ASSERT_EQ (1, bb_b->succs->length ());
9842 ASSERT_EQ (1, bb_c->preds->length ());
9843 ASSERT_EQ (1, bb_c->succs->length ());
9844 ASSERT_EQ (2, bb_d->preds->length ());
9845 ASSERT_EQ (1, bb_d->succs->length ());
9846
9847 /* Verify the dominance information. */
9848 calculate_dominance_info (CDI_DOMINATORS);
9849 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9850 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9851 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_d));
9852 vec<basic_block> dom_by_a = get_dominated_by (CDI_DOMINATORS, bb_a);
9853 ASSERT_EQ (3, dom_by_a.length ()); /* B, C, D, in some order. */
9854 dom_by_a.release ();
9855 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9856 ASSERT_EQ (0, dom_by_b.length ());
9857 dom_by_b.release ();
9858 free_dominance_info (CDI_DOMINATORS);
9859
9860 /* Similarly for post-dominance. */
9861 calculate_dominance_info (CDI_POST_DOMINATORS);
9862 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9863 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9864 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_c));
9865 vec<basic_block> postdom_by_d = get_dominated_by (CDI_POST_DOMINATORS, bb_d);
9866 ASSERT_EQ (3, postdom_by_d.length ()); /* A, B, C in some order. */
9867 postdom_by_d.release ();
9868 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9869 ASSERT_EQ (0, postdom_by_b.length ());
9870 postdom_by_b.release ();
9871 free_dominance_info (CDI_POST_DOMINATORS);
9872
9873 pop_cfun ();
9874 }
9875
9876 /* Verify that we can handle a CFG containing a "complete" aka
9877 fully-connected subgraph (where A B C D below all have edges
9878 pointing to each other node, also to themselves).
9879 e.g.:
9880 ENTRY EXIT
9881 | ^
9882 | /
9883 | /
9884 | /
9885 V/
9886 A<--->B
9887 ^^ ^^
9888 | \ / |
9889 | X |
9890 | / \ |
9891 VV VV
9892 C<--->D
9893 */
9894
9895 static void
9896 test_fully_connected ()
9897 {
9898 gimple_register_cfg_hooks ();
9899
9900 tree fndecl = push_fndecl ("cfg_fully_connected");
9901 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9902
9903 const int n = 4;
9904
9905 /* Create some empty blocks. */
9906 auto_vec <basic_block> subgraph_nodes;
9907 for (int i = 0; i < n; i++)
9908 subgraph_nodes.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun)));
9909
9910 ASSERT_EQ (n + 2, n_basic_blocks_for_fn (fun));
9911 ASSERT_EQ (0, n_edges_for_fn (fun));
9912
9913 /* Create the edges. */
9914 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), subgraph_nodes[0], EDGE_FALLTHRU);
9915 make_edge (subgraph_nodes[0], EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9916 for (int i = 0; i < n; i++)
9917 for (int j = 0; j < n; j++)
9918 make_edge (subgraph_nodes[i], subgraph_nodes[j], 0);
9919
9920 /* Verify the edges. */
9921 ASSERT_EQ (2 + (n * n), n_edges_for_fn (fun));
9922 /* The first one is linked to ENTRY/EXIT as well as itself and
9923 everything else. */
9924 ASSERT_EQ (n + 1, subgraph_nodes[0]->preds->length ());
9925 ASSERT_EQ (n + 1, subgraph_nodes[0]->succs->length ());
9926 /* The other ones in the subgraph are linked to everything in
9927 the subgraph (including themselves). */
9928 for (int i = 1; i < n; i++)
9929 {
9930 ASSERT_EQ (n, subgraph_nodes[i]->preds->length ());
9931 ASSERT_EQ (n, subgraph_nodes[i]->succs->length ());
9932 }
9933
9934 /* Verify the dominance information. */
9935 calculate_dominance_info (CDI_DOMINATORS);
9936 /* The initial block in the subgraph should be dominated by ENTRY. */
9937 ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun),
9938 get_immediate_dominator (CDI_DOMINATORS,
9939 subgraph_nodes[0]));
9940 /* Every other block in the subgraph should be dominated by the
9941 initial block. */
9942 for (int i = 1; i < n; i++)
9943 ASSERT_EQ (subgraph_nodes[0],
9944 get_immediate_dominator (CDI_DOMINATORS,
9945 subgraph_nodes[i]));
9946 free_dominance_info (CDI_DOMINATORS);
9947
9948 /* Similarly for post-dominance. */
9949 calculate_dominance_info (CDI_POST_DOMINATORS);
9950 /* The initial block in the subgraph should be postdominated by EXIT. */
9951 ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun),
9952 get_immediate_dominator (CDI_POST_DOMINATORS,
9953 subgraph_nodes[0]));
9954 /* Every other block in the subgraph should be postdominated by the
9955 initial block, since that leads to EXIT. */
9956 for (int i = 1; i < n; i++)
9957 ASSERT_EQ (subgraph_nodes[0],
9958 get_immediate_dominator (CDI_POST_DOMINATORS,
9959 subgraph_nodes[i]));
9960 free_dominance_info (CDI_POST_DOMINATORS);
9961
9962 pop_cfun ();
9963 }
9964
9965 /* Run all of the selftests within this file. */
9966
9967 void
9968 tree_cfg_c_tests ()
9969 {
9970 test_linear_chain ();
9971 test_diamond ();
9972 test_fully_connected ();
9973 }
9974
9975 } // namespace selftest
9976
9977 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
9978 - loop
9979 - nested loops
9980 - switch statement (a block with many out-edges)
9981 - something that jumps to itself
9982 - etc */
9983
9984 #endif /* CHECKING_P */