Support running the selftests under valgrind
[gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2016 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "cfganal.h"
40 #include "gimple-fold.h"
41 #include "tree-eh.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-cfg.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
49 #include "tree-dfa.h"
50 #include "tree-ssa.h"
51 #include "except.h"
52 #include "cfgloop.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "omp-low.h"
58 #include "tree-cfgcleanup.h"
59 #include "gimplify.h"
60 #include "attribs.h"
61 #include "selftest.h"
62
63 /* This file contains functions for building the Control Flow Graph (CFG)
64 for a function tree. */
65
66 /* Local declarations. */
67
68 /* Initial capacity for the basic block array. */
69 static const int initial_cfg_capacity = 20;
70
71 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
72 which use a particular edge. The CASE_LABEL_EXPRs are chained together
73 via their CASE_CHAIN field, which we clear after we're done with the
74 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
75
76 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
77 update the case vector in response to edge redirections.
78
79 Right now this table is set up and torn down at key points in the
80 compilation process. It would be nice if we could make the table
81 more persistent. The key is getting notification of changes to
82 the CFG (particularly edge removal, creation and redirection). */
83
84 static hash_map<edge, tree> *edge_to_cases;
85
86 /* If we record edge_to_cases, this bitmap will hold indexes
87 of basic blocks that end in a GIMPLE_SWITCH which we touched
88 due to edge manipulations. */
89
90 static bitmap touched_switch_bbs;
91
92 /* CFG statistics. */
93 struct cfg_stats_d
94 {
95 long num_merged_labels;
96 };
97
98 static struct cfg_stats_d cfg_stats;
99
100 /* Data to pass to replace_block_vars_by_duplicates_1. */
101 struct replace_decls_d
102 {
103 hash_map<tree, tree> *vars_map;
104 tree to_context;
105 };
106
107 /* Hash table to store last discriminator assigned for each locus. */
108 struct locus_discrim_map
109 {
110 location_t locus;
111 int discriminator;
112 };
113
114 /* Hashtable helpers. */
115
116 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
117 {
118 static inline hashval_t hash (const locus_discrim_map *);
119 static inline bool equal (const locus_discrim_map *,
120 const locus_discrim_map *);
121 };
122
123 /* Trivial hash function for a location_t. ITEM is a pointer to
124 a hash table entry that maps a location_t to a discriminator. */
125
126 inline hashval_t
127 locus_discrim_hasher::hash (const locus_discrim_map *item)
128 {
129 return LOCATION_LINE (item->locus);
130 }
131
132 /* Equality function for the locus-to-discriminator map. A and B
133 point to the two hash table entries to compare. */
134
135 inline bool
136 locus_discrim_hasher::equal (const locus_discrim_map *a,
137 const locus_discrim_map *b)
138 {
139 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
140 }
141
142 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
143
144 /* Basic blocks and flowgraphs. */
145 static void make_blocks (gimple_seq);
146
147 /* Edges. */
148 static void make_edges (void);
149 static void assign_discriminators (void);
150 static void make_cond_expr_edges (basic_block);
151 static void make_gimple_switch_edges (gswitch *, basic_block);
152 static bool make_goto_expr_edges (basic_block);
153 static void make_gimple_asm_edges (basic_block);
154 static edge gimple_redirect_edge_and_branch (edge, basic_block);
155 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
156
157 /* Various helpers. */
158 static inline bool stmt_starts_bb_p (gimple *, gimple *);
159 static int gimple_verify_flow_info (void);
160 static void gimple_make_forwarder_block (edge);
161 static gimple *first_non_label_stmt (basic_block);
162 static bool verify_gimple_transaction (gtransaction *);
163 static bool call_can_make_abnormal_goto (gimple *);
164
165 /* Flowgraph optimization and cleanup. */
166 static void gimple_merge_blocks (basic_block, basic_block);
167 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
168 static void remove_bb (basic_block);
169 static edge find_taken_edge_computed_goto (basic_block, tree);
170 static edge find_taken_edge_cond_expr (basic_block, tree);
171 static edge find_taken_edge_switch_expr (gswitch *, basic_block, tree);
172 static tree find_case_label_for_value (gswitch *, tree);
173
174 void
175 init_empty_tree_cfg_for_function (struct function *fn)
176 {
177 /* Initialize the basic block array. */
178 init_flow (fn);
179 profile_status_for_fn (fn) = PROFILE_ABSENT;
180 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
181 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
182 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
183 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
184 initial_cfg_capacity);
185
186 /* Build a mapping of labels to their associated blocks. */
187 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
188 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
189 initial_cfg_capacity);
190
191 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
192 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
193
194 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
195 = EXIT_BLOCK_PTR_FOR_FN (fn);
196 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
197 = ENTRY_BLOCK_PTR_FOR_FN (fn);
198 }
199
200 void
201 init_empty_tree_cfg (void)
202 {
203 init_empty_tree_cfg_for_function (cfun);
204 }
205
206 /*---------------------------------------------------------------------------
207 Create basic blocks
208 ---------------------------------------------------------------------------*/
209
210 /* Entry point to the CFG builder for trees. SEQ is the sequence of
211 statements to be added to the flowgraph. */
212
213 static void
214 build_gimple_cfg (gimple_seq seq)
215 {
216 /* Register specific gimple functions. */
217 gimple_register_cfg_hooks ();
218
219 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
220
221 init_empty_tree_cfg ();
222
223 make_blocks (seq);
224
225 /* Make sure there is always at least one block, even if it's empty. */
226 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
227 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
228
229 /* Adjust the size of the array. */
230 if (basic_block_info_for_fn (cfun)->length ()
231 < (size_t) n_basic_blocks_for_fn (cfun))
232 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
233 n_basic_blocks_for_fn (cfun));
234
235 /* To speed up statement iterator walks, we first purge dead labels. */
236 cleanup_dead_labels ();
237
238 /* Group case nodes to reduce the number of edges.
239 We do this after cleaning up dead labels because otherwise we miss
240 a lot of obvious case merging opportunities. */
241 group_case_labels ();
242
243 /* Create the edges of the flowgraph. */
244 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
245 make_edges ();
246 assign_discriminators ();
247 cleanup_dead_labels ();
248 delete discriminator_per_locus;
249 discriminator_per_locus = NULL;
250 }
251
252 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
253 them and propagate the information to LOOP. We assume that the annotations
254 come immediately before the condition in BB, if any. */
255
256 static void
257 replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
258 {
259 gimple_stmt_iterator gsi = gsi_last_bb (bb);
260 gimple *stmt = gsi_stmt (gsi);
261
262 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
263 return;
264
265 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
266 {
267 stmt = gsi_stmt (gsi);
268 if (gimple_code (stmt) != GIMPLE_CALL)
269 break;
270 if (!gimple_call_internal_p (stmt)
271 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
272 break;
273
274 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
275 {
276 case annot_expr_ivdep_kind:
277 loop->safelen = INT_MAX;
278 break;
279 case annot_expr_no_vector_kind:
280 loop->dont_vectorize = true;
281 break;
282 case annot_expr_vector_kind:
283 loop->force_vectorize = true;
284 cfun->has_force_vectorize_loops = true;
285 break;
286 default:
287 gcc_unreachable ();
288 }
289
290 stmt = gimple_build_assign (gimple_call_lhs (stmt),
291 gimple_call_arg (stmt, 0));
292 gsi_replace (&gsi, stmt, true);
293 }
294 }
295
296 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
297 them and propagate the information to the loop. We assume that the
298 annotations come immediately before the condition of the loop. */
299
300 static void
301 replace_loop_annotate (void)
302 {
303 struct loop *loop;
304 basic_block bb;
305 gimple_stmt_iterator gsi;
306 gimple *stmt;
307
308 FOR_EACH_LOOP (loop, 0)
309 {
310 /* First look into the header. */
311 replace_loop_annotate_in_block (loop->header, loop);
312
313 /* Then look into the latch, if any. */
314 if (loop->latch)
315 replace_loop_annotate_in_block (loop->latch, loop);
316 }
317
318 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
319 FOR_EACH_BB_FN (bb, cfun)
320 {
321 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
322 {
323 stmt = gsi_stmt (gsi);
324 if (gimple_code (stmt) != GIMPLE_CALL)
325 continue;
326 if (!gimple_call_internal_p (stmt)
327 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
328 continue;
329
330 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
331 {
332 case annot_expr_ivdep_kind:
333 case annot_expr_no_vector_kind:
334 case annot_expr_vector_kind:
335 break;
336 default:
337 gcc_unreachable ();
338 }
339
340 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
341 stmt = gimple_build_assign (gimple_call_lhs (stmt),
342 gimple_call_arg (stmt, 0));
343 gsi_replace (&gsi, stmt, true);
344 }
345 }
346 }
347
348
349 static unsigned int
350 execute_build_cfg (void)
351 {
352 gimple_seq body = gimple_body (current_function_decl);
353
354 build_gimple_cfg (body);
355 gimple_set_body (current_function_decl, NULL);
356 if (dump_file && (dump_flags & TDF_DETAILS))
357 {
358 fprintf (dump_file, "Scope blocks:\n");
359 dump_scope_blocks (dump_file, dump_flags);
360 }
361 cleanup_tree_cfg ();
362 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
363 replace_loop_annotate ();
364 return 0;
365 }
366
367 namespace {
368
369 const pass_data pass_data_build_cfg =
370 {
371 GIMPLE_PASS, /* type */
372 "cfg", /* name */
373 OPTGROUP_NONE, /* optinfo_flags */
374 TV_TREE_CFG, /* tv_id */
375 PROP_gimple_leh, /* properties_required */
376 ( PROP_cfg | PROP_loops ), /* properties_provided */
377 0, /* properties_destroyed */
378 0, /* todo_flags_start */
379 0, /* todo_flags_finish */
380 };
381
382 class pass_build_cfg : public gimple_opt_pass
383 {
384 public:
385 pass_build_cfg (gcc::context *ctxt)
386 : gimple_opt_pass (pass_data_build_cfg, ctxt)
387 {}
388
389 /* opt_pass methods: */
390 virtual unsigned int execute (function *) { return execute_build_cfg (); }
391
392 }; // class pass_build_cfg
393
394 } // anon namespace
395
396 gimple_opt_pass *
397 make_pass_build_cfg (gcc::context *ctxt)
398 {
399 return new pass_build_cfg (ctxt);
400 }
401
402
403 /* Return true if T is a computed goto. */
404
405 bool
406 computed_goto_p (gimple *t)
407 {
408 return (gimple_code (t) == GIMPLE_GOTO
409 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
410 }
411
412 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
413 the other edge points to a bb with just __builtin_unreachable ().
414 I.e. return true for C->M edge in:
415 <bb C>:
416 ...
417 if (something)
418 goto <bb N>;
419 else
420 goto <bb M>;
421 <bb N>:
422 __builtin_unreachable ();
423 <bb M>: */
424
425 bool
426 assert_unreachable_fallthru_edge_p (edge e)
427 {
428 basic_block pred_bb = e->src;
429 gimple *last = last_stmt (pred_bb);
430 if (last && gimple_code (last) == GIMPLE_COND)
431 {
432 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
433 if (other_bb == e->dest)
434 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
435 if (EDGE_COUNT (other_bb->succs) == 0)
436 {
437 gimple_stmt_iterator gsi = gsi_after_labels (other_bb);
438 gimple *stmt;
439
440 if (gsi_end_p (gsi))
441 return false;
442 stmt = gsi_stmt (gsi);
443 while (is_gimple_debug (stmt) || gimple_clobber_p (stmt))
444 {
445 gsi_next (&gsi);
446 if (gsi_end_p (gsi))
447 return false;
448 stmt = gsi_stmt (gsi);
449 }
450 return gimple_call_builtin_p (stmt, BUILT_IN_UNREACHABLE);
451 }
452 }
453 return false;
454 }
455
456
457 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
458 could alter control flow except via eh. We initialize the flag at
459 CFG build time and only ever clear it later. */
460
461 static void
462 gimple_call_initialize_ctrl_altering (gimple *stmt)
463 {
464 int flags = gimple_call_flags (stmt);
465
466 /* A call alters control flow if it can make an abnormal goto. */
467 if (call_can_make_abnormal_goto (stmt)
468 /* A call also alters control flow if it does not return. */
469 || flags & ECF_NORETURN
470 /* TM ending statements have backedges out of the transaction.
471 Return true so we split the basic block containing them.
472 Note that the TM_BUILTIN test is merely an optimization. */
473 || ((flags & ECF_TM_BUILTIN)
474 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
475 /* BUILT_IN_RETURN call is same as return statement. */
476 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
477 /* IFN_UNIQUE should be the last insn, to make checking for it
478 as cheap as possible. */
479 || (gimple_call_internal_p (stmt)
480 && gimple_call_internal_unique_p (stmt)))
481 gimple_call_set_ctrl_altering (stmt, true);
482 else
483 gimple_call_set_ctrl_altering (stmt, false);
484 }
485
486
487 /* Insert SEQ after BB and build a flowgraph. */
488
489 static basic_block
490 make_blocks_1 (gimple_seq seq, basic_block bb)
491 {
492 gimple_stmt_iterator i = gsi_start (seq);
493 gimple *stmt = NULL;
494 bool start_new_block = true;
495 bool first_stmt_of_seq = true;
496
497 while (!gsi_end_p (i))
498 {
499 gimple *prev_stmt;
500
501 prev_stmt = stmt;
502 stmt = gsi_stmt (i);
503
504 if (stmt && is_gimple_call (stmt))
505 gimple_call_initialize_ctrl_altering (stmt);
506
507 /* If the statement starts a new basic block or if we have determined
508 in a previous pass that we need to create a new block for STMT, do
509 so now. */
510 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
511 {
512 if (!first_stmt_of_seq)
513 gsi_split_seq_before (&i, &seq);
514 bb = create_basic_block (seq, bb);
515 start_new_block = false;
516 }
517
518 /* Now add STMT to BB and create the subgraphs for special statement
519 codes. */
520 gimple_set_bb (stmt, bb);
521
522 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
523 next iteration. */
524 if (stmt_ends_bb_p (stmt))
525 {
526 /* If the stmt can make abnormal goto use a new temporary
527 for the assignment to the LHS. This makes sure the old value
528 of the LHS is available on the abnormal edge. Otherwise
529 we will end up with overlapping life-ranges for abnormal
530 SSA names. */
531 if (gimple_has_lhs (stmt)
532 && stmt_can_make_abnormal_goto (stmt)
533 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
534 {
535 tree lhs = gimple_get_lhs (stmt);
536 tree tmp = create_tmp_var (TREE_TYPE (lhs));
537 gimple *s = gimple_build_assign (lhs, tmp);
538 gimple_set_location (s, gimple_location (stmt));
539 gimple_set_block (s, gimple_block (stmt));
540 gimple_set_lhs (stmt, tmp);
541 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
542 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
543 DECL_GIMPLE_REG_P (tmp) = 1;
544 gsi_insert_after (&i, s, GSI_SAME_STMT);
545 }
546 start_new_block = true;
547 }
548
549 gsi_next (&i);
550 first_stmt_of_seq = false;
551 }
552 return bb;
553 }
554
555 /* Build a flowgraph for the sequence of stmts SEQ. */
556
557 static void
558 make_blocks (gimple_seq seq)
559 {
560 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
561 }
562
563 /* Create and return a new empty basic block after bb AFTER. */
564
565 static basic_block
566 create_bb (void *h, void *e, basic_block after)
567 {
568 basic_block bb;
569
570 gcc_assert (!e);
571
572 /* Create and initialize a new basic block. Since alloc_block uses
573 GC allocation that clears memory to allocate a basic block, we do
574 not have to clear the newly allocated basic block here. */
575 bb = alloc_block ();
576
577 bb->index = last_basic_block_for_fn (cfun);
578 bb->flags = BB_NEW;
579 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
580
581 /* Add the new block to the linked list of blocks. */
582 link_block (bb, after);
583
584 /* Grow the basic block array if needed. */
585 if ((size_t) last_basic_block_for_fn (cfun)
586 == basic_block_info_for_fn (cfun)->length ())
587 {
588 size_t new_size =
589 (last_basic_block_for_fn (cfun)
590 + (last_basic_block_for_fn (cfun) + 3) / 4);
591 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
592 }
593
594 /* Add the newly created block to the array. */
595 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
596
597 n_basic_blocks_for_fn (cfun)++;
598 last_basic_block_for_fn (cfun)++;
599
600 return bb;
601 }
602
603
604 /*---------------------------------------------------------------------------
605 Edge creation
606 ---------------------------------------------------------------------------*/
607
608 /* If basic block BB has an abnormal edge to a basic block
609 containing IFN_ABNORMAL_DISPATCHER internal call, return
610 that the dispatcher's basic block, otherwise return NULL. */
611
612 basic_block
613 get_abnormal_succ_dispatcher (basic_block bb)
614 {
615 edge e;
616 edge_iterator ei;
617
618 FOR_EACH_EDGE (e, ei, bb->succs)
619 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
620 {
621 gimple_stmt_iterator gsi
622 = gsi_start_nondebug_after_labels_bb (e->dest);
623 gimple *g = gsi_stmt (gsi);
624 if (g
625 && is_gimple_call (g)
626 && gimple_call_internal_p (g)
627 && gimple_call_internal_fn (g) == IFN_ABNORMAL_DISPATCHER)
628 return e->dest;
629 }
630 return NULL;
631 }
632
633 /* Helper function for make_edges. Create a basic block with
634 with ABNORMAL_DISPATCHER internal call in it if needed, and
635 create abnormal edges from BBS to it and from it to FOR_BB
636 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
637
638 static void
639 handle_abnormal_edges (basic_block *dispatcher_bbs,
640 basic_block for_bb, int *bb_to_omp_idx,
641 auto_vec<basic_block> *bbs, bool computed_goto)
642 {
643 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
644 unsigned int idx = 0;
645 basic_block bb;
646 bool inner = false;
647
648 if (bb_to_omp_idx)
649 {
650 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
651 if (bb_to_omp_idx[for_bb->index] != 0)
652 inner = true;
653 }
654
655 /* If the dispatcher has been created already, then there are basic
656 blocks with abnormal edges to it, so just make a new edge to
657 for_bb. */
658 if (*dispatcher == NULL)
659 {
660 /* Check if there are any basic blocks that need to have
661 abnormal edges to this dispatcher. If there are none, return
662 early. */
663 if (bb_to_omp_idx == NULL)
664 {
665 if (bbs->is_empty ())
666 return;
667 }
668 else
669 {
670 FOR_EACH_VEC_ELT (*bbs, idx, bb)
671 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
672 break;
673 if (bb == NULL)
674 return;
675 }
676
677 /* Create the dispatcher bb. */
678 *dispatcher = create_basic_block (NULL, for_bb);
679 if (computed_goto)
680 {
681 /* Factor computed gotos into a common computed goto site. Also
682 record the location of that site so that we can un-factor the
683 gotos after we have converted back to normal form. */
684 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
685
686 /* Create the destination of the factored goto. Each original
687 computed goto will put its desired destination into this
688 variable and jump to the label we create immediately below. */
689 tree var = create_tmp_var (ptr_type_node, "gotovar");
690
691 /* Build a label for the new block which will contain the
692 factored computed goto. */
693 tree factored_label_decl
694 = create_artificial_label (UNKNOWN_LOCATION);
695 gimple *factored_computed_goto_label
696 = gimple_build_label (factored_label_decl);
697 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
698
699 /* Build our new computed goto. */
700 gimple *factored_computed_goto = gimple_build_goto (var);
701 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
702
703 FOR_EACH_VEC_ELT (*bbs, idx, bb)
704 {
705 if (bb_to_omp_idx
706 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
707 continue;
708
709 gsi = gsi_last_bb (bb);
710 gimple *last = gsi_stmt (gsi);
711
712 gcc_assert (computed_goto_p (last));
713
714 /* Copy the original computed goto's destination into VAR. */
715 gimple *assignment
716 = gimple_build_assign (var, gimple_goto_dest (last));
717 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
718
719 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
720 e->goto_locus = gimple_location (last);
721 gsi_remove (&gsi, true);
722 }
723 }
724 else
725 {
726 tree arg = inner ? boolean_true_node : boolean_false_node;
727 gimple *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
728 1, arg);
729 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
730 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
731
732 /* Create predecessor edges of the dispatcher. */
733 FOR_EACH_VEC_ELT (*bbs, idx, bb)
734 {
735 if (bb_to_omp_idx
736 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
737 continue;
738 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
739 }
740 }
741 }
742
743 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
744 }
745
746 /* Creates outgoing edges for BB. Returns 1 when it ends with an
747 computed goto, returns 2 when it ends with a statement that
748 might return to this function via an nonlocal goto, otherwise
749 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
750
751 static int
752 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
753 {
754 gimple *last = last_stmt (bb);
755 bool fallthru = false;
756 int ret = 0;
757
758 if (!last)
759 return ret;
760
761 switch (gimple_code (last))
762 {
763 case GIMPLE_GOTO:
764 if (make_goto_expr_edges (bb))
765 ret = 1;
766 fallthru = false;
767 break;
768 case GIMPLE_RETURN:
769 {
770 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
771 e->goto_locus = gimple_location (last);
772 fallthru = false;
773 }
774 break;
775 case GIMPLE_COND:
776 make_cond_expr_edges (bb);
777 fallthru = false;
778 break;
779 case GIMPLE_SWITCH:
780 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
781 fallthru = false;
782 break;
783 case GIMPLE_RESX:
784 make_eh_edges (last);
785 fallthru = false;
786 break;
787 case GIMPLE_EH_DISPATCH:
788 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
789 break;
790
791 case GIMPLE_CALL:
792 /* If this function receives a nonlocal goto, then we need to
793 make edges from this call site to all the nonlocal goto
794 handlers. */
795 if (stmt_can_make_abnormal_goto (last))
796 ret = 2;
797
798 /* If this statement has reachable exception handlers, then
799 create abnormal edges to them. */
800 make_eh_edges (last);
801
802 /* BUILTIN_RETURN is really a return statement. */
803 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
804 {
805 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
806 fallthru = false;
807 }
808 /* Some calls are known not to return. */
809 else
810 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
811 break;
812
813 case GIMPLE_ASSIGN:
814 /* A GIMPLE_ASSIGN may throw internally and thus be considered
815 control-altering. */
816 if (is_ctrl_altering_stmt (last))
817 make_eh_edges (last);
818 fallthru = true;
819 break;
820
821 case GIMPLE_ASM:
822 make_gimple_asm_edges (bb);
823 fallthru = true;
824 break;
825
826 CASE_GIMPLE_OMP:
827 fallthru = make_gimple_omp_edges (bb, pcur_region, pomp_index);
828 break;
829
830 case GIMPLE_TRANSACTION:
831 {
832 gtransaction *txn = as_a <gtransaction *> (last);
833 tree label1 = gimple_transaction_label_norm (txn);
834 tree label2 = gimple_transaction_label_uninst (txn);
835
836 if (label1)
837 make_edge (bb, label_to_block (label1), EDGE_FALLTHRU);
838 if (label2)
839 make_edge (bb, label_to_block (label2),
840 EDGE_TM_UNINSTRUMENTED | (label1 ? 0 : EDGE_FALLTHRU));
841
842 tree label3 = gimple_transaction_label_over (txn);
843 if (gimple_transaction_subcode (txn)
844 & (GTMA_HAVE_ABORT | GTMA_IS_OUTER))
845 make_edge (bb, label_to_block (label3), EDGE_TM_ABORT);
846
847 fallthru = false;
848 }
849 break;
850
851 default:
852 gcc_assert (!stmt_ends_bb_p (last));
853 fallthru = true;
854 break;
855 }
856
857 if (fallthru)
858 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
859
860 return ret;
861 }
862
863 /* Join all the blocks in the flowgraph. */
864
865 static void
866 make_edges (void)
867 {
868 basic_block bb;
869 struct omp_region *cur_region = NULL;
870 auto_vec<basic_block> ab_edge_goto;
871 auto_vec<basic_block> ab_edge_call;
872 int *bb_to_omp_idx = NULL;
873 int cur_omp_region_idx = 0;
874
875 /* Create an edge from entry to the first block with executable
876 statements in it. */
877 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
878 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
879 EDGE_FALLTHRU);
880
881 /* Traverse the basic block array placing edges. */
882 FOR_EACH_BB_FN (bb, cfun)
883 {
884 int mer;
885
886 if (bb_to_omp_idx)
887 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
888
889 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
890 if (mer == 1)
891 ab_edge_goto.safe_push (bb);
892 else if (mer == 2)
893 ab_edge_call.safe_push (bb);
894
895 if (cur_region && bb_to_omp_idx == NULL)
896 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
897 }
898
899 /* Computed gotos are hell to deal with, especially if there are
900 lots of them with a large number of destinations. So we factor
901 them to a common computed goto location before we build the
902 edge list. After we convert back to normal form, we will un-factor
903 the computed gotos since factoring introduces an unwanted jump.
904 For non-local gotos and abnormal edges from calls to calls that return
905 twice or forced labels, factor the abnormal edges too, by having all
906 abnormal edges from the calls go to a common artificial basic block
907 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
908 basic block to all forced labels and calls returning twice.
909 We do this per-OpenMP structured block, because those regions
910 are guaranteed to be single entry single exit by the standard,
911 so it is not allowed to enter or exit such regions abnormally this way,
912 thus all computed gotos, non-local gotos and setjmp/longjmp calls
913 must not transfer control across SESE region boundaries. */
914 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
915 {
916 gimple_stmt_iterator gsi;
917 basic_block dispatcher_bb_array[2] = { NULL, NULL };
918 basic_block *dispatcher_bbs = dispatcher_bb_array;
919 int count = n_basic_blocks_for_fn (cfun);
920
921 if (bb_to_omp_idx)
922 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
923
924 FOR_EACH_BB_FN (bb, cfun)
925 {
926 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
927 {
928 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
929 tree target;
930
931 if (!label_stmt)
932 break;
933
934 target = gimple_label_label (label_stmt);
935
936 /* Make an edge to every label block that has been marked as a
937 potential target for a computed goto or a non-local goto. */
938 if (FORCED_LABEL (target))
939 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
940 &ab_edge_goto, true);
941 if (DECL_NONLOCAL (target))
942 {
943 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
944 &ab_edge_call, false);
945 break;
946 }
947 }
948
949 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
950 gsi_next_nondebug (&gsi);
951 if (!gsi_end_p (gsi))
952 {
953 /* Make an edge to every setjmp-like call. */
954 gimple *call_stmt = gsi_stmt (gsi);
955 if (is_gimple_call (call_stmt)
956 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
957 || gimple_call_builtin_p (call_stmt,
958 BUILT_IN_SETJMP_RECEIVER)))
959 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
960 &ab_edge_call, false);
961 }
962 }
963
964 if (bb_to_omp_idx)
965 XDELETE (dispatcher_bbs);
966 }
967
968 XDELETE (bb_to_omp_idx);
969
970 free_omp_regions ();
971 }
972
973 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
974 needed. Returns true if new bbs were created.
975 Note: This is transitional code, and should not be used for new code. We
976 should be able to get rid of this by rewriting all target va-arg
977 gimplification hooks to use an interface gimple_build_cond_value as described
978 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
979
980 bool
981 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
982 {
983 gimple *stmt = gsi_stmt (*gsi);
984 basic_block bb = gimple_bb (stmt);
985 basic_block lastbb, afterbb;
986 int old_num_bbs = n_basic_blocks_for_fn (cfun);
987 edge e;
988 lastbb = make_blocks_1 (seq, bb);
989 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
990 return false;
991 e = split_block (bb, stmt);
992 /* Move e->dest to come after the new basic blocks. */
993 afterbb = e->dest;
994 unlink_block (afterbb);
995 link_block (afterbb, lastbb);
996 redirect_edge_succ (e, bb->next_bb);
997 bb = bb->next_bb;
998 while (bb != afterbb)
999 {
1000 struct omp_region *cur_region = NULL;
1001 int cur_omp_region_idx = 0;
1002 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1003 gcc_assert (!mer && !cur_region);
1004 add_bb_to_loop (bb, afterbb->loop_father);
1005 bb = bb->next_bb;
1006 }
1007 return true;
1008 }
1009
1010 /* Find the next available discriminator value for LOCUS. The
1011 discriminator distinguishes among several basic blocks that
1012 share a common locus, allowing for more accurate sample-based
1013 profiling. */
1014
1015 static int
1016 next_discriminator_for_locus (location_t locus)
1017 {
1018 struct locus_discrim_map item;
1019 struct locus_discrim_map **slot;
1020
1021 item.locus = locus;
1022 item.discriminator = 0;
1023 slot = discriminator_per_locus->find_slot_with_hash (
1024 &item, LOCATION_LINE (locus), INSERT);
1025 gcc_assert (slot);
1026 if (*slot == HTAB_EMPTY_ENTRY)
1027 {
1028 *slot = XNEW (struct locus_discrim_map);
1029 gcc_assert (*slot);
1030 (*slot)->locus = locus;
1031 (*slot)->discriminator = 0;
1032 }
1033 (*slot)->discriminator++;
1034 return (*slot)->discriminator;
1035 }
1036
1037 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1038
1039 static bool
1040 same_line_p (location_t locus1, location_t locus2)
1041 {
1042 expanded_location from, to;
1043
1044 if (locus1 == locus2)
1045 return true;
1046
1047 from = expand_location (locus1);
1048 to = expand_location (locus2);
1049
1050 if (from.line != to.line)
1051 return false;
1052 if (from.file == to.file)
1053 return true;
1054 return (from.file != NULL
1055 && to.file != NULL
1056 && filename_cmp (from.file, to.file) == 0);
1057 }
1058
1059 /* Assign discriminators to each basic block. */
1060
1061 static void
1062 assign_discriminators (void)
1063 {
1064 basic_block bb;
1065
1066 FOR_EACH_BB_FN (bb, cfun)
1067 {
1068 edge e;
1069 edge_iterator ei;
1070 gimple *last = last_stmt (bb);
1071 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1072
1073 if (locus == UNKNOWN_LOCATION)
1074 continue;
1075
1076 FOR_EACH_EDGE (e, ei, bb->succs)
1077 {
1078 gimple *first = first_non_label_stmt (e->dest);
1079 gimple *last = last_stmt (e->dest);
1080 if ((first && same_line_p (locus, gimple_location (first)))
1081 || (last && same_line_p (locus, gimple_location (last))))
1082 {
1083 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1084 bb->discriminator = next_discriminator_for_locus (locus);
1085 else
1086 e->dest->discriminator = next_discriminator_for_locus (locus);
1087 }
1088 }
1089 }
1090 }
1091
1092 /* Create the edges for a GIMPLE_COND starting at block BB. */
1093
1094 static void
1095 make_cond_expr_edges (basic_block bb)
1096 {
1097 gcond *entry = as_a <gcond *> (last_stmt (bb));
1098 gimple *then_stmt, *else_stmt;
1099 basic_block then_bb, else_bb;
1100 tree then_label, else_label;
1101 edge e;
1102
1103 gcc_assert (entry);
1104 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1105
1106 /* Entry basic blocks for each component. */
1107 then_label = gimple_cond_true_label (entry);
1108 else_label = gimple_cond_false_label (entry);
1109 then_bb = label_to_block (then_label);
1110 else_bb = label_to_block (else_label);
1111 then_stmt = first_stmt (then_bb);
1112 else_stmt = first_stmt (else_bb);
1113
1114 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1115 e->goto_locus = gimple_location (then_stmt);
1116 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1117 if (e)
1118 e->goto_locus = gimple_location (else_stmt);
1119
1120 /* We do not need the labels anymore. */
1121 gimple_cond_set_true_label (entry, NULL_TREE);
1122 gimple_cond_set_false_label (entry, NULL_TREE);
1123 }
1124
1125
1126 /* Called for each element in the hash table (P) as we delete the
1127 edge to cases hash table.
1128
1129 Clear all the CASE_CHAINs to prevent problems with copying of
1130 SWITCH_EXPRs and structure sharing rules, then free the hash table
1131 element. */
1132
1133 bool
1134 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1135 {
1136 tree t, next;
1137
1138 for (t = value; t; t = next)
1139 {
1140 next = CASE_CHAIN (t);
1141 CASE_CHAIN (t) = NULL;
1142 }
1143
1144 return true;
1145 }
1146
1147 /* Start recording information mapping edges to case labels. */
1148
1149 void
1150 start_recording_case_labels (void)
1151 {
1152 gcc_assert (edge_to_cases == NULL);
1153 edge_to_cases = new hash_map<edge, tree>;
1154 touched_switch_bbs = BITMAP_ALLOC (NULL);
1155 }
1156
1157 /* Return nonzero if we are recording information for case labels. */
1158
1159 static bool
1160 recording_case_labels_p (void)
1161 {
1162 return (edge_to_cases != NULL);
1163 }
1164
1165 /* Stop recording information mapping edges to case labels and
1166 remove any information we have recorded. */
1167 void
1168 end_recording_case_labels (void)
1169 {
1170 bitmap_iterator bi;
1171 unsigned i;
1172 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1173 delete edge_to_cases;
1174 edge_to_cases = NULL;
1175 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1176 {
1177 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1178 if (bb)
1179 {
1180 gimple *stmt = last_stmt (bb);
1181 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1182 group_case_labels_stmt (as_a <gswitch *> (stmt));
1183 }
1184 }
1185 BITMAP_FREE (touched_switch_bbs);
1186 }
1187
1188 /* If we are inside a {start,end}_recording_cases block, then return
1189 a chain of CASE_LABEL_EXPRs from T which reference E.
1190
1191 Otherwise return NULL. */
1192
1193 static tree
1194 get_cases_for_edge (edge e, gswitch *t)
1195 {
1196 tree *slot;
1197 size_t i, n;
1198
1199 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1200 chains available. Return NULL so the caller can detect this case. */
1201 if (!recording_case_labels_p ())
1202 return NULL;
1203
1204 slot = edge_to_cases->get (e);
1205 if (slot)
1206 return *slot;
1207
1208 /* If we did not find E in the hash table, then this must be the first
1209 time we have been queried for information about E & T. Add all the
1210 elements from T to the hash table then perform the query again. */
1211
1212 n = gimple_switch_num_labels (t);
1213 for (i = 0; i < n; i++)
1214 {
1215 tree elt = gimple_switch_label (t, i);
1216 tree lab = CASE_LABEL (elt);
1217 basic_block label_bb = label_to_block (lab);
1218 edge this_edge = find_edge (e->src, label_bb);
1219
1220 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1221 a new chain. */
1222 tree &s = edge_to_cases->get_or_insert (this_edge);
1223 CASE_CHAIN (elt) = s;
1224 s = elt;
1225 }
1226
1227 return *edge_to_cases->get (e);
1228 }
1229
1230 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1231
1232 static void
1233 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1234 {
1235 size_t i, n;
1236
1237 n = gimple_switch_num_labels (entry);
1238
1239 for (i = 0; i < n; ++i)
1240 {
1241 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1242 basic_block label_bb = label_to_block (lab);
1243 make_edge (bb, label_bb, 0);
1244 }
1245 }
1246
1247
1248 /* Return the basic block holding label DEST. */
1249
1250 basic_block
1251 label_to_block_fn (struct function *ifun, tree dest)
1252 {
1253 int uid = LABEL_DECL_UID (dest);
1254
1255 /* We would die hard when faced by an undefined label. Emit a label to
1256 the very first basic block. This will hopefully make even the dataflow
1257 and undefined variable warnings quite right. */
1258 if (seen_error () && uid < 0)
1259 {
1260 gimple_stmt_iterator gsi =
1261 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1262 gimple *stmt;
1263
1264 stmt = gimple_build_label (dest);
1265 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1266 uid = LABEL_DECL_UID (dest);
1267 }
1268 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1269 return NULL;
1270 return (*ifun->cfg->x_label_to_block_map)[uid];
1271 }
1272
1273 /* Create edges for a goto statement at block BB. Returns true
1274 if abnormal edges should be created. */
1275
1276 static bool
1277 make_goto_expr_edges (basic_block bb)
1278 {
1279 gimple_stmt_iterator last = gsi_last_bb (bb);
1280 gimple *goto_t = gsi_stmt (last);
1281
1282 /* A simple GOTO creates normal edges. */
1283 if (simple_goto_p (goto_t))
1284 {
1285 tree dest = gimple_goto_dest (goto_t);
1286 basic_block label_bb = label_to_block (dest);
1287 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1288 e->goto_locus = gimple_location (goto_t);
1289 gsi_remove (&last, true);
1290 return false;
1291 }
1292
1293 /* A computed GOTO creates abnormal edges. */
1294 return true;
1295 }
1296
1297 /* Create edges for an asm statement with labels at block BB. */
1298
1299 static void
1300 make_gimple_asm_edges (basic_block bb)
1301 {
1302 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1303 int i, n = gimple_asm_nlabels (stmt);
1304
1305 for (i = 0; i < n; ++i)
1306 {
1307 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1308 basic_block label_bb = label_to_block (label);
1309 make_edge (bb, label_bb, 0);
1310 }
1311 }
1312
1313 /*---------------------------------------------------------------------------
1314 Flowgraph analysis
1315 ---------------------------------------------------------------------------*/
1316
1317 /* Cleanup useless labels in basic blocks. This is something we wish
1318 to do early because it allows us to group case labels before creating
1319 the edges for the CFG, and it speeds up block statement iterators in
1320 all passes later on.
1321 We rerun this pass after CFG is created, to get rid of the labels that
1322 are no longer referenced. After then we do not run it any more, since
1323 (almost) no new labels should be created. */
1324
1325 /* A map from basic block index to the leading label of that block. */
1326 static struct label_record
1327 {
1328 /* The label. */
1329 tree label;
1330
1331 /* True if the label is referenced from somewhere. */
1332 bool used;
1333 } *label_for_bb;
1334
1335 /* Given LABEL return the first label in the same basic block. */
1336
1337 static tree
1338 main_block_label (tree label)
1339 {
1340 basic_block bb = label_to_block (label);
1341 tree main_label = label_for_bb[bb->index].label;
1342
1343 /* label_to_block possibly inserted undefined label into the chain. */
1344 if (!main_label)
1345 {
1346 label_for_bb[bb->index].label = label;
1347 main_label = label;
1348 }
1349
1350 label_for_bb[bb->index].used = true;
1351 return main_label;
1352 }
1353
1354 /* Clean up redundant labels within the exception tree. */
1355
1356 static void
1357 cleanup_dead_labels_eh (void)
1358 {
1359 eh_landing_pad lp;
1360 eh_region r;
1361 tree lab;
1362 int i;
1363
1364 if (cfun->eh == NULL)
1365 return;
1366
1367 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1368 if (lp && lp->post_landing_pad)
1369 {
1370 lab = main_block_label (lp->post_landing_pad);
1371 if (lab != lp->post_landing_pad)
1372 {
1373 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1374 EH_LANDING_PAD_NR (lab) = lp->index;
1375 }
1376 }
1377
1378 FOR_ALL_EH_REGION (r)
1379 switch (r->type)
1380 {
1381 case ERT_CLEANUP:
1382 case ERT_MUST_NOT_THROW:
1383 break;
1384
1385 case ERT_TRY:
1386 {
1387 eh_catch c;
1388 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1389 {
1390 lab = c->label;
1391 if (lab)
1392 c->label = main_block_label (lab);
1393 }
1394 }
1395 break;
1396
1397 case ERT_ALLOWED_EXCEPTIONS:
1398 lab = r->u.allowed.label;
1399 if (lab)
1400 r->u.allowed.label = main_block_label (lab);
1401 break;
1402 }
1403 }
1404
1405
1406 /* Cleanup redundant labels. This is a three-step process:
1407 1) Find the leading label for each block.
1408 2) Redirect all references to labels to the leading labels.
1409 3) Cleanup all useless labels. */
1410
1411 void
1412 cleanup_dead_labels (void)
1413 {
1414 basic_block bb;
1415 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1416
1417 /* Find a suitable label for each block. We use the first user-defined
1418 label if there is one, or otherwise just the first label we see. */
1419 FOR_EACH_BB_FN (bb, cfun)
1420 {
1421 gimple_stmt_iterator i;
1422
1423 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1424 {
1425 tree label;
1426 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1427
1428 if (!label_stmt)
1429 break;
1430
1431 label = gimple_label_label (label_stmt);
1432
1433 /* If we have not yet seen a label for the current block,
1434 remember this one and see if there are more labels. */
1435 if (!label_for_bb[bb->index].label)
1436 {
1437 label_for_bb[bb->index].label = label;
1438 continue;
1439 }
1440
1441 /* If we did see a label for the current block already, but it
1442 is an artificially created label, replace it if the current
1443 label is a user defined label. */
1444 if (!DECL_ARTIFICIAL (label)
1445 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1446 {
1447 label_for_bb[bb->index].label = label;
1448 break;
1449 }
1450 }
1451 }
1452
1453 /* Now redirect all jumps/branches to the selected label.
1454 First do so for each block ending in a control statement. */
1455 FOR_EACH_BB_FN (bb, cfun)
1456 {
1457 gimple *stmt = last_stmt (bb);
1458 tree label, new_label;
1459
1460 if (!stmt)
1461 continue;
1462
1463 switch (gimple_code (stmt))
1464 {
1465 case GIMPLE_COND:
1466 {
1467 gcond *cond_stmt = as_a <gcond *> (stmt);
1468 label = gimple_cond_true_label (cond_stmt);
1469 if (label)
1470 {
1471 new_label = main_block_label (label);
1472 if (new_label != label)
1473 gimple_cond_set_true_label (cond_stmt, new_label);
1474 }
1475
1476 label = gimple_cond_false_label (cond_stmt);
1477 if (label)
1478 {
1479 new_label = main_block_label (label);
1480 if (new_label != label)
1481 gimple_cond_set_false_label (cond_stmt, new_label);
1482 }
1483 }
1484 break;
1485
1486 case GIMPLE_SWITCH:
1487 {
1488 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1489 size_t i, n = gimple_switch_num_labels (switch_stmt);
1490
1491 /* Replace all destination labels. */
1492 for (i = 0; i < n; ++i)
1493 {
1494 tree case_label = gimple_switch_label (switch_stmt, i);
1495 label = CASE_LABEL (case_label);
1496 new_label = main_block_label (label);
1497 if (new_label != label)
1498 CASE_LABEL (case_label) = new_label;
1499 }
1500 break;
1501 }
1502
1503 case GIMPLE_ASM:
1504 {
1505 gasm *asm_stmt = as_a <gasm *> (stmt);
1506 int i, n = gimple_asm_nlabels (asm_stmt);
1507
1508 for (i = 0; i < n; ++i)
1509 {
1510 tree cons = gimple_asm_label_op (asm_stmt, i);
1511 tree label = main_block_label (TREE_VALUE (cons));
1512 TREE_VALUE (cons) = label;
1513 }
1514 break;
1515 }
1516
1517 /* We have to handle gotos until they're removed, and we don't
1518 remove them until after we've created the CFG edges. */
1519 case GIMPLE_GOTO:
1520 if (!computed_goto_p (stmt))
1521 {
1522 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1523 label = gimple_goto_dest (goto_stmt);
1524 new_label = main_block_label (label);
1525 if (new_label != label)
1526 gimple_goto_set_dest (goto_stmt, new_label);
1527 }
1528 break;
1529
1530 case GIMPLE_TRANSACTION:
1531 {
1532 gtransaction *txn = as_a <gtransaction *> (stmt);
1533
1534 label = gimple_transaction_label_norm (txn);
1535 if (label)
1536 {
1537 new_label = main_block_label (label);
1538 if (new_label != label)
1539 gimple_transaction_set_label_norm (txn, new_label);
1540 }
1541
1542 label = gimple_transaction_label_uninst (txn);
1543 if (label)
1544 {
1545 new_label = main_block_label (label);
1546 if (new_label != label)
1547 gimple_transaction_set_label_uninst (txn, new_label);
1548 }
1549
1550 label = gimple_transaction_label_over (txn);
1551 if (label)
1552 {
1553 new_label = main_block_label (label);
1554 if (new_label != label)
1555 gimple_transaction_set_label_over (txn, new_label);
1556 }
1557 }
1558 break;
1559
1560 default:
1561 break;
1562 }
1563 }
1564
1565 /* Do the same for the exception region tree labels. */
1566 cleanup_dead_labels_eh ();
1567
1568 /* Finally, purge dead labels. All user-defined labels and labels that
1569 can be the target of non-local gotos and labels which have their
1570 address taken are preserved. */
1571 FOR_EACH_BB_FN (bb, cfun)
1572 {
1573 gimple_stmt_iterator i;
1574 tree label_for_this_bb = label_for_bb[bb->index].label;
1575
1576 if (!label_for_this_bb)
1577 continue;
1578
1579 /* If the main label of the block is unused, we may still remove it. */
1580 if (!label_for_bb[bb->index].used)
1581 label_for_this_bb = NULL;
1582
1583 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1584 {
1585 tree label;
1586 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1587
1588 if (!label_stmt)
1589 break;
1590
1591 label = gimple_label_label (label_stmt);
1592
1593 if (label == label_for_this_bb
1594 || !DECL_ARTIFICIAL (label)
1595 || DECL_NONLOCAL (label)
1596 || FORCED_LABEL (label))
1597 gsi_next (&i);
1598 else
1599 gsi_remove (&i, true);
1600 }
1601 }
1602
1603 free (label_for_bb);
1604 }
1605
1606 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1607 the ones jumping to the same label.
1608 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1609
1610 void
1611 group_case_labels_stmt (gswitch *stmt)
1612 {
1613 int old_size = gimple_switch_num_labels (stmt);
1614 int i, j, new_size = old_size;
1615 basic_block default_bb = NULL;
1616
1617 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1618
1619 /* Look for possible opportunities to merge cases. */
1620 i = 1;
1621 while (i < old_size)
1622 {
1623 tree base_case, base_high;
1624 basic_block base_bb;
1625
1626 base_case = gimple_switch_label (stmt, i);
1627
1628 gcc_assert (base_case);
1629 base_bb = label_to_block (CASE_LABEL (base_case));
1630
1631 /* Discard cases that have the same destination as the
1632 default case. */
1633 if (base_bb == default_bb)
1634 {
1635 gimple_switch_set_label (stmt, i, NULL_TREE);
1636 i++;
1637 new_size--;
1638 continue;
1639 }
1640
1641 base_high = CASE_HIGH (base_case)
1642 ? CASE_HIGH (base_case)
1643 : CASE_LOW (base_case);
1644 i++;
1645
1646 /* Try to merge case labels. Break out when we reach the end
1647 of the label vector or when we cannot merge the next case
1648 label with the current one. */
1649 while (i < old_size)
1650 {
1651 tree merge_case = gimple_switch_label (stmt, i);
1652 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1653 wide_int bhp1 = wi::add (base_high, 1);
1654
1655 /* Merge the cases if they jump to the same place,
1656 and their ranges are consecutive. */
1657 if (merge_bb == base_bb
1658 && wi::eq_p (CASE_LOW (merge_case), bhp1))
1659 {
1660 base_high = CASE_HIGH (merge_case) ?
1661 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1662 CASE_HIGH (base_case) = base_high;
1663 gimple_switch_set_label (stmt, i, NULL_TREE);
1664 new_size--;
1665 i++;
1666 }
1667 else
1668 break;
1669 }
1670 }
1671
1672 /* Compress the case labels in the label vector, and adjust the
1673 length of the vector. */
1674 for (i = 0, j = 0; i < new_size; i++)
1675 {
1676 while (! gimple_switch_label (stmt, j))
1677 j++;
1678 gimple_switch_set_label (stmt, i,
1679 gimple_switch_label (stmt, j++));
1680 }
1681
1682 gcc_assert (new_size <= old_size);
1683 gimple_switch_set_num_labels (stmt, new_size);
1684 }
1685
1686 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1687 and scan the sorted vector of cases. Combine the ones jumping to the
1688 same label. */
1689
1690 void
1691 group_case_labels (void)
1692 {
1693 basic_block bb;
1694
1695 FOR_EACH_BB_FN (bb, cfun)
1696 {
1697 gimple *stmt = last_stmt (bb);
1698 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1699 group_case_labels_stmt (as_a <gswitch *> (stmt));
1700 }
1701 }
1702
1703 /* Checks whether we can merge block B into block A. */
1704
1705 static bool
1706 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1707 {
1708 gimple *stmt;
1709
1710 if (!single_succ_p (a))
1711 return false;
1712
1713 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1714 return false;
1715
1716 if (single_succ (a) != b)
1717 return false;
1718
1719 if (!single_pred_p (b))
1720 return false;
1721
1722 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1723 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1724 return false;
1725
1726 /* If A ends by a statement causing exceptions or something similar, we
1727 cannot merge the blocks. */
1728 stmt = last_stmt (a);
1729 if (stmt && stmt_ends_bb_p (stmt))
1730 return false;
1731
1732 /* Do not allow a block with only a non-local label to be merged. */
1733 if (stmt)
1734 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1735 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1736 return false;
1737
1738 /* Examine the labels at the beginning of B. */
1739 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1740 gsi_next (&gsi))
1741 {
1742 tree lab;
1743 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1744 if (!label_stmt)
1745 break;
1746 lab = gimple_label_label (label_stmt);
1747
1748 /* Do not remove user forced labels or for -O0 any user labels. */
1749 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1750 return false;
1751 }
1752
1753 /* Protect simple loop latches. We only want to avoid merging
1754 the latch with the loop header or with a block in another
1755 loop in this case. */
1756 if (current_loops
1757 && b->loop_father->latch == b
1758 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1759 && (b->loop_father->header == a
1760 || b->loop_father != a->loop_father))
1761 return false;
1762
1763 /* It must be possible to eliminate all phi nodes in B. If ssa form
1764 is not up-to-date and a name-mapping is registered, we cannot eliminate
1765 any phis. Symbols marked for renaming are never a problem though. */
1766 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1767 gsi_next (&gsi))
1768 {
1769 gphi *phi = gsi.phi ();
1770 /* Technically only new names matter. */
1771 if (name_registered_for_update_p (PHI_RESULT (phi)))
1772 return false;
1773 }
1774
1775 /* When not optimizing, don't merge if we'd lose goto_locus. */
1776 if (!optimize
1777 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1778 {
1779 location_t goto_locus = single_succ_edge (a)->goto_locus;
1780 gimple_stmt_iterator prev, next;
1781 prev = gsi_last_nondebug_bb (a);
1782 next = gsi_after_labels (b);
1783 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1784 gsi_next_nondebug (&next);
1785 if ((gsi_end_p (prev)
1786 || gimple_location (gsi_stmt (prev)) != goto_locus)
1787 && (gsi_end_p (next)
1788 || gimple_location (gsi_stmt (next)) != goto_locus))
1789 return false;
1790 }
1791
1792 return true;
1793 }
1794
1795 /* Replaces all uses of NAME by VAL. */
1796
1797 void
1798 replace_uses_by (tree name, tree val)
1799 {
1800 imm_use_iterator imm_iter;
1801 use_operand_p use;
1802 gimple *stmt;
1803 edge e;
1804
1805 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1806 {
1807 /* Mark the block if we change the last stmt in it. */
1808 if (cfgcleanup_altered_bbs
1809 && stmt_ends_bb_p (stmt))
1810 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1811
1812 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1813 {
1814 replace_exp (use, val);
1815
1816 if (gimple_code (stmt) == GIMPLE_PHI)
1817 {
1818 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1819 PHI_ARG_INDEX_FROM_USE (use));
1820 if (e->flags & EDGE_ABNORMAL
1821 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1822 {
1823 /* This can only occur for virtual operands, since
1824 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1825 would prevent replacement. */
1826 gcc_checking_assert (virtual_operand_p (name));
1827 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1828 }
1829 }
1830 }
1831
1832 if (gimple_code (stmt) != GIMPLE_PHI)
1833 {
1834 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1835 gimple *orig_stmt = stmt;
1836 size_t i;
1837
1838 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1839 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1840 only change sth from non-invariant to invariant, and only
1841 when propagating constants. */
1842 if (is_gimple_min_invariant (val))
1843 for (i = 0; i < gimple_num_ops (stmt); i++)
1844 {
1845 tree op = gimple_op (stmt, i);
1846 /* Operands may be empty here. For example, the labels
1847 of a GIMPLE_COND are nulled out following the creation
1848 of the corresponding CFG edges. */
1849 if (op && TREE_CODE (op) == ADDR_EXPR)
1850 recompute_tree_invariant_for_addr_expr (op);
1851 }
1852
1853 if (fold_stmt (&gsi))
1854 stmt = gsi_stmt (gsi);
1855
1856 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1857 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1858
1859 update_stmt (stmt);
1860 }
1861 }
1862
1863 gcc_checking_assert (has_zero_uses (name));
1864
1865 /* Also update the trees stored in loop structures. */
1866 if (current_loops)
1867 {
1868 struct loop *loop;
1869
1870 FOR_EACH_LOOP (loop, 0)
1871 {
1872 substitute_in_loop_info (loop, name, val);
1873 }
1874 }
1875 }
1876
1877 /* Merge block B into block A. */
1878
1879 static void
1880 gimple_merge_blocks (basic_block a, basic_block b)
1881 {
1882 gimple_stmt_iterator last, gsi;
1883 gphi_iterator psi;
1884
1885 if (dump_file)
1886 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1887
1888 /* Remove all single-valued PHI nodes from block B of the form
1889 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1890 gsi = gsi_last_bb (a);
1891 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1892 {
1893 gimple *phi = gsi_stmt (psi);
1894 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1895 gimple *copy;
1896 bool may_replace_uses = (virtual_operand_p (def)
1897 || may_propagate_copy (def, use));
1898
1899 /* In case we maintain loop closed ssa form, do not propagate arguments
1900 of loop exit phi nodes. */
1901 if (current_loops
1902 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1903 && !virtual_operand_p (def)
1904 && TREE_CODE (use) == SSA_NAME
1905 && a->loop_father != b->loop_father)
1906 may_replace_uses = false;
1907
1908 if (!may_replace_uses)
1909 {
1910 gcc_assert (!virtual_operand_p (def));
1911
1912 /* Note that just emitting the copies is fine -- there is no problem
1913 with ordering of phi nodes. This is because A is the single
1914 predecessor of B, therefore results of the phi nodes cannot
1915 appear as arguments of the phi nodes. */
1916 copy = gimple_build_assign (def, use);
1917 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1918 remove_phi_node (&psi, false);
1919 }
1920 else
1921 {
1922 /* If we deal with a PHI for virtual operands, we can simply
1923 propagate these without fussing with folding or updating
1924 the stmt. */
1925 if (virtual_operand_p (def))
1926 {
1927 imm_use_iterator iter;
1928 use_operand_p use_p;
1929 gimple *stmt;
1930
1931 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1932 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1933 SET_USE (use_p, use);
1934
1935 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1936 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1937 }
1938 else
1939 replace_uses_by (def, use);
1940
1941 remove_phi_node (&psi, true);
1942 }
1943 }
1944
1945 /* Ensure that B follows A. */
1946 move_block_after (b, a);
1947
1948 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1949 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1950
1951 /* Remove labels from B and set gimple_bb to A for other statements. */
1952 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1953 {
1954 gimple *stmt = gsi_stmt (gsi);
1955 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1956 {
1957 tree label = gimple_label_label (label_stmt);
1958 int lp_nr;
1959
1960 gsi_remove (&gsi, false);
1961
1962 /* Now that we can thread computed gotos, we might have
1963 a situation where we have a forced label in block B
1964 However, the label at the start of block B might still be
1965 used in other ways (think about the runtime checking for
1966 Fortran assigned gotos). So we can not just delete the
1967 label. Instead we move the label to the start of block A. */
1968 if (FORCED_LABEL (label))
1969 {
1970 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1971 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1972 }
1973 /* Other user labels keep around in a form of a debug stmt. */
1974 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1975 {
1976 gimple *dbg = gimple_build_debug_bind (label,
1977 integer_zero_node,
1978 stmt);
1979 gimple_debug_bind_reset_value (dbg);
1980 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1981 }
1982
1983 lp_nr = EH_LANDING_PAD_NR (label);
1984 if (lp_nr)
1985 {
1986 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1987 lp->post_landing_pad = NULL;
1988 }
1989 }
1990 else
1991 {
1992 gimple_set_bb (stmt, a);
1993 gsi_next (&gsi);
1994 }
1995 }
1996
1997 /* When merging two BBs, if their counts are different, the larger count
1998 is selected as the new bb count. This is to handle inconsistent
1999 profiles. */
2000 if (a->loop_father == b->loop_father)
2001 {
2002 a->count = MAX (a->count, b->count);
2003 a->frequency = MAX (a->frequency, b->frequency);
2004 }
2005
2006 /* Merge the sequences. */
2007 last = gsi_last_bb (a);
2008 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2009 set_bb_seq (b, NULL);
2010
2011 if (cfgcleanup_altered_bbs)
2012 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2013 }
2014
2015
2016 /* Return the one of two successors of BB that is not reachable by a
2017 complex edge, if there is one. Else, return BB. We use
2018 this in optimizations that use post-dominators for their heuristics,
2019 to catch the cases in C++ where function calls are involved. */
2020
2021 basic_block
2022 single_noncomplex_succ (basic_block bb)
2023 {
2024 edge e0, e1;
2025 if (EDGE_COUNT (bb->succs) != 2)
2026 return bb;
2027
2028 e0 = EDGE_SUCC (bb, 0);
2029 e1 = EDGE_SUCC (bb, 1);
2030 if (e0->flags & EDGE_COMPLEX)
2031 return e1->dest;
2032 if (e1->flags & EDGE_COMPLEX)
2033 return e0->dest;
2034
2035 return bb;
2036 }
2037
2038 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2039
2040 void
2041 notice_special_calls (gcall *call)
2042 {
2043 int flags = gimple_call_flags (call);
2044
2045 if (flags & ECF_MAY_BE_ALLOCA)
2046 cfun->calls_alloca = true;
2047 if (flags & ECF_RETURNS_TWICE)
2048 cfun->calls_setjmp = true;
2049 }
2050
2051
2052 /* Clear flags set by notice_special_calls. Used by dead code removal
2053 to update the flags. */
2054
2055 void
2056 clear_special_calls (void)
2057 {
2058 cfun->calls_alloca = false;
2059 cfun->calls_setjmp = false;
2060 }
2061
2062 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2063
2064 static void
2065 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2066 {
2067 /* Since this block is no longer reachable, we can just delete all
2068 of its PHI nodes. */
2069 remove_phi_nodes (bb);
2070
2071 /* Remove edges to BB's successors. */
2072 while (EDGE_COUNT (bb->succs) > 0)
2073 remove_edge (EDGE_SUCC (bb, 0));
2074 }
2075
2076
2077 /* Remove statements of basic block BB. */
2078
2079 static void
2080 remove_bb (basic_block bb)
2081 {
2082 gimple_stmt_iterator i;
2083
2084 if (dump_file)
2085 {
2086 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2087 if (dump_flags & TDF_DETAILS)
2088 {
2089 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2090 fprintf (dump_file, "\n");
2091 }
2092 }
2093
2094 if (current_loops)
2095 {
2096 struct loop *loop = bb->loop_father;
2097
2098 /* If a loop gets removed, clean up the information associated
2099 with it. */
2100 if (loop->latch == bb
2101 || loop->header == bb)
2102 free_numbers_of_iterations_estimates_loop (loop);
2103 }
2104
2105 /* Remove all the instructions in the block. */
2106 if (bb_seq (bb) != NULL)
2107 {
2108 /* Walk backwards so as to get a chance to substitute all
2109 released DEFs into debug stmts. See
2110 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2111 details. */
2112 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2113 {
2114 gimple *stmt = gsi_stmt (i);
2115 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2116 if (label_stmt
2117 && (FORCED_LABEL (gimple_label_label (label_stmt))
2118 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2119 {
2120 basic_block new_bb;
2121 gimple_stmt_iterator new_gsi;
2122
2123 /* A non-reachable non-local label may still be referenced.
2124 But it no longer needs to carry the extra semantics of
2125 non-locality. */
2126 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2127 {
2128 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2129 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2130 }
2131
2132 new_bb = bb->prev_bb;
2133 new_gsi = gsi_start_bb (new_bb);
2134 gsi_remove (&i, false);
2135 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2136 }
2137 else
2138 {
2139 /* Release SSA definitions. */
2140 release_defs (stmt);
2141 gsi_remove (&i, true);
2142 }
2143
2144 if (gsi_end_p (i))
2145 i = gsi_last_bb (bb);
2146 else
2147 gsi_prev (&i);
2148 }
2149 }
2150
2151 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2152 bb->il.gimple.seq = NULL;
2153 bb->il.gimple.phi_nodes = NULL;
2154 }
2155
2156
2157 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2158 predicate VAL, return the edge that will be taken out of the block.
2159 If VAL does not match a unique edge, NULL is returned. */
2160
2161 edge
2162 find_taken_edge (basic_block bb, tree val)
2163 {
2164 gimple *stmt;
2165
2166 stmt = last_stmt (bb);
2167
2168 gcc_assert (stmt);
2169 gcc_assert (is_ctrl_stmt (stmt));
2170
2171 if (val == NULL)
2172 return NULL;
2173
2174 if (!is_gimple_min_invariant (val))
2175 return NULL;
2176
2177 if (gimple_code (stmt) == GIMPLE_COND)
2178 return find_taken_edge_cond_expr (bb, val);
2179
2180 if (gimple_code (stmt) == GIMPLE_SWITCH)
2181 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), bb, val);
2182
2183 if (computed_goto_p (stmt))
2184 {
2185 /* Only optimize if the argument is a label, if the argument is
2186 not a label then we can not construct a proper CFG.
2187
2188 It may be the case that we only need to allow the LABEL_REF to
2189 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2190 appear inside a LABEL_EXPR just to be safe. */
2191 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2192 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2193 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2194 return NULL;
2195 }
2196
2197 gcc_unreachable ();
2198 }
2199
2200 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2201 statement, determine which of the outgoing edges will be taken out of the
2202 block. Return NULL if either edge may be taken. */
2203
2204 static edge
2205 find_taken_edge_computed_goto (basic_block bb, tree val)
2206 {
2207 basic_block dest;
2208 edge e = NULL;
2209
2210 dest = label_to_block (val);
2211 if (dest)
2212 {
2213 e = find_edge (bb, dest);
2214 gcc_assert (e != NULL);
2215 }
2216
2217 return e;
2218 }
2219
2220 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2221 statement, determine which of the two edges will be taken out of the
2222 block. Return NULL if either edge may be taken. */
2223
2224 static edge
2225 find_taken_edge_cond_expr (basic_block bb, tree val)
2226 {
2227 edge true_edge, false_edge;
2228
2229 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2230
2231 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2232 return (integer_zerop (val) ? false_edge : true_edge);
2233 }
2234
2235 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2236 statement, determine which edge will be taken out of the block. Return
2237 NULL if any edge may be taken. */
2238
2239 static edge
2240 find_taken_edge_switch_expr (gswitch *switch_stmt, basic_block bb,
2241 tree val)
2242 {
2243 basic_block dest_bb;
2244 edge e;
2245 tree taken_case;
2246
2247 taken_case = find_case_label_for_value (switch_stmt, val);
2248 dest_bb = label_to_block (CASE_LABEL (taken_case));
2249
2250 e = find_edge (bb, dest_bb);
2251 gcc_assert (e);
2252 return e;
2253 }
2254
2255
2256 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2257 We can make optimal use here of the fact that the case labels are
2258 sorted: We can do a binary search for a case matching VAL. */
2259
2260 static tree
2261 find_case_label_for_value (gswitch *switch_stmt, tree val)
2262 {
2263 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2264 tree default_case = gimple_switch_default_label (switch_stmt);
2265
2266 for (low = 0, high = n; high - low > 1; )
2267 {
2268 size_t i = (high + low) / 2;
2269 tree t = gimple_switch_label (switch_stmt, i);
2270 int cmp;
2271
2272 /* Cache the result of comparing CASE_LOW and val. */
2273 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2274
2275 if (cmp > 0)
2276 high = i;
2277 else
2278 low = i;
2279
2280 if (CASE_HIGH (t) == NULL)
2281 {
2282 /* A singe-valued case label. */
2283 if (cmp == 0)
2284 return t;
2285 }
2286 else
2287 {
2288 /* A case range. We can only handle integer ranges. */
2289 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2290 return t;
2291 }
2292 }
2293
2294 return default_case;
2295 }
2296
2297
2298 /* Dump a basic block on stderr. */
2299
2300 void
2301 gimple_debug_bb (basic_block bb)
2302 {
2303 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2304 }
2305
2306
2307 /* Dump basic block with index N on stderr. */
2308
2309 basic_block
2310 gimple_debug_bb_n (int n)
2311 {
2312 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2313 return BASIC_BLOCK_FOR_FN (cfun, n);
2314 }
2315
2316
2317 /* Dump the CFG on stderr.
2318
2319 FLAGS are the same used by the tree dumping functions
2320 (see TDF_* in dumpfile.h). */
2321
2322 void
2323 gimple_debug_cfg (int flags)
2324 {
2325 gimple_dump_cfg (stderr, flags);
2326 }
2327
2328
2329 /* Dump the program showing basic block boundaries on the given FILE.
2330
2331 FLAGS are the same used by the tree dumping functions (see TDF_* in
2332 tree.h). */
2333
2334 void
2335 gimple_dump_cfg (FILE *file, int flags)
2336 {
2337 if (flags & TDF_DETAILS)
2338 {
2339 dump_function_header (file, current_function_decl, flags);
2340 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2341 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2342 last_basic_block_for_fn (cfun));
2343
2344 brief_dump_cfg (file, flags | TDF_COMMENT);
2345 fprintf (file, "\n");
2346 }
2347
2348 if (flags & TDF_STATS)
2349 dump_cfg_stats (file);
2350
2351 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2352 }
2353
2354
2355 /* Dump CFG statistics on FILE. */
2356
2357 void
2358 dump_cfg_stats (FILE *file)
2359 {
2360 static long max_num_merged_labels = 0;
2361 unsigned long size, total = 0;
2362 long num_edges;
2363 basic_block bb;
2364 const char * const fmt_str = "%-30s%-13s%12s\n";
2365 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2366 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2367 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2368 const char *funcname = current_function_name ();
2369
2370 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2371
2372 fprintf (file, "---------------------------------------------------------\n");
2373 fprintf (file, fmt_str, "", " Number of ", "Memory");
2374 fprintf (file, fmt_str, "", " instances ", "used ");
2375 fprintf (file, "---------------------------------------------------------\n");
2376
2377 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2378 total += size;
2379 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2380 SCALE (size), LABEL (size));
2381
2382 num_edges = 0;
2383 FOR_EACH_BB_FN (bb, cfun)
2384 num_edges += EDGE_COUNT (bb->succs);
2385 size = num_edges * sizeof (struct edge_def);
2386 total += size;
2387 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2388
2389 fprintf (file, "---------------------------------------------------------\n");
2390 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2391 LABEL (total));
2392 fprintf (file, "---------------------------------------------------------\n");
2393 fprintf (file, "\n");
2394
2395 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2396 max_num_merged_labels = cfg_stats.num_merged_labels;
2397
2398 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2399 cfg_stats.num_merged_labels, max_num_merged_labels);
2400
2401 fprintf (file, "\n");
2402 }
2403
2404
2405 /* Dump CFG statistics on stderr. Keep extern so that it's always
2406 linked in the final executable. */
2407
2408 DEBUG_FUNCTION void
2409 debug_cfg_stats (void)
2410 {
2411 dump_cfg_stats (stderr);
2412 }
2413
2414 /*---------------------------------------------------------------------------
2415 Miscellaneous helpers
2416 ---------------------------------------------------------------------------*/
2417
2418 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2419 flow. Transfers of control flow associated with EH are excluded. */
2420
2421 static bool
2422 call_can_make_abnormal_goto (gimple *t)
2423 {
2424 /* If the function has no non-local labels, then a call cannot make an
2425 abnormal transfer of control. */
2426 if (!cfun->has_nonlocal_label
2427 && !cfun->calls_setjmp)
2428 return false;
2429
2430 /* Likewise if the call has no side effects. */
2431 if (!gimple_has_side_effects (t))
2432 return false;
2433
2434 /* Likewise if the called function is leaf. */
2435 if (gimple_call_flags (t) & ECF_LEAF)
2436 return false;
2437
2438 return true;
2439 }
2440
2441
2442 /* Return true if T can make an abnormal transfer of control flow.
2443 Transfers of control flow associated with EH are excluded. */
2444
2445 bool
2446 stmt_can_make_abnormal_goto (gimple *t)
2447 {
2448 if (computed_goto_p (t))
2449 return true;
2450 if (is_gimple_call (t))
2451 return call_can_make_abnormal_goto (t);
2452 return false;
2453 }
2454
2455
2456 /* Return true if T represents a stmt that always transfers control. */
2457
2458 bool
2459 is_ctrl_stmt (gimple *t)
2460 {
2461 switch (gimple_code (t))
2462 {
2463 case GIMPLE_COND:
2464 case GIMPLE_SWITCH:
2465 case GIMPLE_GOTO:
2466 case GIMPLE_RETURN:
2467 case GIMPLE_RESX:
2468 return true;
2469 default:
2470 return false;
2471 }
2472 }
2473
2474
2475 /* Return true if T is a statement that may alter the flow of control
2476 (e.g., a call to a non-returning function). */
2477
2478 bool
2479 is_ctrl_altering_stmt (gimple *t)
2480 {
2481 gcc_assert (t);
2482
2483 switch (gimple_code (t))
2484 {
2485 case GIMPLE_CALL:
2486 /* Per stmt call flag indicates whether the call could alter
2487 controlflow. */
2488 if (gimple_call_ctrl_altering_p (t))
2489 return true;
2490 break;
2491
2492 case GIMPLE_EH_DISPATCH:
2493 /* EH_DISPATCH branches to the individual catch handlers at
2494 this level of a try or allowed-exceptions region. It can
2495 fallthru to the next statement as well. */
2496 return true;
2497
2498 case GIMPLE_ASM:
2499 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2500 return true;
2501 break;
2502
2503 CASE_GIMPLE_OMP:
2504 /* OpenMP directives alter control flow. */
2505 return true;
2506
2507 case GIMPLE_TRANSACTION:
2508 /* A transaction start alters control flow. */
2509 return true;
2510
2511 default:
2512 break;
2513 }
2514
2515 /* If a statement can throw, it alters control flow. */
2516 return stmt_can_throw_internal (t);
2517 }
2518
2519
2520 /* Return true if T is a simple local goto. */
2521
2522 bool
2523 simple_goto_p (gimple *t)
2524 {
2525 return (gimple_code (t) == GIMPLE_GOTO
2526 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2527 }
2528
2529
2530 /* Return true if STMT should start a new basic block. PREV_STMT is
2531 the statement preceding STMT. It is used when STMT is a label or a
2532 case label. Labels should only start a new basic block if their
2533 previous statement wasn't a label. Otherwise, sequence of labels
2534 would generate unnecessary basic blocks that only contain a single
2535 label. */
2536
2537 static inline bool
2538 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2539 {
2540 if (stmt == NULL)
2541 return false;
2542
2543 /* Labels start a new basic block only if the preceding statement
2544 wasn't a label of the same type. This prevents the creation of
2545 consecutive blocks that have nothing but a single label. */
2546 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2547 {
2548 /* Nonlocal and computed GOTO targets always start a new block. */
2549 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2550 || FORCED_LABEL (gimple_label_label (label_stmt)))
2551 return true;
2552
2553 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2554 {
2555 if (DECL_NONLOCAL (gimple_label_label (
2556 as_a <glabel *> (prev_stmt))))
2557 return true;
2558
2559 cfg_stats.num_merged_labels++;
2560 return false;
2561 }
2562 else
2563 return true;
2564 }
2565 else if (gimple_code (stmt) == GIMPLE_CALL
2566 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2567 /* setjmp acts similar to a nonlocal GOTO target and thus should
2568 start a new block. */
2569 return true;
2570
2571 return false;
2572 }
2573
2574
2575 /* Return true if T should end a basic block. */
2576
2577 bool
2578 stmt_ends_bb_p (gimple *t)
2579 {
2580 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2581 }
2582
2583 /* Remove block annotations and other data structures. */
2584
2585 void
2586 delete_tree_cfg_annotations (struct function *fn)
2587 {
2588 vec_free (label_to_block_map_for_fn (fn));
2589 }
2590
2591 /* Return the virtual phi in BB. */
2592
2593 gphi *
2594 get_virtual_phi (basic_block bb)
2595 {
2596 for (gphi_iterator gsi = gsi_start_phis (bb);
2597 !gsi_end_p (gsi);
2598 gsi_next (&gsi))
2599 {
2600 gphi *phi = gsi.phi ();
2601
2602 if (virtual_operand_p (PHI_RESULT (phi)))
2603 return phi;
2604 }
2605
2606 return NULL;
2607 }
2608
2609 /* Return the first statement in basic block BB. */
2610
2611 gimple *
2612 first_stmt (basic_block bb)
2613 {
2614 gimple_stmt_iterator i = gsi_start_bb (bb);
2615 gimple *stmt = NULL;
2616
2617 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2618 {
2619 gsi_next (&i);
2620 stmt = NULL;
2621 }
2622 return stmt;
2623 }
2624
2625 /* Return the first non-label statement in basic block BB. */
2626
2627 static gimple *
2628 first_non_label_stmt (basic_block bb)
2629 {
2630 gimple_stmt_iterator i = gsi_start_bb (bb);
2631 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2632 gsi_next (&i);
2633 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2634 }
2635
2636 /* Return the last statement in basic block BB. */
2637
2638 gimple *
2639 last_stmt (basic_block bb)
2640 {
2641 gimple_stmt_iterator i = gsi_last_bb (bb);
2642 gimple *stmt = NULL;
2643
2644 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2645 {
2646 gsi_prev (&i);
2647 stmt = NULL;
2648 }
2649 return stmt;
2650 }
2651
2652 /* Return the last statement of an otherwise empty block. Return NULL
2653 if the block is totally empty, or if it contains more than one
2654 statement. */
2655
2656 gimple *
2657 last_and_only_stmt (basic_block bb)
2658 {
2659 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2660 gimple *last, *prev;
2661
2662 if (gsi_end_p (i))
2663 return NULL;
2664
2665 last = gsi_stmt (i);
2666 gsi_prev_nondebug (&i);
2667 if (gsi_end_p (i))
2668 return last;
2669
2670 /* Empty statements should no longer appear in the instruction stream.
2671 Everything that might have appeared before should be deleted by
2672 remove_useless_stmts, and the optimizers should just gsi_remove
2673 instead of smashing with build_empty_stmt.
2674
2675 Thus the only thing that should appear here in a block containing
2676 one executable statement is a label. */
2677 prev = gsi_stmt (i);
2678 if (gimple_code (prev) == GIMPLE_LABEL)
2679 return last;
2680 else
2681 return NULL;
2682 }
2683
2684 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2685
2686 static void
2687 reinstall_phi_args (edge new_edge, edge old_edge)
2688 {
2689 edge_var_map *vm;
2690 int i;
2691 gphi_iterator phis;
2692
2693 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2694 if (!v)
2695 return;
2696
2697 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2698 v->iterate (i, &vm) && !gsi_end_p (phis);
2699 i++, gsi_next (&phis))
2700 {
2701 gphi *phi = phis.phi ();
2702 tree result = redirect_edge_var_map_result (vm);
2703 tree arg = redirect_edge_var_map_def (vm);
2704
2705 gcc_assert (result == gimple_phi_result (phi));
2706
2707 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2708 }
2709
2710 redirect_edge_var_map_clear (old_edge);
2711 }
2712
2713 /* Returns the basic block after which the new basic block created
2714 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2715 near its "logical" location. This is of most help to humans looking
2716 at debugging dumps. */
2717
2718 basic_block
2719 split_edge_bb_loc (edge edge_in)
2720 {
2721 basic_block dest = edge_in->dest;
2722 basic_block dest_prev = dest->prev_bb;
2723
2724 if (dest_prev)
2725 {
2726 edge e = find_edge (dest_prev, dest);
2727 if (e && !(e->flags & EDGE_COMPLEX))
2728 return edge_in->src;
2729 }
2730 return dest_prev;
2731 }
2732
2733 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2734 Abort on abnormal edges. */
2735
2736 static basic_block
2737 gimple_split_edge (edge edge_in)
2738 {
2739 basic_block new_bb, after_bb, dest;
2740 edge new_edge, e;
2741
2742 /* Abnormal edges cannot be split. */
2743 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2744
2745 dest = edge_in->dest;
2746
2747 after_bb = split_edge_bb_loc (edge_in);
2748
2749 new_bb = create_empty_bb (after_bb);
2750 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2751 new_bb->count = edge_in->count;
2752 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2753 new_edge->probability = REG_BR_PROB_BASE;
2754 new_edge->count = edge_in->count;
2755
2756 e = redirect_edge_and_branch (edge_in, new_bb);
2757 gcc_assert (e == edge_in);
2758 reinstall_phi_args (new_edge, e);
2759
2760 return new_bb;
2761 }
2762
2763
2764 /* Verify properties of the address expression T with base object BASE. */
2765
2766 static tree
2767 verify_address (tree t, tree base)
2768 {
2769 bool old_constant;
2770 bool old_side_effects;
2771 bool new_constant;
2772 bool new_side_effects;
2773
2774 old_constant = TREE_CONSTANT (t);
2775 old_side_effects = TREE_SIDE_EFFECTS (t);
2776
2777 recompute_tree_invariant_for_addr_expr (t);
2778 new_side_effects = TREE_SIDE_EFFECTS (t);
2779 new_constant = TREE_CONSTANT (t);
2780
2781 if (old_constant != new_constant)
2782 {
2783 error ("constant not recomputed when ADDR_EXPR changed");
2784 return t;
2785 }
2786 if (old_side_effects != new_side_effects)
2787 {
2788 error ("side effects not recomputed when ADDR_EXPR changed");
2789 return t;
2790 }
2791
2792 if (!(TREE_CODE (base) == VAR_DECL
2793 || TREE_CODE (base) == PARM_DECL
2794 || TREE_CODE (base) == RESULT_DECL))
2795 return NULL_TREE;
2796
2797 if (DECL_GIMPLE_REG_P (base))
2798 {
2799 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2800 return base;
2801 }
2802
2803 return NULL_TREE;
2804 }
2805
2806 /* Callback for walk_tree, check that all elements with address taken are
2807 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2808 inside a PHI node. */
2809
2810 static tree
2811 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2812 {
2813 tree t = *tp, x;
2814
2815 if (TYPE_P (t))
2816 *walk_subtrees = 0;
2817
2818 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2819 #define CHECK_OP(N, MSG) \
2820 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2821 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2822
2823 switch (TREE_CODE (t))
2824 {
2825 case SSA_NAME:
2826 if (SSA_NAME_IN_FREE_LIST (t))
2827 {
2828 error ("SSA name in freelist but still referenced");
2829 return *tp;
2830 }
2831 break;
2832
2833 case PARM_DECL:
2834 case VAR_DECL:
2835 case RESULT_DECL:
2836 {
2837 tree context = decl_function_context (t);
2838 if (context != cfun->decl
2839 && !SCOPE_FILE_SCOPE_P (context)
2840 && !TREE_STATIC (t)
2841 && !DECL_EXTERNAL (t))
2842 {
2843 error ("Local declaration from a different function");
2844 return t;
2845 }
2846 }
2847 break;
2848
2849 case INDIRECT_REF:
2850 error ("INDIRECT_REF in gimple IL");
2851 return t;
2852
2853 case MEM_REF:
2854 x = TREE_OPERAND (t, 0);
2855 if (!POINTER_TYPE_P (TREE_TYPE (x))
2856 || !is_gimple_mem_ref_addr (x))
2857 {
2858 error ("invalid first operand of MEM_REF");
2859 return x;
2860 }
2861 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2862 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2863 {
2864 error ("invalid offset operand of MEM_REF");
2865 return TREE_OPERAND (t, 1);
2866 }
2867 if (TREE_CODE (x) == ADDR_EXPR)
2868 {
2869 tree va = verify_address (x, TREE_OPERAND (x, 0));
2870 if (va)
2871 return va;
2872 x = TREE_OPERAND (x, 0);
2873 }
2874 walk_tree (&x, verify_expr, data, NULL);
2875 *walk_subtrees = 0;
2876 break;
2877
2878 case ASSERT_EXPR:
2879 x = fold (ASSERT_EXPR_COND (t));
2880 if (x == boolean_false_node)
2881 {
2882 error ("ASSERT_EXPR with an always-false condition");
2883 return *tp;
2884 }
2885 break;
2886
2887 case MODIFY_EXPR:
2888 error ("MODIFY_EXPR not expected while having tuples");
2889 return *tp;
2890
2891 case ADDR_EXPR:
2892 {
2893 tree tem;
2894
2895 gcc_assert (is_gimple_address (t));
2896
2897 /* Skip any references (they will be checked when we recurse down the
2898 tree) and ensure that any variable used as a prefix is marked
2899 addressable. */
2900 for (x = TREE_OPERAND (t, 0);
2901 handled_component_p (x);
2902 x = TREE_OPERAND (x, 0))
2903 ;
2904
2905 if ((tem = verify_address (t, x)))
2906 return tem;
2907
2908 if (!(TREE_CODE (x) == VAR_DECL
2909 || TREE_CODE (x) == PARM_DECL
2910 || TREE_CODE (x) == RESULT_DECL))
2911 return NULL;
2912
2913 if (!TREE_ADDRESSABLE (x))
2914 {
2915 error ("address taken, but ADDRESSABLE bit not set");
2916 return x;
2917 }
2918
2919 break;
2920 }
2921
2922 case COND_EXPR:
2923 x = COND_EXPR_COND (t);
2924 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2925 {
2926 error ("non-integral used in condition");
2927 return x;
2928 }
2929 if (!is_gimple_condexpr (x))
2930 {
2931 error ("invalid conditional operand");
2932 return x;
2933 }
2934 break;
2935
2936 case NON_LVALUE_EXPR:
2937 case TRUTH_NOT_EXPR:
2938 gcc_unreachable ();
2939
2940 CASE_CONVERT:
2941 case FIX_TRUNC_EXPR:
2942 case FLOAT_EXPR:
2943 case NEGATE_EXPR:
2944 case ABS_EXPR:
2945 case BIT_NOT_EXPR:
2946 CHECK_OP (0, "invalid operand to unary operator");
2947 break;
2948
2949 case REALPART_EXPR:
2950 case IMAGPART_EXPR:
2951 case BIT_FIELD_REF:
2952 if (!is_gimple_reg_type (TREE_TYPE (t)))
2953 {
2954 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2955 return t;
2956 }
2957
2958 if (TREE_CODE (t) == BIT_FIELD_REF)
2959 {
2960 tree t0 = TREE_OPERAND (t, 0);
2961 tree t1 = TREE_OPERAND (t, 1);
2962 tree t2 = TREE_OPERAND (t, 2);
2963 if (!tree_fits_uhwi_p (t1)
2964 || !tree_fits_uhwi_p (t2))
2965 {
2966 error ("invalid position or size operand to BIT_FIELD_REF");
2967 return t;
2968 }
2969 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2970 && (TYPE_PRECISION (TREE_TYPE (t))
2971 != tree_to_uhwi (t1)))
2972 {
2973 error ("integral result type precision does not match "
2974 "field size of BIT_FIELD_REF");
2975 return t;
2976 }
2977 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2978 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2979 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (t)))
2980 != tree_to_uhwi (t1)))
2981 {
2982 error ("mode size of non-integral result does not "
2983 "match field size of BIT_FIELD_REF");
2984 return t;
2985 }
2986 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
2987 && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
2988 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
2989 {
2990 error ("position plus size exceeds size of referenced object in "
2991 "BIT_FIELD_REF");
2992 return t;
2993 }
2994 }
2995 t = TREE_OPERAND (t, 0);
2996
2997 /* Fall-through. */
2998 case COMPONENT_REF:
2999 case ARRAY_REF:
3000 case ARRAY_RANGE_REF:
3001 case VIEW_CONVERT_EXPR:
3002 /* We have a nest of references. Verify that each of the operands
3003 that determine where to reference is either a constant or a variable,
3004 verify that the base is valid, and then show we've already checked
3005 the subtrees. */
3006 while (handled_component_p (t))
3007 {
3008 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3009 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3010 else if (TREE_CODE (t) == ARRAY_REF
3011 || TREE_CODE (t) == ARRAY_RANGE_REF)
3012 {
3013 CHECK_OP (1, "invalid array index");
3014 if (TREE_OPERAND (t, 2))
3015 CHECK_OP (2, "invalid array lower bound");
3016 if (TREE_OPERAND (t, 3))
3017 CHECK_OP (3, "invalid array stride");
3018 }
3019 else if (TREE_CODE (t) == BIT_FIELD_REF
3020 || TREE_CODE (t) == REALPART_EXPR
3021 || TREE_CODE (t) == IMAGPART_EXPR)
3022 {
3023 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
3024 "REALPART_EXPR");
3025 return t;
3026 }
3027
3028 t = TREE_OPERAND (t, 0);
3029 }
3030
3031 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
3032 {
3033 error ("invalid reference prefix");
3034 return t;
3035 }
3036 walk_tree (&t, verify_expr, data, NULL);
3037 *walk_subtrees = 0;
3038 break;
3039 case PLUS_EXPR:
3040 case MINUS_EXPR:
3041 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3042 POINTER_PLUS_EXPR. */
3043 if (POINTER_TYPE_P (TREE_TYPE (t)))
3044 {
3045 error ("invalid operand to plus/minus, type is a pointer");
3046 return t;
3047 }
3048 CHECK_OP (0, "invalid operand to binary operator");
3049 CHECK_OP (1, "invalid operand to binary operator");
3050 break;
3051
3052 case POINTER_PLUS_EXPR:
3053 /* Check to make sure the first operand is a pointer or reference type. */
3054 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3055 {
3056 error ("invalid operand to pointer plus, first operand is not a pointer");
3057 return t;
3058 }
3059 /* Check to make sure the second operand is a ptrofftype. */
3060 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
3061 {
3062 error ("invalid operand to pointer plus, second operand is not an "
3063 "integer type of appropriate width");
3064 return t;
3065 }
3066 /* FALLTHROUGH */
3067 case LT_EXPR:
3068 case LE_EXPR:
3069 case GT_EXPR:
3070 case GE_EXPR:
3071 case EQ_EXPR:
3072 case NE_EXPR:
3073 case UNORDERED_EXPR:
3074 case ORDERED_EXPR:
3075 case UNLT_EXPR:
3076 case UNLE_EXPR:
3077 case UNGT_EXPR:
3078 case UNGE_EXPR:
3079 case UNEQ_EXPR:
3080 case LTGT_EXPR:
3081 case MULT_EXPR:
3082 case TRUNC_DIV_EXPR:
3083 case CEIL_DIV_EXPR:
3084 case FLOOR_DIV_EXPR:
3085 case ROUND_DIV_EXPR:
3086 case TRUNC_MOD_EXPR:
3087 case CEIL_MOD_EXPR:
3088 case FLOOR_MOD_EXPR:
3089 case ROUND_MOD_EXPR:
3090 case RDIV_EXPR:
3091 case EXACT_DIV_EXPR:
3092 case MIN_EXPR:
3093 case MAX_EXPR:
3094 case LSHIFT_EXPR:
3095 case RSHIFT_EXPR:
3096 case LROTATE_EXPR:
3097 case RROTATE_EXPR:
3098 case BIT_IOR_EXPR:
3099 case BIT_XOR_EXPR:
3100 case BIT_AND_EXPR:
3101 CHECK_OP (0, "invalid operand to binary operator");
3102 CHECK_OP (1, "invalid operand to binary operator");
3103 break;
3104
3105 case CONSTRUCTOR:
3106 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3107 *walk_subtrees = 0;
3108 break;
3109
3110 case CASE_LABEL_EXPR:
3111 if (CASE_CHAIN (t))
3112 {
3113 error ("invalid CASE_CHAIN");
3114 return t;
3115 }
3116 break;
3117
3118 default:
3119 break;
3120 }
3121 return NULL;
3122
3123 #undef CHECK_OP
3124 }
3125
3126
3127 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3128 Returns true if there is an error, otherwise false. */
3129
3130 static bool
3131 verify_types_in_gimple_min_lval (tree expr)
3132 {
3133 tree op;
3134
3135 if (is_gimple_id (expr))
3136 return false;
3137
3138 if (TREE_CODE (expr) != TARGET_MEM_REF
3139 && TREE_CODE (expr) != MEM_REF)
3140 {
3141 error ("invalid expression for min lvalue");
3142 return true;
3143 }
3144
3145 /* TARGET_MEM_REFs are strange beasts. */
3146 if (TREE_CODE (expr) == TARGET_MEM_REF)
3147 return false;
3148
3149 op = TREE_OPERAND (expr, 0);
3150 if (!is_gimple_val (op))
3151 {
3152 error ("invalid operand in indirect reference");
3153 debug_generic_stmt (op);
3154 return true;
3155 }
3156 /* Memory references now generally can involve a value conversion. */
3157
3158 return false;
3159 }
3160
3161 /* Verify if EXPR is a valid GIMPLE reference expression. If
3162 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3163 if there is an error, otherwise false. */
3164
3165 static bool
3166 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3167 {
3168 while (handled_component_p (expr))
3169 {
3170 tree op = TREE_OPERAND (expr, 0);
3171
3172 if (TREE_CODE (expr) == ARRAY_REF
3173 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3174 {
3175 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3176 || (TREE_OPERAND (expr, 2)
3177 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3178 || (TREE_OPERAND (expr, 3)
3179 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3180 {
3181 error ("invalid operands to array reference");
3182 debug_generic_stmt (expr);
3183 return true;
3184 }
3185 }
3186
3187 /* Verify if the reference array element types are compatible. */
3188 if (TREE_CODE (expr) == ARRAY_REF
3189 && !useless_type_conversion_p (TREE_TYPE (expr),
3190 TREE_TYPE (TREE_TYPE (op))))
3191 {
3192 error ("type mismatch in array reference");
3193 debug_generic_stmt (TREE_TYPE (expr));
3194 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3195 return true;
3196 }
3197 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3198 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3199 TREE_TYPE (TREE_TYPE (op))))
3200 {
3201 error ("type mismatch in array range reference");
3202 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3203 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3204 return true;
3205 }
3206
3207 if ((TREE_CODE (expr) == REALPART_EXPR
3208 || TREE_CODE (expr) == IMAGPART_EXPR)
3209 && !useless_type_conversion_p (TREE_TYPE (expr),
3210 TREE_TYPE (TREE_TYPE (op))))
3211 {
3212 error ("type mismatch in real/imagpart reference");
3213 debug_generic_stmt (TREE_TYPE (expr));
3214 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3215 return true;
3216 }
3217
3218 if (TREE_CODE (expr) == COMPONENT_REF
3219 && !useless_type_conversion_p (TREE_TYPE (expr),
3220 TREE_TYPE (TREE_OPERAND (expr, 1))))
3221 {
3222 error ("type mismatch in component reference");
3223 debug_generic_stmt (TREE_TYPE (expr));
3224 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3225 return true;
3226 }
3227
3228 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3229 {
3230 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3231 that their operand is not an SSA name or an invariant when
3232 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3233 bug). Otherwise there is nothing to verify, gross mismatches at
3234 most invoke undefined behavior. */
3235 if (require_lvalue
3236 && (TREE_CODE (op) == SSA_NAME
3237 || is_gimple_min_invariant (op)))
3238 {
3239 error ("conversion of an SSA_NAME on the left hand side");
3240 debug_generic_stmt (expr);
3241 return true;
3242 }
3243 else if (TREE_CODE (op) == SSA_NAME
3244 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3245 {
3246 error ("conversion of register to a different size");
3247 debug_generic_stmt (expr);
3248 return true;
3249 }
3250 else if (!handled_component_p (op))
3251 return false;
3252 }
3253
3254 expr = op;
3255 }
3256
3257 if (TREE_CODE (expr) == MEM_REF)
3258 {
3259 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3260 {
3261 error ("invalid address operand in MEM_REF");
3262 debug_generic_stmt (expr);
3263 return true;
3264 }
3265 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3266 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3267 {
3268 error ("invalid offset operand in MEM_REF");
3269 debug_generic_stmt (expr);
3270 return true;
3271 }
3272 }
3273 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3274 {
3275 if (!TMR_BASE (expr)
3276 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3277 {
3278 error ("invalid address operand in TARGET_MEM_REF");
3279 return true;
3280 }
3281 if (!TMR_OFFSET (expr)
3282 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3283 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3284 {
3285 error ("invalid offset operand in TARGET_MEM_REF");
3286 debug_generic_stmt (expr);
3287 return true;
3288 }
3289 }
3290
3291 return ((require_lvalue || !is_gimple_min_invariant (expr))
3292 && verify_types_in_gimple_min_lval (expr));
3293 }
3294
3295 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3296 list of pointer-to types that is trivially convertible to DEST. */
3297
3298 static bool
3299 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3300 {
3301 tree src;
3302
3303 if (!TYPE_POINTER_TO (src_obj))
3304 return true;
3305
3306 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3307 if (useless_type_conversion_p (dest, src))
3308 return true;
3309
3310 return false;
3311 }
3312
3313 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3314 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3315
3316 static bool
3317 valid_fixed_convert_types_p (tree type1, tree type2)
3318 {
3319 return (FIXED_POINT_TYPE_P (type1)
3320 && (INTEGRAL_TYPE_P (type2)
3321 || SCALAR_FLOAT_TYPE_P (type2)
3322 || FIXED_POINT_TYPE_P (type2)));
3323 }
3324
3325 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3326 is a problem, otherwise false. */
3327
3328 static bool
3329 verify_gimple_call (gcall *stmt)
3330 {
3331 tree fn = gimple_call_fn (stmt);
3332 tree fntype, fndecl;
3333 unsigned i;
3334
3335 if (gimple_call_internal_p (stmt))
3336 {
3337 if (fn)
3338 {
3339 error ("gimple call has two targets");
3340 debug_generic_stmt (fn);
3341 return true;
3342 }
3343 }
3344 else
3345 {
3346 if (!fn)
3347 {
3348 error ("gimple call has no target");
3349 return true;
3350 }
3351 }
3352
3353 if (fn && !is_gimple_call_addr (fn))
3354 {
3355 error ("invalid function in gimple call");
3356 debug_generic_stmt (fn);
3357 return true;
3358 }
3359
3360 if (fn
3361 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3362 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3363 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3364 {
3365 error ("non-function in gimple call");
3366 return true;
3367 }
3368
3369 fndecl = gimple_call_fndecl (stmt);
3370 if (fndecl
3371 && TREE_CODE (fndecl) == FUNCTION_DECL
3372 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3373 && !DECL_PURE_P (fndecl)
3374 && !TREE_READONLY (fndecl))
3375 {
3376 error ("invalid pure const state for function");
3377 return true;
3378 }
3379
3380 tree lhs = gimple_call_lhs (stmt);
3381 if (lhs
3382 && (!is_gimple_lvalue (lhs)
3383 || verify_types_in_gimple_reference (lhs, true)))
3384 {
3385 error ("invalid LHS in gimple call");
3386 return true;
3387 }
3388
3389 if (gimple_call_ctrl_altering_p (stmt)
3390 && gimple_call_noreturn_p (stmt)
3391 && should_remove_lhs_p (lhs))
3392 {
3393 error ("LHS in noreturn call");
3394 return true;
3395 }
3396
3397 fntype = gimple_call_fntype (stmt);
3398 if (fntype
3399 && lhs
3400 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3401 /* ??? At least C++ misses conversions at assignments from
3402 void * call results.
3403 ??? Java is completely off. Especially with functions
3404 returning java.lang.Object.
3405 For now simply allow arbitrary pointer type conversions. */
3406 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3407 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3408 {
3409 error ("invalid conversion in gimple call");
3410 debug_generic_stmt (TREE_TYPE (lhs));
3411 debug_generic_stmt (TREE_TYPE (fntype));
3412 return true;
3413 }
3414
3415 if (gimple_call_chain (stmt)
3416 && !is_gimple_val (gimple_call_chain (stmt)))
3417 {
3418 error ("invalid static chain in gimple call");
3419 debug_generic_stmt (gimple_call_chain (stmt));
3420 return true;
3421 }
3422
3423 /* If there is a static chain argument, the call should either be
3424 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3425 if (gimple_call_chain (stmt)
3426 && fndecl
3427 && !DECL_STATIC_CHAIN (fndecl))
3428 {
3429 error ("static chain with function that doesn%'t use one");
3430 return true;
3431 }
3432
3433 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3434 {
3435 switch (DECL_FUNCTION_CODE (fndecl))
3436 {
3437 case BUILT_IN_UNREACHABLE:
3438 case BUILT_IN_TRAP:
3439 if (gimple_call_num_args (stmt) > 0)
3440 {
3441 /* Built-in unreachable with parameters might not be caught by
3442 undefined behavior sanitizer. Front-ends do check users do not
3443 call them that way but we also produce calls to
3444 __builtin_unreachable internally, for example when IPA figures
3445 out a call cannot happen in a legal program. In such cases,
3446 we must make sure arguments are stripped off. */
3447 error ("__builtin_unreachable or __builtin_trap call with "
3448 "arguments");
3449 return true;
3450 }
3451 break;
3452 default:
3453 break;
3454 }
3455 }
3456
3457 /* ??? The C frontend passes unpromoted arguments in case it
3458 didn't see a function declaration before the call. So for now
3459 leave the call arguments mostly unverified. Once we gimplify
3460 unit-at-a-time we have a chance to fix this. */
3461
3462 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3463 {
3464 tree arg = gimple_call_arg (stmt, i);
3465 if ((is_gimple_reg_type (TREE_TYPE (arg))
3466 && !is_gimple_val (arg))
3467 || (!is_gimple_reg_type (TREE_TYPE (arg))
3468 && !is_gimple_lvalue (arg)))
3469 {
3470 error ("invalid argument to gimple call");
3471 debug_generic_expr (arg);
3472 return true;
3473 }
3474 }
3475
3476 return false;
3477 }
3478
3479 /* Verifies the gimple comparison with the result type TYPE and
3480 the operands OP0 and OP1, comparison code is CODE. */
3481
3482 static bool
3483 verify_gimple_comparison (tree type, tree op0, tree op1, enum tree_code code)
3484 {
3485 tree op0_type = TREE_TYPE (op0);
3486 tree op1_type = TREE_TYPE (op1);
3487
3488 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3489 {
3490 error ("invalid operands in gimple comparison");
3491 return true;
3492 }
3493
3494 /* For comparisons we do not have the operations type as the
3495 effective type the comparison is carried out in. Instead
3496 we require that either the first operand is trivially
3497 convertible into the second, or the other way around.
3498 Because we special-case pointers to void we allow
3499 comparisons of pointers with the same mode as well. */
3500 if (!useless_type_conversion_p (op0_type, op1_type)
3501 && !useless_type_conversion_p (op1_type, op0_type)
3502 && (!POINTER_TYPE_P (op0_type)
3503 || !POINTER_TYPE_P (op1_type)
3504 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3505 {
3506 error ("mismatching comparison operand types");
3507 debug_generic_expr (op0_type);
3508 debug_generic_expr (op1_type);
3509 return true;
3510 }
3511
3512 /* The resulting type of a comparison may be an effective boolean type. */
3513 if (INTEGRAL_TYPE_P (type)
3514 && (TREE_CODE (type) == BOOLEAN_TYPE
3515 || TYPE_PRECISION (type) == 1))
3516 {
3517 if ((TREE_CODE (op0_type) == VECTOR_TYPE
3518 || TREE_CODE (op1_type) == VECTOR_TYPE)
3519 && code != EQ_EXPR && code != NE_EXPR
3520 && !VECTOR_BOOLEAN_TYPE_P (op0_type)
3521 && !VECTOR_INTEGER_TYPE_P (op0_type))
3522 {
3523 error ("unsupported operation or type for vector comparison"
3524 " returning a boolean");
3525 debug_generic_expr (op0_type);
3526 debug_generic_expr (op1_type);
3527 return true;
3528 }
3529 }
3530 /* Or a boolean vector type with the same element count
3531 as the comparison operand types. */
3532 else if (TREE_CODE (type) == VECTOR_TYPE
3533 && TREE_CODE (TREE_TYPE (type)) == BOOLEAN_TYPE)
3534 {
3535 if (TREE_CODE (op0_type) != VECTOR_TYPE
3536 || TREE_CODE (op1_type) != VECTOR_TYPE)
3537 {
3538 error ("non-vector operands in vector comparison");
3539 debug_generic_expr (op0_type);
3540 debug_generic_expr (op1_type);
3541 return true;
3542 }
3543
3544 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type))
3545 {
3546 error ("invalid vector comparison resulting type");
3547 debug_generic_expr (type);
3548 return true;
3549 }
3550 }
3551 else
3552 {
3553 error ("bogus comparison result type");
3554 debug_generic_expr (type);
3555 return true;
3556 }
3557
3558 return false;
3559 }
3560
3561 /* Verify a gimple assignment statement STMT with an unary rhs.
3562 Returns true if anything is wrong. */
3563
3564 static bool
3565 verify_gimple_assign_unary (gassign *stmt)
3566 {
3567 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3568 tree lhs = gimple_assign_lhs (stmt);
3569 tree lhs_type = TREE_TYPE (lhs);
3570 tree rhs1 = gimple_assign_rhs1 (stmt);
3571 tree rhs1_type = TREE_TYPE (rhs1);
3572
3573 if (!is_gimple_reg (lhs))
3574 {
3575 error ("non-register as LHS of unary operation");
3576 return true;
3577 }
3578
3579 if (!is_gimple_val (rhs1))
3580 {
3581 error ("invalid operand in unary operation");
3582 return true;
3583 }
3584
3585 /* First handle conversions. */
3586 switch (rhs_code)
3587 {
3588 CASE_CONVERT:
3589 {
3590 /* Allow conversions from pointer type to integral type only if
3591 there is no sign or zero extension involved.
3592 For targets were the precision of ptrofftype doesn't match that
3593 of pointers we need to allow arbitrary conversions to ptrofftype. */
3594 if ((POINTER_TYPE_P (lhs_type)
3595 && INTEGRAL_TYPE_P (rhs1_type))
3596 || (POINTER_TYPE_P (rhs1_type)
3597 && INTEGRAL_TYPE_P (lhs_type)
3598 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3599 || ptrofftype_p (sizetype))))
3600 return false;
3601
3602 /* Allow conversion from integral to offset type and vice versa. */
3603 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3604 && INTEGRAL_TYPE_P (rhs1_type))
3605 || (INTEGRAL_TYPE_P (lhs_type)
3606 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3607 return false;
3608
3609 /* Otherwise assert we are converting between types of the
3610 same kind. */
3611 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3612 {
3613 error ("invalid types in nop conversion");
3614 debug_generic_expr (lhs_type);
3615 debug_generic_expr (rhs1_type);
3616 return true;
3617 }
3618
3619 return false;
3620 }
3621
3622 case ADDR_SPACE_CONVERT_EXPR:
3623 {
3624 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3625 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3626 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3627 {
3628 error ("invalid types in address space conversion");
3629 debug_generic_expr (lhs_type);
3630 debug_generic_expr (rhs1_type);
3631 return true;
3632 }
3633
3634 return false;
3635 }
3636
3637 case FIXED_CONVERT_EXPR:
3638 {
3639 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3640 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3641 {
3642 error ("invalid types in fixed-point conversion");
3643 debug_generic_expr (lhs_type);
3644 debug_generic_expr (rhs1_type);
3645 return true;
3646 }
3647
3648 return false;
3649 }
3650
3651 case FLOAT_EXPR:
3652 {
3653 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3654 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3655 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3656 {
3657 error ("invalid types in conversion to floating point");
3658 debug_generic_expr (lhs_type);
3659 debug_generic_expr (rhs1_type);
3660 return true;
3661 }
3662
3663 return false;
3664 }
3665
3666 case FIX_TRUNC_EXPR:
3667 {
3668 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3669 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3670 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3671 {
3672 error ("invalid types in conversion to integer");
3673 debug_generic_expr (lhs_type);
3674 debug_generic_expr (rhs1_type);
3675 return true;
3676 }
3677
3678 return false;
3679 }
3680 case REDUC_MAX_EXPR:
3681 case REDUC_MIN_EXPR:
3682 case REDUC_PLUS_EXPR:
3683 if (!VECTOR_TYPE_P (rhs1_type)
3684 || !useless_type_conversion_p (lhs_type, TREE_TYPE (rhs1_type)))
3685 {
3686 error ("reduction should convert from vector to element type");
3687 debug_generic_expr (lhs_type);
3688 debug_generic_expr (rhs1_type);
3689 return true;
3690 }
3691 return false;
3692
3693 case VEC_UNPACK_HI_EXPR:
3694 case VEC_UNPACK_LO_EXPR:
3695 case VEC_UNPACK_FLOAT_HI_EXPR:
3696 case VEC_UNPACK_FLOAT_LO_EXPR:
3697 /* FIXME. */
3698 return false;
3699
3700 case NEGATE_EXPR:
3701 case ABS_EXPR:
3702 case BIT_NOT_EXPR:
3703 case PAREN_EXPR:
3704 case CONJ_EXPR:
3705 break;
3706
3707 default:
3708 gcc_unreachable ();
3709 }
3710
3711 /* For the remaining codes assert there is no conversion involved. */
3712 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3713 {
3714 error ("non-trivial conversion in unary operation");
3715 debug_generic_expr (lhs_type);
3716 debug_generic_expr (rhs1_type);
3717 return true;
3718 }
3719
3720 return false;
3721 }
3722
3723 /* Verify a gimple assignment statement STMT with a binary rhs.
3724 Returns true if anything is wrong. */
3725
3726 static bool
3727 verify_gimple_assign_binary (gassign *stmt)
3728 {
3729 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3730 tree lhs = gimple_assign_lhs (stmt);
3731 tree lhs_type = TREE_TYPE (lhs);
3732 tree rhs1 = gimple_assign_rhs1 (stmt);
3733 tree rhs1_type = TREE_TYPE (rhs1);
3734 tree rhs2 = gimple_assign_rhs2 (stmt);
3735 tree rhs2_type = TREE_TYPE (rhs2);
3736
3737 if (!is_gimple_reg (lhs))
3738 {
3739 error ("non-register as LHS of binary operation");
3740 return true;
3741 }
3742
3743 if (!is_gimple_val (rhs1)
3744 || !is_gimple_val (rhs2))
3745 {
3746 error ("invalid operands in binary operation");
3747 return true;
3748 }
3749
3750 /* First handle operations that involve different types. */
3751 switch (rhs_code)
3752 {
3753 case COMPLEX_EXPR:
3754 {
3755 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3756 || !(INTEGRAL_TYPE_P (rhs1_type)
3757 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3758 || !(INTEGRAL_TYPE_P (rhs2_type)
3759 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3760 {
3761 error ("type mismatch in complex expression");
3762 debug_generic_expr (lhs_type);
3763 debug_generic_expr (rhs1_type);
3764 debug_generic_expr (rhs2_type);
3765 return true;
3766 }
3767
3768 return false;
3769 }
3770
3771 case LSHIFT_EXPR:
3772 case RSHIFT_EXPR:
3773 case LROTATE_EXPR:
3774 case RROTATE_EXPR:
3775 {
3776 /* Shifts and rotates are ok on integral types, fixed point
3777 types and integer vector types. */
3778 if ((!INTEGRAL_TYPE_P (rhs1_type)
3779 && !FIXED_POINT_TYPE_P (rhs1_type)
3780 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3781 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3782 || (!INTEGRAL_TYPE_P (rhs2_type)
3783 /* Vector shifts of vectors are also ok. */
3784 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3785 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3786 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3787 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3788 || !useless_type_conversion_p (lhs_type, rhs1_type))
3789 {
3790 error ("type mismatch in shift expression");
3791 debug_generic_expr (lhs_type);
3792 debug_generic_expr (rhs1_type);
3793 debug_generic_expr (rhs2_type);
3794 return true;
3795 }
3796
3797 return false;
3798 }
3799
3800 case WIDEN_LSHIFT_EXPR:
3801 {
3802 if (!INTEGRAL_TYPE_P (lhs_type)
3803 || !INTEGRAL_TYPE_P (rhs1_type)
3804 || TREE_CODE (rhs2) != INTEGER_CST
3805 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3806 {
3807 error ("type mismatch in widening vector shift expression");
3808 debug_generic_expr (lhs_type);
3809 debug_generic_expr (rhs1_type);
3810 debug_generic_expr (rhs2_type);
3811 return true;
3812 }
3813
3814 return false;
3815 }
3816
3817 case VEC_WIDEN_LSHIFT_HI_EXPR:
3818 case VEC_WIDEN_LSHIFT_LO_EXPR:
3819 {
3820 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3821 || TREE_CODE (lhs_type) != VECTOR_TYPE
3822 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3823 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3824 || TREE_CODE (rhs2) != INTEGER_CST
3825 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3826 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3827 {
3828 error ("type mismatch in widening vector shift expression");
3829 debug_generic_expr (lhs_type);
3830 debug_generic_expr (rhs1_type);
3831 debug_generic_expr (rhs2_type);
3832 return true;
3833 }
3834
3835 return false;
3836 }
3837
3838 case PLUS_EXPR:
3839 case MINUS_EXPR:
3840 {
3841 tree lhs_etype = lhs_type;
3842 tree rhs1_etype = rhs1_type;
3843 tree rhs2_etype = rhs2_type;
3844 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3845 {
3846 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3847 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3848 {
3849 error ("invalid non-vector operands to vector valued plus");
3850 return true;
3851 }
3852 lhs_etype = TREE_TYPE (lhs_type);
3853 rhs1_etype = TREE_TYPE (rhs1_type);
3854 rhs2_etype = TREE_TYPE (rhs2_type);
3855 }
3856 if (POINTER_TYPE_P (lhs_etype)
3857 || POINTER_TYPE_P (rhs1_etype)
3858 || POINTER_TYPE_P (rhs2_etype))
3859 {
3860 error ("invalid (pointer) operands to plus/minus");
3861 return true;
3862 }
3863
3864 /* Continue with generic binary expression handling. */
3865 break;
3866 }
3867
3868 case POINTER_PLUS_EXPR:
3869 {
3870 if (!POINTER_TYPE_P (rhs1_type)
3871 || !useless_type_conversion_p (lhs_type, rhs1_type)
3872 || !ptrofftype_p (rhs2_type))
3873 {
3874 error ("type mismatch in pointer plus expression");
3875 debug_generic_stmt (lhs_type);
3876 debug_generic_stmt (rhs1_type);
3877 debug_generic_stmt (rhs2_type);
3878 return true;
3879 }
3880
3881 return false;
3882 }
3883
3884 case TRUTH_ANDIF_EXPR:
3885 case TRUTH_ORIF_EXPR:
3886 case TRUTH_AND_EXPR:
3887 case TRUTH_OR_EXPR:
3888 case TRUTH_XOR_EXPR:
3889
3890 gcc_unreachable ();
3891
3892 case LT_EXPR:
3893 case LE_EXPR:
3894 case GT_EXPR:
3895 case GE_EXPR:
3896 case EQ_EXPR:
3897 case NE_EXPR:
3898 case UNORDERED_EXPR:
3899 case ORDERED_EXPR:
3900 case UNLT_EXPR:
3901 case UNLE_EXPR:
3902 case UNGT_EXPR:
3903 case UNGE_EXPR:
3904 case UNEQ_EXPR:
3905 case LTGT_EXPR:
3906 /* Comparisons are also binary, but the result type is not
3907 connected to the operand types. */
3908 return verify_gimple_comparison (lhs_type, rhs1, rhs2, rhs_code);
3909
3910 case WIDEN_MULT_EXPR:
3911 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3912 return true;
3913 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3914 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3915
3916 case WIDEN_SUM_EXPR:
3917 case VEC_WIDEN_MULT_HI_EXPR:
3918 case VEC_WIDEN_MULT_LO_EXPR:
3919 case VEC_WIDEN_MULT_EVEN_EXPR:
3920 case VEC_WIDEN_MULT_ODD_EXPR:
3921 case VEC_PACK_TRUNC_EXPR:
3922 case VEC_PACK_SAT_EXPR:
3923 case VEC_PACK_FIX_TRUNC_EXPR:
3924 /* FIXME. */
3925 return false;
3926
3927 case MULT_EXPR:
3928 case MULT_HIGHPART_EXPR:
3929 case TRUNC_DIV_EXPR:
3930 case CEIL_DIV_EXPR:
3931 case FLOOR_DIV_EXPR:
3932 case ROUND_DIV_EXPR:
3933 case TRUNC_MOD_EXPR:
3934 case CEIL_MOD_EXPR:
3935 case FLOOR_MOD_EXPR:
3936 case ROUND_MOD_EXPR:
3937 case RDIV_EXPR:
3938 case EXACT_DIV_EXPR:
3939 case MIN_EXPR:
3940 case MAX_EXPR:
3941 case BIT_IOR_EXPR:
3942 case BIT_XOR_EXPR:
3943 case BIT_AND_EXPR:
3944 /* Continue with generic binary expression handling. */
3945 break;
3946
3947 default:
3948 gcc_unreachable ();
3949 }
3950
3951 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3952 || !useless_type_conversion_p (lhs_type, rhs2_type))
3953 {
3954 error ("type mismatch in binary expression");
3955 debug_generic_stmt (lhs_type);
3956 debug_generic_stmt (rhs1_type);
3957 debug_generic_stmt (rhs2_type);
3958 return true;
3959 }
3960
3961 return false;
3962 }
3963
3964 /* Verify a gimple assignment statement STMT with a ternary rhs.
3965 Returns true if anything is wrong. */
3966
3967 static bool
3968 verify_gimple_assign_ternary (gassign *stmt)
3969 {
3970 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3971 tree lhs = gimple_assign_lhs (stmt);
3972 tree lhs_type = TREE_TYPE (lhs);
3973 tree rhs1 = gimple_assign_rhs1 (stmt);
3974 tree rhs1_type = TREE_TYPE (rhs1);
3975 tree rhs2 = gimple_assign_rhs2 (stmt);
3976 tree rhs2_type = TREE_TYPE (rhs2);
3977 tree rhs3 = gimple_assign_rhs3 (stmt);
3978 tree rhs3_type = TREE_TYPE (rhs3);
3979
3980 if (!is_gimple_reg (lhs))
3981 {
3982 error ("non-register as LHS of ternary operation");
3983 return true;
3984 }
3985
3986 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3987 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3988 || !is_gimple_val (rhs2)
3989 || !is_gimple_val (rhs3))
3990 {
3991 error ("invalid operands in ternary operation");
3992 return true;
3993 }
3994
3995 /* First handle operations that involve different types. */
3996 switch (rhs_code)
3997 {
3998 case WIDEN_MULT_PLUS_EXPR:
3999 case WIDEN_MULT_MINUS_EXPR:
4000 if ((!INTEGRAL_TYPE_P (rhs1_type)
4001 && !FIXED_POINT_TYPE_P (rhs1_type))
4002 || !useless_type_conversion_p (rhs1_type, rhs2_type)
4003 || !useless_type_conversion_p (lhs_type, rhs3_type)
4004 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
4005 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
4006 {
4007 error ("type mismatch in widening multiply-accumulate expression");
4008 debug_generic_expr (lhs_type);
4009 debug_generic_expr (rhs1_type);
4010 debug_generic_expr (rhs2_type);
4011 debug_generic_expr (rhs3_type);
4012 return true;
4013 }
4014 break;
4015
4016 case FMA_EXPR:
4017 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4018 || !useless_type_conversion_p (lhs_type, rhs2_type)
4019 || !useless_type_conversion_p (lhs_type, rhs3_type))
4020 {
4021 error ("type mismatch in fused multiply-add expression");
4022 debug_generic_expr (lhs_type);
4023 debug_generic_expr (rhs1_type);
4024 debug_generic_expr (rhs2_type);
4025 debug_generic_expr (rhs3_type);
4026 return true;
4027 }
4028 break;
4029
4030 case VEC_COND_EXPR:
4031 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4032 || TYPE_VECTOR_SUBPARTS (rhs1_type)
4033 != TYPE_VECTOR_SUBPARTS (lhs_type))
4034 {
4035 error ("the first argument of a VEC_COND_EXPR must be of a "
4036 "boolean vector type of the same number of elements "
4037 "as the result");
4038 debug_generic_expr (lhs_type);
4039 debug_generic_expr (rhs1_type);
4040 return true;
4041 }
4042 /* Fallthrough. */
4043 case COND_EXPR:
4044 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4045 || !useless_type_conversion_p (lhs_type, rhs3_type))
4046 {
4047 error ("type mismatch in conditional expression");
4048 debug_generic_expr (lhs_type);
4049 debug_generic_expr (rhs2_type);
4050 debug_generic_expr (rhs3_type);
4051 return true;
4052 }
4053 break;
4054
4055 case VEC_PERM_EXPR:
4056 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4057 || !useless_type_conversion_p (lhs_type, rhs2_type))
4058 {
4059 error ("type mismatch in vector permute expression");
4060 debug_generic_expr (lhs_type);
4061 debug_generic_expr (rhs1_type);
4062 debug_generic_expr (rhs2_type);
4063 debug_generic_expr (rhs3_type);
4064 return true;
4065 }
4066
4067 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4068 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4069 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4070 {
4071 error ("vector types expected in vector permute expression");
4072 debug_generic_expr (lhs_type);
4073 debug_generic_expr (rhs1_type);
4074 debug_generic_expr (rhs2_type);
4075 debug_generic_expr (rhs3_type);
4076 return true;
4077 }
4078
4079 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
4080 || TYPE_VECTOR_SUBPARTS (rhs2_type)
4081 != TYPE_VECTOR_SUBPARTS (rhs3_type)
4082 || TYPE_VECTOR_SUBPARTS (rhs3_type)
4083 != TYPE_VECTOR_SUBPARTS (lhs_type))
4084 {
4085 error ("vectors with different element number found "
4086 "in vector permute expression");
4087 debug_generic_expr (lhs_type);
4088 debug_generic_expr (rhs1_type);
4089 debug_generic_expr (rhs2_type);
4090 debug_generic_expr (rhs3_type);
4091 return true;
4092 }
4093
4094 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4095 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
4096 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
4097 {
4098 error ("invalid mask type in vector permute expression");
4099 debug_generic_expr (lhs_type);
4100 debug_generic_expr (rhs1_type);
4101 debug_generic_expr (rhs2_type);
4102 debug_generic_expr (rhs3_type);
4103 return true;
4104 }
4105
4106 return false;
4107
4108 case SAD_EXPR:
4109 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4110 || !useless_type_conversion_p (lhs_type, rhs3_type)
4111 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4112 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4113 {
4114 error ("type mismatch in sad expression");
4115 debug_generic_expr (lhs_type);
4116 debug_generic_expr (rhs1_type);
4117 debug_generic_expr (rhs2_type);
4118 debug_generic_expr (rhs3_type);
4119 return true;
4120 }
4121
4122 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4123 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4124 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4125 {
4126 error ("vector types expected in sad expression");
4127 debug_generic_expr (lhs_type);
4128 debug_generic_expr (rhs1_type);
4129 debug_generic_expr (rhs2_type);
4130 debug_generic_expr (rhs3_type);
4131 return true;
4132 }
4133
4134 return false;
4135
4136 case BIT_INSERT_EXPR:
4137 if (! useless_type_conversion_p (lhs_type, rhs1_type))
4138 {
4139 error ("type mismatch in BIT_INSERT_EXPR");
4140 debug_generic_expr (lhs_type);
4141 debug_generic_expr (rhs1_type);
4142 return true;
4143 }
4144 if (! ((INTEGRAL_TYPE_P (rhs1_type)
4145 && INTEGRAL_TYPE_P (rhs2_type))
4146 || (VECTOR_TYPE_P (rhs1_type)
4147 && types_compatible_p (TREE_TYPE (rhs1_type), rhs2_type))))
4148 {
4149 error ("not allowed type combination in BIT_INSERT_EXPR");
4150 debug_generic_expr (rhs1_type);
4151 debug_generic_expr (rhs2_type);
4152 return true;
4153 }
4154 if (! tree_fits_uhwi_p (rhs3)
4155 || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type)))
4156 {
4157 error ("invalid position or size in BIT_INSERT_EXPR");
4158 return true;
4159 }
4160 if (INTEGRAL_TYPE_P (rhs1_type))
4161 {
4162 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4163 if (bitpos >= TYPE_PRECISION (rhs1_type)
4164 || (bitpos + TYPE_PRECISION (rhs2_type)
4165 > TYPE_PRECISION (rhs1_type)))
4166 {
4167 error ("insertion out of range in BIT_INSERT_EXPR");
4168 return true;
4169 }
4170 }
4171 else if (VECTOR_TYPE_P (rhs1_type))
4172 {
4173 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4174 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (TYPE_SIZE (rhs2_type));
4175 if (bitpos % bitsize != 0)
4176 {
4177 error ("vector insertion not at element boundary");
4178 return true;
4179 }
4180 }
4181 return false;
4182
4183 case DOT_PROD_EXPR:
4184 case REALIGN_LOAD_EXPR:
4185 /* FIXME. */
4186 return false;
4187
4188 default:
4189 gcc_unreachable ();
4190 }
4191 return false;
4192 }
4193
4194 /* Verify a gimple assignment statement STMT with a single rhs.
4195 Returns true if anything is wrong. */
4196
4197 static bool
4198 verify_gimple_assign_single (gassign *stmt)
4199 {
4200 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4201 tree lhs = gimple_assign_lhs (stmt);
4202 tree lhs_type = TREE_TYPE (lhs);
4203 tree rhs1 = gimple_assign_rhs1 (stmt);
4204 tree rhs1_type = TREE_TYPE (rhs1);
4205 bool res = false;
4206
4207 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4208 {
4209 error ("non-trivial conversion at assignment");
4210 debug_generic_expr (lhs_type);
4211 debug_generic_expr (rhs1_type);
4212 return true;
4213 }
4214
4215 if (gimple_clobber_p (stmt)
4216 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4217 {
4218 error ("non-decl/MEM_REF LHS in clobber statement");
4219 debug_generic_expr (lhs);
4220 return true;
4221 }
4222
4223 if (handled_component_p (lhs)
4224 || TREE_CODE (lhs) == MEM_REF
4225 || TREE_CODE (lhs) == TARGET_MEM_REF)
4226 res |= verify_types_in_gimple_reference (lhs, true);
4227
4228 /* Special codes we cannot handle via their class. */
4229 switch (rhs_code)
4230 {
4231 case ADDR_EXPR:
4232 {
4233 tree op = TREE_OPERAND (rhs1, 0);
4234 if (!is_gimple_addressable (op))
4235 {
4236 error ("invalid operand in unary expression");
4237 return true;
4238 }
4239
4240 /* Technically there is no longer a need for matching types, but
4241 gimple hygiene asks for this check. In LTO we can end up
4242 combining incompatible units and thus end up with addresses
4243 of globals that change their type to a common one. */
4244 if (!in_lto_p
4245 && !types_compatible_p (TREE_TYPE (op),
4246 TREE_TYPE (TREE_TYPE (rhs1)))
4247 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4248 TREE_TYPE (op)))
4249 {
4250 error ("type mismatch in address expression");
4251 debug_generic_stmt (TREE_TYPE (rhs1));
4252 debug_generic_stmt (TREE_TYPE (op));
4253 return true;
4254 }
4255
4256 return verify_types_in_gimple_reference (op, true);
4257 }
4258
4259 /* tcc_reference */
4260 case INDIRECT_REF:
4261 error ("INDIRECT_REF in gimple IL");
4262 return true;
4263
4264 case COMPONENT_REF:
4265 case BIT_FIELD_REF:
4266 case ARRAY_REF:
4267 case ARRAY_RANGE_REF:
4268 case VIEW_CONVERT_EXPR:
4269 case REALPART_EXPR:
4270 case IMAGPART_EXPR:
4271 case TARGET_MEM_REF:
4272 case MEM_REF:
4273 if (!is_gimple_reg (lhs)
4274 && is_gimple_reg_type (TREE_TYPE (lhs)))
4275 {
4276 error ("invalid rhs for gimple memory store");
4277 debug_generic_stmt (lhs);
4278 debug_generic_stmt (rhs1);
4279 return true;
4280 }
4281 return res || verify_types_in_gimple_reference (rhs1, false);
4282
4283 /* tcc_constant */
4284 case SSA_NAME:
4285 case INTEGER_CST:
4286 case REAL_CST:
4287 case FIXED_CST:
4288 case COMPLEX_CST:
4289 case VECTOR_CST:
4290 case STRING_CST:
4291 return res;
4292
4293 /* tcc_declaration */
4294 case CONST_DECL:
4295 return res;
4296 case VAR_DECL:
4297 case PARM_DECL:
4298 if (!is_gimple_reg (lhs)
4299 && !is_gimple_reg (rhs1)
4300 && is_gimple_reg_type (TREE_TYPE (lhs)))
4301 {
4302 error ("invalid rhs for gimple memory store");
4303 debug_generic_stmt (lhs);
4304 debug_generic_stmt (rhs1);
4305 return true;
4306 }
4307 return res;
4308
4309 case CONSTRUCTOR:
4310 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4311 {
4312 unsigned int i;
4313 tree elt_i, elt_v, elt_t = NULL_TREE;
4314
4315 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4316 return res;
4317 /* For vector CONSTRUCTORs we require that either it is empty
4318 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4319 (then the element count must be correct to cover the whole
4320 outer vector and index must be NULL on all elements, or it is
4321 a CONSTRUCTOR of scalar elements, where we as an exception allow
4322 smaller number of elements (assuming zero filling) and
4323 consecutive indexes as compared to NULL indexes (such
4324 CONSTRUCTORs can appear in the IL from FEs). */
4325 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4326 {
4327 if (elt_t == NULL_TREE)
4328 {
4329 elt_t = TREE_TYPE (elt_v);
4330 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4331 {
4332 tree elt_t = TREE_TYPE (elt_v);
4333 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4334 TREE_TYPE (elt_t)))
4335 {
4336 error ("incorrect type of vector CONSTRUCTOR"
4337 " elements");
4338 debug_generic_stmt (rhs1);
4339 return true;
4340 }
4341 else if (CONSTRUCTOR_NELTS (rhs1)
4342 * TYPE_VECTOR_SUBPARTS (elt_t)
4343 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4344 {
4345 error ("incorrect number of vector CONSTRUCTOR"
4346 " elements");
4347 debug_generic_stmt (rhs1);
4348 return true;
4349 }
4350 }
4351 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4352 elt_t))
4353 {
4354 error ("incorrect type of vector CONSTRUCTOR elements");
4355 debug_generic_stmt (rhs1);
4356 return true;
4357 }
4358 else if (CONSTRUCTOR_NELTS (rhs1)
4359 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4360 {
4361 error ("incorrect number of vector CONSTRUCTOR elements");
4362 debug_generic_stmt (rhs1);
4363 return true;
4364 }
4365 }
4366 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4367 {
4368 error ("incorrect type of vector CONSTRUCTOR elements");
4369 debug_generic_stmt (rhs1);
4370 return true;
4371 }
4372 if (elt_i != NULL_TREE
4373 && (TREE_CODE (elt_t) == VECTOR_TYPE
4374 || TREE_CODE (elt_i) != INTEGER_CST
4375 || compare_tree_int (elt_i, i) != 0))
4376 {
4377 error ("vector CONSTRUCTOR with non-NULL element index");
4378 debug_generic_stmt (rhs1);
4379 return true;
4380 }
4381 if (!is_gimple_val (elt_v))
4382 {
4383 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4384 debug_generic_stmt (rhs1);
4385 return true;
4386 }
4387 }
4388 }
4389 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4390 {
4391 error ("non-vector CONSTRUCTOR with elements");
4392 debug_generic_stmt (rhs1);
4393 return true;
4394 }
4395 return res;
4396 case OBJ_TYPE_REF:
4397 case ASSERT_EXPR:
4398 case WITH_SIZE_EXPR:
4399 /* FIXME. */
4400 return res;
4401
4402 default:;
4403 }
4404
4405 return res;
4406 }
4407
4408 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4409 is a problem, otherwise false. */
4410
4411 static bool
4412 verify_gimple_assign (gassign *stmt)
4413 {
4414 switch (gimple_assign_rhs_class (stmt))
4415 {
4416 case GIMPLE_SINGLE_RHS:
4417 return verify_gimple_assign_single (stmt);
4418
4419 case GIMPLE_UNARY_RHS:
4420 return verify_gimple_assign_unary (stmt);
4421
4422 case GIMPLE_BINARY_RHS:
4423 return verify_gimple_assign_binary (stmt);
4424
4425 case GIMPLE_TERNARY_RHS:
4426 return verify_gimple_assign_ternary (stmt);
4427
4428 default:
4429 gcc_unreachable ();
4430 }
4431 }
4432
4433 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4434 is a problem, otherwise false. */
4435
4436 static bool
4437 verify_gimple_return (greturn *stmt)
4438 {
4439 tree op = gimple_return_retval (stmt);
4440 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4441
4442 /* We cannot test for present return values as we do not fix up missing
4443 return values from the original source. */
4444 if (op == NULL)
4445 return false;
4446
4447 if (!is_gimple_val (op)
4448 && TREE_CODE (op) != RESULT_DECL)
4449 {
4450 error ("invalid operand in return statement");
4451 debug_generic_stmt (op);
4452 return true;
4453 }
4454
4455 if ((TREE_CODE (op) == RESULT_DECL
4456 && DECL_BY_REFERENCE (op))
4457 || (TREE_CODE (op) == SSA_NAME
4458 && SSA_NAME_VAR (op)
4459 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4460 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4461 op = TREE_TYPE (op);
4462
4463 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4464 {
4465 error ("invalid conversion in return statement");
4466 debug_generic_stmt (restype);
4467 debug_generic_stmt (TREE_TYPE (op));
4468 return true;
4469 }
4470
4471 return false;
4472 }
4473
4474
4475 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4476 is a problem, otherwise false. */
4477
4478 static bool
4479 verify_gimple_goto (ggoto *stmt)
4480 {
4481 tree dest = gimple_goto_dest (stmt);
4482
4483 /* ??? We have two canonical forms of direct goto destinations, a
4484 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4485 if (TREE_CODE (dest) != LABEL_DECL
4486 && (!is_gimple_val (dest)
4487 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4488 {
4489 error ("goto destination is neither a label nor a pointer");
4490 return true;
4491 }
4492
4493 return false;
4494 }
4495
4496 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4497 is a problem, otherwise false. */
4498
4499 static bool
4500 verify_gimple_switch (gswitch *stmt)
4501 {
4502 unsigned int i, n;
4503 tree elt, prev_upper_bound = NULL_TREE;
4504 tree index_type, elt_type = NULL_TREE;
4505
4506 if (!is_gimple_val (gimple_switch_index (stmt)))
4507 {
4508 error ("invalid operand to switch statement");
4509 debug_generic_stmt (gimple_switch_index (stmt));
4510 return true;
4511 }
4512
4513 index_type = TREE_TYPE (gimple_switch_index (stmt));
4514 if (! INTEGRAL_TYPE_P (index_type))
4515 {
4516 error ("non-integral type switch statement");
4517 debug_generic_expr (index_type);
4518 return true;
4519 }
4520
4521 elt = gimple_switch_label (stmt, 0);
4522 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4523 {
4524 error ("invalid default case label in switch statement");
4525 debug_generic_expr (elt);
4526 return true;
4527 }
4528
4529 n = gimple_switch_num_labels (stmt);
4530 for (i = 1; i < n; i++)
4531 {
4532 elt = gimple_switch_label (stmt, i);
4533
4534 if (! CASE_LOW (elt))
4535 {
4536 error ("invalid case label in switch statement");
4537 debug_generic_expr (elt);
4538 return true;
4539 }
4540 if (CASE_HIGH (elt)
4541 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4542 {
4543 error ("invalid case range in switch statement");
4544 debug_generic_expr (elt);
4545 return true;
4546 }
4547
4548 if (elt_type)
4549 {
4550 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4551 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4552 {
4553 error ("type mismatch for case label in switch statement");
4554 debug_generic_expr (elt);
4555 return true;
4556 }
4557 }
4558 else
4559 {
4560 elt_type = TREE_TYPE (CASE_LOW (elt));
4561 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4562 {
4563 error ("type precision mismatch in switch statement");
4564 return true;
4565 }
4566 }
4567
4568 if (prev_upper_bound)
4569 {
4570 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4571 {
4572 error ("case labels not sorted in switch statement");
4573 return true;
4574 }
4575 }
4576
4577 prev_upper_bound = CASE_HIGH (elt);
4578 if (! prev_upper_bound)
4579 prev_upper_bound = CASE_LOW (elt);
4580 }
4581
4582 return false;
4583 }
4584
4585 /* Verify a gimple debug statement STMT.
4586 Returns true if anything is wrong. */
4587
4588 static bool
4589 verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
4590 {
4591 /* There isn't much that could be wrong in a gimple debug stmt. A
4592 gimple debug bind stmt, for example, maps a tree, that's usually
4593 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4594 component or member of an aggregate type, to another tree, that
4595 can be an arbitrary expression. These stmts expand into debug
4596 insns, and are converted to debug notes by var-tracking.c. */
4597 return false;
4598 }
4599
4600 /* Verify a gimple label statement STMT.
4601 Returns true if anything is wrong. */
4602
4603 static bool
4604 verify_gimple_label (glabel *stmt)
4605 {
4606 tree decl = gimple_label_label (stmt);
4607 int uid;
4608 bool err = false;
4609
4610 if (TREE_CODE (decl) != LABEL_DECL)
4611 return true;
4612 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4613 && DECL_CONTEXT (decl) != current_function_decl)
4614 {
4615 error ("label's context is not the current function decl");
4616 err |= true;
4617 }
4618
4619 uid = LABEL_DECL_UID (decl);
4620 if (cfun->cfg
4621 && (uid == -1
4622 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4623 {
4624 error ("incorrect entry in label_to_block_map");
4625 err |= true;
4626 }
4627
4628 uid = EH_LANDING_PAD_NR (decl);
4629 if (uid)
4630 {
4631 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4632 if (decl != lp->post_landing_pad)
4633 {
4634 error ("incorrect setting of landing pad number");
4635 err |= true;
4636 }
4637 }
4638
4639 return err;
4640 }
4641
4642 /* Verify a gimple cond statement STMT.
4643 Returns true if anything is wrong. */
4644
4645 static bool
4646 verify_gimple_cond (gcond *stmt)
4647 {
4648 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4649 {
4650 error ("invalid comparison code in gimple cond");
4651 return true;
4652 }
4653 if (!(!gimple_cond_true_label (stmt)
4654 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4655 || !(!gimple_cond_false_label (stmt)
4656 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4657 {
4658 error ("invalid labels in gimple cond");
4659 return true;
4660 }
4661
4662 return verify_gimple_comparison (boolean_type_node,
4663 gimple_cond_lhs (stmt),
4664 gimple_cond_rhs (stmt),
4665 gimple_cond_code (stmt));
4666 }
4667
4668 /* Verify the GIMPLE statement STMT. Returns true if there is an
4669 error, otherwise false. */
4670
4671 static bool
4672 verify_gimple_stmt (gimple *stmt)
4673 {
4674 switch (gimple_code (stmt))
4675 {
4676 case GIMPLE_ASSIGN:
4677 return verify_gimple_assign (as_a <gassign *> (stmt));
4678
4679 case GIMPLE_LABEL:
4680 return verify_gimple_label (as_a <glabel *> (stmt));
4681
4682 case GIMPLE_CALL:
4683 return verify_gimple_call (as_a <gcall *> (stmt));
4684
4685 case GIMPLE_COND:
4686 return verify_gimple_cond (as_a <gcond *> (stmt));
4687
4688 case GIMPLE_GOTO:
4689 return verify_gimple_goto (as_a <ggoto *> (stmt));
4690
4691 case GIMPLE_SWITCH:
4692 return verify_gimple_switch (as_a <gswitch *> (stmt));
4693
4694 case GIMPLE_RETURN:
4695 return verify_gimple_return (as_a <greturn *> (stmt));
4696
4697 case GIMPLE_ASM:
4698 return false;
4699
4700 case GIMPLE_TRANSACTION:
4701 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4702
4703 /* Tuples that do not have tree operands. */
4704 case GIMPLE_NOP:
4705 case GIMPLE_PREDICT:
4706 case GIMPLE_RESX:
4707 case GIMPLE_EH_DISPATCH:
4708 case GIMPLE_EH_MUST_NOT_THROW:
4709 return false;
4710
4711 CASE_GIMPLE_OMP:
4712 /* OpenMP directives are validated by the FE and never operated
4713 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4714 non-gimple expressions when the main index variable has had
4715 its address taken. This does not affect the loop itself
4716 because the header of an GIMPLE_OMP_FOR is merely used to determine
4717 how to setup the parallel iteration. */
4718 return false;
4719
4720 case GIMPLE_DEBUG:
4721 return verify_gimple_debug (stmt);
4722
4723 default:
4724 gcc_unreachable ();
4725 }
4726 }
4727
4728 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4729 and false otherwise. */
4730
4731 static bool
4732 verify_gimple_phi (gimple *phi)
4733 {
4734 bool err = false;
4735 unsigned i;
4736 tree phi_result = gimple_phi_result (phi);
4737 bool virtual_p;
4738
4739 if (!phi_result)
4740 {
4741 error ("invalid PHI result");
4742 return true;
4743 }
4744
4745 virtual_p = virtual_operand_p (phi_result);
4746 if (TREE_CODE (phi_result) != SSA_NAME
4747 || (virtual_p
4748 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4749 {
4750 error ("invalid PHI result");
4751 err = true;
4752 }
4753
4754 for (i = 0; i < gimple_phi_num_args (phi); i++)
4755 {
4756 tree t = gimple_phi_arg_def (phi, i);
4757
4758 if (!t)
4759 {
4760 error ("missing PHI def");
4761 err |= true;
4762 continue;
4763 }
4764 /* Addressable variables do have SSA_NAMEs but they
4765 are not considered gimple values. */
4766 else if ((TREE_CODE (t) == SSA_NAME
4767 && virtual_p != virtual_operand_p (t))
4768 || (virtual_p
4769 && (TREE_CODE (t) != SSA_NAME
4770 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4771 || (!virtual_p
4772 && !is_gimple_val (t)))
4773 {
4774 error ("invalid PHI argument");
4775 debug_generic_expr (t);
4776 err |= true;
4777 }
4778 #ifdef ENABLE_TYPES_CHECKING
4779 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4780 {
4781 error ("incompatible types in PHI argument %u", i);
4782 debug_generic_stmt (TREE_TYPE (phi_result));
4783 debug_generic_stmt (TREE_TYPE (t));
4784 err |= true;
4785 }
4786 #endif
4787 }
4788
4789 return err;
4790 }
4791
4792 /* Verify the GIMPLE statements inside the sequence STMTS. */
4793
4794 static bool
4795 verify_gimple_in_seq_2 (gimple_seq stmts)
4796 {
4797 gimple_stmt_iterator ittr;
4798 bool err = false;
4799
4800 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4801 {
4802 gimple *stmt = gsi_stmt (ittr);
4803
4804 switch (gimple_code (stmt))
4805 {
4806 case GIMPLE_BIND:
4807 err |= verify_gimple_in_seq_2 (
4808 gimple_bind_body (as_a <gbind *> (stmt)));
4809 break;
4810
4811 case GIMPLE_TRY:
4812 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4813 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4814 break;
4815
4816 case GIMPLE_EH_FILTER:
4817 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4818 break;
4819
4820 case GIMPLE_EH_ELSE:
4821 {
4822 geh_else *eh_else = as_a <geh_else *> (stmt);
4823 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
4824 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
4825 }
4826 break;
4827
4828 case GIMPLE_CATCH:
4829 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
4830 as_a <gcatch *> (stmt)));
4831 break;
4832
4833 case GIMPLE_TRANSACTION:
4834 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
4835 break;
4836
4837 default:
4838 {
4839 bool err2 = verify_gimple_stmt (stmt);
4840 if (err2)
4841 debug_gimple_stmt (stmt);
4842 err |= err2;
4843 }
4844 }
4845 }
4846
4847 return err;
4848 }
4849
4850 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4851 is a problem, otherwise false. */
4852
4853 static bool
4854 verify_gimple_transaction (gtransaction *stmt)
4855 {
4856 tree lab;
4857
4858 lab = gimple_transaction_label_norm (stmt);
4859 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4860 return true;
4861 lab = gimple_transaction_label_uninst (stmt);
4862 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4863 return true;
4864 lab = gimple_transaction_label_over (stmt);
4865 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4866 return true;
4867
4868 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4869 }
4870
4871
4872 /* Verify the GIMPLE statements inside the statement list STMTS. */
4873
4874 DEBUG_FUNCTION void
4875 verify_gimple_in_seq (gimple_seq stmts)
4876 {
4877 timevar_push (TV_TREE_STMT_VERIFY);
4878 if (verify_gimple_in_seq_2 (stmts))
4879 internal_error ("verify_gimple failed");
4880 timevar_pop (TV_TREE_STMT_VERIFY);
4881 }
4882
4883 /* Return true when the T can be shared. */
4884
4885 static bool
4886 tree_node_can_be_shared (tree t)
4887 {
4888 if (IS_TYPE_OR_DECL_P (t)
4889 || is_gimple_min_invariant (t)
4890 || TREE_CODE (t) == SSA_NAME
4891 || t == error_mark_node
4892 || TREE_CODE (t) == IDENTIFIER_NODE)
4893 return true;
4894
4895 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4896 return true;
4897
4898 if (DECL_P (t))
4899 return true;
4900
4901 return false;
4902 }
4903
4904 /* Called via walk_tree. Verify tree sharing. */
4905
4906 static tree
4907 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4908 {
4909 hash_set<void *> *visited = (hash_set<void *> *) data;
4910
4911 if (tree_node_can_be_shared (*tp))
4912 {
4913 *walk_subtrees = false;
4914 return NULL;
4915 }
4916
4917 if (visited->add (*tp))
4918 return *tp;
4919
4920 return NULL;
4921 }
4922
4923 /* Called via walk_gimple_stmt. Verify tree sharing. */
4924
4925 static tree
4926 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4927 {
4928 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4929 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4930 }
4931
4932 static bool eh_error_found;
4933 bool
4934 verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
4935 hash_set<gimple *> *visited)
4936 {
4937 if (!visited->contains (stmt))
4938 {
4939 error ("dead STMT in EH table");
4940 debug_gimple_stmt (stmt);
4941 eh_error_found = true;
4942 }
4943 return true;
4944 }
4945
4946 /* Verify if the location LOCs block is in BLOCKS. */
4947
4948 static bool
4949 verify_location (hash_set<tree> *blocks, location_t loc)
4950 {
4951 tree block = LOCATION_BLOCK (loc);
4952 if (block != NULL_TREE
4953 && !blocks->contains (block))
4954 {
4955 error ("location references block not in block tree");
4956 return true;
4957 }
4958 if (block != NULL_TREE)
4959 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4960 return false;
4961 }
4962
4963 /* Called via walk_tree. Verify that expressions have no blocks. */
4964
4965 static tree
4966 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4967 {
4968 if (!EXPR_P (*tp))
4969 {
4970 *walk_subtrees = false;
4971 return NULL;
4972 }
4973
4974 location_t loc = EXPR_LOCATION (*tp);
4975 if (LOCATION_BLOCK (loc) != NULL)
4976 return *tp;
4977
4978 return NULL;
4979 }
4980
4981 /* Called via walk_tree. Verify locations of expressions. */
4982
4983 static tree
4984 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4985 {
4986 hash_set<tree> *blocks = (hash_set<tree> *) data;
4987
4988 if (TREE_CODE (*tp) == VAR_DECL
4989 && DECL_HAS_DEBUG_EXPR_P (*tp))
4990 {
4991 tree t = DECL_DEBUG_EXPR (*tp);
4992 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4993 if (addr)
4994 return addr;
4995 }
4996 if ((TREE_CODE (*tp) == VAR_DECL
4997 || TREE_CODE (*tp) == PARM_DECL
4998 || TREE_CODE (*tp) == RESULT_DECL)
4999 && DECL_HAS_VALUE_EXPR_P (*tp))
5000 {
5001 tree t = DECL_VALUE_EXPR (*tp);
5002 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
5003 if (addr)
5004 return addr;
5005 }
5006
5007 if (!EXPR_P (*tp))
5008 {
5009 *walk_subtrees = false;
5010 return NULL;
5011 }
5012
5013 location_t loc = EXPR_LOCATION (*tp);
5014 if (verify_location (blocks, loc))
5015 return *tp;
5016
5017 return NULL;
5018 }
5019
5020 /* Called via walk_gimple_op. Verify locations of expressions. */
5021
5022 static tree
5023 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
5024 {
5025 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5026 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
5027 }
5028
5029 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
5030
5031 static void
5032 collect_subblocks (hash_set<tree> *blocks, tree block)
5033 {
5034 tree t;
5035 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
5036 {
5037 blocks->add (t);
5038 collect_subblocks (blocks, t);
5039 }
5040 }
5041
5042 /* Verify the GIMPLE statements in the CFG of FN. */
5043
5044 DEBUG_FUNCTION void
5045 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
5046 {
5047 basic_block bb;
5048 bool err = false;
5049
5050 timevar_push (TV_TREE_STMT_VERIFY);
5051 hash_set<void *> visited;
5052 hash_set<gimple *> visited_stmts;
5053
5054 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
5055 hash_set<tree> blocks;
5056 if (DECL_INITIAL (fn->decl))
5057 {
5058 blocks.add (DECL_INITIAL (fn->decl));
5059 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
5060 }
5061
5062 FOR_EACH_BB_FN (bb, fn)
5063 {
5064 gimple_stmt_iterator gsi;
5065
5066 for (gphi_iterator gpi = gsi_start_phis (bb);
5067 !gsi_end_p (gpi);
5068 gsi_next (&gpi))
5069 {
5070 gphi *phi = gpi.phi ();
5071 bool err2 = false;
5072 unsigned i;
5073
5074 visited_stmts.add (phi);
5075
5076 if (gimple_bb (phi) != bb)
5077 {
5078 error ("gimple_bb (phi) is set to a wrong basic block");
5079 err2 = true;
5080 }
5081
5082 err2 |= verify_gimple_phi (phi);
5083
5084 /* Only PHI arguments have locations. */
5085 if (gimple_location (phi) != UNKNOWN_LOCATION)
5086 {
5087 error ("PHI node with location");
5088 err2 = true;
5089 }
5090
5091 for (i = 0; i < gimple_phi_num_args (phi); i++)
5092 {
5093 tree arg = gimple_phi_arg_def (phi, i);
5094 tree addr = walk_tree (&arg, verify_node_sharing_1,
5095 &visited, NULL);
5096 if (addr)
5097 {
5098 error ("incorrect sharing of tree nodes");
5099 debug_generic_expr (addr);
5100 err2 |= true;
5101 }
5102 location_t loc = gimple_phi_arg_location (phi, i);
5103 if (virtual_operand_p (gimple_phi_result (phi))
5104 && loc != UNKNOWN_LOCATION)
5105 {
5106 error ("virtual PHI with argument locations");
5107 err2 = true;
5108 }
5109 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5110 if (addr)
5111 {
5112 debug_generic_expr (addr);
5113 err2 = true;
5114 }
5115 err2 |= verify_location (&blocks, loc);
5116 }
5117
5118 if (err2)
5119 debug_gimple_stmt (phi);
5120 err |= err2;
5121 }
5122
5123 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5124 {
5125 gimple *stmt = gsi_stmt (gsi);
5126 bool err2 = false;
5127 struct walk_stmt_info wi;
5128 tree addr;
5129 int lp_nr;
5130
5131 visited_stmts.add (stmt);
5132
5133 if (gimple_bb (stmt) != bb)
5134 {
5135 error ("gimple_bb (stmt) is set to a wrong basic block");
5136 err2 = true;
5137 }
5138
5139 err2 |= verify_gimple_stmt (stmt);
5140 err2 |= verify_location (&blocks, gimple_location (stmt));
5141
5142 memset (&wi, 0, sizeof (wi));
5143 wi.info = (void *) &visited;
5144 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5145 if (addr)
5146 {
5147 error ("incorrect sharing of tree nodes");
5148 debug_generic_expr (addr);
5149 err2 |= true;
5150 }
5151
5152 memset (&wi, 0, sizeof (wi));
5153 wi.info = (void *) &blocks;
5154 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5155 if (addr)
5156 {
5157 debug_generic_expr (addr);
5158 err2 |= true;
5159 }
5160
5161 /* ??? Instead of not checking these stmts at all the walker
5162 should know its context via wi. */
5163 if (!is_gimple_debug (stmt)
5164 && !is_gimple_omp (stmt))
5165 {
5166 memset (&wi, 0, sizeof (wi));
5167 addr = walk_gimple_op (stmt, verify_expr, &wi);
5168 if (addr)
5169 {
5170 debug_generic_expr (addr);
5171 inform (gimple_location (stmt), "in statement");
5172 err2 |= true;
5173 }
5174 }
5175
5176 /* If the statement is marked as part of an EH region, then it is
5177 expected that the statement could throw. Verify that when we
5178 have optimizations that simplify statements such that we prove
5179 that they cannot throw, that we update other data structures
5180 to match. */
5181 lp_nr = lookup_stmt_eh_lp (stmt);
5182 if (lp_nr > 0)
5183 {
5184 if (!stmt_could_throw_p (stmt))
5185 {
5186 if (verify_nothrow)
5187 {
5188 error ("statement marked for throw, but doesn%'t");
5189 err2 |= true;
5190 }
5191 }
5192 else if (!gsi_one_before_end_p (gsi))
5193 {
5194 error ("statement marked for throw in middle of block");
5195 err2 |= true;
5196 }
5197 }
5198
5199 if (err2)
5200 debug_gimple_stmt (stmt);
5201 err |= err2;
5202 }
5203 }
5204
5205 eh_error_found = false;
5206 hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5207 if (eh_table)
5208 eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
5209 (&visited_stmts);
5210
5211 if (err || eh_error_found)
5212 internal_error ("verify_gimple failed");
5213
5214 verify_histograms ();
5215 timevar_pop (TV_TREE_STMT_VERIFY);
5216 }
5217
5218
5219 /* Verifies that the flow information is OK. */
5220
5221 static int
5222 gimple_verify_flow_info (void)
5223 {
5224 int err = 0;
5225 basic_block bb;
5226 gimple_stmt_iterator gsi;
5227 gimple *stmt;
5228 edge e;
5229 edge_iterator ei;
5230
5231 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5232 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5233 {
5234 error ("ENTRY_BLOCK has IL associated with it");
5235 err = 1;
5236 }
5237
5238 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5239 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5240 {
5241 error ("EXIT_BLOCK has IL associated with it");
5242 err = 1;
5243 }
5244
5245 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5246 if (e->flags & EDGE_FALLTHRU)
5247 {
5248 error ("fallthru to exit from bb %d", e->src->index);
5249 err = 1;
5250 }
5251
5252 FOR_EACH_BB_FN (bb, cfun)
5253 {
5254 bool found_ctrl_stmt = false;
5255
5256 stmt = NULL;
5257
5258 /* Skip labels on the start of basic block. */
5259 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5260 {
5261 tree label;
5262 gimple *prev_stmt = stmt;
5263
5264 stmt = gsi_stmt (gsi);
5265
5266 if (gimple_code (stmt) != GIMPLE_LABEL)
5267 break;
5268
5269 label = gimple_label_label (as_a <glabel *> (stmt));
5270 if (prev_stmt && DECL_NONLOCAL (label))
5271 {
5272 error ("nonlocal label ");
5273 print_generic_expr (stderr, label, 0);
5274 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5275 bb->index);
5276 err = 1;
5277 }
5278
5279 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5280 {
5281 error ("EH landing pad label ");
5282 print_generic_expr (stderr, label, 0);
5283 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5284 bb->index);
5285 err = 1;
5286 }
5287
5288 if (label_to_block (label) != bb)
5289 {
5290 error ("label ");
5291 print_generic_expr (stderr, label, 0);
5292 fprintf (stderr, " to block does not match in bb %d",
5293 bb->index);
5294 err = 1;
5295 }
5296
5297 if (decl_function_context (label) != current_function_decl)
5298 {
5299 error ("label ");
5300 print_generic_expr (stderr, label, 0);
5301 fprintf (stderr, " has incorrect context in bb %d",
5302 bb->index);
5303 err = 1;
5304 }
5305 }
5306
5307 /* Verify that body of basic block BB is free of control flow. */
5308 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5309 {
5310 gimple *stmt = gsi_stmt (gsi);
5311
5312 if (found_ctrl_stmt)
5313 {
5314 error ("control flow in the middle of basic block %d",
5315 bb->index);
5316 err = 1;
5317 }
5318
5319 if (stmt_ends_bb_p (stmt))
5320 found_ctrl_stmt = true;
5321
5322 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5323 {
5324 error ("label ");
5325 print_generic_expr (stderr, gimple_label_label (label_stmt), 0);
5326 fprintf (stderr, " in the middle of basic block %d", bb->index);
5327 err = 1;
5328 }
5329 }
5330
5331 gsi = gsi_last_bb (bb);
5332 if (gsi_end_p (gsi))
5333 continue;
5334
5335 stmt = gsi_stmt (gsi);
5336
5337 if (gimple_code (stmt) == GIMPLE_LABEL)
5338 continue;
5339
5340 err |= verify_eh_edges (stmt);
5341
5342 if (is_ctrl_stmt (stmt))
5343 {
5344 FOR_EACH_EDGE (e, ei, bb->succs)
5345 if (e->flags & EDGE_FALLTHRU)
5346 {
5347 error ("fallthru edge after a control statement in bb %d",
5348 bb->index);
5349 err = 1;
5350 }
5351 }
5352
5353 if (gimple_code (stmt) != GIMPLE_COND)
5354 {
5355 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5356 after anything else but if statement. */
5357 FOR_EACH_EDGE (e, ei, bb->succs)
5358 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5359 {
5360 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5361 bb->index);
5362 err = 1;
5363 }
5364 }
5365
5366 switch (gimple_code (stmt))
5367 {
5368 case GIMPLE_COND:
5369 {
5370 edge true_edge;
5371 edge false_edge;
5372
5373 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5374
5375 if (!true_edge
5376 || !false_edge
5377 || !(true_edge->flags & EDGE_TRUE_VALUE)
5378 || !(false_edge->flags & EDGE_FALSE_VALUE)
5379 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5380 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5381 || EDGE_COUNT (bb->succs) >= 3)
5382 {
5383 error ("wrong outgoing edge flags at end of bb %d",
5384 bb->index);
5385 err = 1;
5386 }
5387 }
5388 break;
5389
5390 case GIMPLE_GOTO:
5391 if (simple_goto_p (stmt))
5392 {
5393 error ("explicit goto at end of bb %d", bb->index);
5394 err = 1;
5395 }
5396 else
5397 {
5398 /* FIXME. We should double check that the labels in the
5399 destination blocks have their address taken. */
5400 FOR_EACH_EDGE (e, ei, bb->succs)
5401 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5402 | EDGE_FALSE_VALUE))
5403 || !(e->flags & EDGE_ABNORMAL))
5404 {
5405 error ("wrong outgoing edge flags at end of bb %d",
5406 bb->index);
5407 err = 1;
5408 }
5409 }
5410 break;
5411
5412 case GIMPLE_CALL:
5413 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5414 break;
5415 /* ... fallthru ... */
5416 case GIMPLE_RETURN:
5417 if (!single_succ_p (bb)
5418 || (single_succ_edge (bb)->flags
5419 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5420 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5421 {
5422 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5423 err = 1;
5424 }
5425 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5426 {
5427 error ("return edge does not point to exit in bb %d",
5428 bb->index);
5429 err = 1;
5430 }
5431 break;
5432
5433 case GIMPLE_SWITCH:
5434 {
5435 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5436 tree prev;
5437 edge e;
5438 size_t i, n;
5439
5440 n = gimple_switch_num_labels (switch_stmt);
5441
5442 /* Mark all the destination basic blocks. */
5443 for (i = 0; i < n; ++i)
5444 {
5445 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5446 basic_block label_bb = label_to_block (lab);
5447 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5448 label_bb->aux = (void *)1;
5449 }
5450
5451 /* Verify that the case labels are sorted. */
5452 prev = gimple_switch_label (switch_stmt, 0);
5453 for (i = 1; i < n; ++i)
5454 {
5455 tree c = gimple_switch_label (switch_stmt, i);
5456 if (!CASE_LOW (c))
5457 {
5458 error ("found default case not at the start of "
5459 "case vector");
5460 err = 1;
5461 continue;
5462 }
5463 if (CASE_LOW (prev)
5464 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5465 {
5466 error ("case labels not sorted: ");
5467 print_generic_expr (stderr, prev, 0);
5468 fprintf (stderr," is greater than ");
5469 print_generic_expr (stderr, c, 0);
5470 fprintf (stderr," but comes before it.\n");
5471 err = 1;
5472 }
5473 prev = c;
5474 }
5475 /* VRP will remove the default case if it can prove it will
5476 never be executed. So do not verify there always exists
5477 a default case here. */
5478
5479 FOR_EACH_EDGE (e, ei, bb->succs)
5480 {
5481 if (!e->dest->aux)
5482 {
5483 error ("extra outgoing edge %d->%d",
5484 bb->index, e->dest->index);
5485 err = 1;
5486 }
5487
5488 e->dest->aux = (void *)2;
5489 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5490 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5491 {
5492 error ("wrong outgoing edge flags at end of bb %d",
5493 bb->index);
5494 err = 1;
5495 }
5496 }
5497
5498 /* Check that we have all of them. */
5499 for (i = 0; i < n; ++i)
5500 {
5501 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5502 basic_block label_bb = label_to_block (lab);
5503
5504 if (label_bb->aux != (void *)2)
5505 {
5506 error ("missing edge %i->%i", bb->index, label_bb->index);
5507 err = 1;
5508 }
5509 }
5510
5511 FOR_EACH_EDGE (e, ei, bb->succs)
5512 e->dest->aux = (void *)0;
5513 }
5514 break;
5515
5516 case GIMPLE_EH_DISPATCH:
5517 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5518 break;
5519
5520 default:
5521 break;
5522 }
5523 }
5524
5525 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5526 verify_dominators (CDI_DOMINATORS);
5527
5528 return err;
5529 }
5530
5531
5532 /* Updates phi nodes after creating a forwarder block joined
5533 by edge FALLTHRU. */
5534
5535 static void
5536 gimple_make_forwarder_block (edge fallthru)
5537 {
5538 edge e;
5539 edge_iterator ei;
5540 basic_block dummy, bb;
5541 tree var;
5542 gphi_iterator gsi;
5543
5544 dummy = fallthru->src;
5545 bb = fallthru->dest;
5546
5547 if (single_pred_p (bb))
5548 return;
5549
5550 /* If we redirected a branch we must create new PHI nodes at the
5551 start of BB. */
5552 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5553 {
5554 gphi *phi, *new_phi;
5555
5556 phi = gsi.phi ();
5557 var = gimple_phi_result (phi);
5558 new_phi = create_phi_node (var, bb);
5559 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5560 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5561 UNKNOWN_LOCATION);
5562 }
5563
5564 /* Add the arguments we have stored on edges. */
5565 FOR_EACH_EDGE (e, ei, bb->preds)
5566 {
5567 if (e == fallthru)
5568 continue;
5569
5570 flush_pending_stmts (e);
5571 }
5572 }
5573
5574
5575 /* Return a non-special label in the head of basic block BLOCK.
5576 Create one if it doesn't exist. */
5577
5578 tree
5579 gimple_block_label (basic_block bb)
5580 {
5581 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5582 bool first = true;
5583 tree label;
5584 glabel *stmt;
5585
5586 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5587 {
5588 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5589 if (!stmt)
5590 break;
5591 label = gimple_label_label (stmt);
5592 if (!DECL_NONLOCAL (label))
5593 {
5594 if (!first)
5595 gsi_move_before (&i, &s);
5596 return label;
5597 }
5598 }
5599
5600 label = create_artificial_label (UNKNOWN_LOCATION);
5601 stmt = gimple_build_label (label);
5602 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5603 return label;
5604 }
5605
5606
5607 /* Attempt to perform edge redirection by replacing a possibly complex
5608 jump instruction by a goto or by removing the jump completely.
5609 This can apply only if all edges now point to the same block. The
5610 parameters and return values are equivalent to
5611 redirect_edge_and_branch. */
5612
5613 static edge
5614 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5615 {
5616 basic_block src = e->src;
5617 gimple_stmt_iterator i;
5618 gimple *stmt;
5619
5620 /* We can replace or remove a complex jump only when we have exactly
5621 two edges. */
5622 if (EDGE_COUNT (src->succs) != 2
5623 /* Verify that all targets will be TARGET. Specifically, the
5624 edge that is not E must also go to TARGET. */
5625 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5626 return NULL;
5627
5628 i = gsi_last_bb (src);
5629 if (gsi_end_p (i))
5630 return NULL;
5631
5632 stmt = gsi_stmt (i);
5633
5634 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5635 {
5636 gsi_remove (&i, true);
5637 e = ssa_redirect_edge (e, target);
5638 e->flags = EDGE_FALLTHRU;
5639 return e;
5640 }
5641
5642 return NULL;
5643 }
5644
5645
5646 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5647 edge representing the redirected branch. */
5648
5649 static edge
5650 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5651 {
5652 basic_block bb = e->src;
5653 gimple_stmt_iterator gsi;
5654 edge ret;
5655 gimple *stmt;
5656
5657 if (e->flags & EDGE_ABNORMAL)
5658 return NULL;
5659
5660 if (e->dest == dest)
5661 return NULL;
5662
5663 if (e->flags & EDGE_EH)
5664 return redirect_eh_edge (e, dest);
5665
5666 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5667 {
5668 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5669 if (ret)
5670 return ret;
5671 }
5672
5673 gsi = gsi_last_bb (bb);
5674 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5675
5676 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5677 {
5678 case GIMPLE_COND:
5679 /* For COND_EXPR, we only need to redirect the edge. */
5680 break;
5681
5682 case GIMPLE_GOTO:
5683 /* No non-abnormal edges should lead from a non-simple goto, and
5684 simple ones should be represented implicitly. */
5685 gcc_unreachable ();
5686
5687 case GIMPLE_SWITCH:
5688 {
5689 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5690 tree label = gimple_block_label (dest);
5691 tree cases = get_cases_for_edge (e, switch_stmt);
5692
5693 /* If we have a list of cases associated with E, then use it
5694 as it's a lot faster than walking the entire case vector. */
5695 if (cases)
5696 {
5697 edge e2 = find_edge (e->src, dest);
5698 tree last, first;
5699
5700 first = cases;
5701 while (cases)
5702 {
5703 last = cases;
5704 CASE_LABEL (cases) = label;
5705 cases = CASE_CHAIN (cases);
5706 }
5707
5708 /* If there was already an edge in the CFG, then we need
5709 to move all the cases associated with E to E2. */
5710 if (e2)
5711 {
5712 tree cases2 = get_cases_for_edge (e2, switch_stmt);
5713
5714 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5715 CASE_CHAIN (cases2) = first;
5716 }
5717 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5718 }
5719 else
5720 {
5721 size_t i, n = gimple_switch_num_labels (switch_stmt);
5722
5723 for (i = 0; i < n; i++)
5724 {
5725 tree elt = gimple_switch_label (switch_stmt, i);
5726 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5727 CASE_LABEL (elt) = label;
5728 }
5729 }
5730 }
5731 break;
5732
5733 case GIMPLE_ASM:
5734 {
5735 gasm *asm_stmt = as_a <gasm *> (stmt);
5736 int i, n = gimple_asm_nlabels (asm_stmt);
5737 tree label = NULL;
5738
5739 for (i = 0; i < n; ++i)
5740 {
5741 tree cons = gimple_asm_label_op (asm_stmt, i);
5742 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5743 {
5744 if (!label)
5745 label = gimple_block_label (dest);
5746 TREE_VALUE (cons) = label;
5747 }
5748 }
5749
5750 /* If we didn't find any label matching the former edge in the
5751 asm labels, we must be redirecting the fallthrough
5752 edge. */
5753 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5754 }
5755 break;
5756
5757 case GIMPLE_RETURN:
5758 gsi_remove (&gsi, true);
5759 e->flags |= EDGE_FALLTHRU;
5760 break;
5761
5762 case GIMPLE_OMP_RETURN:
5763 case GIMPLE_OMP_CONTINUE:
5764 case GIMPLE_OMP_SECTIONS_SWITCH:
5765 case GIMPLE_OMP_FOR:
5766 /* The edges from OMP constructs can be simply redirected. */
5767 break;
5768
5769 case GIMPLE_EH_DISPATCH:
5770 if (!(e->flags & EDGE_FALLTHRU))
5771 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
5772 break;
5773
5774 case GIMPLE_TRANSACTION:
5775 if (e->flags & EDGE_TM_ABORT)
5776 gimple_transaction_set_label_over (as_a <gtransaction *> (stmt),
5777 gimple_block_label (dest));
5778 else if (e->flags & EDGE_TM_UNINSTRUMENTED)
5779 gimple_transaction_set_label_uninst (as_a <gtransaction *> (stmt),
5780 gimple_block_label (dest));
5781 else
5782 gimple_transaction_set_label_norm (as_a <gtransaction *> (stmt),
5783 gimple_block_label (dest));
5784 break;
5785
5786 default:
5787 /* Otherwise it must be a fallthru edge, and we don't need to
5788 do anything besides redirecting it. */
5789 gcc_assert (e->flags & EDGE_FALLTHRU);
5790 break;
5791 }
5792
5793 /* Update/insert PHI nodes as necessary. */
5794
5795 /* Now update the edges in the CFG. */
5796 e = ssa_redirect_edge (e, dest);
5797
5798 return e;
5799 }
5800
5801 /* Returns true if it is possible to remove edge E by redirecting
5802 it to the destination of the other edge from E->src. */
5803
5804 static bool
5805 gimple_can_remove_branch_p (const_edge e)
5806 {
5807 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5808 return false;
5809
5810 return true;
5811 }
5812
5813 /* Simple wrapper, as we can always redirect fallthru edges. */
5814
5815 static basic_block
5816 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5817 {
5818 e = gimple_redirect_edge_and_branch (e, dest);
5819 gcc_assert (e);
5820
5821 return NULL;
5822 }
5823
5824
5825 /* Splits basic block BB after statement STMT (but at least after the
5826 labels). If STMT is NULL, BB is split just after the labels. */
5827
5828 static basic_block
5829 gimple_split_block (basic_block bb, void *stmt)
5830 {
5831 gimple_stmt_iterator gsi;
5832 gimple_stmt_iterator gsi_tgt;
5833 gimple_seq list;
5834 basic_block new_bb;
5835 edge e;
5836 edge_iterator ei;
5837
5838 new_bb = create_empty_bb (bb);
5839
5840 /* Redirect the outgoing edges. */
5841 new_bb->succs = bb->succs;
5842 bb->succs = NULL;
5843 FOR_EACH_EDGE (e, ei, new_bb->succs)
5844 e->src = new_bb;
5845
5846 /* Get a stmt iterator pointing to the first stmt to move. */
5847 if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
5848 gsi = gsi_after_labels (bb);
5849 else
5850 {
5851 gsi = gsi_for_stmt ((gimple *) stmt);
5852 gsi_next (&gsi);
5853 }
5854
5855 /* Move everything from GSI to the new basic block. */
5856 if (gsi_end_p (gsi))
5857 return new_bb;
5858
5859 /* Split the statement list - avoid re-creating new containers as this
5860 brings ugly quadratic memory consumption in the inliner.
5861 (We are still quadratic since we need to update stmt BB pointers,
5862 sadly.) */
5863 gsi_split_seq_before (&gsi, &list);
5864 set_bb_seq (new_bb, list);
5865 for (gsi_tgt = gsi_start (list);
5866 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5867 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5868
5869 return new_bb;
5870 }
5871
5872
5873 /* Moves basic block BB after block AFTER. */
5874
5875 static bool
5876 gimple_move_block_after (basic_block bb, basic_block after)
5877 {
5878 if (bb->prev_bb == after)
5879 return true;
5880
5881 unlink_block (bb);
5882 link_block (bb, after);
5883
5884 return true;
5885 }
5886
5887
5888 /* Return TRUE if block BB has no executable statements, otherwise return
5889 FALSE. */
5890
5891 static bool
5892 gimple_empty_block_p (basic_block bb)
5893 {
5894 /* BB must have no executable statements. */
5895 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5896 if (phi_nodes (bb))
5897 return false;
5898 if (gsi_end_p (gsi))
5899 return true;
5900 if (is_gimple_debug (gsi_stmt (gsi)))
5901 gsi_next_nondebug (&gsi);
5902 return gsi_end_p (gsi);
5903 }
5904
5905
5906 /* Split a basic block if it ends with a conditional branch and if the
5907 other part of the block is not empty. */
5908
5909 static basic_block
5910 gimple_split_block_before_cond_jump (basic_block bb)
5911 {
5912 gimple *last, *split_point;
5913 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5914 if (gsi_end_p (gsi))
5915 return NULL;
5916 last = gsi_stmt (gsi);
5917 if (gimple_code (last) != GIMPLE_COND
5918 && gimple_code (last) != GIMPLE_SWITCH)
5919 return NULL;
5920 gsi_prev (&gsi);
5921 split_point = gsi_stmt (gsi);
5922 return split_block (bb, split_point)->dest;
5923 }
5924
5925
5926 /* Return true if basic_block can be duplicated. */
5927
5928 static bool
5929 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5930 {
5931 return true;
5932 }
5933
5934 /* Create a duplicate of the basic block BB. NOTE: This does not
5935 preserve SSA form. */
5936
5937 static basic_block
5938 gimple_duplicate_bb (basic_block bb)
5939 {
5940 basic_block new_bb;
5941 gimple_stmt_iterator gsi_tgt;
5942
5943 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
5944
5945 /* Copy the PHI nodes. We ignore PHI node arguments here because
5946 the incoming edges have not been setup yet. */
5947 for (gphi_iterator gpi = gsi_start_phis (bb);
5948 !gsi_end_p (gpi);
5949 gsi_next (&gpi))
5950 {
5951 gphi *phi, *copy;
5952 phi = gpi.phi ();
5953 copy = create_phi_node (NULL_TREE, new_bb);
5954 create_new_def_for (gimple_phi_result (phi), copy,
5955 gimple_phi_result_ptr (copy));
5956 gimple_set_uid (copy, gimple_uid (phi));
5957 }
5958
5959 gsi_tgt = gsi_start_bb (new_bb);
5960 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
5961 !gsi_end_p (gsi);
5962 gsi_next (&gsi))
5963 {
5964 def_operand_p def_p;
5965 ssa_op_iter op_iter;
5966 tree lhs;
5967 gimple *stmt, *copy;
5968
5969 stmt = gsi_stmt (gsi);
5970 if (gimple_code (stmt) == GIMPLE_LABEL)
5971 continue;
5972
5973 /* Don't duplicate label debug stmts. */
5974 if (gimple_debug_bind_p (stmt)
5975 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5976 == LABEL_DECL)
5977 continue;
5978
5979 /* Create a new copy of STMT and duplicate STMT's virtual
5980 operands. */
5981 copy = gimple_copy (stmt);
5982 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5983
5984 maybe_duplicate_eh_stmt (copy, stmt);
5985 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5986
5987 /* When copying around a stmt writing into a local non-user
5988 aggregate, make sure it won't share stack slot with other
5989 vars. */
5990 lhs = gimple_get_lhs (stmt);
5991 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5992 {
5993 tree base = get_base_address (lhs);
5994 if (base
5995 && (TREE_CODE (base) == VAR_DECL
5996 || TREE_CODE (base) == RESULT_DECL)
5997 && DECL_IGNORED_P (base)
5998 && !TREE_STATIC (base)
5999 && !DECL_EXTERNAL (base)
6000 && (TREE_CODE (base) != VAR_DECL
6001 || !DECL_HAS_VALUE_EXPR_P (base)))
6002 DECL_NONSHAREABLE (base) = 1;
6003 }
6004
6005 /* Create new names for all the definitions created by COPY and
6006 add replacement mappings for each new name. */
6007 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
6008 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
6009 }
6010
6011 return new_bb;
6012 }
6013
6014 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
6015
6016 static void
6017 add_phi_args_after_copy_edge (edge e_copy)
6018 {
6019 basic_block bb, bb_copy = e_copy->src, dest;
6020 edge e;
6021 edge_iterator ei;
6022 gphi *phi, *phi_copy;
6023 tree def;
6024 gphi_iterator psi, psi_copy;
6025
6026 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
6027 return;
6028
6029 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
6030
6031 if (e_copy->dest->flags & BB_DUPLICATED)
6032 dest = get_bb_original (e_copy->dest);
6033 else
6034 dest = e_copy->dest;
6035
6036 e = find_edge (bb, dest);
6037 if (!e)
6038 {
6039 /* During loop unrolling the target of the latch edge is copied.
6040 In this case we are not looking for edge to dest, but to
6041 duplicated block whose original was dest. */
6042 FOR_EACH_EDGE (e, ei, bb->succs)
6043 {
6044 if ((e->dest->flags & BB_DUPLICATED)
6045 && get_bb_original (e->dest) == dest)
6046 break;
6047 }
6048
6049 gcc_assert (e != NULL);
6050 }
6051
6052 for (psi = gsi_start_phis (e->dest),
6053 psi_copy = gsi_start_phis (e_copy->dest);
6054 !gsi_end_p (psi);
6055 gsi_next (&psi), gsi_next (&psi_copy))
6056 {
6057 phi = psi.phi ();
6058 phi_copy = psi_copy.phi ();
6059 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
6060 add_phi_arg (phi_copy, def, e_copy,
6061 gimple_phi_arg_location_from_edge (phi, e));
6062 }
6063 }
6064
6065
6066 /* Basic block BB_COPY was created by code duplication. Add phi node
6067 arguments for edges going out of BB_COPY. The blocks that were
6068 duplicated have BB_DUPLICATED set. */
6069
6070 void
6071 add_phi_args_after_copy_bb (basic_block bb_copy)
6072 {
6073 edge e_copy;
6074 edge_iterator ei;
6075
6076 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
6077 {
6078 add_phi_args_after_copy_edge (e_copy);
6079 }
6080 }
6081
6082 /* Blocks in REGION_COPY array of length N_REGION were created by
6083 duplication of basic blocks. Add phi node arguments for edges
6084 going from these blocks. If E_COPY is not NULL, also add
6085 phi node arguments for its destination.*/
6086
6087 void
6088 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
6089 edge e_copy)
6090 {
6091 unsigned i;
6092
6093 for (i = 0; i < n_region; i++)
6094 region_copy[i]->flags |= BB_DUPLICATED;
6095
6096 for (i = 0; i < n_region; i++)
6097 add_phi_args_after_copy_bb (region_copy[i]);
6098 if (e_copy)
6099 add_phi_args_after_copy_edge (e_copy);
6100
6101 for (i = 0; i < n_region; i++)
6102 region_copy[i]->flags &= ~BB_DUPLICATED;
6103 }
6104
6105 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6106 important exit edge EXIT. By important we mean that no SSA name defined
6107 inside region is live over the other exit edges of the region. All entry
6108 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6109 to the duplicate of the region. Dominance and loop information is
6110 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6111 UPDATE_DOMINANCE is false then we assume that the caller will update the
6112 dominance information after calling this function. The new basic
6113 blocks are stored to REGION_COPY in the same order as they had in REGION,
6114 provided that REGION_COPY is not NULL.
6115 The function returns false if it is unable to copy the region,
6116 true otherwise. */
6117
6118 bool
6119 gimple_duplicate_sese_region (edge entry, edge exit,
6120 basic_block *region, unsigned n_region,
6121 basic_block *region_copy,
6122 bool update_dominance)
6123 {
6124 unsigned i;
6125 bool free_region_copy = false, copying_header = false;
6126 struct loop *loop = entry->dest->loop_father;
6127 edge exit_copy;
6128 vec<basic_block> doms;
6129 edge redirected;
6130 int total_freq = 0, entry_freq = 0;
6131 gcov_type total_count = 0, entry_count = 0;
6132
6133 if (!can_copy_bbs_p (region, n_region))
6134 return false;
6135
6136 /* Some sanity checking. Note that we do not check for all possible
6137 missuses of the functions. I.e. if you ask to copy something weird,
6138 it will work, but the state of structures probably will not be
6139 correct. */
6140 for (i = 0; i < n_region; i++)
6141 {
6142 /* We do not handle subloops, i.e. all the blocks must belong to the
6143 same loop. */
6144 if (region[i]->loop_father != loop)
6145 return false;
6146
6147 if (region[i] != entry->dest
6148 && region[i] == loop->header)
6149 return false;
6150 }
6151
6152 /* In case the function is used for loop header copying (which is the primary
6153 use), ensure that EXIT and its copy will be new latch and entry edges. */
6154 if (loop->header == entry->dest)
6155 {
6156 copying_header = true;
6157
6158 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6159 return false;
6160
6161 for (i = 0; i < n_region; i++)
6162 if (region[i] != exit->src
6163 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6164 return false;
6165 }
6166
6167 initialize_original_copy_tables ();
6168
6169 if (copying_header)
6170 set_loop_copy (loop, loop_outer (loop));
6171 else
6172 set_loop_copy (loop, loop);
6173
6174 if (!region_copy)
6175 {
6176 region_copy = XNEWVEC (basic_block, n_region);
6177 free_region_copy = true;
6178 }
6179
6180 /* Record blocks outside the region that are dominated by something
6181 inside. */
6182 if (update_dominance)
6183 {
6184 doms.create (0);
6185 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6186 }
6187
6188 if (entry->dest->count)
6189 {
6190 total_count = entry->dest->count;
6191 entry_count = entry->count;
6192 /* Fix up corner cases, to avoid division by zero or creation of negative
6193 frequencies. */
6194 if (entry_count > total_count)
6195 entry_count = total_count;
6196 }
6197 else
6198 {
6199 total_freq = entry->dest->frequency;
6200 entry_freq = EDGE_FREQUENCY (entry);
6201 /* Fix up corner cases, to avoid division by zero or creation of negative
6202 frequencies. */
6203 if (total_freq == 0)
6204 total_freq = 1;
6205 else if (entry_freq > total_freq)
6206 entry_freq = total_freq;
6207 }
6208
6209 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6210 split_edge_bb_loc (entry), update_dominance);
6211 if (total_count)
6212 {
6213 scale_bbs_frequencies_gcov_type (region, n_region,
6214 total_count - entry_count,
6215 total_count);
6216 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
6217 total_count);
6218 }
6219 else
6220 {
6221 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
6222 total_freq);
6223 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
6224 }
6225
6226 if (copying_header)
6227 {
6228 loop->header = exit->dest;
6229 loop->latch = exit->src;
6230 }
6231
6232 /* Redirect the entry and add the phi node arguments. */
6233 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6234 gcc_assert (redirected != NULL);
6235 flush_pending_stmts (entry);
6236
6237 /* Concerning updating of dominators: We must recount dominators
6238 for entry block and its copy. Anything that is outside of the
6239 region, but was dominated by something inside needs recounting as
6240 well. */
6241 if (update_dominance)
6242 {
6243 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6244 doms.safe_push (get_bb_original (entry->dest));
6245 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6246 doms.release ();
6247 }
6248
6249 /* Add the other PHI node arguments. */
6250 add_phi_args_after_copy (region_copy, n_region, NULL);
6251
6252 if (free_region_copy)
6253 free (region_copy);
6254
6255 free_original_copy_tables ();
6256 return true;
6257 }
6258
6259 /* Checks if BB is part of the region defined by N_REGION BBS. */
6260 static bool
6261 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6262 {
6263 unsigned int n;
6264
6265 for (n = 0; n < n_region; n++)
6266 {
6267 if (bb == bbs[n])
6268 return true;
6269 }
6270 return false;
6271 }
6272
6273 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6274 are stored to REGION_COPY in the same order in that they appear
6275 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6276 the region, EXIT an exit from it. The condition guarding EXIT
6277 is moved to ENTRY. Returns true if duplication succeeds, false
6278 otherwise.
6279
6280 For example,
6281
6282 some_code;
6283 if (cond)
6284 A;
6285 else
6286 B;
6287
6288 is transformed to
6289
6290 if (cond)
6291 {
6292 some_code;
6293 A;
6294 }
6295 else
6296 {
6297 some_code;
6298 B;
6299 }
6300 */
6301
6302 bool
6303 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
6304 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
6305 basic_block *region_copy ATTRIBUTE_UNUSED)
6306 {
6307 unsigned i;
6308 bool free_region_copy = false;
6309 struct loop *loop = exit->dest->loop_father;
6310 struct loop *orig_loop = entry->dest->loop_father;
6311 basic_block switch_bb, entry_bb, nentry_bb;
6312 vec<basic_block> doms;
6313 int total_freq = 0, exit_freq = 0;
6314 gcov_type total_count = 0, exit_count = 0;
6315 edge exits[2], nexits[2], e;
6316 gimple_stmt_iterator gsi;
6317 gimple *cond_stmt;
6318 edge sorig, snew;
6319 basic_block exit_bb;
6320 gphi_iterator psi;
6321 gphi *phi;
6322 tree def;
6323 struct loop *target, *aloop, *cloop;
6324
6325 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6326 exits[0] = exit;
6327 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6328
6329 if (!can_copy_bbs_p (region, n_region))
6330 return false;
6331
6332 initialize_original_copy_tables ();
6333 set_loop_copy (orig_loop, loop);
6334
6335 target= loop;
6336 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6337 {
6338 if (bb_part_of_region_p (aloop->header, region, n_region))
6339 {
6340 cloop = duplicate_loop (aloop, target);
6341 duplicate_subloops (aloop, cloop);
6342 }
6343 }
6344
6345 if (!region_copy)
6346 {
6347 region_copy = XNEWVEC (basic_block, n_region);
6348 free_region_copy = true;
6349 }
6350
6351 gcc_assert (!need_ssa_update_p (cfun));
6352
6353 /* Record blocks outside the region that are dominated by something
6354 inside. */
6355 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6356
6357 if (exit->src->count)
6358 {
6359 total_count = exit->src->count;
6360 exit_count = exit->count;
6361 /* Fix up corner cases, to avoid division by zero or creation of negative
6362 frequencies. */
6363 if (exit_count > total_count)
6364 exit_count = total_count;
6365 }
6366 else
6367 {
6368 total_freq = exit->src->frequency;
6369 exit_freq = EDGE_FREQUENCY (exit);
6370 /* Fix up corner cases, to avoid division by zero or creation of negative
6371 frequencies. */
6372 if (total_freq == 0)
6373 total_freq = 1;
6374 if (exit_freq > total_freq)
6375 exit_freq = total_freq;
6376 }
6377
6378 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6379 split_edge_bb_loc (exit), true);
6380 if (total_count)
6381 {
6382 scale_bbs_frequencies_gcov_type (region, n_region,
6383 total_count - exit_count,
6384 total_count);
6385 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
6386 total_count);
6387 }
6388 else
6389 {
6390 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6391 total_freq);
6392 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6393 }
6394
6395 /* Create the switch block, and put the exit condition to it. */
6396 entry_bb = entry->dest;
6397 nentry_bb = get_bb_copy (entry_bb);
6398 if (!last_stmt (entry->src)
6399 || !stmt_ends_bb_p (last_stmt (entry->src)))
6400 switch_bb = entry->src;
6401 else
6402 switch_bb = split_edge (entry);
6403 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6404
6405 gsi = gsi_last_bb (switch_bb);
6406 cond_stmt = last_stmt (exit->src);
6407 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6408 cond_stmt = gimple_copy (cond_stmt);
6409
6410 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6411
6412 sorig = single_succ_edge (switch_bb);
6413 sorig->flags = exits[1]->flags;
6414 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6415
6416 /* Register the new edge from SWITCH_BB in loop exit lists. */
6417 rescan_loop_exit (snew, true, false);
6418
6419 /* Add the PHI node arguments. */
6420 add_phi_args_after_copy (region_copy, n_region, snew);
6421
6422 /* Get rid of now superfluous conditions and associated edges (and phi node
6423 arguments). */
6424 exit_bb = exit->dest;
6425
6426 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6427 PENDING_STMT (e) = NULL;
6428
6429 /* The latch of ORIG_LOOP was copied, and so was the backedge
6430 to the original header. We redirect this backedge to EXIT_BB. */
6431 for (i = 0; i < n_region; i++)
6432 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6433 {
6434 gcc_assert (single_succ_edge (region_copy[i]));
6435 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6436 PENDING_STMT (e) = NULL;
6437 for (psi = gsi_start_phis (exit_bb);
6438 !gsi_end_p (psi);
6439 gsi_next (&psi))
6440 {
6441 phi = psi.phi ();
6442 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6443 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6444 }
6445 }
6446 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6447 PENDING_STMT (e) = NULL;
6448
6449 /* Anything that is outside of the region, but was dominated by something
6450 inside needs to update dominance info. */
6451 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6452 doms.release ();
6453 /* Update the SSA web. */
6454 update_ssa (TODO_update_ssa);
6455
6456 if (free_region_copy)
6457 free (region_copy);
6458
6459 free_original_copy_tables ();
6460 return true;
6461 }
6462
6463 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6464 adding blocks when the dominator traversal reaches EXIT. This
6465 function silently assumes that ENTRY strictly dominates EXIT. */
6466
6467 void
6468 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6469 vec<basic_block> *bbs_p)
6470 {
6471 basic_block son;
6472
6473 for (son = first_dom_son (CDI_DOMINATORS, entry);
6474 son;
6475 son = next_dom_son (CDI_DOMINATORS, son))
6476 {
6477 bbs_p->safe_push (son);
6478 if (son != exit)
6479 gather_blocks_in_sese_region (son, exit, bbs_p);
6480 }
6481 }
6482
6483 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6484 The duplicates are recorded in VARS_MAP. */
6485
6486 static void
6487 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6488 tree to_context)
6489 {
6490 tree t = *tp, new_t;
6491 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6492
6493 if (DECL_CONTEXT (t) == to_context)
6494 return;
6495
6496 bool existed;
6497 tree &loc = vars_map->get_or_insert (t, &existed);
6498
6499 if (!existed)
6500 {
6501 if (SSA_VAR_P (t))
6502 {
6503 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6504 add_local_decl (f, new_t);
6505 }
6506 else
6507 {
6508 gcc_assert (TREE_CODE (t) == CONST_DECL);
6509 new_t = copy_node (t);
6510 }
6511 DECL_CONTEXT (new_t) = to_context;
6512
6513 loc = new_t;
6514 }
6515 else
6516 new_t = loc;
6517
6518 *tp = new_t;
6519 }
6520
6521
6522 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6523 VARS_MAP maps old ssa names and var_decls to the new ones. */
6524
6525 static tree
6526 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6527 tree to_context)
6528 {
6529 tree new_name;
6530
6531 gcc_assert (!virtual_operand_p (name));
6532
6533 tree *loc = vars_map->get (name);
6534
6535 if (!loc)
6536 {
6537 tree decl = SSA_NAME_VAR (name);
6538 if (decl)
6539 {
6540 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name));
6541 replace_by_duplicate_decl (&decl, vars_map, to_context);
6542 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6543 decl, SSA_NAME_DEF_STMT (name));
6544 }
6545 else
6546 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6547 name, SSA_NAME_DEF_STMT (name));
6548
6549 /* Now that we've used the def stmt to define new_name, make sure it
6550 doesn't define name anymore. */
6551 SSA_NAME_DEF_STMT (name) = NULL;
6552
6553 vars_map->put (name, new_name);
6554 }
6555 else
6556 new_name = *loc;
6557
6558 return new_name;
6559 }
6560
6561 struct move_stmt_d
6562 {
6563 tree orig_block;
6564 tree new_block;
6565 tree from_context;
6566 tree to_context;
6567 hash_map<tree, tree> *vars_map;
6568 htab_t new_label_map;
6569 hash_map<void *, void *> *eh_map;
6570 bool remap_decls_p;
6571 };
6572
6573 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6574 contained in *TP if it has been ORIG_BLOCK previously and change the
6575 DECL_CONTEXT of every local variable referenced in *TP. */
6576
6577 static tree
6578 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6579 {
6580 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6581 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6582 tree t = *tp;
6583
6584 if (EXPR_P (t))
6585 {
6586 tree block = TREE_BLOCK (t);
6587 if (block == p->orig_block
6588 || (p->orig_block == NULL_TREE
6589 && block != NULL_TREE))
6590 TREE_SET_BLOCK (t, p->new_block);
6591 else if (flag_checking && block != NULL_TREE)
6592 {
6593 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6594 block = BLOCK_SUPERCONTEXT (block);
6595 gcc_assert (block == p->orig_block);
6596 }
6597 }
6598 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6599 {
6600 if (TREE_CODE (t) == SSA_NAME)
6601 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6602 else if (TREE_CODE (t) == PARM_DECL
6603 && gimple_in_ssa_p (cfun))
6604 *tp = *(p->vars_map->get (t));
6605 else if (TREE_CODE (t) == LABEL_DECL)
6606 {
6607 if (p->new_label_map)
6608 {
6609 struct tree_map in, *out;
6610 in.base.from = t;
6611 out = (struct tree_map *)
6612 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6613 if (out)
6614 *tp = t = out->to;
6615 }
6616
6617 DECL_CONTEXT (t) = p->to_context;
6618 }
6619 else if (p->remap_decls_p)
6620 {
6621 /* Replace T with its duplicate. T should no longer appear in the
6622 parent function, so this looks wasteful; however, it may appear
6623 in referenced_vars, and more importantly, as virtual operands of
6624 statements, and in alias lists of other variables. It would be
6625 quite difficult to expunge it from all those places. ??? It might
6626 suffice to do this for addressable variables. */
6627 if ((TREE_CODE (t) == VAR_DECL
6628 && !is_global_var (t))
6629 || TREE_CODE (t) == CONST_DECL)
6630 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6631 }
6632 *walk_subtrees = 0;
6633 }
6634 else if (TYPE_P (t))
6635 *walk_subtrees = 0;
6636
6637 return NULL_TREE;
6638 }
6639
6640 /* Helper for move_stmt_r. Given an EH region number for the source
6641 function, map that to the duplicate EH regio number in the dest. */
6642
6643 static int
6644 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6645 {
6646 eh_region old_r, new_r;
6647
6648 old_r = get_eh_region_from_number (old_nr);
6649 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6650
6651 return new_r->index;
6652 }
6653
6654 /* Similar, but operate on INTEGER_CSTs. */
6655
6656 static tree
6657 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6658 {
6659 int old_nr, new_nr;
6660
6661 old_nr = tree_to_shwi (old_t_nr);
6662 new_nr = move_stmt_eh_region_nr (old_nr, p);
6663
6664 return build_int_cst (integer_type_node, new_nr);
6665 }
6666
6667 /* Like move_stmt_op, but for gimple statements.
6668
6669 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6670 contained in the current statement in *GSI_P and change the
6671 DECL_CONTEXT of every local variable referenced in the current
6672 statement. */
6673
6674 static tree
6675 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6676 struct walk_stmt_info *wi)
6677 {
6678 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6679 gimple *stmt = gsi_stmt (*gsi_p);
6680 tree block = gimple_block (stmt);
6681
6682 if (block == p->orig_block
6683 || (p->orig_block == NULL_TREE
6684 && block != NULL_TREE))
6685 gimple_set_block (stmt, p->new_block);
6686
6687 switch (gimple_code (stmt))
6688 {
6689 case GIMPLE_CALL:
6690 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6691 {
6692 tree r, fndecl = gimple_call_fndecl (stmt);
6693 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6694 switch (DECL_FUNCTION_CODE (fndecl))
6695 {
6696 case BUILT_IN_EH_COPY_VALUES:
6697 r = gimple_call_arg (stmt, 1);
6698 r = move_stmt_eh_region_tree_nr (r, p);
6699 gimple_call_set_arg (stmt, 1, r);
6700 /* FALLTHRU */
6701
6702 case BUILT_IN_EH_POINTER:
6703 case BUILT_IN_EH_FILTER:
6704 r = gimple_call_arg (stmt, 0);
6705 r = move_stmt_eh_region_tree_nr (r, p);
6706 gimple_call_set_arg (stmt, 0, r);
6707 break;
6708
6709 default:
6710 break;
6711 }
6712 }
6713 break;
6714
6715 case GIMPLE_RESX:
6716 {
6717 gresx *resx_stmt = as_a <gresx *> (stmt);
6718 int r = gimple_resx_region (resx_stmt);
6719 r = move_stmt_eh_region_nr (r, p);
6720 gimple_resx_set_region (resx_stmt, r);
6721 }
6722 break;
6723
6724 case GIMPLE_EH_DISPATCH:
6725 {
6726 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
6727 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
6728 r = move_stmt_eh_region_nr (r, p);
6729 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
6730 }
6731 break;
6732
6733 case GIMPLE_OMP_RETURN:
6734 case GIMPLE_OMP_CONTINUE:
6735 break;
6736 default:
6737 if (is_gimple_omp (stmt))
6738 {
6739 /* Do not remap variables inside OMP directives. Variables
6740 referenced in clauses and directive header belong to the
6741 parent function and should not be moved into the child
6742 function. */
6743 bool save_remap_decls_p = p->remap_decls_p;
6744 p->remap_decls_p = false;
6745 *handled_ops_p = true;
6746
6747 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6748 move_stmt_op, wi);
6749
6750 p->remap_decls_p = save_remap_decls_p;
6751 }
6752 break;
6753 }
6754
6755 return NULL_TREE;
6756 }
6757
6758 /* Move basic block BB from function CFUN to function DEST_FN. The
6759 block is moved out of the original linked list and placed after
6760 block AFTER in the new list. Also, the block is removed from the
6761 original array of blocks and placed in DEST_FN's array of blocks.
6762 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6763 updated to reflect the moved edges.
6764
6765 The local variables are remapped to new instances, VARS_MAP is used
6766 to record the mapping. */
6767
6768 static void
6769 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6770 basic_block after, bool update_edge_count_p,
6771 struct move_stmt_d *d)
6772 {
6773 struct control_flow_graph *cfg;
6774 edge_iterator ei;
6775 edge e;
6776 gimple_stmt_iterator si;
6777 unsigned old_len, new_len;
6778
6779 /* Remove BB from dominance structures. */
6780 delete_from_dominance_info (CDI_DOMINATORS, bb);
6781
6782 /* Move BB from its current loop to the copy in the new function. */
6783 if (current_loops)
6784 {
6785 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6786 if (new_loop)
6787 bb->loop_father = new_loop;
6788 }
6789
6790 /* Link BB to the new linked list. */
6791 move_block_after (bb, after);
6792
6793 /* Update the edge count in the corresponding flowgraphs. */
6794 if (update_edge_count_p)
6795 FOR_EACH_EDGE (e, ei, bb->succs)
6796 {
6797 cfun->cfg->x_n_edges--;
6798 dest_cfun->cfg->x_n_edges++;
6799 }
6800
6801 /* Remove BB from the original basic block array. */
6802 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6803 cfun->cfg->x_n_basic_blocks--;
6804
6805 /* Grow DEST_CFUN's basic block array if needed. */
6806 cfg = dest_cfun->cfg;
6807 cfg->x_n_basic_blocks++;
6808 if (bb->index >= cfg->x_last_basic_block)
6809 cfg->x_last_basic_block = bb->index + 1;
6810
6811 old_len = vec_safe_length (cfg->x_basic_block_info);
6812 if ((unsigned) cfg->x_last_basic_block >= old_len)
6813 {
6814 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6815 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6816 }
6817
6818 (*cfg->x_basic_block_info)[bb->index] = bb;
6819
6820 /* Remap the variables in phi nodes. */
6821 for (gphi_iterator psi = gsi_start_phis (bb);
6822 !gsi_end_p (psi); )
6823 {
6824 gphi *phi = psi.phi ();
6825 use_operand_p use;
6826 tree op = PHI_RESULT (phi);
6827 ssa_op_iter oi;
6828 unsigned i;
6829
6830 if (virtual_operand_p (op))
6831 {
6832 /* Remove the phi nodes for virtual operands (alias analysis will be
6833 run for the new function, anyway). */
6834 remove_phi_node (&psi, true);
6835 continue;
6836 }
6837
6838 SET_PHI_RESULT (phi,
6839 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6840 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6841 {
6842 op = USE_FROM_PTR (use);
6843 if (TREE_CODE (op) == SSA_NAME)
6844 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6845 }
6846
6847 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6848 {
6849 location_t locus = gimple_phi_arg_location (phi, i);
6850 tree block = LOCATION_BLOCK (locus);
6851
6852 if (locus == UNKNOWN_LOCATION)
6853 continue;
6854 if (d->orig_block == NULL_TREE || block == d->orig_block)
6855 {
6856 locus = set_block (locus, d->new_block);
6857 gimple_phi_arg_set_location (phi, i, locus);
6858 }
6859 }
6860
6861 gsi_next (&psi);
6862 }
6863
6864 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6865 {
6866 gimple *stmt = gsi_stmt (si);
6867 struct walk_stmt_info wi;
6868
6869 memset (&wi, 0, sizeof (wi));
6870 wi.info = d;
6871 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6872
6873 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
6874 {
6875 tree label = gimple_label_label (label_stmt);
6876 int uid = LABEL_DECL_UID (label);
6877
6878 gcc_assert (uid > -1);
6879
6880 old_len = vec_safe_length (cfg->x_label_to_block_map);
6881 if (old_len <= (unsigned) uid)
6882 {
6883 new_len = 3 * uid / 2 + 1;
6884 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6885 }
6886
6887 (*cfg->x_label_to_block_map)[uid] = bb;
6888 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6889
6890 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6891
6892 if (uid >= dest_cfun->cfg->last_label_uid)
6893 dest_cfun->cfg->last_label_uid = uid + 1;
6894 }
6895
6896 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6897 remove_stmt_from_eh_lp_fn (cfun, stmt);
6898
6899 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6900 gimple_remove_stmt_histograms (cfun, stmt);
6901
6902 /* We cannot leave any operands allocated from the operand caches of
6903 the current function. */
6904 free_stmt_operands (cfun, stmt);
6905 push_cfun (dest_cfun);
6906 update_stmt (stmt);
6907 pop_cfun ();
6908 }
6909
6910 FOR_EACH_EDGE (e, ei, bb->succs)
6911 if (e->goto_locus != UNKNOWN_LOCATION)
6912 {
6913 tree block = LOCATION_BLOCK (e->goto_locus);
6914 if (d->orig_block == NULL_TREE
6915 || block == d->orig_block)
6916 e->goto_locus = set_block (e->goto_locus, d->new_block);
6917 }
6918 }
6919
6920 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6921 the outermost EH region. Use REGION as the incoming base EH region. */
6922
6923 static eh_region
6924 find_outermost_region_in_block (struct function *src_cfun,
6925 basic_block bb, eh_region region)
6926 {
6927 gimple_stmt_iterator si;
6928
6929 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6930 {
6931 gimple *stmt = gsi_stmt (si);
6932 eh_region stmt_region;
6933 int lp_nr;
6934
6935 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6936 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6937 if (stmt_region)
6938 {
6939 if (region == NULL)
6940 region = stmt_region;
6941 else if (stmt_region != region)
6942 {
6943 region = eh_region_outermost (src_cfun, stmt_region, region);
6944 gcc_assert (region != NULL);
6945 }
6946 }
6947 }
6948
6949 return region;
6950 }
6951
6952 static tree
6953 new_label_mapper (tree decl, void *data)
6954 {
6955 htab_t hash = (htab_t) data;
6956 struct tree_map *m;
6957 void **slot;
6958
6959 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6960
6961 m = XNEW (struct tree_map);
6962 m->hash = DECL_UID (decl);
6963 m->base.from = decl;
6964 m->to = create_artificial_label (UNKNOWN_LOCATION);
6965 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6966 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6967 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6968
6969 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6970 gcc_assert (*slot == NULL);
6971
6972 *slot = m;
6973
6974 return m->to;
6975 }
6976
6977 /* Tree walker to replace the decls used inside value expressions by
6978 duplicates. */
6979
6980 static tree
6981 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
6982 {
6983 struct replace_decls_d *rd = (struct replace_decls_d *)data;
6984
6985 switch (TREE_CODE (*tp))
6986 {
6987 case VAR_DECL:
6988 case PARM_DECL:
6989 case RESULT_DECL:
6990 replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
6991 break;
6992 default:
6993 break;
6994 }
6995
6996 if (IS_TYPE_OR_DECL_P (*tp))
6997 *walk_subtrees = false;
6998
6999 return NULL;
7000 }
7001
7002 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7003 subblocks. */
7004
7005 static void
7006 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
7007 tree to_context)
7008 {
7009 tree *tp, t;
7010
7011 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
7012 {
7013 t = *tp;
7014 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
7015 continue;
7016 replace_by_duplicate_decl (&t, vars_map, to_context);
7017 if (t != *tp)
7018 {
7019 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
7020 {
7021 tree x = DECL_VALUE_EXPR (*tp);
7022 struct replace_decls_d rd = { vars_map, to_context };
7023 unshare_expr (x);
7024 walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
7025 SET_DECL_VALUE_EXPR (t, x);
7026 DECL_HAS_VALUE_EXPR_P (t) = 1;
7027 }
7028 DECL_CHAIN (t) = DECL_CHAIN (*tp);
7029 *tp = t;
7030 }
7031 }
7032
7033 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
7034 replace_block_vars_by_duplicates (block, vars_map, to_context);
7035 }
7036
7037 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7038 from FN1 to FN2. */
7039
7040 static void
7041 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
7042 struct loop *loop)
7043 {
7044 /* Discard it from the old loop array. */
7045 (*get_loops (fn1))[loop->num] = NULL;
7046
7047 /* Place it in the new loop array, assigning it a new number. */
7048 loop->num = number_of_loops (fn2);
7049 vec_safe_push (loops_for_fn (fn2)->larray, loop);
7050
7051 /* Recurse to children. */
7052 for (loop = loop->inner; loop; loop = loop->next)
7053 fixup_loop_arrays_after_move (fn1, fn2, loop);
7054 }
7055
7056 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7057 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
7058
7059 DEBUG_FUNCTION void
7060 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
7061 {
7062 basic_block bb;
7063 edge_iterator ei;
7064 edge e;
7065 bitmap bbs = BITMAP_ALLOC (NULL);
7066 int i;
7067
7068 gcc_assert (entry != NULL);
7069 gcc_assert (entry != exit);
7070 gcc_assert (bbs_p != NULL);
7071
7072 gcc_assert (bbs_p->length () > 0);
7073
7074 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7075 bitmap_set_bit (bbs, bb->index);
7076
7077 gcc_assert (bitmap_bit_p (bbs, entry->index));
7078 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
7079
7080 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7081 {
7082 if (bb == entry)
7083 {
7084 gcc_assert (single_pred_p (entry));
7085 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
7086 }
7087 else
7088 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
7089 {
7090 e = ei_edge (ei);
7091 gcc_assert (bitmap_bit_p (bbs, e->src->index));
7092 }
7093
7094 if (bb == exit)
7095 {
7096 gcc_assert (single_succ_p (exit));
7097 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
7098 }
7099 else
7100 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
7101 {
7102 e = ei_edge (ei);
7103 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
7104 }
7105 }
7106
7107 BITMAP_FREE (bbs);
7108 }
7109
7110 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7111
7112 bool
7113 gather_ssa_name_hash_map_from (tree const &from, tree const &, void *data)
7114 {
7115 bitmap release_names = (bitmap)data;
7116
7117 if (TREE_CODE (from) != SSA_NAME)
7118 return true;
7119
7120 bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7121 return true;
7122 }
7123
7124 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7125 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7126 single basic block in the original CFG and the new basic block is
7127 returned. DEST_CFUN must not have a CFG yet.
7128
7129 Note that the region need not be a pure SESE region. Blocks inside
7130 the region may contain calls to abort/exit. The only restriction
7131 is that ENTRY_BB should be the only entry point and it must
7132 dominate EXIT_BB.
7133
7134 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7135 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7136 to the new function.
7137
7138 All local variables referenced in the region are assumed to be in
7139 the corresponding BLOCK_VARS and unexpanded variable lists
7140 associated with DEST_CFUN.
7141
7142 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7143 reimplement move_sese_region_to_fn by duplicating the region rather than
7144 moving it. */
7145
7146 basic_block
7147 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7148 basic_block exit_bb, tree orig_block)
7149 {
7150 vec<basic_block> bbs, dom_bbs;
7151 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7152 basic_block after, bb, *entry_pred, *exit_succ, abb;
7153 struct function *saved_cfun = cfun;
7154 int *entry_flag, *exit_flag;
7155 unsigned *entry_prob, *exit_prob;
7156 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7157 edge e;
7158 edge_iterator ei;
7159 htab_t new_label_map;
7160 hash_map<void *, void *> *eh_map;
7161 struct loop *loop = entry_bb->loop_father;
7162 struct loop *loop0 = get_loop (saved_cfun, 0);
7163 struct move_stmt_d d;
7164
7165 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7166 region. */
7167 gcc_assert (entry_bb != exit_bb
7168 && (!exit_bb
7169 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7170
7171 /* Collect all the blocks in the region. Manually add ENTRY_BB
7172 because it won't be added by dfs_enumerate_from. */
7173 bbs.create (0);
7174 bbs.safe_push (entry_bb);
7175 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7176
7177 if (flag_checking)
7178 verify_sese (entry_bb, exit_bb, &bbs);
7179
7180 /* The blocks that used to be dominated by something in BBS will now be
7181 dominated by the new block. */
7182 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7183 bbs.address (),
7184 bbs.length ());
7185
7186 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7187 the predecessor edges to ENTRY_BB and the successor edges to
7188 EXIT_BB so that we can re-attach them to the new basic block that
7189 will replace the region. */
7190 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7191 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7192 entry_flag = XNEWVEC (int, num_entry_edges);
7193 entry_prob = XNEWVEC (unsigned, num_entry_edges);
7194 i = 0;
7195 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7196 {
7197 entry_prob[i] = e->probability;
7198 entry_flag[i] = e->flags;
7199 entry_pred[i++] = e->src;
7200 remove_edge (e);
7201 }
7202
7203 if (exit_bb)
7204 {
7205 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7206 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7207 exit_flag = XNEWVEC (int, num_exit_edges);
7208 exit_prob = XNEWVEC (unsigned, num_exit_edges);
7209 i = 0;
7210 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7211 {
7212 exit_prob[i] = e->probability;
7213 exit_flag[i] = e->flags;
7214 exit_succ[i++] = e->dest;
7215 remove_edge (e);
7216 }
7217 }
7218 else
7219 {
7220 num_exit_edges = 0;
7221 exit_succ = NULL;
7222 exit_flag = NULL;
7223 exit_prob = NULL;
7224 }
7225
7226 /* Switch context to the child function to initialize DEST_FN's CFG. */
7227 gcc_assert (dest_cfun->cfg == NULL);
7228 push_cfun (dest_cfun);
7229
7230 init_empty_tree_cfg ();
7231
7232 /* Initialize EH information for the new function. */
7233 eh_map = NULL;
7234 new_label_map = NULL;
7235 if (saved_cfun->eh)
7236 {
7237 eh_region region = NULL;
7238
7239 FOR_EACH_VEC_ELT (bbs, i, bb)
7240 region = find_outermost_region_in_block (saved_cfun, bb, region);
7241
7242 init_eh_for_function ();
7243 if (region != NULL)
7244 {
7245 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7246 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7247 new_label_mapper, new_label_map);
7248 }
7249 }
7250
7251 /* Initialize an empty loop tree. */
7252 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7253 init_loops_structure (dest_cfun, loops, 1);
7254 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7255 set_loops_for_fn (dest_cfun, loops);
7256
7257 /* Move the outlined loop tree part. */
7258 num_nodes = bbs.length ();
7259 FOR_EACH_VEC_ELT (bbs, i, bb)
7260 {
7261 if (bb->loop_father->header == bb)
7262 {
7263 struct loop *this_loop = bb->loop_father;
7264 struct loop *outer = loop_outer (this_loop);
7265 if (outer == loop
7266 /* If the SESE region contains some bbs ending with
7267 a noreturn call, those are considered to belong
7268 to the outermost loop in saved_cfun, rather than
7269 the entry_bb's loop_father. */
7270 || outer == loop0)
7271 {
7272 if (outer != loop)
7273 num_nodes -= this_loop->num_nodes;
7274 flow_loop_tree_node_remove (bb->loop_father);
7275 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7276 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7277 }
7278 }
7279 else if (bb->loop_father == loop0 && loop0 != loop)
7280 num_nodes--;
7281
7282 /* Remove loop exits from the outlined region. */
7283 if (loops_for_fn (saved_cfun)->exits)
7284 FOR_EACH_EDGE (e, ei, bb->succs)
7285 {
7286 struct loops *l = loops_for_fn (saved_cfun);
7287 loop_exit **slot
7288 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7289 NO_INSERT);
7290 if (slot)
7291 l->exits->clear_slot (slot);
7292 }
7293 }
7294
7295
7296 /* Adjust the number of blocks in the tree root of the outlined part. */
7297 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7298
7299 /* Setup a mapping to be used by move_block_to_fn. */
7300 loop->aux = current_loops->tree_root;
7301 loop0->aux = current_loops->tree_root;
7302
7303 pop_cfun ();
7304
7305 /* Move blocks from BBS into DEST_CFUN. */
7306 gcc_assert (bbs.length () >= 2);
7307 after = dest_cfun->cfg->x_entry_block_ptr;
7308 hash_map<tree, tree> vars_map;
7309
7310 memset (&d, 0, sizeof (d));
7311 d.orig_block = orig_block;
7312 d.new_block = DECL_INITIAL (dest_cfun->decl);
7313 d.from_context = cfun->decl;
7314 d.to_context = dest_cfun->decl;
7315 d.vars_map = &vars_map;
7316 d.new_label_map = new_label_map;
7317 d.eh_map = eh_map;
7318 d.remap_decls_p = true;
7319
7320 if (gimple_in_ssa_p (cfun))
7321 for (tree arg = DECL_ARGUMENTS (d.to_context); arg; arg = DECL_CHAIN (arg))
7322 {
7323 tree narg = make_ssa_name_fn (dest_cfun, arg, gimple_build_nop ());
7324 set_ssa_default_def (dest_cfun, arg, narg);
7325 vars_map.put (arg, narg);
7326 }
7327
7328 FOR_EACH_VEC_ELT (bbs, i, bb)
7329 {
7330 /* No need to update edge counts on the last block. It has
7331 already been updated earlier when we detached the region from
7332 the original CFG. */
7333 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7334 after = bb;
7335 }
7336
7337 loop->aux = NULL;
7338 loop0->aux = NULL;
7339 /* Loop sizes are no longer correct, fix them up. */
7340 loop->num_nodes -= num_nodes;
7341 for (struct loop *outer = loop_outer (loop);
7342 outer; outer = loop_outer (outer))
7343 outer->num_nodes -= num_nodes;
7344 loop0->num_nodes -= bbs.length () - num_nodes;
7345
7346 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7347 {
7348 struct loop *aloop;
7349 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7350 if (aloop != NULL)
7351 {
7352 if (aloop->simduid)
7353 {
7354 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7355 d.to_context);
7356 dest_cfun->has_simduid_loops = true;
7357 }
7358 if (aloop->force_vectorize)
7359 dest_cfun->has_force_vectorize_loops = true;
7360 }
7361 }
7362
7363 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7364 if (orig_block)
7365 {
7366 tree block;
7367 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7368 == NULL_TREE);
7369 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7370 = BLOCK_SUBBLOCKS (orig_block);
7371 for (block = BLOCK_SUBBLOCKS (orig_block);
7372 block; block = BLOCK_CHAIN (block))
7373 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7374 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7375 }
7376
7377 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7378 &vars_map, dest_cfun->decl);
7379
7380 if (new_label_map)
7381 htab_delete (new_label_map);
7382 if (eh_map)
7383 delete eh_map;
7384
7385 if (gimple_in_ssa_p (cfun))
7386 {
7387 /* We need to release ssa-names in a defined order, so first find them,
7388 and then iterate in ascending version order. */
7389 bitmap release_names = BITMAP_ALLOC (NULL);
7390 vars_map.traverse<void *, gather_ssa_name_hash_map_from> (release_names);
7391 bitmap_iterator bi;
7392 unsigned i;
7393 EXECUTE_IF_SET_IN_BITMAP (release_names, 0, i, bi)
7394 release_ssa_name (ssa_name (i));
7395 BITMAP_FREE (release_names);
7396 }
7397
7398 /* Rewire the entry and exit blocks. The successor to the entry
7399 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7400 the child function. Similarly, the predecessor of DEST_FN's
7401 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7402 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7403 various CFG manipulation function get to the right CFG.
7404
7405 FIXME, this is silly. The CFG ought to become a parameter to
7406 these helpers. */
7407 push_cfun (dest_cfun);
7408 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7409 if (exit_bb)
7410 make_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7411 pop_cfun ();
7412
7413 /* Back in the original function, the SESE region has disappeared,
7414 create a new basic block in its place. */
7415 bb = create_empty_bb (entry_pred[0]);
7416 if (current_loops)
7417 add_bb_to_loop (bb, loop);
7418 for (i = 0; i < num_entry_edges; i++)
7419 {
7420 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7421 e->probability = entry_prob[i];
7422 }
7423
7424 for (i = 0; i < num_exit_edges; i++)
7425 {
7426 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7427 e->probability = exit_prob[i];
7428 }
7429
7430 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7431 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7432 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7433 dom_bbs.release ();
7434
7435 if (exit_bb)
7436 {
7437 free (exit_prob);
7438 free (exit_flag);
7439 free (exit_succ);
7440 }
7441 free (entry_prob);
7442 free (entry_flag);
7443 free (entry_pred);
7444 bbs.release ();
7445
7446 return bb;
7447 }
7448
7449 /* Dump default def DEF to file FILE using FLAGS and indentation
7450 SPC. */
7451
7452 static void
7453 dump_default_def (FILE *file, tree def, int spc, int flags)
7454 {
7455 for (int i = 0; i < spc; ++i)
7456 fprintf (file, " ");
7457 dump_ssaname_info_to_file (file, def, spc);
7458
7459 print_generic_expr (file, TREE_TYPE (def), flags);
7460 fprintf (file, " ");
7461 print_generic_expr (file, def, flags);
7462 fprintf (file, " = ");
7463 print_generic_expr (file, SSA_NAME_VAR (def), flags);
7464 fprintf (file, ";\n");
7465 }
7466
7467 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7468 */
7469
7470 void
7471 dump_function_to_file (tree fndecl, FILE *file, int flags)
7472 {
7473 tree arg, var, old_current_fndecl = current_function_decl;
7474 struct function *dsf;
7475 bool ignore_topmost_bind = false, any_var = false;
7476 basic_block bb;
7477 tree chain;
7478 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7479 && decl_is_tm_clone (fndecl));
7480 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7481
7482 if (DECL_ATTRIBUTES (fndecl) != NULL_TREE)
7483 {
7484 fprintf (file, "__attribute__((");
7485
7486 bool first = true;
7487 tree chain;
7488 for (chain = DECL_ATTRIBUTES (fndecl); chain;
7489 first = false, chain = TREE_CHAIN (chain))
7490 {
7491 if (!first)
7492 fprintf (file, ", ");
7493
7494 print_generic_expr (file, get_attribute_name (chain), dump_flags);
7495 if (TREE_VALUE (chain) != NULL_TREE)
7496 {
7497 fprintf (file, " (");
7498 print_generic_expr (file, TREE_VALUE (chain), dump_flags);
7499 fprintf (file, ")");
7500 }
7501 }
7502
7503 fprintf (file, "))\n");
7504 }
7505
7506 current_function_decl = fndecl;
7507 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7508
7509 arg = DECL_ARGUMENTS (fndecl);
7510 while (arg)
7511 {
7512 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7513 fprintf (file, " ");
7514 print_generic_expr (file, arg, dump_flags);
7515 if (flags & TDF_VERBOSE)
7516 print_node (file, "", arg, 4);
7517 if (DECL_CHAIN (arg))
7518 fprintf (file, ", ");
7519 arg = DECL_CHAIN (arg);
7520 }
7521 fprintf (file, ")\n");
7522
7523 if (flags & TDF_VERBOSE)
7524 print_node (file, "", fndecl, 2);
7525
7526 dsf = DECL_STRUCT_FUNCTION (fndecl);
7527 if (dsf && (flags & TDF_EH))
7528 dump_eh_tree (file, dsf);
7529
7530 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7531 {
7532 dump_node (fndecl, TDF_SLIM | flags, file);
7533 current_function_decl = old_current_fndecl;
7534 return;
7535 }
7536
7537 /* When GIMPLE is lowered, the variables are no longer available in
7538 BIND_EXPRs, so display them separately. */
7539 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7540 {
7541 unsigned ix;
7542 ignore_topmost_bind = true;
7543
7544 fprintf (file, "{\n");
7545 if (gimple_in_ssa_p (fun)
7546 && (flags & TDF_ALIAS))
7547 {
7548 for (arg = DECL_ARGUMENTS (fndecl); arg != NULL;
7549 arg = DECL_CHAIN (arg))
7550 {
7551 tree def = ssa_default_def (fun, arg);
7552 if (def)
7553 dump_default_def (file, def, 2, flags);
7554 }
7555
7556 tree res = DECL_RESULT (fun->decl);
7557 if (res != NULL_TREE
7558 && DECL_BY_REFERENCE (res))
7559 {
7560 tree def = ssa_default_def (fun, res);
7561 if (def)
7562 dump_default_def (file, def, 2, flags);
7563 }
7564
7565 tree static_chain = fun->static_chain_decl;
7566 if (static_chain != NULL_TREE)
7567 {
7568 tree def = ssa_default_def (fun, static_chain);
7569 if (def)
7570 dump_default_def (file, def, 2, flags);
7571 }
7572 }
7573
7574 if (!vec_safe_is_empty (fun->local_decls))
7575 FOR_EACH_LOCAL_DECL (fun, ix, var)
7576 {
7577 print_generic_decl (file, var, flags);
7578 if (flags & TDF_VERBOSE)
7579 print_node (file, "", var, 4);
7580 fprintf (file, "\n");
7581
7582 any_var = true;
7583 }
7584 if (gimple_in_ssa_p (cfun))
7585 for (ix = 1; ix < num_ssa_names; ++ix)
7586 {
7587 tree name = ssa_name (ix);
7588 if (name && !SSA_NAME_VAR (name))
7589 {
7590 fprintf (file, " ");
7591 print_generic_expr (file, TREE_TYPE (name), flags);
7592 fprintf (file, " ");
7593 print_generic_expr (file, name, flags);
7594 fprintf (file, ";\n");
7595
7596 any_var = true;
7597 }
7598 }
7599 }
7600
7601 if (fun && fun->decl == fndecl
7602 && fun->cfg
7603 && basic_block_info_for_fn (fun))
7604 {
7605 /* If the CFG has been built, emit a CFG-based dump. */
7606 if (!ignore_topmost_bind)
7607 fprintf (file, "{\n");
7608
7609 if (any_var && n_basic_blocks_for_fn (fun))
7610 fprintf (file, "\n");
7611
7612 FOR_EACH_BB_FN (bb, fun)
7613 dump_bb (file, bb, 2, flags | TDF_COMMENT);
7614
7615 fprintf (file, "}\n");
7616 }
7617 else if (DECL_SAVED_TREE (fndecl) == NULL)
7618 {
7619 /* The function is now in GIMPLE form but the CFG has not been
7620 built yet. Emit the single sequence of GIMPLE statements
7621 that make up its body. */
7622 gimple_seq body = gimple_body (fndecl);
7623
7624 if (gimple_seq_first_stmt (body)
7625 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7626 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7627 print_gimple_seq (file, body, 0, flags);
7628 else
7629 {
7630 if (!ignore_topmost_bind)
7631 fprintf (file, "{\n");
7632
7633 if (any_var)
7634 fprintf (file, "\n");
7635
7636 print_gimple_seq (file, body, 2, flags);
7637 fprintf (file, "}\n");
7638 }
7639 }
7640 else
7641 {
7642 int indent;
7643
7644 /* Make a tree based dump. */
7645 chain = DECL_SAVED_TREE (fndecl);
7646 if (chain && TREE_CODE (chain) == BIND_EXPR)
7647 {
7648 if (ignore_topmost_bind)
7649 {
7650 chain = BIND_EXPR_BODY (chain);
7651 indent = 2;
7652 }
7653 else
7654 indent = 0;
7655 }
7656 else
7657 {
7658 if (!ignore_topmost_bind)
7659 {
7660 fprintf (file, "{\n");
7661 /* No topmost bind, pretend it's ignored for later. */
7662 ignore_topmost_bind = true;
7663 }
7664 indent = 2;
7665 }
7666
7667 if (any_var)
7668 fprintf (file, "\n");
7669
7670 print_generic_stmt_indented (file, chain, flags, indent);
7671 if (ignore_topmost_bind)
7672 fprintf (file, "}\n");
7673 }
7674
7675 if (flags & TDF_ENUMERATE_LOCALS)
7676 dump_enumerated_decls (file, flags);
7677 fprintf (file, "\n\n");
7678
7679 current_function_decl = old_current_fndecl;
7680 }
7681
7682 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7683
7684 DEBUG_FUNCTION void
7685 debug_function (tree fn, int flags)
7686 {
7687 dump_function_to_file (fn, stderr, flags);
7688 }
7689
7690
7691 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7692
7693 static void
7694 print_pred_bbs (FILE *file, basic_block bb)
7695 {
7696 edge e;
7697 edge_iterator ei;
7698
7699 FOR_EACH_EDGE (e, ei, bb->preds)
7700 fprintf (file, "bb_%d ", e->src->index);
7701 }
7702
7703
7704 /* Print on FILE the indexes for the successors of basic_block BB. */
7705
7706 static void
7707 print_succ_bbs (FILE *file, basic_block bb)
7708 {
7709 edge e;
7710 edge_iterator ei;
7711
7712 FOR_EACH_EDGE (e, ei, bb->succs)
7713 fprintf (file, "bb_%d ", e->dest->index);
7714 }
7715
7716 /* Print to FILE the basic block BB following the VERBOSITY level. */
7717
7718 void
7719 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7720 {
7721 char *s_indent = (char *) alloca ((size_t) indent + 1);
7722 memset ((void *) s_indent, ' ', (size_t) indent);
7723 s_indent[indent] = '\0';
7724
7725 /* Print basic_block's header. */
7726 if (verbosity >= 2)
7727 {
7728 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7729 print_pred_bbs (file, bb);
7730 fprintf (file, "}, succs = {");
7731 print_succ_bbs (file, bb);
7732 fprintf (file, "})\n");
7733 }
7734
7735 /* Print basic_block's body. */
7736 if (verbosity >= 3)
7737 {
7738 fprintf (file, "%s {\n", s_indent);
7739 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7740 fprintf (file, "%s }\n", s_indent);
7741 }
7742 }
7743
7744 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7745
7746 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7747 VERBOSITY level this outputs the contents of the loop, or just its
7748 structure. */
7749
7750 static void
7751 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7752 {
7753 char *s_indent;
7754 basic_block bb;
7755
7756 if (loop == NULL)
7757 return;
7758
7759 s_indent = (char *) alloca ((size_t) indent + 1);
7760 memset ((void *) s_indent, ' ', (size_t) indent);
7761 s_indent[indent] = '\0';
7762
7763 /* Print loop's header. */
7764 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7765 if (loop->header)
7766 fprintf (file, "header = %d", loop->header->index);
7767 else
7768 {
7769 fprintf (file, "deleted)\n");
7770 return;
7771 }
7772 if (loop->latch)
7773 fprintf (file, ", latch = %d", loop->latch->index);
7774 else
7775 fprintf (file, ", multiple latches");
7776 fprintf (file, ", niter = ");
7777 print_generic_expr (file, loop->nb_iterations, 0);
7778
7779 if (loop->any_upper_bound)
7780 {
7781 fprintf (file, ", upper_bound = ");
7782 print_decu (loop->nb_iterations_upper_bound, file);
7783 }
7784 if (loop->any_likely_upper_bound)
7785 {
7786 fprintf (file, ", likely_upper_bound = ");
7787 print_decu (loop->nb_iterations_likely_upper_bound, file);
7788 }
7789
7790 if (loop->any_estimate)
7791 {
7792 fprintf (file, ", estimate = ");
7793 print_decu (loop->nb_iterations_estimate, file);
7794 }
7795 fprintf (file, ")\n");
7796
7797 /* Print loop's body. */
7798 if (verbosity >= 1)
7799 {
7800 fprintf (file, "%s{\n", s_indent);
7801 FOR_EACH_BB_FN (bb, cfun)
7802 if (bb->loop_father == loop)
7803 print_loops_bb (file, bb, indent, verbosity);
7804
7805 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7806 fprintf (file, "%s}\n", s_indent);
7807 }
7808 }
7809
7810 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7811 spaces. Following VERBOSITY level this outputs the contents of the
7812 loop, or just its structure. */
7813
7814 static void
7815 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7816 int verbosity)
7817 {
7818 if (loop == NULL)
7819 return;
7820
7821 print_loop (file, loop, indent, verbosity);
7822 print_loop_and_siblings (file, loop->next, indent, verbosity);
7823 }
7824
7825 /* Follow a CFG edge from the entry point of the program, and on entry
7826 of a loop, pretty print the loop structure on FILE. */
7827
7828 void
7829 print_loops (FILE *file, int verbosity)
7830 {
7831 basic_block bb;
7832
7833 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7834 fprintf (file, "\nLoops in function: %s\n", current_function_name ());
7835 if (bb && bb->loop_father)
7836 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7837 }
7838
7839 /* Dump a loop. */
7840
7841 DEBUG_FUNCTION void
7842 debug (struct loop &ref)
7843 {
7844 print_loop (stderr, &ref, 0, /*verbosity*/0);
7845 }
7846
7847 DEBUG_FUNCTION void
7848 debug (struct loop *ptr)
7849 {
7850 if (ptr)
7851 debug (*ptr);
7852 else
7853 fprintf (stderr, "<nil>\n");
7854 }
7855
7856 /* Dump a loop verbosely. */
7857
7858 DEBUG_FUNCTION void
7859 debug_verbose (struct loop &ref)
7860 {
7861 print_loop (stderr, &ref, 0, /*verbosity*/3);
7862 }
7863
7864 DEBUG_FUNCTION void
7865 debug_verbose (struct loop *ptr)
7866 {
7867 if (ptr)
7868 debug (*ptr);
7869 else
7870 fprintf (stderr, "<nil>\n");
7871 }
7872
7873
7874 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7875
7876 DEBUG_FUNCTION void
7877 debug_loops (int verbosity)
7878 {
7879 print_loops (stderr, verbosity);
7880 }
7881
7882 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7883
7884 DEBUG_FUNCTION void
7885 debug_loop (struct loop *loop, int verbosity)
7886 {
7887 print_loop (stderr, loop, 0, verbosity);
7888 }
7889
7890 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7891 level. */
7892
7893 DEBUG_FUNCTION void
7894 debug_loop_num (unsigned num, int verbosity)
7895 {
7896 debug_loop (get_loop (cfun, num), verbosity);
7897 }
7898
7899 /* Return true if BB ends with a call, possibly followed by some
7900 instructions that must stay with the call. Return false,
7901 otherwise. */
7902
7903 static bool
7904 gimple_block_ends_with_call_p (basic_block bb)
7905 {
7906 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7907 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7908 }
7909
7910
7911 /* Return true if BB ends with a conditional branch. Return false,
7912 otherwise. */
7913
7914 static bool
7915 gimple_block_ends_with_condjump_p (const_basic_block bb)
7916 {
7917 gimple *stmt = last_stmt (CONST_CAST_BB (bb));
7918 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7919 }
7920
7921
7922 /* Return true if we need to add fake edge to exit at statement T.
7923 Helper function for gimple_flow_call_edges_add. */
7924
7925 static bool
7926 need_fake_edge_p (gimple *t)
7927 {
7928 tree fndecl = NULL_TREE;
7929 int call_flags = 0;
7930
7931 /* NORETURN and LONGJMP calls already have an edge to exit.
7932 CONST and PURE calls do not need one.
7933 We don't currently check for CONST and PURE here, although
7934 it would be a good idea, because those attributes are
7935 figured out from the RTL in mark_constant_function, and
7936 the counter incrementation code from -fprofile-arcs
7937 leads to different results from -fbranch-probabilities. */
7938 if (is_gimple_call (t))
7939 {
7940 fndecl = gimple_call_fndecl (t);
7941 call_flags = gimple_call_flags (t);
7942 }
7943
7944 if (is_gimple_call (t)
7945 && fndecl
7946 && DECL_BUILT_IN (fndecl)
7947 && (call_flags & ECF_NOTHROW)
7948 && !(call_flags & ECF_RETURNS_TWICE)
7949 /* fork() doesn't really return twice, but the effect of
7950 wrapping it in __gcov_fork() which calls __gcov_flush()
7951 and clears the counters before forking has the same
7952 effect as returning twice. Force a fake edge. */
7953 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7954 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7955 return false;
7956
7957 if (is_gimple_call (t))
7958 {
7959 edge_iterator ei;
7960 edge e;
7961 basic_block bb;
7962
7963 if (!(call_flags & ECF_NORETURN))
7964 return true;
7965
7966 bb = gimple_bb (t);
7967 FOR_EACH_EDGE (e, ei, bb->succs)
7968 if ((e->flags & EDGE_FAKE) == 0)
7969 return true;
7970 }
7971
7972 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
7973 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
7974 return true;
7975
7976 return false;
7977 }
7978
7979
7980 /* Add fake edges to the function exit for any non constant and non
7981 noreturn calls (or noreturn calls with EH/abnormal edges),
7982 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7983 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7984 that were split.
7985
7986 The goal is to expose cases in which entering a basic block does
7987 not imply that all subsequent instructions must be executed. */
7988
7989 static int
7990 gimple_flow_call_edges_add (sbitmap blocks)
7991 {
7992 int i;
7993 int blocks_split = 0;
7994 int last_bb = last_basic_block_for_fn (cfun);
7995 bool check_last_block = false;
7996
7997 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
7998 return 0;
7999
8000 if (! blocks)
8001 check_last_block = true;
8002 else
8003 check_last_block = bitmap_bit_p (blocks,
8004 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
8005
8006 /* In the last basic block, before epilogue generation, there will be
8007 a fallthru edge to EXIT. Special care is required if the last insn
8008 of the last basic block is a call because make_edge folds duplicate
8009 edges, which would result in the fallthru edge also being marked
8010 fake, which would result in the fallthru edge being removed by
8011 remove_fake_edges, which would result in an invalid CFG.
8012
8013 Moreover, we can't elide the outgoing fake edge, since the block
8014 profiler needs to take this into account in order to solve the minimal
8015 spanning tree in the case that the call doesn't return.
8016
8017 Handle this by adding a dummy instruction in a new last basic block. */
8018 if (check_last_block)
8019 {
8020 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
8021 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8022 gimple *t = NULL;
8023
8024 if (!gsi_end_p (gsi))
8025 t = gsi_stmt (gsi);
8026
8027 if (t && need_fake_edge_p (t))
8028 {
8029 edge e;
8030
8031 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8032 if (e)
8033 {
8034 gsi_insert_on_edge (e, gimple_build_nop ());
8035 gsi_commit_edge_inserts ();
8036 }
8037 }
8038 }
8039
8040 /* Now add fake edges to the function exit for any non constant
8041 calls since there is no way that we can determine if they will
8042 return or not... */
8043 for (i = 0; i < last_bb; i++)
8044 {
8045 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8046 gimple_stmt_iterator gsi;
8047 gimple *stmt, *last_stmt;
8048
8049 if (!bb)
8050 continue;
8051
8052 if (blocks && !bitmap_bit_p (blocks, i))
8053 continue;
8054
8055 gsi = gsi_last_nondebug_bb (bb);
8056 if (!gsi_end_p (gsi))
8057 {
8058 last_stmt = gsi_stmt (gsi);
8059 do
8060 {
8061 stmt = gsi_stmt (gsi);
8062 if (need_fake_edge_p (stmt))
8063 {
8064 edge e;
8065
8066 /* The handling above of the final block before the
8067 epilogue should be enough to verify that there is
8068 no edge to the exit block in CFG already.
8069 Calling make_edge in such case would cause us to
8070 mark that edge as fake and remove it later. */
8071 if (flag_checking && stmt == last_stmt)
8072 {
8073 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8074 gcc_assert (e == NULL);
8075 }
8076
8077 /* Note that the following may create a new basic block
8078 and renumber the existing basic blocks. */
8079 if (stmt != last_stmt)
8080 {
8081 e = split_block (bb, stmt);
8082 if (e)
8083 blocks_split++;
8084 }
8085 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
8086 }
8087 gsi_prev (&gsi);
8088 }
8089 while (!gsi_end_p (gsi));
8090 }
8091 }
8092
8093 if (blocks_split)
8094 verify_flow_info ();
8095
8096 return blocks_split;
8097 }
8098
8099 /* Removes edge E and all the blocks dominated by it, and updates dominance
8100 information. The IL in E->src needs to be updated separately.
8101 If dominance info is not available, only the edge E is removed.*/
8102
8103 void
8104 remove_edge_and_dominated_blocks (edge e)
8105 {
8106 vec<basic_block> bbs_to_remove = vNULL;
8107 vec<basic_block> bbs_to_fix_dom = vNULL;
8108 bitmap df, df_idom;
8109 edge f;
8110 edge_iterator ei;
8111 bool none_removed = false;
8112 unsigned i;
8113 basic_block bb, dbb;
8114 bitmap_iterator bi;
8115
8116 /* If we are removing a path inside a non-root loop that may change
8117 loop ownership of blocks or remove loops. Mark loops for fixup. */
8118 if (current_loops
8119 && loop_outer (e->src->loop_father) != NULL
8120 && e->src->loop_father == e->dest->loop_father)
8121 loops_state_set (LOOPS_NEED_FIXUP);
8122
8123 if (!dom_info_available_p (CDI_DOMINATORS))
8124 {
8125 remove_edge (e);
8126 return;
8127 }
8128
8129 /* No updating is needed for edges to exit. */
8130 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8131 {
8132 if (cfgcleanup_altered_bbs)
8133 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8134 remove_edge (e);
8135 return;
8136 }
8137
8138 /* First, we find the basic blocks to remove. If E->dest has a predecessor
8139 that is not dominated by E->dest, then this set is empty. Otherwise,
8140 all the basic blocks dominated by E->dest are removed.
8141
8142 Also, to DF_IDOM we store the immediate dominators of the blocks in
8143 the dominance frontier of E (i.e., of the successors of the
8144 removed blocks, if there are any, and of E->dest otherwise). */
8145 FOR_EACH_EDGE (f, ei, e->dest->preds)
8146 {
8147 if (f == e)
8148 continue;
8149
8150 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
8151 {
8152 none_removed = true;
8153 break;
8154 }
8155 }
8156
8157 df = BITMAP_ALLOC (NULL);
8158 df_idom = BITMAP_ALLOC (NULL);
8159
8160 if (none_removed)
8161 bitmap_set_bit (df_idom,
8162 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
8163 else
8164 {
8165 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
8166 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8167 {
8168 FOR_EACH_EDGE (f, ei, bb->succs)
8169 {
8170 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
8171 bitmap_set_bit (df, f->dest->index);
8172 }
8173 }
8174 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8175 bitmap_clear_bit (df, bb->index);
8176
8177 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
8178 {
8179 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8180 bitmap_set_bit (df_idom,
8181 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
8182 }
8183 }
8184
8185 if (cfgcleanup_altered_bbs)
8186 {
8187 /* Record the set of the altered basic blocks. */
8188 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8189 bitmap_ior_into (cfgcleanup_altered_bbs, df);
8190 }
8191
8192 /* Remove E and the cancelled blocks. */
8193 if (none_removed)
8194 remove_edge (e);
8195 else
8196 {
8197 /* Walk backwards so as to get a chance to substitute all
8198 released DEFs into debug stmts. See
8199 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
8200 details. */
8201 for (i = bbs_to_remove.length (); i-- > 0; )
8202 delete_basic_block (bbs_to_remove[i]);
8203 }
8204
8205 /* Update the dominance information. The immediate dominator may change only
8206 for blocks whose immediate dominator belongs to DF_IDOM:
8207
8208 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8209 removal. Let Z the arbitrary block such that idom(Z) = Y and
8210 Z dominates X after the removal. Before removal, there exists a path P
8211 from Y to X that avoids Z. Let F be the last edge on P that is
8212 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
8213 dominates W, and because of P, Z does not dominate W), and W belongs to
8214 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
8215 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
8216 {
8217 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8218 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
8219 dbb;
8220 dbb = next_dom_son (CDI_DOMINATORS, dbb))
8221 bbs_to_fix_dom.safe_push (dbb);
8222 }
8223
8224 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8225
8226 BITMAP_FREE (df);
8227 BITMAP_FREE (df_idom);
8228 bbs_to_remove.release ();
8229 bbs_to_fix_dom.release ();
8230 }
8231
8232 /* Purge dead EH edges from basic block BB. */
8233
8234 bool
8235 gimple_purge_dead_eh_edges (basic_block bb)
8236 {
8237 bool changed = false;
8238 edge e;
8239 edge_iterator ei;
8240 gimple *stmt = last_stmt (bb);
8241
8242 if (stmt && stmt_can_throw_internal (stmt))
8243 return false;
8244
8245 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8246 {
8247 if (e->flags & EDGE_EH)
8248 {
8249 remove_edge_and_dominated_blocks (e);
8250 changed = true;
8251 }
8252 else
8253 ei_next (&ei);
8254 }
8255
8256 return changed;
8257 }
8258
8259 /* Purge dead EH edges from basic block listed in BLOCKS. */
8260
8261 bool
8262 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8263 {
8264 bool changed = false;
8265 unsigned i;
8266 bitmap_iterator bi;
8267
8268 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8269 {
8270 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8271
8272 /* Earlier gimple_purge_dead_eh_edges could have removed
8273 this basic block already. */
8274 gcc_assert (bb || changed);
8275 if (bb != NULL)
8276 changed |= gimple_purge_dead_eh_edges (bb);
8277 }
8278
8279 return changed;
8280 }
8281
8282 /* Purge dead abnormal call edges from basic block BB. */
8283
8284 bool
8285 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8286 {
8287 bool changed = false;
8288 edge e;
8289 edge_iterator ei;
8290 gimple *stmt = last_stmt (bb);
8291
8292 if (!cfun->has_nonlocal_label
8293 && !cfun->calls_setjmp)
8294 return false;
8295
8296 if (stmt && stmt_can_make_abnormal_goto (stmt))
8297 return false;
8298
8299 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8300 {
8301 if (e->flags & EDGE_ABNORMAL)
8302 {
8303 if (e->flags & EDGE_FALLTHRU)
8304 e->flags &= ~EDGE_ABNORMAL;
8305 else
8306 remove_edge_and_dominated_blocks (e);
8307 changed = true;
8308 }
8309 else
8310 ei_next (&ei);
8311 }
8312
8313 return changed;
8314 }
8315
8316 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8317
8318 bool
8319 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8320 {
8321 bool changed = false;
8322 unsigned i;
8323 bitmap_iterator bi;
8324
8325 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8326 {
8327 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8328
8329 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8330 this basic block already. */
8331 gcc_assert (bb || changed);
8332 if (bb != NULL)
8333 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8334 }
8335
8336 return changed;
8337 }
8338
8339 /* This function is called whenever a new edge is created or
8340 redirected. */
8341
8342 static void
8343 gimple_execute_on_growing_pred (edge e)
8344 {
8345 basic_block bb = e->dest;
8346
8347 if (!gimple_seq_empty_p (phi_nodes (bb)))
8348 reserve_phi_args_for_new_edge (bb);
8349 }
8350
8351 /* This function is called immediately before edge E is removed from
8352 the edge vector E->dest->preds. */
8353
8354 static void
8355 gimple_execute_on_shrinking_pred (edge e)
8356 {
8357 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8358 remove_phi_args (e);
8359 }
8360
8361 /*---------------------------------------------------------------------------
8362 Helper functions for Loop versioning
8363 ---------------------------------------------------------------------------*/
8364
8365 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8366 of 'first'. Both of them are dominated by 'new_head' basic block. When
8367 'new_head' was created by 'second's incoming edge it received phi arguments
8368 on the edge by split_edge(). Later, additional edge 'e' was created to
8369 connect 'new_head' and 'first'. Now this routine adds phi args on this
8370 additional edge 'e' that new_head to second edge received as part of edge
8371 splitting. */
8372
8373 static void
8374 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8375 basic_block new_head, edge e)
8376 {
8377 gphi *phi1, *phi2;
8378 gphi_iterator psi1, psi2;
8379 tree def;
8380 edge e2 = find_edge (new_head, second);
8381
8382 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8383 edge, we should always have an edge from NEW_HEAD to SECOND. */
8384 gcc_assert (e2 != NULL);
8385
8386 /* Browse all 'second' basic block phi nodes and add phi args to
8387 edge 'e' for 'first' head. PHI args are always in correct order. */
8388
8389 for (psi2 = gsi_start_phis (second),
8390 psi1 = gsi_start_phis (first);
8391 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8392 gsi_next (&psi2), gsi_next (&psi1))
8393 {
8394 phi1 = psi1.phi ();
8395 phi2 = psi2.phi ();
8396 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8397 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8398 }
8399 }
8400
8401
8402 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8403 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8404 the destination of the ELSE part. */
8405
8406 static void
8407 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8408 basic_block second_head ATTRIBUTE_UNUSED,
8409 basic_block cond_bb, void *cond_e)
8410 {
8411 gimple_stmt_iterator gsi;
8412 gimple *new_cond_expr;
8413 tree cond_expr = (tree) cond_e;
8414 edge e0;
8415
8416 /* Build new conditional expr */
8417 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8418 NULL_TREE, NULL_TREE);
8419
8420 /* Add new cond in cond_bb. */
8421 gsi = gsi_last_bb (cond_bb);
8422 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8423
8424 /* Adjust edges appropriately to connect new head with first head
8425 as well as second head. */
8426 e0 = single_succ_edge (cond_bb);
8427 e0->flags &= ~EDGE_FALLTHRU;
8428 e0->flags |= EDGE_FALSE_VALUE;
8429 }
8430
8431
8432 /* Do book-keeping of basic block BB for the profile consistency checker.
8433 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8434 then do post-pass accounting. Store the counting in RECORD. */
8435 static void
8436 gimple_account_profile_record (basic_block bb, int after_pass,
8437 struct profile_record *record)
8438 {
8439 gimple_stmt_iterator i;
8440 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8441 {
8442 record->size[after_pass]
8443 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8444 if (profile_status_for_fn (cfun) == PROFILE_READ)
8445 record->time[after_pass]
8446 += estimate_num_insns (gsi_stmt (i),
8447 &eni_time_weights) * bb->count;
8448 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8449 record->time[after_pass]
8450 += estimate_num_insns (gsi_stmt (i),
8451 &eni_time_weights) * bb->frequency;
8452 }
8453 }
8454
8455 struct cfg_hooks gimple_cfg_hooks = {
8456 "gimple",
8457 gimple_verify_flow_info,
8458 gimple_dump_bb, /* dump_bb */
8459 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8460 create_bb, /* create_basic_block */
8461 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8462 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8463 gimple_can_remove_branch_p, /* can_remove_branch_p */
8464 remove_bb, /* delete_basic_block */
8465 gimple_split_block, /* split_block */
8466 gimple_move_block_after, /* move_block_after */
8467 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8468 gimple_merge_blocks, /* merge_blocks */
8469 gimple_predict_edge, /* predict_edge */
8470 gimple_predicted_by_p, /* predicted_by_p */
8471 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8472 gimple_duplicate_bb, /* duplicate_block */
8473 gimple_split_edge, /* split_edge */
8474 gimple_make_forwarder_block, /* make_forward_block */
8475 NULL, /* tidy_fallthru_edge */
8476 NULL, /* force_nonfallthru */
8477 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8478 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8479 gimple_flow_call_edges_add, /* flow_call_edges_add */
8480 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8481 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8482 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8483 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8484 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8485 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8486 flush_pending_stmts, /* flush_pending_stmts */
8487 gimple_empty_block_p, /* block_empty_p */
8488 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8489 gimple_account_profile_record,
8490 };
8491
8492
8493 /* Split all critical edges. */
8494
8495 unsigned int
8496 split_critical_edges (void)
8497 {
8498 basic_block bb;
8499 edge e;
8500 edge_iterator ei;
8501
8502 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8503 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8504 mappings around the calls to split_edge. */
8505 start_recording_case_labels ();
8506 FOR_ALL_BB_FN (bb, cfun)
8507 {
8508 FOR_EACH_EDGE (e, ei, bb->succs)
8509 {
8510 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8511 split_edge (e);
8512 /* PRE inserts statements to edges and expects that
8513 since split_critical_edges was done beforehand, committing edge
8514 insertions will not split more edges. In addition to critical
8515 edges we must split edges that have multiple successors and
8516 end by control flow statements, such as RESX.
8517 Go ahead and split them too. This matches the logic in
8518 gimple_find_edge_insert_loc. */
8519 else if ((!single_pred_p (e->dest)
8520 || !gimple_seq_empty_p (phi_nodes (e->dest))
8521 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8522 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8523 && !(e->flags & EDGE_ABNORMAL))
8524 {
8525 gimple_stmt_iterator gsi;
8526
8527 gsi = gsi_last_bb (e->src);
8528 if (!gsi_end_p (gsi)
8529 && stmt_ends_bb_p (gsi_stmt (gsi))
8530 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8531 && !gimple_call_builtin_p (gsi_stmt (gsi),
8532 BUILT_IN_RETURN)))
8533 split_edge (e);
8534 }
8535 }
8536 }
8537 end_recording_case_labels ();
8538 return 0;
8539 }
8540
8541 namespace {
8542
8543 const pass_data pass_data_split_crit_edges =
8544 {
8545 GIMPLE_PASS, /* type */
8546 "crited", /* name */
8547 OPTGROUP_NONE, /* optinfo_flags */
8548 TV_TREE_SPLIT_EDGES, /* tv_id */
8549 PROP_cfg, /* properties_required */
8550 PROP_no_crit_edges, /* properties_provided */
8551 0, /* properties_destroyed */
8552 0, /* todo_flags_start */
8553 0, /* todo_flags_finish */
8554 };
8555
8556 class pass_split_crit_edges : public gimple_opt_pass
8557 {
8558 public:
8559 pass_split_crit_edges (gcc::context *ctxt)
8560 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8561 {}
8562
8563 /* opt_pass methods: */
8564 virtual unsigned int execute (function *) { return split_critical_edges (); }
8565
8566 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8567 }; // class pass_split_crit_edges
8568
8569 } // anon namespace
8570
8571 gimple_opt_pass *
8572 make_pass_split_crit_edges (gcc::context *ctxt)
8573 {
8574 return new pass_split_crit_edges (ctxt);
8575 }
8576
8577
8578 /* Insert COND expression which is GIMPLE_COND after STMT
8579 in basic block BB with appropriate basic block split
8580 and creation of a new conditionally executed basic block.
8581 Return created basic block. */
8582 basic_block
8583 insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond)
8584 {
8585 edge fall = split_block (bb, stmt);
8586 gimple_stmt_iterator iter = gsi_last_bb (bb);
8587 basic_block new_bb;
8588
8589 /* Insert cond statement. */
8590 gcc_assert (gimple_code (cond) == GIMPLE_COND);
8591 if (gsi_end_p (iter))
8592 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
8593 else
8594 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
8595
8596 /* Create conditionally executed block. */
8597 new_bb = create_empty_bb (bb);
8598 make_edge (bb, new_bb, EDGE_TRUE_VALUE);
8599 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
8600
8601 /* Fix edge for split bb. */
8602 fall->flags = EDGE_FALSE_VALUE;
8603
8604 /* Update dominance info. */
8605 if (dom_info_available_p (CDI_DOMINATORS))
8606 {
8607 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
8608 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
8609 }
8610
8611 /* Update loop info. */
8612 if (current_loops)
8613 add_bb_to_loop (new_bb, bb->loop_father);
8614
8615 return new_bb;
8616 }
8617
8618 /* Build a ternary operation and gimplify it. Emit code before GSI.
8619 Return the gimple_val holding the result. */
8620
8621 tree
8622 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8623 tree type, tree a, tree b, tree c)
8624 {
8625 tree ret;
8626 location_t loc = gimple_location (gsi_stmt (*gsi));
8627
8628 ret = fold_build3_loc (loc, code, type, a, b, c);
8629 STRIP_NOPS (ret);
8630
8631 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8632 GSI_SAME_STMT);
8633 }
8634
8635 /* Build a binary operation and gimplify it. Emit code before GSI.
8636 Return the gimple_val holding the result. */
8637
8638 tree
8639 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8640 tree type, tree a, tree b)
8641 {
8642 tree ret;
8643
8644 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8645 STRIP_NOPS (ret);
8646
8647 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8648 GSI_SAME_STMT);
8649 }
8650
8651 /* Build a unary operation and gimplify it. Emit code before GSI.
8652 Return the gimple_val holding the result. */
8653
8654 tree
8655 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8656 tree a)
8657 {
8658 tree ret;
8659
8660 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8661 STRIP_NOPS (ret);
8662
8663 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8664 GSI_SAME_STMT);
8665 }
8666
8667
8668 \f
8669 /* Given a basic block B which ends with a conditional and has
8670 precisely two successors, determine which of the edges is taken if
8671 the conditional is true and which is taken if the conditional is
8672 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8673
8674 void
8675 extract_true_false_edges_from_block (basic_block b,
8676 edge *true_edge,
8677 edge *false_edge)
8678 {
8679 edge e = EDGE_SUCC (b, 0);
8680
8681 if (e->flags & EDGE_TRUE_VALUE)
8682 {
8683 *true_edge = e;
8684 *false_edge = EDGE_SUCC (b, 1);
8685 }
8686 else
8687 {
8688 *false_edge = e;
8689 *true_edge = EDGE_SUCC (b, 1);
8690 }
8691 }
8692
8693
8694 /* From a controlling predicate in the immediate dominator DOM of
8695 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
8696 predicate evaluates to true and false and store them to
8697 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
8698 they are non-NULL. Returns true if the edges can be determined,
8699 else return false. */
8700
8701 bool
8702 extract_true_false_controlled_edges (basic_block dom, basic_block phiblock,
8703 edge *true_controlled_edge,
8704 edge *false_controlled_edge)
8705 {
8706 basic_block bb = phiblock;
8707 edge true_edge, false_edge, tem;
8708 edge e0 = NULL, e1 = NULL;
8709
8710 /* We have to verify that one edge into the PHI node is dominated
8711 by the true edge of the predicate block and the other edge
8712 dominated by the false edge. This ensures that the PHI argument
8713 we are going to take is completely determined by the path we
8714 take from the predicate block.
8715 We can only use BB dominance checks below if the destination of
8716 the true/false edges are dominated by their edge, thus only
8717 have a single predecessor. */
8718 extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
8719 tem = EDGE_PRED (bb, 0);
8720 if (tem == true_edge
8721 || (single_pred_p (true_edge->dest)
8722 && (tem->src == true_edge->dest
8723 || dominated_by_p (CDI_DOMINATORS,
8724 tem->src, true_edge->dest))))
8725 e0 = tem;
8726 else if (tem == false_edge
8727 || (single_pred_p (false_edge->dest)
8728 && (tem->src == false_edge->dest
8729 || dominated_by_p (CDI_DOMINATORS,
8730 tem->src, false_edge->dest))))
8731 e1 = tem;
8732 else
8733 return false;
8734 tem = EDGE_PRED (bb, 1);
8735 if (tem == true_edge
8736 || (single_pred_p (true_edge->dest)
8737 && (tem->src == true_edge->dest
8738 || dominated_by_p (CDI_DOMINATORS,
8739 tem->src, true_edge->dest))))
8740 e0 = tem;
8741 else if (tem == false_edge
8742 || (single_pred_p (false_edge->dest)
8743 && (tem->src == false_edge->dest
8744 || dominated_by_p (CDI_DOMINATORS,
8745 tem->src, false_edge->dest))))
8746 e1 = tem;
8747 else
8748 return false;
8749 if (!e0 || !e1)
8750 return false;
8751
8752 if (true_controlled_edge)
8753 *true_controlled_edge = e0;
8754 if (false_controlled_edge)
8755 *false_controlled_edge = e1;
8756
8757 return true;
8758 }
8759
8760
8761
8762 /* Emit return warnings. */
8763
8764 namespace {
8765
8766 const pass_data pass_data_warn_function_return =
8767 {
8768 GIMPLE_PASS, /* type */
8769 "*warn_function_return", /* name */
8770 OPTGROUP_NONE, /* optinfo_flags */
8771 TV_NONE, /* tv_id */
8772 PROP_cfg, /* properties_required */
8773 0, /* properties_provided */
8774 0, /* properties_destroyed */
8775 0, /* todo_flags_start */
8776 0, /* todo_flags_finish */
8777 };
8778
8779 class pass_warn_function_return : public gimple_opt_pass
8780 {
8781 public:
8782 pass_warn_function_return (gcc::context *ctxt)
8783 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8784 {}
8785
8786 /* opt_pass methods: */
8787 virtual unsigned int execute (function *);
8788
8789 }; // class pass_warn_function_return
8790
8791 unsigned int
8792 pass_warn_function_return::execute (function *fun)
8793 {
8794 source_location location;
8795 gimple *last;
8796 edge e;
8797 edge_iterator ei;
8798
8799 if (!targetm.warn_func_return (fun->decl))
8800 return 0;
8801
8802 /* If we have a path to EXIT, then we do return. */
8803 if (TREE_THIS_VOLATILE (fun->decl)
8804 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
8805 {
8806 location = UNKNOWN_LOCATION;
8807 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8808 {
8809 last = last_stmt (e->src);
8810 if ((gimple_code (last) == GIMPLE_RETURN
8811 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8812 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8813 break;
8814 }
8815 if (location == UNKNOWN_LOCATION)
8816 location = cfun->function_end_locus;
8817 warning_at (location, 0, "%<noreturn%> function does return");
8818 }
8819
8820 /* If we see "return;" in some basic block, then we do reach the end
8821 without returning a value. */
8822 else if (warn_return_type
8823 && !TREE_NO_WARNING (fun->decl)
8824 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0
8825 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
8826 {
8827 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8828 {
8829 gimple *last = last_stmt (e->src);
8830 greturn *return_stmt = dyn_cast <greturn *> (last);
8831 if (return_stmt
8832 && gimple_return_retval (return_stmt) == NULL
8833 && !gimple_no_warning_p (last))
8834 {
8835 location = gimple_location (last);
8836 if (location == UNKNOWN_LOCATION)
8837 location = fun->function_end_locus;
8838 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8839 TREE_NO_WARNING (fun->decl) = 1;
8840 break;
8841 }
8842 }
8843 }
8844 return 0;
8845 }
8846
8847 } // anon namespace
8848
8849 gimple_opt_pass *
8850 make_pass_warn_function_return (gcc::context *ctxt)
8851 {
8852 return new pass_warn_function_return (ctxt);
8853 }
8854
8855 /* Walk a gimplified function and warn for functions whose return value is
8856 ignored and attribute((warn_unused_result)) is set. This is done before
8857 inlining, so we don't have to worry about that. */
8858
8859 static void
8860 do_warn_unused_result (gimple_seq seq)
8861 {
8862 tree fdecl, ftype;
8863 gimple_stmt_iterator i;
8864
8865 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8866 {
8867 gimple *g = gsi_stmt (i);
8868
8869 switch (gimple_code (g))
8870 {
8871 case GIMPLE_BIND:
8872 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
8873 break;
8874 case GIMPLE_TRY:
8875 do_warn_unused_result (gimple_try_eval (g));
8876 do_warn_unused_result (gimple_try_cleanup (g));
8877 break;
8878 case GIMPLE_CATCH:
8879 do_warn_unused_result (gimple_catch_handler (
8880 as_a <gcatch *> (g)));
8881 break;
8882 case GIMPLE_EH_FILTER:
8883 do_warn_unused_result (gimple_eh_filter_failure (g));
8884 break;
8885
8886 case GIMPLE_CALL:
8887 if (gimple_call_lhs (g))
8888 break;
8889 if (gimple_call_internal_p (g))
8890 break;
8891
8892 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8893 LHS. All calls whose value is ignored should be
8894 represented like this. Look for the attribute. */
8895 fdecl = gimple_call_fndecl (g);
8896 ftype = gimple_call_fntype (g);
8897
8898 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8899 {
8900 location_t loc = gimple_location (g);
8901
8902 if (fdecl)
8903 warning_at (loc, OPT_Wunused_result,
8904 "ignoring return value of %qD, "
8905 "declared with attribute warn_unused_result",
8906 fdecl);
8907 else
8908 warning_at (loc, OPT_Wunused_result,
8909 "ignoring return value of function "
8910 "declared with attribute warn_unused_result");
8911 }
8912 break;
8913
8914 default:
8915 /* Not a container, not a call, or a call whose value is used. */
8916 break;
8917 }
8918 }
8919 }
8920
8921 namespace {
8922
8923 const pass_data pass_data_warn_unused_result =
8924 {
8925 GIMPLE_PASS, /* type */
8926 "*warn_unused_result", /* name */
8927 OPTGROUP_NONE, /* optinfo_flags */
8928 TV_NONE, /* tv_id */
8929 PROP_gimple_any, /* properties_required */
8930 0, /* properties_provided */
8931 0, /* properties_destroyed */
8932 0, /* todo_flags_start */
8933 0, /* todo_flags_finish */
8934 };
8935
8936 class pass_warn_unused_result : public gimple_opt_pass
8937 {
8938 public:
8939 pass_warn_unused_result (gcc::context *ctxt)
8940 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8941 {}
8942
8943 /* opt_pass methods: */
8944 virtual bool gate (function *) { return flag_warn_unused_result; }
8945 virtual unsigned int execute (function *)
8946 {
8947 do_warn_unused_result (gimple_body (current_function_decl));
8948 return 0;
8949 }
8950
8951 }; // class pass_warn_unused_result
8952
8953 } // anon namespace
8954
8955 gimple_opt_pass *
8956 make_pass_warn_unused_result (gcc::context *ctxt)
8957 {
8958 return new pass_warn_unused_result (ctxt);
8959 }
8960
8961 /* IPA passes, compilation of earlier functions or inlining
8962 might have changed some properties, such as marked functions nothrow,
8963 pure, const or noreturn.
8964 Remove redundant edges and basic blocks, and create new ones if necessary.
8965
8966 This pass can't be executed as stand alone pass from pass manager, because
8967 in between inlining and this fixup the verify_flow_info would fail. */
8968
8969 unsigned int
8970 execute_fixup_cfg (void)
8971 {
8972 basic_block bb;
8973 gimple_stmt_iterator gsi;
8974 int todo = 0;
8975 gcov_type count_scale;
8976 edge e;
8977 edge_iterator ei;
8978
8979 count_scale
8980 = GCOV_COMPUTE_SCALE (cgraph_node::get (current_function_decl)->count,
8981 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
8982
8983 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
8984 cgraph_node::get (current_function_decl)->count;
8985 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
8986 apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count,
8987 count_scale);
8988
8989 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
8990 e->count = apply_scale (e->count, count_scale);
8991
8992 FOR_EACH_BB_FN (bb, cfun)
8993 {
8994 bb->count = apply_scale (bb->count, count_scale);
8995 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
8996 {
8997 gimple *stmt = gsi_stmt (gsi);
8998 tree decl = is_gimple_call (stmt)
8999 ? gimple_call_fndecl (stmt)
9000 : NULL;
9001 if (decl)
9002 {
9003 int flags = gimple_call_flags (stmt);
9004 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
9005 {
9006 if (gimple_purge_dead_abnormal_call_edges (bb))
9007 todo |= TODO_cleanup_cfg;
9008
9009 if (gimple_in_ssa_p (cfun))
9010 {
9011 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9012 update_stmt (stmt);
9013 }
9014 }
9015
9016 if (flags & ECF_NORETURN
9017 && fixup_noreturn_call (stmt))
9018 todo |= TODO_cleanup_cfg;
9019 }
9020
9021 /* Remove stores to variables we marked write-only.
9022 Keep access when store has side effect, i.e. in case when source
9023 is volatile. */
9024 if (gimple_store_p (stmt)
9025 && !gimple_has_side_effects (stmt))
9026 {
9027 tree lhs = get_base_address (gimple_get_lhs (stmt));
9028
9029 if (TREE_CODE (lhs) == VAR_DECL
9030 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9031 && varpool_node::get (lhs)->writeonly)
9032 {
9033 unlink_stmt_vdef (stmt);
9034 gsi_remove (&gsi, true);
9035 release_defs (stmt);
9036 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9037 continue;
9038 }
9039 }
9040 /* For calls we can simply remove LHS when it is known
9041 to be write-only. */
9042 if (is_gimple_call (stmt)
9043 && gimple_get_lhs (stmt))
9044 {
9045 tree lhs = get_base_address (gimple_get_lhs (stmt));
9046
9047 if (TREE_CODE (lhs) == VAR_DECL
9048 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9049 && varpool_node::get (lhs)->writeonly)
9050 {
9051 gimple_call_set_lhs (stmt, NULL);
9052 update_stmt (stmt);
9053 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9054 }
9055 }
9056
9057 if (maybe_clean_eh_stmt (stmt)
9058 && gimple_purge_dead_eh_edges (bb))
9059 todo |= TODO_cleanup_cfg;
9060 gsi_next (&gsi);
9061 }
9062
9063 FOR_EACH_EDGE (e, ei, bb->succs)
9064 e->count = apply_scale (e->count, count_scale);
9065
9066 /* If we have a basic block with no successors that does not
9067 end with a control statement or a noreturn call end it with
9068 a call to __builtin_unreachable. This situation can occur
9069 when inlining a noreturn call that does in fact return. */
9070 if (EDGE_COUNT (bb->succs) == 0)
9071 {
9072 gimple *stmt = last_stmt (bb);
9073 if (!stmt
9074 || (!is_ctrl_stmt (stmt)
9075 && (!is_gimple_call (stmt)
9076 || (gimple_call_flags (stmt) & ECF_NORETURN) == 0)))
9077 {
9078 if (stmt && is_gimple_call (stmt))
9079 gimple_call_set_ctrl_altering (stmt, false);
9080 stmt = gimple_build_call
9081 (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
9082 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9083 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
9084 }
9085 }
9086 }
9087 if (count_scale != REG_BR_PROB_BASE)
9088 compute_function_frequency ();
9089
9090 if (current_loops
9091 && (todo & TODO_cleanup_cfg))
9092 loops_state_set (LOOPS_NEED_FIXUP);
9093
9094 return todo;
9095 }
9096
9097 namespace {
9098
9099 const pass_data pass_data_fixup_cfg =
9100 {
9101 GIMPLE_PASS, /* type */
9102 "fixup_cfg", /* name */
9103 OPTGROUP_NONE, /* optinfo_flags */
9104 TV_NONE, /* tv_id */
9105 PROP_cfg, /* properties_required */
9106 0, /* properties_provided */
9107 0, /* properties_destroyed */
9108 0, /* todo_flags_start */
9109 0, /* todo_flags_finish */
9110 };
9111
9112 class pass_fixup_cfg : public gimple_opt_pass
9113 {
9114 public:
9115 pass_fixup_cfg (gcc::context *ctxt)
9116 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
9117 {}
9118
9119 /* opt_pass methods: */
9120 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
9121 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
9122
9123 }; // class pass_fixup_cfg
9124
9125 } // anon namespace
9126
9127 gimple_opt_pass *
9128 make_pass_fixup_cfg (gcc::context *ctxt)
9129 {
9130 return new pass_fixup_cfg (ctxt);
9131 }
9132
9133 /* Garbage collection support for edge_def. */
9134
9135 extern void gt_ggc_mx (tree&);
9136 extern void gt_ggc_mx (gimple *&);
9137 extern void gt_ggc_mx (rtx&);
9138 extern void gt_ggc_mx (basic_block&);
9139
9140 static void
9141 gt_ggc_mx (rtx_insn *& x)
9142 {
9143 if (x)
9144 gt_ggc_mx_rtx_def ((void *) x);
9145 }
9146
9147 void
9148 gt_ggc_mx (edge_def *e)
9149 {
9150 tree block = LOCATION_BLOCK (e->goto_locus);
9151 gt_ggc_mx (e->src);
9152 gt_ggc_mx (e->dest);
9153 if (current_ir_type () == IR_GIMPLE)
9154 gt_ggc_mx (e->insns.g);
9155 else
9156 gt_ggc_mx (e->insns.r);
9157 gt_ggc_mx (block);
9158 }
9159
9160 /* PCH support for edge_def. */
9161
9162 extern void gt_pch_nx (tree&);
9163 extern void gt_pch_nx (gimple *&);
9164 extern void gt_pch_nx (rtx&);
9165 extern void gt_pch_nx (basic_block&);
9166
9167 static void
9168 gt_pch_nx (rtx_insn *& x)
9169 {
9170 if (x)
9171 gt_pch_nx_rtx_def ((void *) x);
9172 }
9173
9174 void
9175 gt_pch_nx (edge_def *e)
9176 {
9177 tree block = LOCATION_BLOCK (e->goto_locus);
9178 gt_pch_nx (e->src);
9179 gt_pch_nx (e->dest);
9180 if (current_ir_type () == IR_GIMPLE)
9181 gt_pch_nx (e->insns.g);
9182 else
9183 gt_pch_nx (e->insns.r);
9184 gt_pch_nx (block);
9185 }
9186
9187 void
9188 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
9189 {
9190 tree block = LOCATION_BLOCK (e->goto_locus);
9191 op (&(e->src), cookie);
9192 op (&(e->dest), cookie);
9193 if (current_ir_type () == IR_GIMPLE)
9194 op (&(e->insns.g), cookie);
9195 else
9196 op (&(e->insns.r), cookie);
9197 op (&(block), cookie);
9198 }
9199
9200 #if CHECKING_P
9201
9202 namespace selftest {
9203
9204 /* Helper function for CFG selftests: create a dummy function decl
9205 and push it as cfun. */
9206
9207 static tree
9208 push_fndecl (const char *name)
9209 {
9210 tree fn_type = build_function_type_array (integer_type_node, 0, NULL);
9211 /* FIXME: this uses input_location: */
9212 tree fndecl = build_fn_decl (name, fn_type);
9213 tree retval = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
9214 NULL_TREE, integer_type_node);
9215 DECL_RESULT (fndecl) = retval;
9216 push_struct_function (fndecl);
9217 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9218 ASSERT_TRUE (fun != NULL);
9219 init_empty_tree_cfg_for_function (fun);
9220 ASSERT_EQ (2, n_basic_blocks_for_fn (fun));
9221 ASSERT_EQ (0, n_edges_for_fn (fun));
9222 return fndecl;
9223 }
9224
9225 /* These tests directly create CFGs.
9226 Compare with the static fns within tree-cfg.c:
9227 - build_gimple_cfg
9228 - make_blocks: calls create_basic_block (seq, bb);
9229 - make_edges. */
9230
9231 /* Verify a simple cfg of the form:
9232 ENTRY -> A -> B -> C -> EXIT. */
9233
9234 static void
9235 test_linear_chain ()
9236 {
9237 gimple_register_cfg_hooks ();
9238
9239 tree fndecl = push_fndecl ("cfg_test_linear_chain");
9240 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9241
9242 /* Create some empty blocks. */
9243 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9244 basic_block bb_b = create_empty_bb (bb_a);
9245 basic_block bb_c = create_empty_bb (bb_b);
9246
9247 ASSERT_EQ (5, n_basic_blocks_for_fn (fun));
9248 ASSERT_EQ (0, n_edges_for_fn (fun));
9249
9250 /* Create some edges: a simple linear chain of BBs. */
9251 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9252 make_edge (bb_a, bb_b, 0);
9253 make_edge (bb_b, bb_c, 0);
9254 make_edge (bb_c, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9255
9256 /* Verify the edges. */
9257 ASSERT_EQ (4, n_edges_for_fn (fun));
9258 ASSERT_EQ (NULL, ENTRY_BLOCK_PTR_FOR_FN (fun)->preds);
9259 ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs->length ());
9260 ASSERT_EQ (1, bb_a->preds->length ());
9261 ASSERT_EQ (1, bb_a->succs->length ());
9262 ASSERT_EQ (1, bb_b->preds->length ());
9263 ASSERT_EQ (1, bb_b->succs->length ());
9264 ASSERT_EQ (1, bb_c->preds->length ());
9265 ASSERT_EQ (1, bb_c->succs->length ());
9266 ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun)->preds->length ());
9267 ASSERT_EQ (NULL, EXIT_BLOCK_PTR_FOR_FN (fun)->succs);
9268
9269 /* Verify the dominance information
9270 Each BB in our simple chain should be dominated by the one before
9271 it. */
9272 calculate_dominance_info (CDI_DOMINATORS);
9273 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9274 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9275 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9276 ASSERT_EQ (1, dom_by_b.length ());
9277 ASSERT_EQ (bb_c, dom_by_b[0]);
9278 free_dominance_info (CDI_DOMINATORS);
9279 dom_by_b.release ();
9280
9281 /* Similarly for post-dominance: each BB in our chain is post-dominated
9282 by the one after it. */
9283 calculate_dominance_info (CDI_POST_DOMINATORS);
9284 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9285 ASSERT_EQ (bb_c, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9286 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9287 ASSERT_EQ (1, postdom_by_b.length ());
9288 ASSERT_EQ (bb_a, postdom_by_b[0]);
9289 free_dominance_info (CDI_POST_DOMINATORS);
9290 postdom_by_b.release ();
9291
9292 pop_cfun ();
9293 }
9294
9295 /* Verify a simple CFG of the form:
9296 ENTRY
9297 |
9298 A
9299 / \
9300 /t \f
9301 B C
9302 \ /
9303 \ /
9304 D
9305 |
9306 EXIT. */
9307
9308 static void
9309 test_diamond ()
9310 {
9311 gimple_register_cfg_hooks ();
9312
9313 tree fndecl = push_fndecl ("cfg_test_diamond");
9314 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9315
9316 /* Create some empty blocks. */
9317 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9318 basic_block bb_b = create_empty_bb (bb_a);
9319 basic_block bb_c = create_empty_bb (bb_a);
9320 basic_block bb_d = create_empty_bb (bb_b);
9321
9322 ASSERT_EQ (6, n_basic_blocks_for_fn (fun));
9323 ASSERT_EQ (0, n_edges_for_fn (fun));
9324
9325 /* Create the edges. */
9326 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9327 make_edge (bb_a, bb_b, EDGE_TRUE_VALUE);
9328 make_edge (bb_a, bb_c, EDGE_FALSE_VALUE);
9329 make_edge (bb_b, bb_d, 0);
9330 make_edge (bb_c, bb_d, 0);
9331 make_edge (bb_d, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9332
9333 /* Verify the edges. */
9334 ASSERT_EQ (6, n_edges_for_fn (fun));
9335 ASSERT_EQ (1, bb_a->preds->length ());
9336 ASSERT_EQ (2, bb_a->succs->length ());
9337 ASSERT_EQ (1, bb_b->preds->length ());
9338 ASSERT_EQ (1, bb_b->succs->length ());
9339 ASSERT_EQ (1, bb_c->preds->length ());
9340 ASSERT_EQ (1, bb_c->succs->length ());
9341 ASSERT_EQ (2, bb_d->preds->length ());
9342 ASSERT_EQ (1, bb_d->succs->length ());
9343
9344 /* Verify the dominance information. */
9345 calculate_dominance_info (CDI_DOMINATORS);
9346 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9347 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9348 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_d));
9349 vec<basic_block> dom_by_a = get_dominated_by (CDI_DOMINATORS, bb_a);
9350 ASSERT_EQ (3, dom_by_a.length ()); /* B, C, D, in some order. */
9351 dom_by_a.release ();
9352 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9353 ASSERT_EQ (0, dom_by_b.length ());
9354 dom_by_b.release ();
9355 free_dominance_info (CDI_DOMINATORS);
9356
9357 /* Similarly for post-dominance. */
9358 calculate_dominance_info (CDI_POST_DOMINATORS);
9359 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9360 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9361 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_c));
9362 vec<basic_block> postdom_by_d = get_dominated_by (CDI_POST_DOMINATORS, bb_d);
9363 ASSERT_EQ (3, postdom_by_d.length ()); /* A, B, C in some order. */
9364 postdom_by_d.release ();
9365 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9366 ASSERT_EQ (0, postdom_by_b.length ());
9367 postdom_by_b.release ();
9368 free_dominance_info (CDI_POST_DOMINATORS);
9369
9370 pop_cfun ();
9371 }
9372
9373 /* Verify that we can handle a CFG containing a "complete" aka
9374 fully-connected subgraph (where A B C D below all have edges
9375 pointing to each other node, also to themselves).
9376 e.g.:
9377 ENTRY EXIT
9378 | ^
9379 | /
9380 | /
9381 | /
9382 V/
9383 A<--->B
9384 ^^ ^^
9385 | \ / |
9386 | X |
9387 | / \ |
9388 VV VV
9389 C<--->D
9390 */
9391
9392 static void
9393 test_fully_connected ()
9394 {
9395 gimple_register_cfg_hooks ();
9396
9397 tree fndecl = push_fndecl ("cfg_fully_connected");
9398 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9399
9400 const int n = 4;
9401
9402 /* Create some empty blocks. */
9403 auto_vec <basic_block> subgraph_nodes;
9404 for (int i = 0; i < n; i++)
9405 subgraph_nodes.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun)));
9406
9407 ASSERT_EQ (n + 2, n_basic_blocks_for_fn (fun));
9408 ASSERT_EQ (0, n_edges_for_fn (fun));
9409
9410 /* Create the edges. */
9411 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), subgraph_nodes[0], EDGE_FALLTHRU);
9412 make_edge (subgraph_nodes[0], EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9413 for (int i = 0; i < n; i++)
9414 for (int j = 0; j < n; j++)
9415 make_edge (subgraph_nodes[i], subgraph_nodes[j], 0);
9416
9417 /* Verify the edges. */
9418 ASSERT_EQ (2 + (n * n), n_edges_for_fn (fun));
9419 /* The first one is linked to ENTRY/EXIT as well as itself and
9420 everything else. */
9421 ASSERT_EQ (n + 1, subgraph_nodes[0]->preds->length ());
9422 ASSERT_EQ (n + 1, subgraph_nodes[0]->succs->length ());
9423 /* The other ones in the subgraph are linked to everything in
9424 the subgraph (including themselves). */
9425 for (int i = 1; i < n; i++)
9426 {
9427 ASSERT_EQ (n, subgraph_nodes[i]->preds->length ());
9428 ASSERT_EQ (n, subgraph_nodes[i]->succs->length ());
9429 }
9430
9431 /* Verify the dominance information. */
9432 calculate_dominance_info (CDI_DOMINATORS);
9433 /* The initial block in the subgraph should be dominated by ENTRY. */
9434 ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun),
9435 get_immediate_dominator (CDI_DOMINATORS,
9436 subgraph_nodes[0]));
9437 /* Every other block in the subgraph should be dominated by the
9438 initial block. */
9439 for (int i = 1; i < n; i++)
9440 ASSERT_EQ (subgraph_nodes[0],
9441 get_immediate_dominator (CDI_DOMINATORS,
9442 subgraph_nodes[i]));
9443 free_dominance_info (CDI_DOMINATORS);
9444
9445 /* Similarly for post-dominance. */
9446 calculate_dominance_info (CDI_POST_DOMINATORS);
9447 /* The initial block in the subgraph should be postdominated by EXIT. */
9448 ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun),
9449 get_immediate_dominator (CDI_POST_DOMINATORS,
9450 subgraph_nodes[0]));
9451 /* Every other block in the subgraph should be postdominated by the
9452 initial block, since that leads to EXIT. */
9453 for (int i = 1; i < n; i++)
9454 ASSERT_EQ (subgraph_nodes[0],
9455 get_immediate_dominator (CDI_POST_DOMINATORS,
9456 subgraph_nodes[i]));
9457 free_dominance_info (CDI_POST_DOMINATORS);
9458
9459 pop_cfun ();
9460 }
9461
9462 /* Run all of the selftests within this file. */
9463
9464 void
9465 tree_cfg_c_tests ()
9466 {
9467 test_linear_chain ();
9468 test_diamond ();
9469 test_fully_connected ();
9470 }
9471
9472 } // namespace selftest
9473
9474 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
9475 - loop
9476 - nested loops
9477 - switch statement (a block with many out-edges)
9478 - something that jumps to itself
9479 - etc */
9480
9481 #endif /* CHECKING_P */