ggcplug.c: Shuffle includes to include gcc-plugin.h earlier.
[gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2014 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "hash-table.h"
25 #include "hash-map.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "trans-mem.h"
29 #include "stor-layout.h"
30 #include "print-tree.h"
31 #include "tm_p.h"
32 #include "predict.h"
33 #include "vec.h"
34 #include "hashtab.h"
35 #include "hash-set.h"
36 #include "machmode.h"
37 #include "hard-reg-set.h"
38 #include "input.h"
39 #include "function.h"
40 #include "dominance.h"
41 #include "cfg.h"
42 #include "cfganal.h"
43 #include "basic-block.h"
44 #include "flags.h"
45 #include "gimple-pretty-print.h"
46 #include "tree-ssa-alias.h"
47 #include "internal-fn.h"
48 #include "gimple-fold.h"
49 #include "tree-eh.h"
50 #include "gimple-expr.h"
51 #include "is-a.h"
52 #include "gimple.h"
53 #include "gimple-iterator.h"
54 #include "gimplify-me.h"
55 #include "gimple-walk.h"
56 #include "gimple-ssa.h"
57 #include "cgraph.h"
58 #include "tree-cfg.h"
59 #include "tree-phinodes.h"
60 #include "ssa-iterators.h"
61 #include "stringpool.h"
62 #include "tree-ssanames.h"
63 #include "tree-ssa-loop-manip.h"
64 #include "tree-ssa-loop-niter.h"
65 #include "tree-into-ssa.h"
66 #include "expr.h"
67 #include "tree-dfa.h"
68 #include "tree-ssa.h"
69 #include "tree-dump.h"
70 #include "tree-pass.h"
71 #include "diagnostic-core.h"
72 #include "except.h"
73 #include "cfgloop.h"
74 #include "tree-ssa-propagate.h"
75 #include "value-prof.h"
76 #include "tree-inline.h"
77 #include "target.h"
78 #include "tree-ssa-live.h"
79 #include "omp-low.h"
80 #include "tree-cfgcleanup.h"
81 #include "wide-int.h"
82 #include "wide-int-print.h"
83
84 /* This file contains functions for building the Control Flow Graph (CFG)
85 for a function tree. */
86
87 /* Local declarations. */
88
89 /* Initial capacity for the basic block array. */
90 static const int initial_cfg_capacity = 20;
91
92 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
93 which use a particular edge. The CASE_LABEL_EXPRs are chained together
94 via their CASE_CHAIN field, which we clear after we're done with the
95 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
96
97 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
98 update the case vector in response to edge redirections.
99
100 Right now this table is set up and torn down at key points in the
101 compilation process. It would be nice if we could make the table
102 more persistent. The key is getting notification of changes to
103 the CFG (particularly edge removal, creation and redirection). */
104
105 static hash_map<edge, tree> *edge_to_cases;
106
107 /* If we record edge_to_cases, this bitmap will hold indexes
108 of basic blocks that end in a GIMPLE_SWITCH which we touched
109 due to edge manipulations. */
110
111 static bitmap touched_switch_bbs;
112
113 /* CFG statistics. */
114 struct cfg_stats_d
115 {
116 long num_merged_labels;
117 };
118
119 static struct cfg_stats_d cfg_stats;
120
121 /* Hash table to store last discriminator assigned for each locus. */
122 struct locus_discrim_map
123 {
124 location_t locus;
125 int discriminator;
126 };
127
128 /* Hashtable helpers. */
129
130 struct locus_discrim_hasher : typed_free_remove <locus_discrim_map>
131 {
132 typedef locus_discrim_map value_type;
133 typedef locus_discrim_map compare_type;
134 static inline hashval_t hash (const value_type *);
135 static inline bool equal (const value_type *, const compare_type *);
136 };
137
138 /* Trivial hash function for a location_t. ITEM is a pointer to
139 a hash table entry that maps a location_t to a discriminator. */
140
141 inline hashval_t
142 locus_discrim_hasher::hash (const value_type *item)
143 {
144 return LOCATION_LINE (item->locus);
145 }
146
147 /* Equality function for the locus-to-discriminator map. A and B
148 point to the two hash table entries to compare. */
149
150 inline bool
151 locus_discrim_hasher::equal (const value_type *a, const compare_type *b)
152 {
153 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
154 }
155
156 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
157
158 /* Basic blocks and flowgraphs. */
159 static void make_blocks (gimple_seq);
160
161 /* Edges. */
162 static void make_edges (void);
163 static void assign_discriminators (void);
164 static void make_cond_expr_edges (basic_block);
165 static void make_gimple_switch_edges (basic_block);
166 static bool make_goto_expr_edges (basic_block);
167 static void make_gimple_asm_edges (basic_block);
168 static edge gimple_redirect_edge_and_branch (edge, basic_block);
169 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
170
171 /* Various helpers. */
172 static inline bool stmt_starts_bb_p (gimple, gimple);
173 static int gimple_verify_flow_info (void);
174 static void gimple_make_forwarder_block (edge);
175 static gimple first_non_label_stmt (basic_block);
176 static bool verify_gimple_transaction (gimple);
177 static bool call_can_make_abnormal_goto (gimple);
178
179 /* Flowgraph optimization and cleanup. */
180 static void gimple_merge_blocks (basic_block, basic_block);
181 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
182 static void remove_bb (basic_block);
183 static edge find_taken_edge_computed_goto (basic_block, tree);
184 static edge find_taken_edge_cond_expr (basic_block, tree);
185 static edge find_taken_edge_switch_expr (basic_block, tree);
186 static tree find_case_label_for_value (gimple, tree);
187
188 void
189 init_empty_tree_cfg_for_function (struct function *fn)
190 {
191 /* Initialize the basic block array. */
192 init_flow (fn);
193 profile_status_for_fn (fn) = PROFILE_ABSENT;
194 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
195 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
196 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
197 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
198 initial_cfg_capacity);
199
200 /* Build a mapping of labels to their associated blocks. */
201 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
202 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
203 initial_cfg_capacity);
204
205 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
206 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
207
208 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
209 = EXIT_BLOCK_PTR_FOR_FN (fn);
210 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
211 = ENTRY_BLOCK_PTR_FOR_FN (fn);
212 }
213
214 void
215 init_empty_tree_cfg (void)
216 {
217 init_empty_tree_cfg_for_function (cfun);
218 }
219
220 /*---------------------------------------------------------------------------
221 Create basic blocks
222 ---------------------------------------------------------------------------*/
223
224 /* Entry point to the CFG builder for trees. SEQ is the sequence of
225 statements to be added to the flowgraph. */
226
227 static void
228 build_gimple_cfg (gimple_seq seq)
229 {
230 /* Register specific gimple functions. */
231 gimple_register_cfg_hooks ();
232
233 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
234
235 init_empty_tree_cfg ();
236
237 make_blocks (seq);
238
239 /* Make sure there is always at least one block, even if it's empty. */
240 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
241 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
242
243 /* Adjust the size of the array. */
244 if (basic_block_info_for_fn (cfun)->length ()
245 < (size_t) n_basic_blocks_for_fn (cfun))
246 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
247 n_basic_blocks_for_fn (cfun));
248
249 /* To speed up statement iterator walks, we first purge dead labels. */
250 cleanup_dead_labels ();
251
252 /* Group case nodes to reduce the number of edges.
253 We do this after cleaning up dead labels because otherwise we miss
254 a lot of obvious case merging opportunities. */
255 group_case_labels ();
256
257 /* Create the edges of the flowgraph. */
258 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
259 make_edges ();
260 assign_discriminators ();
261 cleanup_dead_labels ();
262 delete discriminator_per_locus;
263 discriminator_per_locus = NULL;
264 }
265
266
267 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
268 them and propagate the information to the loop. We assume that the
269 annotations come immediately before the condition of the loop. */
270
271 static void
272 replace_loop_annotate ()
273 {
274 struct loop *loop;
275 basic_block bb;
276 gimple_stmt_iterator gsi;
277 gimple stmt;
278
279 FOR_EACH_LOOP (loop, 0)
280 {
281 gsi = gsi_last_bb (loop->header);
282 stmt = gsi_stmt (gsi);
283 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
284 continue;
285 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
286 {
287 stmt = gsi_stmt (gsi);
288 if (gimple_code (stmt) != GIMPLE_CALL)
289 break;
290 if (!gimple_call_internal_p (stmt)
291 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
292 break;
293 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
294 {
295 case annot_expr_ivdep_kind:
296 loop->safelen = INT_MAX;
297 break;
298 case annot_expr_no_vector_kind:
299 loop->dont_vectorize = true;
300 break;
301 case annot_expr_vector_kind:
302 loop->force_vectorize = true;
303 cfun->has_force_vectorize_loops = true;
304 break;
305 default:
306 gcc_unreachable ();
307 }
308 stmt = gimple_build_assign (gimple_call_lhs (stmt),
309 gimple_call_arg (stmt, 0));
310 gsi_replace (&gsi, stmt, true);
311 }
312 }
313
314 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
315 FOR_EACH_BB_FN (bb, cfun)
316 {
317 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
318 {
319 stmt = gsi_stmt (gsi);
320 if (gimple_code (stmt) != GIMPLE_CALL)
321 break;
322 if (!gimple_call_internal_p (stmt)
323 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
324 break;
325 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
326 {
327 case annot_expr_ivdep_kind:
328 case annot_expr_no_vector_kind:
329 case annot_expr_vector_kind:
330 break;
331 default:
332 gcc_unreachable ();
333 }
334 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
335 stmt = gimple_build_assign (gimple_call_lhs (stmt),
336 gimple_call_arg (stmt, 0));
337 gsi_replace (&gsi, stmt, true);
338 }
339 }
340 }
341
342
343 static unsigned int
344 execute_build_cfg (void)
345 {
346 gimple_seq body = gimple_body (current_function_decl);
347
348 build_gimple_cfg (body);
349 gimple_set_body (current_function_decl, NULL);
350 if (dump_file && (dump_flags & TDF_DETAILS))
351 {
352 fprintf (dump_file, "Scope blocks:\n");
353 dump_scope_blocks (dump_file, dump_flags);
354 }
355 cleanup_tree_cfg ();
356 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
357 replace_loop_annotate ();
358 return 0;
359 }
360
361 namespace {
362
363 const pass_data pass_data_build_cfg =
364 {
365 GIMPLE_PASS, /* type */
366 "cfg", /* name */
367 OPTGROUP_NONE, /* optinfo_flags */
368 TV_TREE_CFG, /* tv_id */
369 PROP_gimple_leh, /* properties_required */
370 ( PROP_cfg | PROP_loops ), /* properties_provided */
371 0, /* properties_destroyed */
372 0, /* todo_flags_start */
373 0, /* todo_flags_finish */
374 };
375
376 class pass_build_cfg : public gimple_opt_pass
377 {
378 public:
379 pass_build_cfg (gcc::context *ctxt)
380 : gimple_opt_pass (pass_data_build_cfg, ctxt)
381 {}
382
383 /* opt_pass methods: */
384 virtual unsigned int execute (function *) { return execute_build_cfg (); }
385
386 }; // class pass_build_cfg
387
388 } // anon namespace
389
390 gimple_opt_pass *
391 make_pass_build_cfg (gcc::context *ctxt)
392 {
393 return new pass_build_cfg (ctxt);
394 }
395
396
397 /* Return true if T is a computed goto. */
398
399 bool
400 computed_goto_p (gimple t)
401 {
402 return (gimple_code (t) == GIMPLE_GOTO
403 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
404 }
405
406 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
407 the other edge points to a bb with just __builtin_unreachable ().
408 I.e. return true for C->M edge in:
409 <bb C>:
410 ...
411 if (something)
412 goto <bb N>;
413 else
414 goto <bb M>;
415 <bb N>:
416 __builtin_unreachable ();
417 <bb M>: */
418
419 bool
420 assert_unreachable_fallthru_edge_p (edge e)
421 {
422 basic_block pred_bb = e->src;
423 gimple last = last_stmt (pred_bb);
424 if (last && gimple_code (last) == GIMPLE_COND)
425 {
426 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
427 if (other_bb == e->dest)
428 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
429 if (EDGE_COUNT (other_bb->succs) == 0)
430 {
431 gimple_stmt_iterator gsi = gsi_after_labels (other_bb);
432 gimple stmt;
433
434 if (gsi_end_p (gsi))
435 return false;
436 stmt = gsi_stmt (gsi);
437 while (is_gimple_debug (stmt) || gimple_clobber_p (stmt))
438 {
439 gsi_next (&gsi);
440 if (gsi_end_p (gsi))
441 return false;
442 stmt = gsi_stmt (gsi);
443 }
444 return gimple_call_builtin_p (stmt, BUILT_IN_UNREACHABLE);
445 }
446 }
447 return false;
448 }
449
450
451 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
452 could alter control flow except via eh. We initialize the flag at
453 CFG build time and only ever clear it later. */
454
455 static void
456 gimple_call_initialize_ctrl_altering (gimple stmt)
457 {
458 int flags = gimple_call_flags (stmt);
459
460 /* A call alters control flow if it can make an abnormal goto. */
461 if (call_can_make_abnormal_goto (stmt)
462 /* A call also alters control flow if it does not return. */
463 || flags & ECF_NORETURN
464 /* TM ending statements have backedges out of the transaction.
465 Return true so we split the basic block containing them.
466 Note that the TM_BUILTIN test is merely an optimization. */
467 || ((flags & ECF_TM_BUILTIN)
468 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
469 /* BUILT_IN_RETURN call is same as return statement. */
470 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
471 gimple_call_set_ctrl_altering (stmt, true);
472 else
473 gimple_call_set_ctrl_altering (stmt, false);
474 }
475
476
477 /* Build a flowgraph for the sequence of stmts SEQ. */
478
479 static void
480 make_blocks (gimple_seq seq)
481 {
482 gimple_stmt_iterator i = gsi_start (seq);
483 gimple stmt = NULL;
484 bool start_new_block = true;
485 bool first_stmt_of_seq = true;
486 basic_block bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
487
488 while (!gsi_end_p (i))
489 {
490 gimple prev_stmt;
491
492 prev_stmt = stmt;
493 stmt = gsi_stmt (i);
494
495 if (stmt && is_gimple_call (stmt))
496 gimple_call_initialize_ctrl_altering (stmt);
497
498 /* If the statement starts a new basic block or if we have determined
499 in a previous pass that we need to create a new block for STMT, do
500 so now. */
501 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
502 {
503 if (!first_stmt_of_seq)
504 gsi_split_seq_before (&i, &seq);
505 bb = create_basic_block (seq, NULL, bb);
506 start_new_block = false;
507 }
508
509 /* Now add STMT to BB and create the subgraphs for special statement
510 codes. */
511 gimple_set_bb (stmt, bb);
512
513 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
514 next iteration. */
515 if (stmt_ends_bb_p (stmt))
516 {
517 /* If the stmt can make abnormal goto use a new temporary
518 for the assignment to the LHS. This makes sure the old value
519 of the LHS is available on the abnormal edge. Otherwise
520 we will end up with overlapping life-ranges for abnormal
521 SSA names. */
522 if (gimple_has_lhs (stmt)
523 && stmt_can_make_abnormal_goto (stmt)
524 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
525 {
526 tree lhs = gimple_get_lhs (stmt);
527 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
528 gimple s = gimple_build_assign (lhs, tmp);
529 gimple_set_location (s, gimple_location (stmt));
530 gimple_set_block (s, gimple_block (stmt));
531 gimple_set_lhs (stmt, tmp);
532 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
533 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
534 DECL_GIMPLE_REG_P (tmp) = 1;
535 gsi_insert_after (&i, s, GSI_SAME_STMT);
536 }
537 start_new_block = true;
538 }
539
540 gsi_next (&i);
541 first_stmt_of_seq = false;
542 }
543 }
544
545
546 /* Create and return a new empty basic block after bb AFTER. */
547
548 static basic_block
549 create_bb (void *h, void *e, basic_block after)
550 {
551 basic_block bb;
552
553 gcc_assert (!e);
554
555 /* Create and initialize a new basic block. Since alloc_block uses
556 GC allocation that clears memory to allocate a basic block, we do
557 not have to clear the newly allocated basic block here. */
558 bb = alloc_block ();
559
560 bb->index = last_basic_block_for_fn (cfun);
561 bb->flags = BB_NEW;
562 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
563
564 /* Add the new block to the linked list of blocks. */
565 link_block (bb, after);
566
567 /* Grow the basic block array if needed. */
568 if ((size_t) last_basic_block_for_fn (cfun)
569 == basic_block_info_for_fn (cfun)->length ())
570 {
571 size_t new_size =
572 (last_basic_block_for_fn (cfun)
573 + (last_basic_block_for_fn (cfun) + 3) / 4);
574 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
575 }
576
577 /* Add the newly created block to the array. */
578 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
579
580 n_basic_blocks_for_fn (cfun)++;
581 last_basic_block_for_fn (cfun)++;
582
583 return bb;
584 }
585
586
587 /*---------------------------------------------------------------------------
588 Edge creation
589 ---------------------------------------------------------------------------*/
590
591 /* Fold COND_EXPR_COND of each COND_EXPR. */
592
593 void
594 fold_cond_expr_cond (void)
595 {
596 basic_block bb;
597
598 FOR_EACH_BB_FN (bb, cfun)
599 {
600 gimple stmt = last_stmt (bb);
601
602 if (stmt && gimple_code (stmt) == GIMPLE_COND)
603 {
604 location_t loc = gimple_location (stmt);
605 tree cond;
606 bool zerop, onep;
607
608 fold_defer_overflow_warnings ();
609 cond = fold_binary_loc (loc, gimple_cond_code (stmt), boolean_type_node,
610 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
611 if (cond)
612 {
613 zerop = integer_zerop (cond);
614 onep = integer_onep (cond);
615 }
616 else
617 zerop = onep = false;
618
619 fold_undefer_overflow_warnings (zerop || onep,
620 stmt,
621 WARN_STRICT_OVERFLOW_CONDITIONAL);
622 if (zerop)
623 gimple_cond_make_false (stmt);
624 else if (onep)
625 gimple_cond_make_true (stmt);
626 }
627 }
628 }
629
630 /* If basic block BB has an abnormal edge to a basic block
631 containing IFN_ABNORMAL_DISPATCHER internal call, return
632 that the dispatcher's basic block, otherwise return NULL. */
633
634 basic_block
635 get_abnormal_succ_dispatcher (basic_block bb)
636 {
637 edge e;
638 edge_iterator ei;
639
640 FOR_EACH_EDGE (e, ei, bb->succs)
641 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
642 {
643 gimple_stmt_iterator gsi
644 = gsi_start_nondebug_after_labels_bb (e->dest);
645 gimple g = gsi_stmt (gsi);
646 if (g
647 && is_gimple_call (g)
648 && gimple_call_internal_p (g)
649 && gimple_call_internal_fn (g) == IFN_ABNORMAL_DISPATCHER)
650 return e->dest;
651 }
652 return NULL;
653 }
654
655 /* Helper function for make_edges. Create a basic block with
656 with ABNORMAL_DISPATCHER internal call in it if needed, and
657 create abnormal edges from BBS to it and from it to FOR_BB
658 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
659
660 static void
661 handle_abnormal_edges (basic_block *dispatcher_bbs,
662 basic_block for_bb, int *bb_to_omp_idx,
663 auto_vec<basic_block> *bbs, bool computed_goto)
664 {
665 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
666 unsigned int idx = 0;
667 basic_block bb;
668 bool inner = false;
669
670 if (bb_to_omp_idx)
671 {
672 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
673 if (bb_to_omp_idx[for_bb->index] != 0)
674 inner = true;
675 }
676
677 /* If the dispatcher has been created already, then there are basic
678 blocks with abnormal edges to it, so just make a new edge to
679 for_bb. */
680 if (*dispatcher == NULL)
681 {
682 /* Check if there are any basic blocks that need to have
683 abnormal edges to this dispatcher. If there are none, return
684 early. */
685 if (bb_to_omp_idx == NULL)
686 {
687 if (bbs->is_empty ())
688 return;
689 }
690 else
691 {
692 FOR_EACH_VEC_ELT (*bbs, idx, bb)
693 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
694 break;
695 if (bb == NULL)
696 return;
697 }
698
699 /* Create the dispatcher bb. */
700 *dispatcher = create_basic_block (NULL, NULL, for_bb);
701 if (computed_goto)
702 {
703 /* Factor computed gotos into a common computed goto site. Also
704 record the location of that site so that we can un-factor the
705 gotos after we have converted back to normal form. */
706 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
707
708 /* Create the destination of the factored goto. Each original
709 computed goto will put its desired destination into this
710 variable and jump to the label we create immediately below. */
711 tree var = create_tmp_var (ptr_type_node, "gotovar");
712
713 /* Build a label for the new block which will contain the
714 factored computed goto. */
715 tree factored_label_decl
716 = create_artificial_label (UNKNOWN_LOCATION);
717 gimple factored_computed_goto_label
718 = gimple_build_label (factored_label_decl);
719 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
720
721 /* Build our new computed goto. */
722 gimple factored_computed_goto = gimple_build_goto (var);
723 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
724
725 FOR_EACH_VEC_ELT (*bbs, idx, bb)
726 {
727 if (bb_to_omp_idx
728 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
729 continue;
730
731 gsi = gsi_last_bb (bb);
732 gimple last = gsi_stmt (gsi);
733
734 gcc_assert (computed_goto_p (last));
735
736 /* Copy the original computed goto's destination into VAR. */
737 gimple assignment
738 = gimple_build_assign (var, gimple_goto_dest (last));
739 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
740
741 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
742 e->goto_locus = gimple_location (last);
743 gsi_remove (&gsi, true);
744 }
745 }
746 else
747 {
748 tree arg = inner ? boolean_true_node : boolean_false_node;
749 gimple g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
750 1, arg);
751 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
752 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
753
754 /* Create predecessor edges of the dispatcher. */
755 FOR_EACH_VEC_ELT (*bbs, idx, bb)
756 {
757 if (bb_to_omp_idx
758 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
759 continue;
760 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
761 }
762 }
763 }
764
765 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
766 }
767
768 /* Join all the blocks in the flowgraph. */
769
770 static void
771 make_edges (void)
772 {
773 basic_block bb;
774 struct omp_region *cur_region = NULL;
775 auto_vec<basic_block> ab_edge_goto;
776 auto_vec<basic_block> ab_edge_call;
777 int *bb_to_omp_idx = NULL;
778 int cur_omp_region_idx = 0;
779
780 /* Create an edge from entry to the first block with executable
781 statements in it. */
782 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
783 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
784 EDGE_FALLTHRU);
785
786 /* Traverse the basic block array placing edges. */
787 FOR_EACH_BB_FN (bb, cfun)
788 {
789 gimple last = last_stmt (bb);
790 bool fallthru;
791
792 if (bb_to_omp_idx)
793 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
794
795 if (last)
796 {
797 enum gimple_code code = gimple_code (last);
798 switch (code)
799 {
800 case GIMPLE_GOTO:
801 if (make_goto_expr_edges (bb))
802 ab_edge_goto.safe_push (bb);
803 fallthru = false;
804 break;
805 case GIMPLE_RETURN:
806 {
807 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
808 e->goto_locus = gimple_location (last);
809 fallthru = false;
810 }
811 break;
812 case GIMPLE_COND:
813 make_cond_expr_edges (bb);
814 fallthru = false;
815 break;
816 case GIMPLE_SWITCH:
817 make_gimple_switch_edges (bb);
818 fallthru = false;
819 break;
820 case GIMPLE_RESX:
821 make_eh_edges (last);
822 fallthru = false;
823 break;
824 case GIMPLE_EH_DISPATCH:
825 fallthru = make_eh_dispatch_edges (last);
826 break;
827
828 case GIMPLE_CALL:
829 /* If this function receives a nonlocal goto, then we need to
830 make edges from this call site to all the nonlocal goto
831 handlers. */
832 if (stmt_can_make_abnormal_goto (last))
833 ab_edge_call.safe_push (bb);
834
835 /* If this statement has reachable exception handlers, then
836 create abnormal edges to them. */
837 make_eh_edges (last);
838
839 /* BUILTIN_RETURN is really a return statement. */
840 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
841 {
842 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
843 fallthru = false;
844 }
845 /* Some calls are known not to return. */
846 else
847 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
848 break;
849
850 case GIMPLE_ASSIGN:
851 /* A GIMPLE_ASSIGN may throw internally and thus be considered
852 control-altering. */
853 if (is_ctrl_altering_stmt (last))
854 make_eh_edges (last);
855 fallthru = true;
856 break;
857
858 case GIMPLE_ASM:
859 make_gimple_asm_edges (bb);
860 fallthru = true;
861 break;
862
863 CASE_GIMPLE_OMP:
864 fallthru = make_gimple_omp_edges (bb, &cur_region,
865 &cur_omp_region_idx);
866 if (cur_region && bb_to_omp_idx == NULL)
867 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
868 break;
869
870 case GIMPLE_TRANSACTION:
871 {
872 tree abort_label = gimple_transaction_label (last);
873 if (abort_label)
874 make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT);
875 fallthru = true;
876 }
877 break;
878
879 default:
880 gcc_assert (!stmt_ends_bb_p (last));
881 fallthru = true;
882 }
883 }
884 else
885 fallthru = true;
886
887 if (fallthru)
888 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
889 }
890
891 /* Computed gotos are hell to deal with, especially if there are
892 lots of them with a large number of destinations. So we factor
893 them to a common computed goto location before we build the
894 edge list. After we convert back to normal form, we will un-factor
895 the computed gotos since factoring introduces an unwanted jump.
896 For non-local gotos and abnormal edges from calls to calls that return
897 twice or forced labels, factor the abnormal edges too, by having all
898 abnormal edges from the calls go to a common artificial basic block
899 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
900 basic block to all forced labels and calls returning twice.
901 We do this per-OpenMP structured block, because those regions
902 are guaranteed to be single entry single exit by the standard,
903 so it is not allowed to enter or exit such regions abnormally this way,
904 thus all computed gotos, non-local gotos and setjmp/longjmp calls
905 must not transfer control across SESE region boundaries. */
906 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
907 {
908 gimple_stmt_iterator gsi;
909 basic_block dispatcher_bb_array[2] = { NULL, NULL };
910 basic_block *dispatcher_bbs = dispatcher_bb_array;
911 int count = n_basic_blocks_for_fn (cfun);
912
913 if (bb_to_omp_idx)
914 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
915
916 FOR_EACH_BB_FN (bb, cfun)
917 {
918 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
919 {
920 gimple label_stmt = gsi_stmt (gsi);
921 tree target;
922
923 if (gimple_code (label_stmt) != GIMPLE_LABEL)
924 break;
925
926 target = gimple_label_label (label_stmt);
927
928 /* Make an edge to every label block that has been marked as a
929 potential target for a computed goto or a non-local goto. */
930 if (FORCED_LABEL (target))
931 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
932 &ab_edge_goto, true);
933 if (DECL_NONLOCAL (target))
934 {
935 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
936 &ab_edge_call, false);
937 break;
938 }
939 }
940
941 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
942 gsi_next_nondebug (&gsi);
943 if (!gsi_end_p (gsi))
944 {
945 /* Make an edge to every setjmp-like call. */
946 gimple call_stmt = gsi_stmt (gsi);
947 if (is_gimple_call (call_stmt)
948 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
949 || gimple_call_builtin_p (call_stmt,
950 BUILT_IN_SETJMP_RECEIVER)))
951 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
952 &ab_edge_call, false);
953 }
954 }
955
956 if (bb_to_omp_idx)
957 XDELETE (dispatcher_bbs);
958 }
959
960 XDELETE (bb_to_omp_idx);
961
962 free_omp_regions ();
963
964 /* Fold COND_EXPR_COND of each COND_EXPR. */
965 fold_cond_expr_cond ();
966 }
967
968 /* Find the next available discriminator value for LOCUS. The
969 discriminator distinguishes among several basic blocks that
970 share a common locus, allowing for more accurate sample-based
971 profiling. */
972
973 static int
974 next_discriminator_for_locus (location_t locus)
975 {
976 struct locus_discrim_map item;
977 struct locus_discrim_map **slot;
978
979 item.locus = locus;
980 item.discriminator = 0;
981 slot = discriminator_per_locus->find_slot_with_hash (
982 &item, LOCATION_LINE (locus), INSERT);
983 gcc_assert (slot);
984 if (*slot == HTAB_EMPTY_ENTRY)
985 {
986 *slot = XNEW (struct locus_discrim_map);
987 gcc_assert (*slot);
988 (*slot)->locus = locus;
989 (*slot)->discriminator = 0;
990 }
991 (*slot)->discriminator++;
992 return (*slot)->discriminator;
993 }
994
995 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
996
997 static bool
998 same_line_p (location_t locus1, location_t locus2)
999 {
1000 expanded_location from, to;
1001
1002 if (locus1 == locus2)
1003 return true;
1004
1005 from = expand_location (locus1);
1006 to = expand_location (locus2);
1007
1008 if (from.line != to.line)
1009 return false;
1010 if (from.file == to.file)
1011 return true;
1012 return (from.file != NULL
1013 && to.file != NULL
1014 && filename_cmp (from.file, to.file) == 0);
1015 }
1016
1017 /* Assign discriminators to each basic block. */
1018
1019 static void
1020 assign_discriminators (void)
1021 {
1022 basic_block bb;
1023
1024 FOR_EACH_BB_FN (bb, cfun)
1025 {
1026 edge e;
1027 edge_iterator ei;
1028 gimple last = last_stmt (bb);
1029 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1030
1031 if (locus == UNKNOWN_LOCATION)
1032 continue;
1033
1034 FOR_EACH_EDGE (e, ei, bb->succs)
1035 {
1036 gimple first = first_non_label_stmt (e->dest);
1037 gimple last = last_stmt (e->dest);
1038 if ((first && same_line_p (locus, gimple_location (first)))
1039 || (last && same_line_p (locus, gimple_location (last))))
1040 {
1041 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1042 bb->discriminator = next_discriminator_for_locus (locus);
1043 else
1044 e->dest->discriminator = next_discriminator_for_locus (locus);
1045 }
1046 }
1047 }
1048 }
1049
1050 /* Create the edges for a GIMPLE_COND starting at block BB. */
1051
1052 static void
1053 make_cond_expr_edges (basic_block bb)
1054 {
1055 gimple entry = last_stmt (bb);
1056 gimple then_stmt, else_stmt;
1057 basic_block then_bb, else_bb;
1058 tree then_label, else_label;
1059 edge e;
1060
1061 gcc_assert (entry);
1062 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1063
1064 /* Entry basic blocks for each component. */
1065 then_label = gimple_cond_true_label (entry);
1066 else_label = gimple_cond_false_label (entry);
1067 then_bb = label_to_block (then_label);
1068 else_bb = label_to_block (else_label);
1069 then_stmt = first_stmt (then_bb);
1070 else_stmt = first_stmt (else_bb);
1071
1072 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1073 e->goto_locus = gimple_location (then_stmt);
1074 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1075 if (e)
1076 e->goto_locus = gimple_location (else_stmt);
1077
1078 /* We do not need the labels anymore. */
1079 gimple_cond_set_true_label (entry, NULL_TREE);
1080 gimple_cond_set_false_label (entry, NULL_TREE);
1081 }
1082
1083
1084 /* Called for each element in the hash table (P) as we delete the
1085 edge to cases hash table.
1086
1087 Clear all the TREE_CHAINs to prevent problems with copying of
1088 SWITCH_EXPRs and structure sharing rules, then free the hash table
1089 element. */
1090
1091 bool
1092 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1093 {
1094 tree t, next;
1095
1096 for (t = value; t; t = next)
1097 {
1098 next = CASE_CHAIN (t);
1099 CASE_CHAIN (t) = NULL;
1100 }
1101
1102 return true;
1103 }
1104
1105 /* Start recording information mapping edges to case labels. */
1106
1107 void
1108 start_recording_case_labels (void)
1109 {
1110 gcc_assert (edge_to_cases == NULL);
1111 edge_to_cases = new hash_map<edge, tree>;
1112 touched_switch_bbs = BITMAP_ALLOC (NULL);
1113 }
1114
1115 /* Return nonzero if we are recording information for case labels. */
1116
1117 static bool
1118 recording_case_labels_p (void)
1119 {
1120 return (edge_to_cases != NULL);
1121 }
1122
1123 /* Stop recording information mapping edges to case labels and
1124 remove any information we have recorded. */
1125 void
1126 end_recording_case_labels (void)
1127 {
1128 bitmap_iterator bi;
1129 unsigned i;
1130 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1131 delete edge_to_cases;
1132 edge_to_cases = NULL;
1133 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1134 {
1135 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1136 if (bb)
1137 {
1138 gimple stmt = last_stmt (bb);
1139 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1140 group_case_labels_stmt (stmt);
1141 }
1142 }
1143 BITMAP_FREE (touched_switch_bbs);
1144 }
1145
1146 /* If we are inside a {start,end}_recording_cases block, then return
1147 a chain of CASE_LABEL_EXPRs from T which reference E.
1148
1149 Otherwise return NULL. */
1150
1151 static tree
1152 get_cases_for_edge (edge e, gimple t)
1153 {
1154 tree *slot;
1155 size_t i, n;
1156
1157 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1158 chains available. Return NULL so the caller can detect this case. */
1159 if (!recording_case_labels_p ())
1160 return NULL;
1161
1162 slot = edge_to_cases->get (e);
1163 if (slot)
1164 return *slot;
1165
1166 /* If we did not find E in the hash table, then this must be the first
1167 time we have been queried for information about E & T. Add all the
1168 elements from T to the hash table then perform the query again. */
1169
1170 n = gimple_switch_num_labels (t);
1171 for (i = 0; i < n; i++)
1172 {
1173 tree elt = gimple_switch_label (t, i);
1174 tree lab = CASE_LABEL (elt);
1175 basic_block label_bb = label_to_block (lab);
1176 edge this_edge = find_edge (e->src, label_bb);
1177
1178 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1179 a new chain. */
1180 tree &s = edge_to_cases->get_or_insert (this_edge);
1181 CASE_CHAIN (elt) = s;
1182 s = elt;
1183 }
1184
1185 return *edge_to_cases->get (e);
1186 }
1187
1188 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1189
1190 static void
1191 make_gimple_switch_edges (basic_block bb)
1192 {
1193 gimple entry = last_stmt (bb);
1194 size_t i, n;
1195
1196 n = gimple_switch_num_labels (entry);
1197
1198 for (i = 0; i < n; ++i)
1199 {
1200 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1201 basic_block label_bb = label_to_block (lab);
1202 make_edge (bb, label_bb, 0);
1203 }
1204 }
1205
1206
1207 /* Return the basic block holding label DEST. */
1208
1209 basic_block
1210 label_to_block_fn (struct function *ifun, tree dest)
1211 {
1212 int uid = LABEL_DECL_UID (dest);
1213
1214 /* We would die hard when faced by an undefined label. Emit a label to
1215 the very first basic block. This will hopefully make even the dataflow
1216 and undefined variable warnings quite right. */
1217 if (seen_error () && uid < 0)
1218 {
1219 gimple_stmt_iterator gsi =
1220 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1221 gimple stmt;
1222
1223 stmt = gimple_build_label (dest);
1224 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1225 uid = LABEL_DECL_UID (dest);
1226 }
1227 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1228 return NULL;
1229 return (*ifun->cfg->x_label_to_block_map)[uid];
1230 }
1231
1232 /* Create edges for a goto statement at block BB. Returns true
1233 if abnormal edges should be created. */
1234
1235 static bool
1236 make_goto_expr_edges (basic_block bb)
1237 {
1238 gimple_stmt_iterator last = gsi_last_bb (bb);
1239 gimple goto_t = gsi_stmt (last);
1240
1241 /* A simple GOTO creates normal edges. */
1242 if (simple_goto_p (goto_t))
1243 {
1244 tree dest = gimple_goto_dest (goto_t);
1245 basic_block label_bb = label_to_block (dest);
1246 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1247 e->goto_locus = gimple_location (goto_t);
1248 gsi_remove (&last, true);
1249 return false;
1250 }
1251
1252 /* A computed GOTO creates abnormal edges. */
1253 return true;
1254 }
1255
1256 /* Create edges for an asm statement with labels at block BB. */
1257
1258 static void
1259 make_gimple_asm_edges (basic_block bb)
1260 {
1261 gimple stmt = last_stmt (bb);
1262 int i, n = gimple_asm_nlabels (stmt);
1263
1264 for (i = 0; i < n; ++i)
1265 {
1266 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1267 basic_block label_bb = label_to_block (label);
1268 make_edge (bb, label_bb, 0);
1269 }
1270 }
1271
1272 /*---------------------------------------------------------------------------
1273 Flowgraph analysis
1274 ---------------------------------------------------------------------------*/
1275
1276 /* Cleanup useless labels in basic blocks. This is something we wish
1277 to do early because it allows us to group case labels before creating
1278 the edges for the CFG, and it speeds up block statement iterators in
1279 all passes later on.
1280 We rerun this pass after CFG is created, to get rid of the labels that
1281 are no longer referenced. After then we do not run it any more, since
1282 (almost) no new labels should be created. */
1283
1284 /* A map from basic block index to the leading label of that block. */
1285 static struct label_record
1286 {
1287 /* The label. */
1288 tree label;
1289
1290 /* True if the label is referenced from somewhere. */
1291 bool used;
1292 } *label_for_bb;
1293
1294 /* Given LABEL return the first label in the same basic block. */
1295
1296 static tree
1297 main_block_label (tree label)
1298 {
1299 basic_block bb = label_to_block (label);
1300 tree main_label = label_for_bb[bb->index].label;
1301
1302 /* label_to_block possibly inserted undefined label into the chain. */
1303 if (!main_label)
1304 {
1305 label_for_bb[bb->index].label = label;
1306 main_label = label;
1307 }
1308
1309 label_for_bb[bb->index].used = true;
1310 return main_label;
1311 }
1312
1313 /* Clean up redundant labels within the exception tree. */
1314
1315 static void
1316 cleanup_dead_labels_eh (void)
1317 {
1318 eh_landing_pad lp;
1319 eh_region r;
1320 tree lab;
1321 int i;
1322
1323 if (cfun->eh == NULL)
1324 return;
1325
1326 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1327 if (lp && lp->post_landing_pad)
1328 {
1329 lab = main_block_label (lp->post_landing_pad);
1330 if (lab != lp->post_landing_pad)
1331 {
1332 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1333 EH_LANDING_PAD_NR (lab) = lp->index;
1334 }
1335 }
1336
1337 FOR_ALL_EH_REGION (r)
1338 switch (r->type)
1339 {
1340 case ERT_CLEANUP:
1341 case ERT_MUST_NOT_THROW:
1342 break;
1343
1344 case ERT_TRY:
1345 {
1346 eh_catch c;
1347 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1348 {
1349 lab = c->label;
1350 if (lab)
1351 c->label = main_block_label (lab);
1352 }
1353 }
1354 break;
1355
1356 case ERT_ALLOWED_EXCEPTIONS:
1357 lab = r->u.allowed.label;
1358 if (lab)
1359 r->u.allowed.label = main_block_label (lab);
1360 break;
1361 }
1362 }
1363
1364
1365 /* Cleanup redundant labels. This is a three-step process:
1366 1) Find the leading label for each block.
1367 2) Redirect all references to labels to the leading labels.
1368 3) Cleanup all useless labels. */
1369
1370 void
1371 cleanup_dead_labels (void)
1372 {
1373 basic_block bb;
1374 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1375
1376 /* Find a suitable label for each block. We use the first user-defined
1377 label if there is one, or otherwise just the first label we see. */
1378 FOR_EACH_BB_FN (bb, cfun)
1379 {
1380 gimple_stmt_iterator i;
1381
1382 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1383 {
1384 tree label;
1385 gimple stmt = gsi_stmt (i);
1386
1387 if (gimple_code (stmt) != GIMPLE_LABEL)
1388 break;
1389
1390 label = gimple_label_label (stmt);
1391
1392 /* If we have not yet seen a label for the current block,
1393 remember this one and see if there are more labels. */
1394 if (!label_for_bb[bb->index].label)
1395 {
1396 label_for_bb[bb->index].label = label;
1397 continue;
1398 }
1399
1400 /* If we did see a label for the current block already, but it
1401 is an artificially created label, replace it if the current
1402 label is a user defined label. */
1403 if (!DECL_ARTIFICIAL (label)
1404 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1405 {
1406 label_for_bb[bb->index].label = label;
1407 break;
1408 }
1409 }
1410 }
1411
1412 /* Now redirect all jumps/branches to the selected label.
1413 First do so for each block ending in a control statement. */
1414 FOR_EACH_BB_FN (bb, cfun)
1415 {
1416 gimple stmt = last_stmt (bb);
1417 tree label, new_label;
1418
1419 if (!stmt)
1420 continue;
1421
1422 switch (gimple_code (stmt))
1423 {
1424 case GIMPLE_COND:
1425 label = gimple_cond_true_label (stmt);
1426 if (label)
1427 {
1428 new_label = main_block_label (label);
1429 if (new_label != label)
1430 gimple_cond_set_true_label (stmt, new_label);
1431 }
1432
1433 label = gimple_cond_false_label (stmt);
1434 if (label)
1435 {
1436 new_label = main_block_label (label);
1437 if (new_label != label)
1438 gimple_cond_set_false_label (stmt, new_label);
1439 }
1440 break;
1441
1442 case GIMPLE_SWITCH:
1443 {
1444 size_t i, n = gimple_switch_num_labels (stmt);
1445
1446 /* Replace all destination labels. */
1447 for (i = 0; i < n; ++i)
1448 {
1449 tree case_label = gimple_switch_label (stmt, i);
1450 label = CASE_LABEL (case_label);
1451 new_label = main_block_label (label);
1452 if (new_label != label)
1453 CASE_LABEL (case_label) = new_label;
1454 }
1455 break;
1456 }
1457
1458 case GIMPLE_ASM:
1459 {
1460 int i, n = gimple_asm_nlabels (stmt);
1461
1462 for (i = 0; i < n; ++i)
1463 {
1464 tree cons = gimple_asm_label_op (stmt, i);
1465 tree label = main_block_label (TREE_VALUE (cons));
1466 TREE_VALUE (cons) = label;
1467 }
1468 break;
1469 }
1470
1471 /* We have to handle gotos until they're removed, and we don't
1472 remove them until after we've created the CFG edges. */
1473 case GIMPLE_GOTO:
1474 if (!computed_goto_p (stmt))
1475 {
1476 label = gimple_goto_dest (stmt);
1477 new_label = main_block_label (label);
1478 if (new_label != label)
1479 gimple_goto_set_dest (stmt, new_label);
1480 }
1481 break;
1482
1483 case GIMPLE_TRANSACTION:
1484 {
1485 tree label = gimple_transaction_label (stmt);
1486 if (label)
1487 {
1488 tree new_label = main_block_label (label);
1489 if (new_label != label)
1490 gimple_transaction_set_label (stmt, new_label);
1491 }
1492 }
1493 break;
1494
1495 default:
1496 break;
1497 }
1498 }
1499
1500 /* Do the same for the exception region tree labels. */
1501 cleanup_dead_labels_eh ();
1502
1503 /* Finally, purge dead labels. All user-defined labels and labels that
1504 can be the target of non-local gotos and labels which have their
1505 address taken are preserved. */
1506 FOR_EACH_BB_FN (bb, cfun)
1507 {
1508 gimple_stmt_iterator i;
1509 tree label_for_this_bb = label_for_bb[bb->index].label;
1510
1511 if (!label_for_this_bb)
1512 continue;
1513
1514 /* If the main label of the block is unused, we may still remove it. */
1515 if (!label_for_bb[bb->index].used)
1516 label_for_this_bb = NULL;
1517
1518 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1519 {
1520 tree label;
1521 gimple stmt = gsi_stmt (i);
1522
1523 if (gimple_code (stmt) != GIMPLE_LABEL)
1524 break;
1525
1526 label = gimple_label_label (stmt);
1527
1528 if (label == label_for_this_bb
1529 || !DECL_ARTIFICIAL (label)
1530 || DECL_NONLOCAL (label)
1531 || FORCED_LABEL (label))
1532 gsi_next (&i);
1533 else
1534 gsi_remove (&i, true);
1535 }
1536 }
1537
1538 free (label_for_bb);
1539 }
1540
1541 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1542 the ones jumping to the same label.
1543 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1544
1545 void
1546 group_case_labels_stmt (gimple stmt)
1547 {
1548 int old_size = gimple_switch_num_labels (stmt);
1549 int i, j, new_size = old_size;
1550 basic_block default_bb = NULL;
1551
1552 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1553
1554 /* Look for possible opportunities to merge cases. */
1555 i = 1;
1556 while (i < old_size)
1557 {
1558 tree base_case, base_high;
1559 basic_block base_bb;
1560
1561 base_case = gimple_switch_label (stmt, i);
1562
1563 gcc_assert (base_case);
1564 base_bb = label_to_block (CASE_LABEL (base_case));
1565
1566 /* Discard cases that have the same destination as the
1567 default case. */
1568 if (base_bb == default_bb)
1569 {
1570 gimple_switch_set_label (stmt, i, NULL_TREE);
1571 i++;
1572 new_size--;
1573 continue;
1574 }
1575
1576 base_high = CASE_HIGH (base_case)
1577 ? CASE_HIGH (base_case)
1578 : CASE_LOW (base_case);
1579 i++;
1580
1581 /* Try to merge case labels. Break out when we reach the end
1582 of the label vector or when we cannot merge the next case
1583 label with the current one. */
1584 while (i < old_size)
1585 {
1586 tree merge_case = gimple_switch_label (stmt, i);
1587 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1588 wide_int bhp1 = wi::add (base_high, 1);
1589
1590 /* Merge the cases if they jump to the same place,
1591 and their ranges are consecutive. */
1592 if (merge_bb == base_bb
1593 && wi::eq_p (CASE_LOW (merge_case), bhp1))
1594 {
1595 base_high = CASE_HIGH (merge_case) ?
1596 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1597 CASE_HIGH (base_case) = base_high;
1598 gimple_switch_set_label (stmt, i, NULL_TREE);
1599 new_size--;
1600 i++;
1601 }
1602 else
1603 break;
1604 }
1605 }
1606
1607 /* Compress the case labels in the label vector, and adjust the
1608 length of the vector. */
1609 for (i = 0, j = 0; i < new_size; i++)
1610 {
1611 while (! gimple_switch_label (stmt, j))
1612 j++;
1613 gimple_switch_set_label (stmt, i,
1614 gimple_switch_label (stmt, j++));
1615 }
1616
1617 gcc_assert (new_size <= old_size);
1618 gimple_switch_set_num_labels (stmt, new_size);
1619 }
1620
1621 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1622 and scan the sorted vector of cases. Combine the ones jumping to the
1623 same label. */
1624
1625 void
1626 group_case_labels (void)
1627 {
1628 basic_block bb;
1629
1630 FOR_EACH_BB_FN (bb, cfun)
1631 {
1632 gimple stmt = last_stmt (bb);
1633 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1634 group_case_labels_stmt (stmt);
1635 }
1636 }
1637
1638 /* Checks whether we can merge block B into block A. */
1639
1640 static bool
1641 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1642 {
1643 gimple stmt;
1644 gimple_stmt_iterator gsi;
1645
1646 if (!single_succ_p (a))
1647 return false;
1648
1649 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1650 return false;
1651
1652 if (single_succ (a) != b)
1653 return false;
1654
1655 if (!single_pred_p (b))
1656 return false;
1657
1658 if (b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1659 return false;
1660
1661 /* If A ends by a statement causing exceptions or something similar, we
1662 cannot merge the blocks. */
1663 stmt = last_stmt (a);
1664 if (stmt && stmt_ends_bb_p (stmt))
1665 return false;
1666
1667 /* Do not allow a block with only a non-local label to be merged. */
1668 if (stmt
1669 && gimple_code (stmt) == GIMPLE_LABEL
1670 && DECL_NONLOCAL (gimple_label_label (stmt)))
1671 return false;
1672
1673 /* Examine the labels at the beginning of B. */
1674 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
1675 {
1676 tree lab;
1677 stmt = gsi_stmt (gsi);
1678 if (gimple_code (stmt) != GIMPLE_LABEL)
1679 break;
1680 lab = gimple_label_label (stmt);
1681
1682 /* Do not remove user forced labels or for -O0 any user labels. */
1683 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1684 return false;
1685 }
1686
1687 /* Protect simple loop latches. We only want to avoid merging
1688 the latch with the loop header in this case. */
1689 if (current_loops
1690 && b->loop_father->latch == b
1691 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1692 && b->loop_father->header == a)
1693 return false;
1694
1695 /* It must be possible to eliminate all phi nodes in B. If ssa form
1696 is not up-to-date and a name-mapping is registered, we cannot eliminate
1697 any phis. Symbols marked for renaming are never a problem though. */
1698 for (gsi = gsi_start_phis (b); !gsi_end_p (gsi); gsi_next (&gsi))
1699 {
1700 gimple phi = gsi_stmt (gsi);
1701 /* Technically only new names matter. */
1702 if (name_registered_for_update_p (PHI_RESULT (phi)))
1703 return false;
1704 }
1705
1706 /* When not optimizing, don't merge if we'd lose goto_locus. */
1707 if (!optimize
1708 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1709 {
1710 location_t goto_locus = single_succ_edge (a)->goto_locus;
1711 gimple_stmt_iterator prev, next;
1712 prev = gsi_last_nondebug_bb (a);
1713 next = gsi_after_labels (b);
1714 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1715 gsi_next_nondebug (&next);
1716 if ((gsi_end_p (prev)
1717 || gimple_location (gsi_stmt (prev)) != goto_locus)
1718 && (gsi_end_p (next)
1719 || gimple_location (gsi_stmt (next)) != goto_locus))
1720 return false;
1721 }
1722
1723 return true;
1724 }
1725
1726 /* Replaces all uses of NAME by VAL. */
1727
1728 void
1729 replace_uses_by (tree name, tree val)
1730 {
1731 imm_use_iterator imm_iter;
1732 use_operand_p use;
1733 gimple stmt;
1734 edge e;
1735
1736 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1737 {
1738 /* Mark the block if we change the last stmt in it. */
1739 if (cfgcleanup_altered_bbs
1740 && stmt_ends_bb_p (stmt))
1741 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1742
1743 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1744 {
1745 replace_exp (use, val);
1746
1747 if (gimple_code (stmt) == GIMPLE_PHI)
1748 {
1749 e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
1750 if (e->flags & EDGE_ABNORMAL)
1751 {
1752 /* This can only occur for virtual operands, since
1753 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1754 would prevent replacement. */
1755 gcc_checking_assert (virtual_operand_p (name));
1756 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1757 }
1758 }
1759 }
1760
1761 if (gimple_code (stmt) != GIMPLE_PHI)
1762 {
1763 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1764 gimple orig_stmt = stmt;
1765 size_t i;
1766
1767 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1768 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1769 only change sth from non-invariant to invariant, and only
1770 when propagating constants. */
1771 if (is_gimple_min_invariant (val))
1772 for (i = 0; i < gimple_num_ops (stmt); i++)
1773 {
1774 tree op = gimple_op (stmt, i);
1775 /* Operands may be empty here. For example, the labels
1776 of a GIMPLE_COND are nulled out following the creation
1777 of the corresponding CFG edges. */
1778 if (op && TREE_CODE (op) == ADDR_EXPR)
1779 recompute_tree_invariant_for_addr_expr (op);
1780 }
1781
1782 if (fold_stmt (&gsi))
1783 stmt = gsi_stmt (gsi);
1784
1785 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1786 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1787
1788 update_stmt (stmt);
1789 }
1790 }
1791
1792 gcc_checking_assert (has_zero_uses (name));
1793
1794 /* Also update the trees stored in loop structures. */
1795 if (current_loops)
1796 {
1797 struct loop *loop;
1798
1799 FOR_EACH_LOOP (loop, 0)
1800 {
1801 substitute_in_loop_info (loop, name, val);
1802 }
1803 }
1804 }
1805
1806 /* Merge block B into block A. */
1807
1808 static void
1809 gimple_merge_blocks (basic_block a, basic_block b)
1810 {
1811 gimple_stmt_iterator last, gsi, psi;
1812
1813 if (dump_file)
1814 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1815
1816 /* Remove all single-valued PHI nodes from block B of the form
1817 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1818 gsi = gsi_last_bb (a);
1819 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1820 {
1821 gimple phi = gsi_stmt (psi);
1822 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1823 gimple copy;
1824 bool may_replace_uses = (virtual_operand_p (def)
1825 || may_propagate_copy (def, use));
1826
1827 /* In case we maintain loop closed ssa form, do not propagate arguments
1828 of loop exit phi nodes. */
1829 if (current_loops
1830 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1831 && !virtual_operand_p (def)
1832 && TREE_CODE (use) == SSA_NAME
1833 && a->loop_father != b->loop_father)
1834 may_replace_uses = false;
1835
1836 if (!may_replace_uses)
1837 {
1838 gcc_assert (!virtual_operand_p (def));
1839
1840 /* Note that just emitting the copies is fine -- there is no problem
1841 with ordering of phi nodes. This is because A is the single
1842 predecessor of B, therefore results of the phi nodes cannot
1843 appear as arguments of the phi nodes. */
1844 copy = gimple_build_assign (def, use);
1845 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1846 remove_phi_node (&psi, false);
1847 }
1848 else
1849 {
1850 /* If we deal with a PHI for virtual operands, we can simply
1851 propagate these without fussing with folding or updating
1852 the stmt. */
1853 if (virtual_operand_p (def))
1854 {
1855 imm_use_iterator iter;
1856 use_operand_p use_p;
1857 gimple stmt;
1858
1859 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1860 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1861 SET_USE (use_p, use);
1862
1863 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1864 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1865 }
1866 else
1867 replace_uses_by (def, use);
1868
1869 remove_phi_node (&psi, true);
1870 }
1871 }
1872
1873 /* Ensure that B follows A. */
1874 move_block_after (b, a);
1875
1876 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1877 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1878
1879 /* Remove labels from B and set gimple_bb to A for other statements. */
1880 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1881 {
1882 gimple stmt = gsi_stmt (gsi);
1883 if (gimple_code (stmt) == GIMPLE_LABEL)
1884 {
1885 tree label = gimple_label_label (stmt);
1886 int lp_nr;
1887
1888 gsi_remove (&gsi, false);
1889
1890 /* Now that we can thread computed gotos, we might have
1891 a situation where we have a forced label in block B
1892 However, the label at the start of block B might still be
1893 used in other ways (think about the runtime checking for
1894 Fortran assigned gotos). So we can not just delete the
1895 label. Instead we move the label to the start of block A. */
1896 if (FORCED_LABEL (label))
1897 {
1898 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1899 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1900 }
1901 /* Other user labels keep around in a form of a debug stmt. */
1902 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1903 {
1904 gimple dbg = gimple_build_debug_bind (label,
1905 integer_zero_node,
1906 stmt);
1907 gimple_debug_bind_reset_value (dbg);
1908 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1909 }
1910
1911 lp_nr = EH_LANDING_PAD_NR (label);
1912 if (lp_nr)
1913 {
1914 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1915 lp->post_landing_pad = NULL;
1916 }
1917 }
1918 else
1919 {
1920 gimple_set_bb (stmt, a);
1921 gsi_next (&gsi);
1922 }
1923 }
1924
1925 /* When merging two BBs, if their counts are different, the larger count
1926 is selected as the new bb count. This is to handle inconsistent
1927 profiles. */
1928 if (a->loop_father == b->loop_father)
1929 {
1930 a->count = MAX (a->count, b->count);
1931 a->frequency = MAX (a->frequency, b->frequency);
1932 }
1933
1934 /* Merge the sequences. */
1935 last = gsi_last_bb (a);
1936 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1937 set_bb_seq (b, NULL);
1938
1939 if (cfgcleanup_altered_bbs)
1940 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1941 }
1942
1943
1944 /* Return the one of two successors of BB that is not reachable by a
1945 complex edge, if there is one. Else, return BB. We use
1946 this in optimizations that use post-dominators for their heuristics,
1947 to catch the cases in C++ where function calls are involved. */
1948
1949 basic_block
1950 single_noncomplex_succ (basic_block bb)
1951 {
1952 edge e0, e1;
1953 if (EDGE_COUNT (bb->succs) != 2)
1954 return bb;
1955
1956 e0 = EDGE_SUCC (bb, 0);
1957 e1 = EDGE_SUCC (bb, 1);
1958 if (e0->flags & EDGE_COMPLEX)
1959 return e1->dest;
1960 if (e1->flags & EDGE_COMPLEX)
1961 return e0->dest;
1962
1963 return bb;
1964 }
1965
1966 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1967
1968 void
1969 notice_special_calls (gimple call)
1970 {
1971 int flags = gimple_call_flags (call);
1972
1973 if (flags & ECF_MAY_BE_ALLOCA)
1974 cfun->calls_alloca = true;
1975 if (flags & ECF_RETURNS_TWICE)
1976 cfun->calls_setjmp = true;
1977 }
1978
1979
1980 /* Clear flags set by notice_special_calls. Used by dead code removal
1981 to update the flags. */
1982
1983 void
1984 clear_special_calls (void)
1985 {
1986 cfun->calls_alloca = false;
1987 cfun->calls_setjmp = false;
1988 }
1989
1990 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1991
1992 static void
1993 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1994 {
1995 /* Since this block is no longer reachable, we can just delete all
1996 of its PHI nodes. */
1997 remove_phi_nodes (bb);
1998
1999 /* Remove edges to BB's successors. */
2000 while (EDGE_COUNT (bb->succs) > 0)
2001 remove_edge (EDGE_SUCC (bb, 0));
2002 }
2003
2004
2005 /* Remove statements of basic block BB. */
2006
2007 static void
2008 remove_bb (basic_block bb)
2009 {
2010 gimple_stmt_iterator i;
2011
2012 if (dump_file)
2013 {
2014 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2015 if (dump_flags & TDF_DETAILS)
2016 {
2017 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2018 fprintf (dump_file, "\n");
2019 }
2020 }
2021
2022 if (current_loops)
2023 {
2024 struct loop *loop = bb->loop_father;
2025
2026 /* If a loop gets removed, clean up the information associated
2027 with it. */
2028 if (loop->latch == bb
2029 || loop->header == bb)
2030 free_numbers_of_iterations_estimates_loop (loop);
2031 }
2032
2033 /* Remove all the instructions in the block. */
2034 if (bb_seq (bb) != NULL)
2035 {
2036 /* Walk backwards so as to get a chance to substitute all
2037 released DEFs into debug stmts. See
2038 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2039 details. */
2040 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2041 {
2042 gimple stmt = gsi_stmt (i);
2043 if (gimple_code (stmt) == GIMPLE_LABEL
2044 && (FORCED_LABEL (gimple_label_label (stmt))
2045 || DECL_NONLOCAL (gimple_label_label (stmt))))
2046 {
2047 basic_block new_bb;
2048 gimple_stmt_iterator new_gsi;
2049
2050 /* A non-reachable non-local label may still be referenced.
2051 But it no longer needs to carry the extra semantics of
2052 non-locality. */
2053 if (DECL_NONLOCAL (gimple_label_label (stmt)))
2054 {
2055 DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
2056 FORCED_LABEL (gimple_label_label (stmt)) = 1;
2057 }
2058
2059 new_bb = bb->prev_bb;
2060 new_gsi = gsi_start_bb (new_bb);
2061 gsi_remove (&i, false);
2062 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2063 }
2064 else
2065 {
2066 /* Release SSA definitions if we are in SSA. Note that we
2067 may be called when not in SSA. For example,
2068 final_cleanup calls this function via
2069 cleanup_tree_cfg. */
2070 if (gimple_in_ssa_p (cfun))
2071 release_defs (stmt);
2072
2073 gsi_remove (&i, true);
2074 }
2075
2076 if (gsi_end_p (i))
2077 i = gsi_last_bb (bb);
2078 else
2079 gsi_prev (&i);
2080 }
2081 }
2082
2083 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2084 bb->il.gimple.seq = NULL;
2085 bb->il.gimple.phi_nodes = NULL;
2086 }
2087
2088
2089 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2090 predicate VAL, return the edge that will be taken out of the block.
2091 If VAL does not match a unique edge, NULL is returned. */
2092
2093 edge
2094 find_taken_edge (basic_block bb, tree val)
2095 {
2096 gimple stmt;
2097
2098 stmt = last_stmt (bb);
2099
2100 gcc_assert (stmt);
2101 gcc_assert (is_ctrl_stmt (stmt));
2102
2103 if (val == NULL)
2104 return NULL;
2105
2106 if (!is_gimple_min_invariant (val))
2107 return NULL;
2108
2109 if (gimple_code (stmt) == GIMPLE_COND)
2110 return find_taken_edge_cond_expr (bb, val);
2111
2112 if (gimple_code (stmt) == GIMPLE_SWITCH)
2113 return find_taken_edge_switch_expr (bb, val);
2114
2115 if (computed_goto_p (stmt))
2116 {
2117 /* Only optimize if the argument is a label, if the argument is
2118 not a label then we can not construct a proper CFG.
2119
2120 It may be the case that we only need to allow the LABEL_REF to
2121 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2122 appear inside a LABEL_EXPR just to be safe. */
2123 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2124 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2125 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2126 return NULL;
2127 }
2128
2129 gcc_unreachable ();
2130 }
2131
2132 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2133 statement, determine which of the outgoing edges will be taken out of the
2134 block. Return NULL if either edge may be taken. */
2135
2136 static edge
2137 find_taken_edge_computed_goto (basic_block bb, tree val)
2138 {
2139 basic_block dest;
2140 edge e = NULL;
2141
2142 dest = label_to_block (val);
2143 if (dest)
2144 {
2145 e = find_edge (bb, dest);
2146 gcc_assert (e != NULL);
2147 }
2148
2149 return e;
2150 }
2151
2152 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2153 statement, determine which of the two edges will be taken out of the
2154 block. Return NULL if either edge may be taken. */
2155
2156 static edge
2157 find_taken_edge_cond_expr (basic_block bb, tree val)
2158 {
2159 edge true_edge, false_edge;
2160
2161 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2162
2163 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2164 return (integer_zerop (val) ? false_edge : true_edge);
2165 }
2166
2167 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2168 statement, determine which edge will be taken out of the block. Return
2169 NULL if any edge may be taken. */
2170
2171 static edge
2172 find_taken_edge_switch_expr (basic_block bb, tree val)
2173 {
2174 basic_block dest_bb;
2175 edge e;
2176 gimple switch_stmt;
2177 tree taken_case;
2178
2179 switch_stmt = last_stmt (bb);
2180 taken_case = find_case_label_for_value (switch_stmt, val);
2181 dest_bb = label_to_block (CASE_LABEL (taken_case));
2182
2183 e = find_edge (bb, dest_bb);
2184 gcc_assert (e);
2185 return e;
2186 }
2187
2188
2189 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2190 We can make optimal use here of the fact that the case labels are
2191 sorted: We can do a binary search for a case matching VAL. */
2192
2193 static tree
2194 find_case_label_for_value (gimple switch_stmt, tree val)
2195 {
2196 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2197 tree default_case = gimple_switch_default_label (switch_stmt);
2198
2199 for (low = 0, high = n; high - low > 1; )
2200 {
2201 size_t i = (high + low) / 2;
2202 tree t = gimple_switch_label (switch_stmt, i);
2203 int cmp;
2204
2205 /* Cache the result of comparing CASE_LOW and val. */
2206 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2207
2208 if (cmp > 0)
2209 high = i;
2210 else
2211 low = i;
2212
2213 if (CASE_HIGH (t) == NULL)
2214 {
2215 /* A singe-valued case label. */
2216 if (cmp == 0)
2217 return t;
2218 }
2219 else
2220 {
2221 /* A case range. We can only handle integer ranges. */
2222 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2223 return t;
2224 }
2225 }
2226
2227 return default_case;
2228 }
2229
2230
2231 /* Dump a basic block on stderr. */
2232
2233 void
2234 gimple_debug_bb (basic_block bb)
2235 {
2236 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2237 }
2238
2239
2240 /* Dump basic block with index N on stderr. */
2241
2242 basic_block
2243 gimple_debug_bb_n (int n)
2244 {
2245 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2246 return BASIC_BLOCK_FOR_FN (cfun, n);
2247 }
2248
2249
2250 /* Dump the CFG on stderr.
2251
2252 FLAGS are the same used by the tree dumping functions
2253 (see TDF_* in dumpfile.h). */
2254
2255 void
2256 gimple_debug_cfg (int flags)
2257 {
2258 gimple_dump_cfg (stderr, flags);
2259 }
2260
2261
2262 /* Dump the program showing basic block boundaries on the given FILE.
2263
2264 FLAGS are the same used by the tree dumping functions (see TDF_* in
2265 tree.h). */
2266
2267 void
2268 gimple_dump_cfg (FILE *file, int flags)
2269 {
2270 if (flags & TDF_DETAILS)
2271 {
2272 dump_function_header (file, current_function_decl, flags);
2273 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2274 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2275 last_basic_block_for_fn (cfun));
2276
2277 brief_dump_cfg (file, flags | TDF_COMMENT);
2278 fprintf (file, "\n");
2279 }
2280
2281 if (flags & TDF_STATS)
2282 dump_cfg_stats (file);
2283
2284 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2285 }
2286
2287
2288 /* Dump CFG statistics on FILE. */
2289
2290 void
2291 dump_cfg_stats (FILE *file)
2292 {
2293 static long max_num_merged_labels = 0;
2294 unsigned long size, total = 0;
2295 long num_edges;
2296 basic_block bb;
2297 const char * const fmt_str = "%-30s%-13s%12s\n";
2298 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2299 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2300 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2301 const char *funcname = current_function_name ();
2302
2303 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2304
2305 fprintf (file, "---------------------------------------------------------\n");
2306 fprintf (file, fmt_str, "", " Number of ", "Memory");
2307 fprintf (file, fmt_str, "", " instances ", "used ");
2308 fprintf (file, "---------------------------------------------------------\n");
2309
2310 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2311 total += size;
2312 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2313 SCALE (size), LABEL (size));
2314
2315 num_edges = 0;
2316 FOR_EACH_BB_FN (bb, cfun)
2317 num_edges += EDGE_COUNT (bb->succs);
2318 size = num_edges * sizeof (struct edge_def);
2319 total += size;
2320 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2321
2322 fprintf (file, "---------------------------------------------------------\n");
2323 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2324 LABEL (total));
2325 fprintf (file, "---------------------------------------------------------\n");
2326 fprintf (file, "\n");
2327
2328 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2329 max_num_merged_labels = cfg_stats.num_merged_labels;
2330
2331 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2332 cfg_stats.num_merged_labels, max_num_merged_labels);
2333
2334 fprintf (file, "\n");
2335 }
2336
2337
2338 /* Dump CFG statistics on stderr. Keep extern so that it's always
2339 linked in the final executable. */
2340
2341 DEBUG_FUNCTION void
2342 debug_cfg_stats (void)
2343 {
2344 dump_cfg_stats (stderr);
2345 }
2346
2347 /*---------------------------------------------------------------------------
2348 Miscellaneous helpers
2349 ---------------------------------------------------------------------------*/
2350
2351 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2352 flow. Transfers of control flow associated with EH are excluded. */
2353
2354 static bool
2355 call_can_make_abnormal_goto (gimple t)
2356 {
2357 /* If the function has no non-local labels, then a call cannot make an
2358 abnormal transfer of control. */
2359 if (!cfun->has_nonlocal_label
2360 && !cfun->calls_setjmp)
2361 return false;
2362
2363 /* Likewise if the call has no side effects. */
2364 if (!gimple_has_side_effects (t))
2365 return false;
2366
2367 /* Likewise if the called function is leaf. */
2368 if (gimple_call_flags (t) & ECF_LEAF)
2369 return false;
2370
2371 return true;
2372 }
2373
2374
2375 /* Return true if T can make an abnormal transfer of control flow.
2376 Transfers of control flow associated with EH are excluded. */
2377
2378 bool
2379 stmt_can_make_abnormal_goto (gimple t)
2380 {
2381 if (computed_goto_p (t))
2382 return true;
2383 if (is_gimple_call (t))
2384 return call_can_make_abnormal_goto (t);
2385 return false;
2386 }
2387
2388
2389 /* Return true if T represents a stmt that always transfers control. */
2390
2391 bool
2392 is_ctrl_stmt (gimple t)
2393 {
2394 switch (gimple_code (t))
2395 {
2396 case GIMPLE_COND:
2397 case GIMPLE_SWITCH:
2398 case GIMPLE_GOTO:
2399 case GIMPLE_RETURN:
2400 case GIMPLE_RESX:
2401 return true;
2402 default:
2403 return false;
2404 }
2405 }
2406
2407
2408 /* Return true if T is a statement that may alter the flow of control
2409 (e.g., a call to a non-returning function). */
2410
2411 bool
2412 is_ctrl_altering_stmt (gimple t)
2413 {
2414 gcc_assert (t);
2415
2416 switch (gimple_code (t))
2417 {
2418 case GIMPLE_CALL:
2419 /* Per stmt call flag indicates whether the call could alter
2420 controlflow. */
2421 if (gimple_call_ctrl_altering_p (t))
2422 return true;
2423 break;
2424
2425 case GIMPLE_EH_DISPATCH:
2426 /* EH_DISPATCH branches to the individual catch handlers at
2427 this level of a try or allowed-exceptions region. It can
2428 fallthru to the next statement as well. */
2429 return true;
2430
2431 case GIMPLE_ASM:
2432 if (gimple_asm_nlabels (t) > 0)
2433 return true;
2434 break;
2435
2436 CASE_GIMPLE_OMP:
2437 /* OpenMP directives alter control flow. */
2438 return true;
2439
2440 case GIMPLE_TRANSACTION:
2441 /* A transaction start alters control flow. */
2442 return true;
2443
2444 default:
2445 break;
2446 }
2447
2448 /* If a statement can throw, it alters control flow. */
2449 return stmt_can_throw_internal (t);
2450 }
2451
2452
2453 /* Return true if T is a simple local goto. */
2454
2455 bool
2456 simple_goto_p (gimple t)
2457 {
2458 return (gimple_code (t) == GIMPLE_GOTO
2459 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2460 }
2461
2462
2463 /* Return true if STMT should start a new basic block. PREV_STMT is
2464 the statement preceding STMT. It is used when STMT is a label or a
2465 case label. Labels should only start a new basic block if their
2466 previous statement wasn't a label. Otherwise, sequence of labels
2467 would generate unnecessary basic blocks that only contain a single
2468 label. */
2469
2470 static inline bool
2471 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2472 {
2473 if (stmt == NULL)
2474 return false;
2475
2476 /* Labels start a new basic block only if the preceding statement
2477 wasn't a label of the same type. This prevents the creation of
2478 consecutive blocks that have nothing but a single label. */
2479 if (gimple_code (stmt) == GIMPLE_LABEL)
2480 {
2481 /* Nonlocal and computed GOTO targets always start a new block. */
2482 if (DECL_NONLOCAL (gimple_label_label (stmt))
2483 || FORCED_LABEL (gimple_label_label (stmt)))
2484 return true;
2485
2486 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2487 {
2488 if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
2489 return true;
2490
2491 cfg_stats.num_merged_labels++;
2492 return false;
2493 }
2494 else
2495 return true;
2496 }
2497 else if (gimple_code (stmt) == GIMPLE_CALL
2498 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2499 /* setjmp acts similar to a nonlocal GOTO target and thus should
2500 start a new block. */
2501 return true;
2502
2503 return false;
2504 }
2505
2506
2507 /* Return true if T should end a basic block. */
2508
2509 bool
2510 stmt_ends_bb_p (gimple t)
2511 {
2512 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2513 }
2514
2515 /* Remove block annotations and other data structures. */
2516
2517 void
2518 delete_tree_cfg_annotations (void)
2519 {
2520 vec_free (label_to_block_map_for_fn (cfun));
2521 }
2522
2523
2524 /* Return the first statement in basic block BB. */
2525
2526 gimple
2527 first_stmt (basic_block bb)
2528 {
2529 gimple_stmt_iterator i = gsi_start_bb (bb);
2530 gimple stmt = NULL;
2531
2532 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2533 {
2534 gsi_next (&i);
2535 stmt = NULL;
2536 }
2537 return stmt;
2538 }
2539
2540 /* Return the first non-label statement in basic block BB. */
2541
2542 static gimple
2543 first_non_label_stmt (basic_block bb)
2544 {
2545 gimple_stmt_iterator i = gsi_start_bb (bb);
2546 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2547 gsi_next (&i);
2548 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2549 }
2550
2551 /* Return the last statement in basic block BB. */
2552
2553 gimple
2554 last_stmt (basic_block bb)
2555 {
2556 gimple_stmt_iterator i = gsi_last_bb (bb);
2557 gimple stmt = NULL;
2558
2559 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2560 {
2561 gsi_prev (&i);
2562 stmt = NULL;
2563 }
2564 return stmt;
2565 }
2566
2567 /* Return the last statement of an otherwise empty block. Return NULL
2568 if the block is totally empty, or if it contains more than one
2569 statement. */
2570
2571 gimple
2572 last_and_only_stmt (basic_block bb)
2573 {
2574 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2575 gimple last, prev;
2576
2577 if (gsi_end_p (i))
2578 return NULL;
2579
2580 last = gsi_stmt (i);
2581 gsi_prev_nondebug (&i);
2582 if (gsi_end_p (i))
2583 return last;
2584
2585 /* Empty statements should no longer appear in the instruction stream.
2586 Everything that might have appeared before should be deleted by
2587 remove_useless_stmts, and the optimizers should just gsi_remove
2588 instead of smashing with build_empty_stmt.
2589
2590 Thus the only thing that should appear here in a block containing
2591 one executable statement is a label. */
2592 prev = gsi_stmt (i);
2593 if (gimple_code (prev) == GIMPLE_LABEL)
2594 return last;
2595 else
2596 return NULL;
2597 }
2598
2599 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2600
2601 static void
2602 reinstall_phi_args (edge new_edge, edge old_edge)
2603 {
2604 edge_var_map *vm;
2605 int i;
2606 gimple_stmt_iterator phis;
2607
2608 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2609 if (!v)
2610 return;
2611
2612 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2613 v->iterate (i, &vm) && !gsi_end_p (phis);
2614 i++, gsi_next (&phis))
2615 {
2616 gimple phi = gsi_stmt (phis);
2617 tree result = redirect_edge_var_map_result (vm);
2618 tree arg = redirect_edge_var_map_def (vm);
2619
2620 gcc_assert (result == gimple_phi_result (phi));
2621
2622 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2623 }
2624
2625 redirect_edge_var_map_clear (old_edge);
2626 }
2627
2628 /* Returns the basic block after which the new basic block created
2629 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2630 near its "logical" location. This is of most help to humans looking
2631 at debugging dumps. */
2632
2633 static basic_block
2634 split_edge_bb_loc (edge edge_in)
2635 {
2636 basic_block dest = edge_in->dest;
2637 basic_block dest_prev = dest->prev_bb;
2638
2639 if (dest_prev)
2640 {
2641 edge e = find_edge (dest_prev, dest);
2642 if (e && !(e->flags & EDGE_COMPLEX))
2643 return edge_in->src;
2644 }
2645 return dest_prev;
2646 }
2647
2648 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2649 Abort on abnormal edges. */
2650
2651 static basic_block
2652 gimple_split_edge (edge edge_in)
2653 {
2654 basic_block new_bb, after_bb, dest;
2655 edge new_edge, e;
2656
2657 /* Abnormal edges cannot be split. */
2658 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2659
2660 dest = edge_in->dest;
2661
2662 after_bb = split_edge_bb_loc (edge_in);
2663
2664 new_bb = create_empty_bb (after_bb);
2665 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2666 new_bb->count = edge_in->count;
2667 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2668 new_edge->probability = REG_BR_PROB_BASE;
2669 new_edge->count = edge_in->count;
2670
2671 e = redirect_edge_and_branch (edge_in, new_bb);
2672 gcc_assert (e == edge_in);
2673 reinstall_phi_args (new_edge, e);
2674
2675 return new_bb;
2676 }
2677
2678
2679 /* Verify properties of the address expression T with base object BASE. */
2680
2681 static tree
2682 verify_address (tree t, tree base)
2683 {
2684 bool old_constant;
2685 bool old_side_effects;
2686 bool new_constant;
2687 bool new_side_effects;
2688
2689 old_constant = TREE_CONSTANT (t);
2690 old_side_effects = TREE_SIDE_EFFECTS (t);
2691
2692 recompute_tree_invariant_for_addr_expr (t);
2693 new_side_effects = TREE_SIDE_EFFECTS (t);
2694 new_constant = TREE_CONSTANT (t);
2695
2696 if (old_constant != new_constant)
2697 {
2698 error ("constant not recomputed when ADDR_EXPR changed");
2699 return t;
2700 }
2701 if (old_side_effects != new_side_effects)
2702 {
2703 error ("side effects not recomputed when ADDR_EXPR changed");
2704 return t;
2705 }
2706
2707 if (!(TREE_CODE (base) == VAR_DECL
2708 || TREE_CODE (base) == PARM_DECL
2709 || TREE_CODE (base) == RESULT_DECL))
2710 return NULL_TREE;
2711
2712 if (DECL_GIMPLE_REG_P (base))
2713 {
2714 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2715 return base;
2716 }
2717
2718 return NULL_TREE;
2719 }
2720
2721 /* Callback for walk_tree, check that all elements with address taken are
2722 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2723 inside a PHI node. */
2724
2725 static tree
2726 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2727 {
2728 tree t = *tp, x;
2729
2730 if (TYPE_P (t))
2731 *walk_subtrees = 0;
2732
2733 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2734 #define CHECK_OP(N, MSG) \
2735 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2736 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2737
2738 switch (TREE_CODE (t))
2739 {
2740 case SSA_NAME:
2741 if (SSA_NAME_IN_FREE_LIST (t))
2742 {
2743 error ("SSA name in freelist but still referenced");
2744 return *tp;
2745 }
2746 break;
2747
2748 case INDIRECT_REF:
2749 error ("INDIRECT_REF in gimple IL");
2750 return t;
2751
2752 case MEM_REF:
2753 x = TREE_OPERAND (t, 0);
2754 if (!POINTER_TYPE_P (TREE_TYPE (x))
2755 || !is_gimple_mem_ref_addr (x))
2756 {
2757 error ("invalid first operand of MEM_REF");
2758 return x;
2759 }
2760 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2761 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2762 {
2763 error ("invalid offset operand of MEM_REF");
2764 return TREE_OPERAND (t, 1);
2765 }
2766 if (TREE_CODE (x) == ADDR_EXPR
2767 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2768 return x;
2769 *walk_subtrees = 0;
2770 break;
2771
2772 case ASSERT_EXPR:
2773 x = fold (ASSERT_EXPR_COND (t));
2774 if (x == boolean_false_node)
2775 {
2776 error ("ASSERT_EXPR with an always-false condition");
2777 return *tp;
2778 }
2779 break;
2780
2781 case MODIFY_EXPR:
2782 error ("MODIFY_EXPR not expected while having tuples");
2783 return *tp;
2784
2785 case ADDR_EXPR:
2786 {
2787 tree tem;
2788
2789 gcc_assert (is_gimple_address (t));
2790
2791 /* Skip any references (they will be checked when we recurse down the
2792 tree) and ensure that any variable used as a prefix is marked
2793 addressable. */
2794 for (x = TREE_OPERAND (t, 0);
2795 handled_component_p (x);
2796 x = TREE_OPERAND (x, 0))
2797 ;
2798
2799 if ((tem = verify_address (t, x)))
2800 return tem;
2801
2802 if (!(TREE_CODE (x) == VAR_DECL
2803 || TREE_CODE (x) == PARM_DECL
2804 || TREE_CODE (x) == RESULT_DECL))
2805 return NULL;
2806
2807 if (!TREE_ADDRESSABLE (x))
2808 {
2809 error ("address taken, but ADDRESSABLE bit not set");
2810 return x;
2811 }
2812
2813 break;
2814 }
2815
2816 case COND_EXPR:
2817 x = COND_EXPR_COND (t);
2818 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2819 {
2820 error ("non-integral used in condition");
2821 return x;
2822 }
2823 if (!is_gimple_condexpr (x))
2824 {
2825 error ("invalid conditional operand");
2826 return x;
2827 }
2828 break;
2829
2830 case NON_LVALUE_EXPR:
2831 case TRUTH_NOT_EXPR:
2832 gcc_unreachable ();
2833
2834 CASE_CONVERT:
2835 case FIX_TRUNC_EXPR:
2836 case FLOAT_EXPR:
2837 case NEGATE_EXPR:
2838 case ABS_EXPR:
2839 case BIT_NOT_EXPR:
2840 CHECK_OP (0, "invalid operand to unary operator");
2841 break;
2842
2843 case REALPART_EXPR:
2844 case IMAGPART_EXPR:
2845 case BIT_FIELD_REF:
2846 if (!is_gimple_reg_type (TREE_TYPE (t)))
2847 {
2848 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2849 return t;
2850 }
2851
2852 if (TREE_CODE (t) == BIT_FIELD_REF)
2853 {
2854 tree t0 = TREE_OPERAND (t, 0);
2855 tree t1 = TREE_OPERAND (t, 1);
2856 tree t2 = TREE_OPERAND (t, 2);
2857 if (!tree_fits_uhwi_p (t1)
2858 || !tree_fits_uhwi_p (t2))
2859 {
2860 error ("invalid position or size operand to BIT_FIELD_REF");
2861 return t;
2862 }
2863 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2864 && (TYPE_PRECISION (TREE_TYPE (t))
2865 != tree_to_uhwi (t1)))
2866 {
2867 error ("integral result type precision does not match "
2868 "field size of BIT_FIELD_REF");
2869 return t;
2870 }
2871 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2872 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2873 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2874 != tree_to_uhwi (t1)))
2875 {
2876 error ("mode precision of non-integral result does not "
2877 "match field size of BIT_FIELD_REF");
2878 return t;
2879 }
2880 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
2881 && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
2882 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
2883 {
2884 error ("position plus size exceeds size of referenced object in "
2885 "BIT_FIELD_REF");
2886 return t;
2887 }
2888 }
2889 t = TREE_OPERAND (t, 0);
2890
2891 /* Fall-through. */
2892 case COMPONENT_REF:
2893 case ARRAY_REF:
2894 case ARRAY_RANGE_REF:
2895 case VIEW_CONVERT_EXPR:
2896 /* We have a nest of references. Verify that each of the operands
2897 that determine where to reference is either a constant or a variable,
2898 verify that the base is valid, and then show we've already checked
2899 the subtrees. */
2900 while (handled_component_p (t))
2901 {
2902 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2903 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2904 else if (TREE_CODE (t) == ARRAY_REF
2905 || TREE_CODE (t) == ARRAY_RANGE_REF)
2906 {
2907 CHECK_OP (1, "invalid array index");
2908 if (TREE_OPERAND (t, 2))
2909 CHECK_OP (2, "invalid array lower bound");
2910 if (TREE_OPERAND (t, 3))
2911 CHECK_OP (3, "invalid array stride");
2912 }
2913 else if (TREE_CODE (t) == BIT_FIELD_REF
2914 || TREE_CODE (t) == REALPART_EXPR
2915 || TREE_CODE (t) == IMAGPART_EXPR)
2916 {
2917 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
2918 "REALPART_EXPR");
2919 return t;
2920 }
2921
2922 t = TREE_OPERAND (t, 0);
2923 }
2924
2925 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2926 {
2927 error ("invalid reference prefix");
2928 return t;
2929 }
2930 *walk_subtrees = 0;
2931 break;
2932 case PLUS_EXPR:
2933 case MINUS_EXPR:
2934 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2935 POINTER_PLUS_EXPR. */
2936 if (POINTER_TYPE_P (TREE_TYPE (t)))
2937 {
2938 error ("invalid operand to plus/minus, type is a pointer");
2939 return t;
2940 }
2941 CHECK_OP (0, "invalid operand to binary operator");
2942 CHECK_OP (1, "invalid operand to binary operator");
2943 break;
2944
2945 case POINTER_PLUS_EXPR:
2946 /* Check to make sure the first operand is a pointer or reference type. */
2947 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
2948 {
2949 error ("invalid operand to pointer plus, first operand is not a pointer");
2950 return t;
2951 }
2952 /* Check to make sure the second operand is a ptrofftype. */
2953 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
2954 {
2955 error ("invalid operand to pointer plus, second operand is not an "
2956 "integer type of appropriate width");
2957 return t;
2958 }
2959 /* FALLTHROUGH */
2960 case LT_EXPR:
2961 case LE_EXPR:
2962 case GT_EXPR:
2963 case GE_EXPR:
2964 case EQ_EXPR:
2965 case NE_EXPR:
2966 case UNORDERED_EXPR:
2967 case ORDERED_EXPR:
2968 case UNLT_EXPR:
2969 case UNLE_EXPR:
2970 case UNGT_EXPR:
2971 case UNGE_EXPR:
2972 case UNEQ_EXPR:
2973 case LTGT_EXPR:
2974 case MULT_EXPR:
2975 case TRUNC_DIV_EXPR:
2976 case CEIL_DIV_EXPR:
2977 case FLOOR_DIV_EXPR:
2978 case ROUND_DIV_EXPR:
2979 case TRUNC_MOD_EXPR:
2980 case CEIL_MOD_EXPR:
2981 case FLOOR_MOD_EXPR:
2982 case ROUND_MOD_EXPR:
2983 case RDIV_EXPR:
2984 case EXACT_DIV_EXPR:
2985 case MIN_EXPR:
2986 case MAX_EXPR:
2987 case LSHIFT_EXPR:
2988 case RSHIFT_EXPR:
2989 case LROTATE_EXPR:
2990 case RROTATE_EXPR:
2991 case BIT_IOR_EXPR:
2992 case BIT_XOR_EXPR:
2993 case BIT_AND_EXPR:
2994 CHECK_OP (0, "invalid operand to binary operator");
2995 CHECK_OP (1, "invalid operand to binary operator");
2996 break;
2997
2998 case CONSTRUCTOR:
2999 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3000 *walk_subtrees = 0;
3001 break;
3002
3003 case CASE_LABEL_EXPR:
3004 if (CASE_CHAIN (t))
3005 {
3006 error ("invalid CASE_CHAIN");
3007 return t;
3008 }
3009 break;
3010
3011 default:
3012 break;
3013 }
3014 return NULL;
3015
3016 #undef CHECK_OP
3017 }
3018
3019
3020 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3021 Returns true if there is an error, otherwise false. */
3022
3023 static bool
3024 verify_types_in_gimple_min_lval (tree expr)
3025 {
3026 tree op;
3027
3028 if (is_gimple_id (expr))
3029 return false;
3030
3031 if (TREE_CODE (expr) != TARGET_MEM_REF
3032 && TREE_CODE (expr) != MEM_REF)
3033 {
3034 error ("invalid expression for min lvalue");
3035 return true;
3036 }
3037
3038 /* TARGET_MEM_REFs are strange beasts. */
3039 if (TREE_CODE (expr) == TARGET_MEM_REF)
3040 return false;
3041
3042 op = TREE_OPERAND (expr, 0);
3043 if (!is_gimple_val (op))
3044 {
3045 error ("invalid operand in indirect reference");
3046 debug_generic_stmt (op);
3047 return true;
3048 }
3049 /* Memory references now generally can involve a value conversion. */
3050
3051 return false;
3052 }
3053
3054 /* Verify if EXPR is a valid GIMPLE reference expression. If
3055 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3056 if there is an error, otherwise false. */
3057
3058 static bool
3059 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3060 {
3061 while (handled_component_p (expr))
3062 {
3063 tree op = TREE_OPERAND (expr, 0);
3064
3065 if (TREE_CODE (expr) == ARRAY_REF
3066 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3067 {
3068 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3069 || (TREE_OPERAND (expr, 2)
3070 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3071 || (TREE_OPERAND (expr, 3)
3072 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3073 {
3074 error ("invalid operands to array reference");
3075 debug_generic_stmt (expr);
3076 return true;
3077 }
3078 }
3079
3080 /* Verify if the reference array element types are compatible. */
3081 if (TREE_CODE (expr) == ARRAY_REF
3082 && !useless_type_conversion_p (TREE_TYPE (expr),
3083 TREE_TYPE (TREE_TYPE (op))))
3084 {
3085 error ("type mismatch in array reference");
3086 debug_generic_stmt (TREE_TYPE (expr));
3087 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3088 return true;
3089 }
3090 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3091 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3092 TREE_TYPE (TREE_TYPE (op))))
3093 {
3094 error ("type mismatch in array range reference");
3095 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3096 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3097 return true;
3098 }
3099
3100 if ((TREE_CODE (expr) == REALPART_EXPR
3101 || TREE_CODE (expr) == IMAGPART_EXPR)
3102 && !useless_type_conversion_p (TREE_TYPE (expr),
3103 TREE_TYPE (TREE_TYPE (op))))
3104 {
3105 error ("type mismatch in real/imagpart reference");
3106 debug_generic_stmt (TREE_TYPE (expr));
3107 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3108 return true;
3109 }
3110
3111 if (TREE_CODE (expr) == COMPONENT_REF
3112 && !useless_type_conversion_p (TREE_TYPE (expr),
3113 TREE_TYPE (TREE_OPERAND (expr, 1))))
3114 {
3115 error ("type mismatch in component reference");
3116 debug_generic_stmt (TREE_TYPE (expr));
3117 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3118 return true;
3119 }
3120
3121 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3122 {
3123 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3124 that their operand is not an SSA name or an invariant when
3125 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3126 bug). Otherwise there is nothing to verify, gross mismatches at
3127 most invoke undefined behavior. */
3128 if (require_lvalue
3129 && (TREE_CODE (op) == SSA_NAME
3130 || is_gimple_min_invariant (op)))
3131 {
3132 error ("conversion of an SSA_NAME on the left hand side");
3133 debug_generic_stmt (expr);
3134 return true;
3135 }
3136 else if (TREE_CODE (op) == SSA_NAME
3137 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3138 {
3139 error ("conversion of register to a different size");
3140 debug_generic_stmt (expr);
3141 return true;
3142 }
3143 else if (!handled_component_p (op))
3144 return false;
3145 }
3146
3147 expr = op;
3148 }
3149
3150 if (TREE_CODE (expr) == MEM_REF)
3151 {
3152 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3153 {
3154 error ("invalid address operand in MEM_REF");
3155 debug_generic_stmt (expr);
3156 return true;
3157 }
3158 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3159 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3160 {
3161 error ("invalid offset operand in MEM_REF");
3162 debug_generic_stmt (expr);
3163 return true;
3164 }
3165 }
3166 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3167 {
3168 if (!TMR_BASE (expr)
3169 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3170 {
3171 error ("invalid address operand in TARGET_MEM_REF");
3172 return true;
3173 }
3174 if (!TMR_OFFSET (expr)
3175 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3176 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3177 {
3178 error ("invalid offset operand in TARGET_MEM_REF");
3179 debug_generic_stmt (expr);
3180 return true;
3181 }
3182 }
3183
3184 return ((require_lvalue || !is_gimple_min_invariant (expr))
3185 && verify_types_in_gimple_min_lval (expr));
3186 }
3187
3188 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3189 list of pointer-to types that is trivially convertible to DEST. */
3190
3191 static bool
3192 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3193 {
3194 tree src;
3195
3196 if (!TYPE_POINTER_TO (src_obj))
3197 return true;
3198
3199 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3200 if (useless_type_conversion_p (dest, src))
3201 return true;
3202
3203 return false;
3204 }
3205
3206 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3207 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3208
3209 static bool
3210 valid_fixed_convert_types_p (tree type1, tree type2)
3211 {
3212 return (FIXED_POINT_TYPE_P (type1)
3213 && (INTEGRAL_TYPE_P (type2)
3214 || SCALAR_FLOAT_TYPE_P (type2)
3215 || FIXED_POINT_TYPE_P (type2)));
3216 }
3217
3218 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3219 is a problem, otherwise false. */
3220
3221 static bool
3222 verify_gimple_call (gimple stmt)
3223 {
3224 tree fn = gimple_call_fn (stmt);
3225 tree fntype, fndecl;
3226 unsigned i;
3227
3228 if (gimple_call_internal_p (stmt))
3229 {
3230 if (fn)
3231 {
3232 error ("gimple call has two targets");
3233 debug_generic_stmt (fn);
3234 return true;
3235 }
3236 }
3237 else
3238 {
3239 if (!fn)
3240 {
3241 error ("gimple call has no target");
3242 return true;
3243 }
3244 }
3245
3246 if (fn && !is_gimple_call_addr (fn))
3247 {
3248 error ("invalid function in gimple call");
3249 debug_generic_stmt (fn);
3250 return true;
3251 }
3252
3253 if (fn
3254 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3255 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3256 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3257 {
3258 error ("non-function in gimple call");
3259 return true;
3260 }
3261
3262 fndecl = gimple_call_fndecl (stmt);
3263 if (fndecl
3264 && TREE_CODE (fndecl) == FUNCTION_DECL
3265 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3266 && !DECL_PURE_P (fndecl)
3267 && !TREE_READONLY (fndecl))
3268 {
3269 error ("invalid pure const state for function");
3270 return true;
3271 }
3272
3273 if (gimple_call_lhs (stmt)
3274 && (!is_gimple_lvalue (gimple_call_lhs (stmt))
3275 || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
3276 {
3277 error ("invalid LHS in gimple call");
3278 return true;
3279 }
3280
3281 if (gimple_call_lhs (stmt) && gimple_call_noreturn_p (stmt))
3282 {
3283 error ("LHS in noreturn call");
3284 return true;
3285 }
3286
3287 fntype = gimple_call_fntype (stmt);
3288 if (fntype
3289 && gimple_call_lhs (stmt)
3290 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3291 TREE_TYPE (fntype))
3292 /* ??? At least C++ misses conversions at assignments from
3293 void * call results.
3294 ??? Java is completely off. Especially with functions
3295 returning java.lang.Object.
3296 For now simply allow arbitrary pointer type conversions. */
3297 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3298 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3299 {
3300 error ("invalid conversion in gimple call");
3301 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3302 debug_generic_stmt (TREE_TYPE (fntype));
3303 return true;
3304 }
3305
3306 if (gimple_call_chain (stmt)
3307 && !is_gimple_val (gimple_call_chain (stmt)))
3308 {
3309 error ("invalid static chain in gimple call");
3310 debug_generic_stmt (gimple_call_chain (stmt));
3311 return true;
3312 }
3313
3314 /* If there is a static chain argument, this should not be an indirect
3315 call, and the decl should have DECL_STATIC_CHAIN set. */
3316 if (gimple_call_chain (stmt))
3317 {
3318 if (!gimple_call_fndecl (stmt))
3319 {
3320 error ("static chain in indirect gimple call");
3321 return true;
3322 }
3323 fn = TREE_OPERAND (fn, 0);
3324
3325 if (!DECL_STATIC_CHAIN (fn))
3326 {
3327 error ("static chain with function that doesn%'t use one");
3328 return true;
3329 }
3330 }
3331
3332 /* ??? The C frontend passes unpromoted arguments in case it
3333 didn't see a function declaration before the call. So for now
3334 leave the call arguments mostly unverified. Once we gimplify
3335 unit-at-a-time we have a chance to fix this. */
3336
3337 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3338 {
3339 tree arg = gimple_call_arg (stmt, i);
3340 if ((is_gimple_reg_type (TREE_TYPE (arg))
3341 && !is_gimple_val (arg))
3342 || (!is_gimple_reg_type (TREE_TYPE (arg))
3343 && !is_gimple_lvalue (arg)))
3344 {
3345 error ("invalid argument to gimple call");
3346 debug_generic_expr (arg);
3347 return true;
3348 }
3349 }
3350
3351 return false;
3352 }
3353
3354 /* Verifies the gimple comparison with the result type TYPE and
3355 the operands OP0 and OP1. */
3356
3357 static bool
3358 verify_gimple_comparison (tree type, tree op0, tree op1)
3359 {
3360 tree op0_type = TREE_TYPE (op0);
3361 tree op1_type = TREE_TYPE (op1);
3362
3363 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3364 {
3365 error ("invalid operands in gimple comparison");
3366 return true;
3367 }
3368
3369 /* For comparisons we do not have the operations type as the
3370 effective type the comparison is carried out in. Instead
3371 we require that either the first operand is trivially
3372 convertible into the second, or the other way around.
3373 Because we special-case pointers to void we allow
3374 comparisons of pointers with the same mode as well. */
3375 if (!useless_type_conversion_p (op0_type, op1_type)
3376 && !useless_type_conversion_p (op1_type, op0_type)
3377 && (!POINTER_TYPE_P (op0_type)
3378 || !POINTER_TYPE_P (op1_type)
3379 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3380 {
3381 error ("mismatching comparison operand types");
3382 debug_generic_expr (op0_type);
3383 debug_generic_expr (op1_type);
3384 return true;
3385 }
3386
3387 /* The resulting type of a comparison may be an effective boolean type. */
3388 if (INTEGRAL_TYPE_P (type)
3389 && (TREE_CODE (type) == BOOLEAN_TYPE
3390 || TYPE_PRECISION (type) == 1))
3391 {
3392 if (TREE_CODE (op0_type) == VECTOR_TYPE
3393 || TREE_CODE (op1_type) == VECTOR_TYPE)
3394 {
3395 error ("vector comparison returning a boolean");
3396 debug_generic_expr (op0_type);
3397 debug_generic_expr (op1_type);
3398 return true;
3399 }
3400 }
3401 /* Or an integer vector type with the same size and element count
3402 as the comparison operand types. */
3403 else if (TREE_CODE (type) == VECTOR_TYPE
3404 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3405 {
3406 if (TREE_CODE (op0_type) != VECTOR_TYPE
3407 || TREE_CODE (op1_type) != VECTOR_TYPE)
3408 {
3409 error ("non-vector operands in vector comparison");
3410 debug_generic_expr (op0_type);
3411 debug_generic_expr (op1_type);
3412 return true;
3413 }
3414
3415 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3416 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3417 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type))))
3418 /* The result of a vector comparison is of signed
3419 integral type. */
3420 || TYPE_UNSIGNED (TREE_TYPE (type)))
3421 {
3422 error ("invalid vector comparison resulting type");
3423 debug_generic_expr (type);
3424 return true;
3425 }
3426 }
3427 else
3428 {
3429 error ("bogus comparison result type");
3430 debug_generic_expr (type);
3431 return true;
3432 }
3433
3434 return false;
3435 }
3436
3437 /* Verify a gimple assignment statement STMT with an unary rhs.
3438 Returns true if anything is wrong. */
3439
3440 static bool
3441 verify_gimple_assign_unary (gimple stmt)
3442 {
3443 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3444 tree lhs = gimple_assign_lhs (stmt);
3445 tree lhs_type = TREE_TYPE (lhs);
3446 tree rhs1 = gimple_assign_rhs1 (stmt);
3447 tree rhs1_type = TREE_TYPE (rhs1);
3448
3449 if (!is_gimple_reg (lhs))
3450 {
3451 error ("non-register as LHS of unary operation");
3452 return true;
3453 }
3454
3455 if (!is_gimple_val (rhs1))
3456 {
3457 error ("invalid operand in unary operation");
3458 return true;
3459 }
3460
3461 /* First handle conversions. */
3462 switch (rhs_code)
3463 {
3464 CASE_CONVERT:
3465 {
3466 /* Allow conversions from pointer type to integral type only if
3467 there is no sign or zero extension involved.
3468 For targets were the precision of ptrofftype doesn't match that
3469 of pointers we need to allow arbitrary conversions to ptrofftype. */
3470 if ((POINTER_TYPE_P (lhs_type)
3471 && INTEGRAL_TYPE_P (rhs1_type))
3472 || (POINTER_TYPE_P (rhs1_type)
3473 && INTEGRAL_TYPE_P (lhs_type)
3474 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3475 || ptrofftype_p (sizetype))))
3476 return false;
3477
3478 /* Allow conversion from integral to offset type and vice versa. */
3479 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3480 && INTEGRAL_TYPE_P (rhs1_type))
3481 || (INTEGRAL_TYPE_P (lhs_type)
3482 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3483 return false;
3484
3485 /* Otherwise assert we are converting between types of the
3486 same kind. */
3487 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3488 {
3489 error ("invalid types in nop conversion");
3490 debug_generic_expr (lhs_type);
3491 debug_generic_expr (rhs1_type);
3492 return true;
3493 }
3494
3495 return false;
3496 }
3497
3498 case ADDR_SPACE_CONVERT_EXPR:
3499 {
3500 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3501 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3502 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3503 {
3504 error ("invalid types in address space conversion");
3505 debug_generic_expr (lhs_type);
3506 debug_generic_expr (rhs1_type);
3507 return true;
3508 }
3509
3510 return false;
3511 }
3512
3513 case FIXED_CONVERT_EXPR:
3514 {
3515 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3516 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3517 {
3518 error ("invalid types in fixed-point conversion");
3519 debug_generic_expr (lhs_type);
3520 debug_generic_expr (rhs1_type);
3521 return true;
3522 }
3523
3524 return false;
3525 }
3526
3527 case FLOAT_EXPR:
3528 {
3529 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3530 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3531 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3532 {
3533 error ("invalid types in conversion to floating point");
3534 debug_generic_expr (lhs_type);
3535 debug_generic_expr (rhs1_type);
3536 return true;
3537 }
3538
3539 return false;
3540 }
3541
3542 case FIX_TRUNC_EXPR:
3543 {
3544 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3545 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3546 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3547 {
3548 error ("invalid types in conversion to integer");
3549 debug_generic_expr (lhs_type);
3550 debug_generic_expr (rhs1_type);
3551 return true;
3552 }
3553
3554 return false;
3555 }
3556
3557 case VEC_UNPACK_HI_EXPR:
3558 case VEC_UNPACK_LO_EXPR:
3559 case REDUC_MAX_EXPR:
3560 case REDUC_MIN_EXPR:
3561 case REDUC_PLUS_EXPR:
3562 case VEC_UNPACK_FLOAT_HI_EXPR:
3563 case VEC_UNPACK_FLOAT_LO_EXPR:
3564 /* FIXME. */
3565 return false;
3566
3567 case NEGATE_EXPR:
3568 case ABS_EXPR:
3569 case BIT_NOT_EXPR:
3570 case PAREN_EXPR:
3571 case CONJ_EXPR:
3572 break;
3573
3574 default:
3575 gcc_unreachable ();
3576 }
3577
3578 /* For the remaining codes assert there is no conversion involved. */
3579 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3580 {
3581 error ("non-trivial conversion in unary operation");
3582 debug_generic_expr (lhs_type);
3583 debug_generic_expr (rhs1_type);
3584 return true;
3585 }
3586
3587 return false;
3588 }
3589
3590 /* Verify a gimple assignment statement STMT with a binary rhs.
3591 Returns true if anything is wrong. */
3592
3593 static bool
3594 verify_gimple_assign_binary (gimple stmt)
3595 {
3596 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3597 tree lhs = gimple_assign_lhs (stmt);
3598 tree lhs_type = TREE_TYPE (lhs);
3599 tree rhs1 = gimple_assign_rhs1 (stmt);
3600 tree rhs1_type = TREE_TYPE (rhs1);
3601 tree rhs2 = gimple_assign_rhs2 (stmt);
3602 tree rhs2_type = TREE_TYPE (rhs2);
3603
3604 if (!is_gimple_reg (lhs))
3605 {
3606 error ("non-register as LHS of binary operation");
3607 return true;
3608 }
3609
3610 if (!is_gimple_val (rhs1)
3611 || !is_gimple_val (rhs2))
3612 {
3613 error ("invalid operands in binary operation");
3614 return true;
3615 }
3616
3617 /* First handle operations that involve different types. */
3618 switch (rhs_code)
3619 {
3620 case COMPLEX_EXPR:
3621 {
3622 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3623 || !(INTEGRAL_TYPE_P (rhs1_type)
3624 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3625 || !(INTEGRAL_TYPE_P (rhs2_type)
3626 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3627 {
3628 error ("type mismatch in complex expression");
3629 debug_generic_expr (lhs_type);
3630 debug_generic_expr (rhs1_type);
3631 debug_generic_expr (rhs2_type);
3632 return true;
3633 }
3634
3635 return false;
3636 }
3637
3638 case LSHIFT_EXPR:
3639 case RSHIFT_EXPR:
3640 case LROTATE_EXPR:
3641 case RROTATE_EXPR:
3642 {
3643 /* Shifts and rotates are ok on integral types, fixed point
3644 types and integer vector types. */
3645 if ((!INTEGRAL_TYPE_P (rhs1_type)
3646 && !FIXED_POINT_TYPE_P (rhs1_type)
3647 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3648 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3649 || (!INTEGRAL_TYPE_P (rhs2_type)
3650 /* Vector shifts of vectors are also ok. */
3651 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3652 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3653 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3654 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3655 || !useless_type_conversion_p (lhs_type, rhs1_type))
3656 {
3657 error ("type mismatch in shift expression");
3658 debug_generic_expr (lhs_type);
3659 debug_generic_expr (rhs1_type);
3660 debug_generic_expr (rhs2_type);
3661 return true;
3662 }
3663
3664 return false;
3665 }
3666
3667 case VEC_LSHIFT_EXPR:
3668 case VEC_RSHIFT_EXPR:
3669 {
3670 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3671 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3672 || POINTER_TYPE_P (TREE_TYPE (rhs1_type))
3673 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type))
3674 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3675 || (!INTEGRAL_TYPE_P (rhs2_type)
3676 && (TREE_CODE (rhs2_type) != VECTOR_TYPE
3677 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3678 || !useless_type_conversion_p (lhs_type, rhs1_type))
3679 {
3680 error ("type mismatch in vector shift expression");
3681 debug_generic_expr (lhs_type);
3682 debug_generic_expr (rhs1_type);
3683 debug_generic_expr (rhs2_type);
3684 return true;
3685 }
3686 /* For shifting a vector of non-integral components we
3687 only allow shifting by a constant multiple of the element size. */
3688 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3689 && (TREE_CODE (rhs2) != INTEGER_CST
3690 || !div_if_zero_remainder (rhs2,
3691 TYPE_SIZE (TREE_TYPE (rhs1_type)))))
3692 {
3693 error ("non-element sized vector shift of floating point vector");
3694 return true;
3695 }
3696
3697 return false;
3698 }
3699
3700 case WIDEN_LSHIFT_EXPR:
3701 {
3702 if (!INTEGRAL_TYPE_P (lhs_type)
3703 || !INTEGRAL_TYPE_P (rhs1_type)
3704 || TREE_CODE (rhs2) != INTEGER_CST
3705 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3706 {
3707 error ("type mismatch in widening vector shift expression");
3708 debug_generic_expr (lhs_type);
3709 debug_generic_expr (rhs1_type);
3710 debug_generic_expr (rhs2_type);
3711 return true;
3712 }
3713
3714 return false;
3715 }
3716
3717 case VEC_WIDEN_LSHIFT_HI_EXPR:
3718 case VEC_WIDEN_LSHIFT_LO_EXPR:
3719 {
3720 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3721 || TREE_CODE (lhs_type) != VECTOR_TYPE
3722 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3723 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3724 || TREE_CODE (rhs2) != INTEGER_CST
3725 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3726 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3727 {
3728 error ("type mismatch in widening vector shift expression");
3729 debug_generic_expr (lhs_type);
3730 debug_generic_expr (rhs1_type);
3731 debug_generic_expr (rhs2_type);
3732 return true;
3733 }
3734
3735 return false;
3736 }
3737
3738 case PLUS_EXPR:
3739 case MINUS_EXPR:
3740 {
3741 tree lhs_etype = lhs_type;
3742 tree rhs1_etype = rhs1_type;
3743 tree rhs2_etype = rhs2_type;
3744 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3745 {
3746 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3747 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3748 {
3749 error ("invalid non-vector operands to vector valued plus");
3750 return true;
3751 }
3752 lhs_etype = TREE_TYPE (lhs_type);
3753 rhs1_etype = TREE_TYPE (rhs1_type);
3754 rhs2_etype = TREE_TYPE (rhs2_type);
3755 }
3756 if (POINTER_TYPE_P (lhs_etype)
3757 || POINTER_TYPE_P (rhs1_etype)
3758 || POINTER_TYPE_P (rhs2_etype))
3759 {
3760 error ("invalid (pointer) operands to plus/minus");
3761 return true;
3762 }
3763
3764 /* Continue with generic binary expression handling. */
3765 break;
3766 }
3767
3768 case POINTER_PLUS_EXPR:
3769 {
3770 if (!POINTER_TYPE_P (rhs1_type)
3771 || !useless_type_conversion_p (lhs_type, rhs1_type)
3772 || !ptrofftype_p (rhs2_type))
3773 {
3774 error ("type mismatch in pointer plus expression");
3775 debug_generic_stmt (lhs_type);
3776 debug_generic_stmt (rhs1_type);
3777 debug_generic_stmt (rhs2_type);
3778 return true;
3779 }
3780
3781 return false;
3782 }
3783
3784 case TRUTH_ANDIF_EXPR:
3785 case TRUTH_ORIF_EXPR:
3786 case TRUTH_AND_EXPR:
3787 case TRUTH_OR_EXPR:
3788 case TRUTH_XOR_EXPR:
3789
3790 gcc_unreachable ();
3791
3792 case LT_EXPR:
3793 case LE_EXPR:
3794 case GT_EXPR:
3795 case GE_EXPR:
3796 case EQ_EXPR:
3797 case NE_EXPR:
3798 case UNORDERED_EXPR:
3799 case ORDERED_EXPR:
3800 case UNLT_EXPR:
3801 case UNLE_EXPR:
3802 case UNGT_EXPR:
3803 case UNGE_EXPR:
3804 case UNEQ_EXPR:
3805 case LTGT_EXPR:
3806 /* Comparisons are also binary, but the result type is not
3807 connected to the operand types. */
3808 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3809
3810 case WIDEN_MULT_EXPR:
3811 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3812 return true;
3813 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3814 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3815
3816 case WIDEN_SUM_EXPR:
3817 case VEC_WIDEN_MULT_HI_EXPR:
3818 case VEC_WIDEN_MULT_LO_EXPR:
3819 case VEC_WIDEN_MULT_EVEN_EXPR:
3820 case VEC_WIDEN_MULT_ODD_EXPR:
3821 case VEC_PACK_TRUNC_EXPR:
3822 case VEC_PACK_SAT_EXPR:
3823 case VEC_PACK_FIX_TRUNC_EXPR:
3824 /* FIXME. */
3825 return false;
3826
3827 case MULT_EXPR:
3828 case MULT_HIGHPART_EXPR:
3829 case TRUNC_DIV_EXPR:
3830 case CEIL_DIV_EXPR:
3831 case FLOOR_DIV_EXPR:
3832 case ROUND_DIV_EXPR:
3833 case TRUNC_MOD_EXPR:
3834 case CEIL_MOD_EXPR:
3835 case FLOOR_MOD_EXPR:
3836 case ROUND_MOD_EXPR:
3837 case RDIV_EXPR:
3838 case EXACT_DIV_EXPR:
3839 case MIN_EXPR:
3840 case MAX_EXPR:
3841 case BIT_IOR_EXPR:
3842 case BIT_XOR_EXPR:
3843 case BIT_AND_EXPR:
3844 /* Continue with generic binary expression handling. */
3845 break;
3846
3847 default:
3848 gcc_unreachable ();
3849 }
3850
3851 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3852 || !useless_type_conversion_p (lhs_type, rhs2_type))
3853 {
3854 error ("type mismatch in binary expression");
3855 debug_generic_stmt (lhs_type);
3856 debug_generic_stmt (rhs1_type);
3857 debug_generic_stmt (rhs2_type);
3858 return true;
3859 }
3860
3861 return false;
3862 }
3863
3864 /* Verify a gimple assignment statement STMT with a ternary rhs.
3865 Returns true if anything is wrong. */
3866
3867 static bool
3868 verify_gimple_assign_ternary (gimple stmt)
3869 {
3870 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3871 tree lhs = gimple_assign_lhs (stmt);
3872 tree lhs_type = TREE_TYPE (lhs);
3873 tree rhs1 = gimple_assign_rhs1 (stmt);
3874 tree rhs1_type = TREE_TYPE (rhs1);
3875 tree rhs2 = gimple_assign_rhs2 (stmt);
3876 tree rhs2_type = TREE_TYPE (rhs2);
3877 tree rhs3 = gimple_assign_rhs3 (stmt);
3878 tree rhs3_type = TREE_TYPE (rhs3);
3879
3880 if (!is_gimple_reg (lhs))
3881 {
3882 error ("non-register as LHS of ternary operation");
3883 return true;
3884 }
3885
3886 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3887 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3888 || !is_gimple_val (rhs2)
3889 || !is_gimple_val (rhs3))
3890 {
3891 error ("invalid operands in ternary operation");
3892 return true;
3893 }
3894
3895 /* First handle operations that involve different types. */
3896 switch (rhs_code)
3897 {
3898 case WIDEN_MULT_PLUS_EXPR:
3899 case WIDEN_MULT_MINUS_EXPR:
3900 if ((!INTEGRAL_TYPE_P (rhs1_type)
3901 && !FIXED_POINT_TYPE_P (rhs1_type))
3902 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3903 || !useless_type_conversion_p (lhs_type, rhs3_type)
3904 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3905 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3906 {
3907 error ("type mismatch in widening multiply-accumulate expression");
3908 debug_generic_expr (lhs_type);
3909 debug_generic_expr (rhs1_type);
3910 debug_generic_expr (rhs2_type);
3911 debug_generic_expr (rhs3_type);
3912 return true;
3913 }
3914 break;
3915
3916 case FMA_EXPR:
3917 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3918 || !useless_type_conversion_p (lhs_type, rhs2_type)
3919 || !useless_type_conversion_p (lhs_type, rhs3_type))
3920 {
3921 error ("type mismatch in fused multiply-add expression");
3922 debug_generic_expr (lhs_type);
3923 debug_generic_expr (rhs1_type);
3924 debug_generic_expr (rhs2_type);
3925 debug_generic_expr (rhs3_type);
3926 return true;
3927 }
3928 break;
3929
3930 case COND_EXPR:
3931 case VEC_COND_EXPR:
3932 if (!useless_type_conversion_p (lhs_type, rhs2_type)
3933 || !useless_type_conversion_p (lhs_type, rhs3_type))
3934 {
3935 error ("type mismatch in conditional expression");
3936 debug_generic_expr (lhs_type);
3937 debug_generic_expr (rhs2_type);
3938 debug_generic_expr (rhs3_type);
3939 return true;
3940 }
3941 break;
3942
3943 case VEC_PERM_EXPR:
3944 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3945 || !useless_type_conversion_p (lhs_type, rhs2_type))
3946 {
3947 error ("type mismatch in vector permute expression");
3948 debug_generic_expr (lhs_type);
3949 debug_generic_expr (rhs1_type);
3950 debug_generic_expr (rhs2_type);
3951 debug_generic_expr (rhs3_type);
3952 return true;
3953 }
3954
3955 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3956 || TREE_CODE (rhs2_type) != VECTOR_TYPE
3957 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
3958 {
3959 error ("vector types expected in vector permute expression");
3960 debug_generic_expr (lhs_type);
3961 debug_generic_expr (rhs1_type);
3962 debug_generic_expr (rhs2_type);
3963 debug_generic_expr (rhs3_type);
3964 return true;
3965 }
3966
3967 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
3968 || TYPE_VECTOR_SUBPARTS (rhs2_type)
3969 != TYPE_VECTOR_SUBPARTS (rhs3_type)
3970 || TYPE_VECTOR_SUBPARTS (rhs3_type)
3971 != TYPE_VECTOR_SUBPARTS (lhs_type))
3972 {
3973 error ("vectors with different element number found "
3974 "in vector permute expression");
3975 debug_generic_expr (lhs_type);
3976 debug_generic_expr (rhs1_type);
3977 debug_generic_expr (rhs2_type);
3978 debug_generic_expr (rhs3_type);
3979 return true;
3980 }
3981
3982 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
3983 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
3984 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
3985 {
3986 error ("invalid mask type in vector permute expression");
3987 debug_generic_expr (lhs_type);
3988 debug_generic_expr (rhs1_type);
3989 debug_generic_expr (rhs2_type);
3990 debug_generic_expr (rhs3_type);
3991 return true;
3992 }
3993
3994 return false;
3995
3996 case SAD_EXPR:
3997 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
3998 || !useless_type_conversion_p (lhs_type, rhs3_type)
3999 || 2 * GET_MODE_BITSIZE (GET_MODE_INNER
4000 (TYPE_MODE (TREE_TYPE (rhs1_type))))
4001 > GET_MODE_BITSIZE (GET_MODE_INNER
4002 (TYPE_MODE (TREE_TYPE (lhs_type)))))
4003 {
4004 error ("type mismatch in sad expression");
4005 debug_generic_expr (lhs_type);
4006 debug_generic_expr (rhs1_type);
4007 debug_generic_expr (rhs2_type);
4008 debug_generic_expr (rhs3_type);
4009 return true;
4010 }
4011
4012 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4013 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4014 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4015 {
4016 error ("vector types expected in sad expression");
4017 debug_generic_expr (lhs_type);
4018 debug_generic_expr (rhs1_type);
4019 debug_generic_expr (rhs2_type);
4020 debug_generic_expr (rhs3_type);
4021 return true;
4022 }
4023
4024 return false;
4025
4026 case DOT_PROD_EXPR:
4027 case REALIGN_LOAD_EXPR:
4028 /* FIXME. */
4029 return false;
4030
4031 default:
4032 gcc_unreachable ();
4033 }
4034 return false;
4035 }
4036
4037 /* Verify a gimple assignment statement STMT with a single rhs.
4038 Returns true if anything is wrong. */
4039
4040 static bool
4041 verify_gimple_assign_single (gimple stmt)
4042 {
4043 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4044 tree lhs = gimple_assign_lhs (stmt);
4045 tree lhs_type = TREE_TYPE (lhs);
4046 tree rhs1 = gimple_assign_rhs1 (stmt);
4047 tree rhs1_type = TREE_TYPE (rhs1);
4048 bool res = false;
4049
4050 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4051 {
4052 error ("non-trivial conversion at assignment");
4053 debug_generic_expr (lhs_type);
4054 debug_generic_expr (rhs1_type);
4055 return true;
4056 }
4057
4058 if (gimple_clobber_p (stmt)
4059 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4060 {
4061 error ("non-decl/MEM_REF LHS in clobber statement");
4062 debug_generic_expr (lhs);
4063 return true;
4064 }
4065
4066 if (handled_component_p (lhs)
4067 || TREE_CODE (lhs) == MEM_REF
4068 || TREE_CODE (lhs) == TARGET_MEM_REF)
4069 res |= verify_types_in_gimple_reference (lhs, true);
4070
4071 /* Special codes we cannot handle via their class. */
4072 switch (rhs_code)
4073 {
4074 case ADDR_EXPR:
4075 {
4076 tree op = TREE_OPERAND (rhs1, 0);
4077 if (!is_gimple_addressable (op))
4078 {
4079 error ("invalid operand in unary expression");
4080 return true;
4081 }
4082
4083 /* Technically there is no longer a need for matching types, but
4084 gimple hygiene asks for this check. In LTO we can end up
4085 combining incompatible units and thus end up with addresses
4086 of globals that change their type to a common one. */
4087 if (!in_lto_p
4088 && !types_compatible_p (TREE_TYPE (op),
4089 TREE_TYPE (TREE_TYPE (rhs1)))
4090 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4091 TREE_TYPE (op)))
4092 {
4093 error ("type mismatch in address expression");
4094 debug_generic_stmt (TREE_TYPE (rhs1));
4095 debug_generic_stmt (TREE_TYPE (op));
4096 return true;
4097 }
4098
4099 return verify_types_in_gimple_reference (op, true);
4100 }
4101
4102 /* tcc_reference */
4103 case INDIRECT_REF:
4104 error ("INDIRECT_REF in gimple IL");
4105 return true;
4106
4107 case COMPONENT_REF:
4108 case BIT_FIELD_REF:
4109 case ARRAY_REF:
4110 case ARRAY_RANGE_REF:
4111 case VIEW_CONVERT_EXPR:
4112 case REALPART_EXPR:
4113 case IMAGPART_EXPR:
4114 case TARGET_MEM_REF:
4115 case MEM_REF:
4116 if (!is_gimple_reg (lhs)
4117 && is_gimple_reg_type (TREE_TYPE (lhs)))
4118 {
4119 error ("invalid rhs for gimple memory store");
4120 debug_generic_stmt (lhs);
4121 debug_generic_stmt (rhs1);
4122 return true;
4123 }
4124 return res || verify_types_in_gimple_reference (rhs1, false);
4125
4126 /* tcc_constant */
4127 case SSA_NAME:
4128 case INTEGER_CST:
4129 case REAL_CST:
4130 case FIXED_CST:
4131 case COMPLEX_CST:
4132 case VECTOR_CST:
4133 case STRING_CST:
4134 return res;
4135
4136 /* tcc_declaration */
4137 case CONST_DECL:
4138 return res;
4139 case VAR_DECL:
4140 case PARM_DECL:
4141 if (!is_gimple_reg (lhs)
4142 && !is_gimple_reg (rhs1)
4143 && is_gimple_reg_type (TREE_TYPE (lhs)))
4144 {
4145 error ("invalid rhs for gimple memory store");
4146 debug_generic_stmt (lhs);
4147 debug_generic_stmt (rhs1);
4148 return true;
4149 }
4150 return res;
4151
4152 case CONSTRUCTOR:
4153 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4154 {
4155 unsigned int i;
4156 tree elt_i, elt_v, elt_t = NULL_TREE;
4157
4158 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4159 return res;
4160 /* For vector CONSTRUCTORs we require that either it is empty
4161 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4162 (then the element count must be correct to cover the whole
4163 outer vector and index must be NULL on all elements, or it is
4164 a CONSTRUCTOR of scalar elements, where we as an exception allow
4165 smaller number of elements (assuming zero filling) and
4166 consecutive indexes as compared to NULL indexes (such
4167 CONSTRUCTORs can appear in the IL from FEs). */
4168 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4169 {
4170 if (elt_t == NULL_TREE)
4171 {
4172 elt_t = TREE_TYPE (elt_v);
4173 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4174 {
4175 tree elt_t = TREE_TYPE (elt_v);
4176 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4177 TREE_TYPE (elt_t)))
4178 {
4179 error ("incorrect type of vector CONSTRUCTOR"
4180 " elements");
4181 debug_generic_stmt (rhs1);
4182 return true;
4183 }
4184 else if (CONSTRUCTOR_NELTS (rhs1)
4185 * TYPE_VECTOR_SUBPARTS (elt_t)
4186 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4187 {
4188 error ("incorrect number of vector CONSTRUCTOR"
4189 " elements");
4190 debug_generic_stmt (rhs1);
4191 return true;
4192 }
4193 }
4194 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4195 elt_t))
4196 {
4197 error ("incorrect type of vector CONSTRUCTOR elements");
4198 debug_generic_stmt (rhs1);
4199 return true;
4200 }
4201 else if (CONSTRUCTOR_NELTS (rhs1)
4202 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4203 {
4204 error ("incorrect number of vector CONSTRUCTOR elements");
4205 debug_generic_stmt (rhs1);
4206 return true;
4207 }
4208 }
4209 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4210 {
4211 error ("incorrect type of vector CONSTRUCTOR elements");
4212 debug_generic_stmt (rhs1);
4213 return true;
4214 }
4215 if (elt_i != NULL_TREE
4216 && (TREE_CODE (elt_t) == VECTOR_TYPE
4217 || TREE_CODE (elt_i) != INTEGER_CST
4218 || compare_tree_int (elt_i, i) != 0))
4219 {
4220 error ("vector CONSTRUCTOR with non-NULL element index");
4221 debug_generic_stmt (rhs1);
4222 return true;
4223 }
4224 if (!is_gimple_val (elt_v))
4225 {
4226 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4227 debug_generic_stmt (rhs1);
4228 return true;
4229 }
4230 }
4231 }
4232 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4233 {
4234 error ("non-vector CONSTRUCTOR with elements");
4235 debug_generic_stmt (rhs1);
4236 return true;
4237 }
4238 return res;
4239 case OBJ_TYPE_REF:
4240 case ASSERT_EXPR:
4241 case WITH_SIZE_EXPR:
4242 /* FIXME. */
4243 return res;
4244
4245 default:;
4246 }
4247
4248 return res;
4249 }
4250
4251 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4252 is a problem, otherwise false. */
4253
4254 static bool
4255 verify_gimple_assign (gimple stmt)
4256 {
4257 switch (gimple_assign_rhs_class (stmt))
4258 {
4259 case GIMPLE_SINGLE_RHS:
4260 return verify_gimple_assign_single (stmt);
4261
4262 case GIMPLE_UNARY_RHS:
4263 return verify_gimple_assign_unary (stmt);
4264
4265 case GIMPLE_BINARY_RHS:
4266 return verify_gimple_assign_binary (stmt);
4267
4268 case GIMPLE_TERNARY_RHS:
4269 return verify_gimple_assign_ternary (stmt);
4270
4271 default:
4272 gcc_unreachable ();
4273 }
4274 }
4275
4276 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4277 is a problem, otherwise false. */
4278
4279 static bool
4280 verify_gimple_return (gimple stmt)
4281 {
4282 tree op = gimple_return_retval (stmt);
4283 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4284
4285 /* We cannot test for present return values as we do not fix up missing
4286 return values from the original source. */
4287 if (op == NULL)
4288 return false;
4289
4290 if (!is_gimple_val (op)
4291 && TREE_CODE (op) != RESULT_DECL)
4292 {
4293 error ("invalid operand in return statement");
4294 debug_generic_stmt (op);
4295 return true;
4296 }
4297
4298 if ((TREE_CODE (op) == RESULT_DECL
4299 && DECL_BY_REFERENCE (op))
4300 || (TREE_CODE (op) == SSA_NAME
4301 && SSA_NAME_VAR (op)
4302 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4303 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4304 op = TREE_TYPE (op);
4305
4306 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4307 {
4308 error ("invalid conversion in return statement");
4309 debug_generic_stmt (restype);
4310 debug_generic_stmt (TREE_TYPE (op));
4311 return true;
4312 }
4313
4314 return false;
4315 }
4316
4317
4318 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4319 is a problem, otherwise false. */
4320
4321 static bool
4322 verify_gimple_goto (gimple stmt)
4323 {
4324 tree dest = gimple_goto_dest (stmt);
4325
4326 /* ??? We have two canonical forms of direct goto destinations, a
4327 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4328 if (TREE_CODE (dest) != LABEL_DECL
4329 && (!is_gimple_val (dest)
4330 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4331 {
4332 error ("goto destination is neither a label nor a pointer");
4333 return true;
4334 }
4335
4336 return false;
4337 }
4338
4339 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4340 is a problem, otherwise false. */
4341
4342 static bool
4343 verify_gimple_switch (gimple stmt)
4344 {
4345 unsigned int i, n;
4346 tree elt, prev_upper_bound = NULL_TREE;
4347 tree index_type, elt_type = NULL_TREE;
4348
4349 if (!is_gimple_val (gimple_switch_index (stmt)))
4350 {
4351 error ("invalid operand to switch statement");
4352 debug_generic_stmt (gimple_switch_index (stmt));
4353 return true;
4354 }
4355
4356 index_type = TREE_TYPE (gimple_switch_index (stmt));
4357 if (! INTEGRAL_TYPE_P (index_type))
4358 {
4359 error ("non-integral type switch statement");
4360 debug_generic_expr (index_type);
4361 return true;
4362 }
4363
4364 elt = gimple_switch_label (stmt, 0);
4365 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4366 {
4367 error ("invalid default case label in switch statement");
4368 debug_generic_expr (elt);
4369 return true;
4370 }
4371
4372 n = gimple_switch_num_labels (stmt);
4373 for (i = 1; i < n; i++)
4374 {
4375 elt = gimple_switch_label (stmt, i);
4376
4377 if (! CASE_LOW (elt))
4378 {
4379 error ("invalid case label in switch statement");
4380 debug_generic_expr (elt);
4381 return true;
4382 }
4383 if (CASE_HIGH (elt)
4384 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4385 {
4386 error ("invalid case range in switch statement");
4387 debug_generic_expr (elt);
4388 return true;
4389 }
4390
4391 if (elt_type)
4392 {
4393 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4394 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4395 {
4396 error ("type mismatch for case label in switch statement");
4397 debug_generic_expr (elt);
4398 return true;
4399 }
4400 }
4401 else
4402 {
4403 elt_type = TREE_TYPE (CASE_LOW (elt));
4404 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4405 {
4406 error ("type precision mismatch in switch statement");
4407 return true;
4408 }
4409 }
4410
4411 if (prev_upper_bound)
4412 {
4413 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4414 {
4415 error ("case labels not sorted in switch statement");
4416 return true;
4417 }
4418 }
4419
4420 prev_upper_bound = CASE_HIGH (elt);
4421 if (! prev_upper_bound)
4422 prev_upper_bound = CASE_LOW (elt);
4423 }
4424
4425 return false;
4426 }
4427
4428 /* Verify a gimple debug statement STMT.
4429 Returns true if anything is wrong. */
4430
4431 static bool
4432 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4433 {
4434 /* There isn't much that could be wrong in a gimple debug stmt. A
4435 gimple debug bind stmt, for example, maps a tree, that's usually
4436 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4437 component or member of an aggregate type, to another tree, that
4438 can be an arbitrary expression. These stmts expand into debug
4439 insns, and are converted to debug notes by var-tracking.c. */
4440 return false;
4441 }
4442
4443 /* Verify a gimple label statement STMT.
4444 Returns true if anything is wrong. */
4445
4446 static bool
4447 verify_gimple_label (gimple stmt)
4448 {
4449 tree decl = gimple_label_label (stmt);
4450 int uid;
4451 bool err = false;
4452
4453 if (TREE_CODE (decl) != LABEL_DECL)
4454 return true;
4455 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4456 && DECL_CONTEXT (decl) != current_function_decl)
4457 {
4458 error ("label's context is not the current function decl");
4459 err |= true;
4460 }
4461
4462 uid = LABEL_DECL_UID (decl);
4463 if (cfun->cfg
4464 && (uid == -1
4465 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4466 {
4467 error ("incorrect entry in label_to_block_map");
4468 err |= true;
4469 }
4470
4471 uid = EH_LANDING_PAD_NR (decl);
4472 if (uid)
4473 {
4474 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4475 if (decl != lp->post_landing_pad)
4476 {
4477 error ("incorrect setting of landing pad number");
4478 err |= true;
4479 }
4480 }
4481
4482 return err;
4483 }
4484
4485 /* Verify the GIMPLE statement STMT. Returns true if there is an
4486 error, otherwise false. */
4487
4488 static bool
4489 verify_gimple_stmt (gimple stmt)
4490 {
4491 switch (gimple_code (stmt))
4492 {
4493 case GIMPLE_ASSIGN:
4494 return verify_gimple_assign (stmt);
4495
4496 case GIMPLE_LABEL:
4497 return verify_gimple_label (stmt);
4498
4499 case GIMPLE_CALL:
4500 return verify_gimple_call (stmt);
4501
4502 case GIMPLE_COND:
4503 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4504 {
4505 error ("invalid comparison code in gimple cond");
4506 return true;
4507 }
4508 if (!(!gimple_cond_true_label (stmt)
4509 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4510 || !(!gimple_cond_false_label (stmt)
4511 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4512 {
4513 error ("invalid labels in gimple cond");
4514 return true;
4515 }
4516
4517 return verify_gimple_comparison (boolean_type_node,
4518 gimple_cond_lhs (stmt),
4519 gimple_cond_rhs (stmt));
4520
4521 case GIMPLE_GOTO:
4522 return verify_gimple_goto (stmt);
4523
4524 case GIMPLE_SWITCH:
4525 return verify_gimple_switch (stmt);
4526
4527 case GIMPLE_RETURN:
4528 return verify_gimple_return (stmt);
4529
4530 case GIMPLE_ASM:
4531 return false;
4532
4533 case GIMPLE_TRANSACTION:
4534 return verify_gimple_transaction (stmt);
4535
4536 /* Tuples that do not have tree operands. */
4537 case GIMPLE_NOP:
4538 case GIMPLE_PREDICT:
4539 case GIMPLE_RESX:
4540 case GIMPLE_EH_DISPATCH:
4541 case GIMPLE_EH_MUST_NOT_THROW:
4542 return false;
4543
4544 CASE_GIMPLE_OMP:
4545 /* OpenMP directives are validated by the FE and never operated
4546 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4547 non-gimple expressions when the main index variable has had
4548 its address taken. This does not affect the loop itself
4549 because the header of an GIMPLE_OMP_FOR is merely used to determine
4550 how to setup the parallel iteration. */
4551 return false;
4552
4553 case GIMPLE_DEBUG:
4554 return verify_gimple_debug (stmt);
4555
4556 default:
4557 gcc_unreachable ();
4558 }
4559 }
4560
4561 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4562 and false otherwise. */
4563
4564 static bool
4565 verify_gimple_phi (gimple phi)
4566 {
4567 bool err = false;
4568 unsigned i;
4569 tree phi_result = gimple_phi_result (phi);
4570 bool virtual_p;
4571
4572 if (!phi_result)
4573 {
4574 error ("invalid PHI result");
4575 return true;
4576 }
4577
4578 virtual_p = virtual_operand_p (phi_result);
4579 if (TREE_CODE (phi_result) != SSA_NAME
4580 || (virtual_p
4581 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4582 {
4583 error ("invalid PHI result");
4584 err = true;
4585 }
4586
4587 for (i = 0; i < gimple_phi_num_args (phi); i++)
4588 {
4589 tree t = gimple_phi_arg_def (phi, i);
4590
4591 if (!t)
4592 {
4593 error ("missing PHI def");
4594 err |= true;
4595 continue;
4596 }
4597 /* Addressable variables do have SSA_NAMEs but they
4598 are not considered gimple values. */
4599 else if ((TREE_CODE (t) == SSA_NAME
4600 && virtual_p != virtual_operand_p (t))
4601 || (virtual_p
4602 && (TREE_CODE (t) != SSA_NAME
4603 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4604 || (!virtual_p
4605 && !is_gimple_val (t)))
4606 {
4607 error ("invalid PHI argument");
4608 debug_generic_expr (t);
4609 err |= true;
4610 }
4611 #ifdef ENABLE_TYPES_CHECKING
4612 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4613 {
4614 error ("incompatible types in PHI argument %u", i);
4615 debug_generic_stmt (TREE_TYPE (phi_result));
4616 debug_generic_stmt (TREE_TYPE (t));
4617 err |= true;
4618 }
4619 #endif
4620 }
4621
4622 return err;
4623 }
4624
4625 /* Verify the GIMPLE statements inside the sequence STMTS. */
4626
4627 static bool
4628 verify_gimple_in_seq_2 (gimple_seq stmts)
4629 {
4630 gimple_stmt_iterator ittr;
4631 bool err = false;
4632
4633 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4634 {
4635 gimple stmt = gsi_stmt (ittr);
4636
4637 switch (gimple_code (stmt))
4638 {
4639 case GIMPLE_BIND:
4640 err |= verify_gimple_in_seq_2 (gimple_bind_body (stmt));
4641 break;
4642
4643 case GIMPLE_TRY:
4644 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4645 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4646 break;
4647
4648 case GIMPLE_EH_FILTER:
4649 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4650 break;
4651
4652 case GIMPLE_EH_ELSE:
4653 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (stmt));
4654 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (stmt));
4655 break;
4656
4657 case GIMPLE_CATCH:
4658 err |= verify_gimple_in_seq_2 (gimple_catch_handler (stmt));
4659 break;
4660
4661 case GIMPLE_TRANSACTION:
4662 err |= verify_gimple_transaction (stmt);
4663 break;
4664
4665 default:
4666 {
4667 bool err2 = verify_gimple_stmt (stmt);
4668 if (err2)
4669 debug_gimple_stmt (stmt);
4670 err |= err2;
4671 }
4672 }
4673 }
4674
4675 return err;
4676 }
4677
4678 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4679 is a problem, otherwise false. */
4680
4681 static bool
4682 verify_gimple_transaction (gimple stmt)
4683 {
4684 tree lab = gimple_transaction_label (stmt);
4685 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4686 return true;
4687 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4688 }
4689
4690
4691 /* Verify the GIMPLE statements inside the statement list STMTS. */
4692
4693 DEBUG_FUNCTION void
4694 verify_gimple_in_seq (gimple_seq stmts)
4695 {
4696 timevar_push (TV_TREE_STMT_VERIFY);
4697 if (verify_gimple_in_seq_2 (stmts))
4698 internal_error ("verify_gimple failed");
4699 timevar_pop (TV_TREE_STMT_VERIFY);
4700 }
4701
4702 /* Return true when the T can be shared. */
4703
4704 static bool
4705 tree_node_can_be_shared (tree t)
4706 {
4707 if (IS_TYPE_OR_DECL_P (t)
4708 || is_gimple_min_invariant (t)
4709 || TREE_CODE (t) == SSA_NAME
4710 || t == error_mark_node
4711 || TREE_CODE (t) == IDENTIFIER_NODE)
4712 return true;
4713
4714 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4715 return true;
4716
4717 if (DECL_P (t))
4718 return true;
4719
4720 return false;
4721 }
4722
4723 /* Called via walk_tree. Verify tree sharing. */
4724
4725 static tree
4726 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4727 {
4728 hash_set<void *> *visited = (hash_set<void *> *) data;
4729
4730 if (tree_node_can_be_shared (*tp))
4731 {
4732 *walk_subtrees = false;
4733 return NULL;
4734 }
4735
4736 if (visited->add (*tp))
4737 return *tp;
4738
4739 return NULL;
4740 }
4741
4742 /* Called via walk_gimple_stmt. Verify tree sharing. */
4743
4744 static tree
4745 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4746 {
4747 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4748 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4749 }
4750
4751 static bool eh_error_found;
4752 bool
4753 verify_eh_throw_stmt_node (const gimple &stmt, const int &,
4754 hash_set<gimple> *visited)
4755 {
4756 if (!visited->contains (stmt))
4757 {
4758 error ("dead STMT in EH table");
4759 debug_gimple_stmt (stmt);
4760 eh_error_found = true;
4761 }
4762 return true;
4763 }
4764
4765 /* Verify if the location LOCs block is in BLOCKS. */
4766
4767 static bool
4768 verify_location (hash_set<tree> *blocks, location_t loc)
4769 {
4770 tree block = LOCATION_BLOCK (loc);
4771 if (block != NULL_TREE
4772 && !blocks->contains (block))
4773 {
4774 error ("location references block not in block tree");
4775 return true;
4776 }
4777 if (block != NULL_TREE)
4778 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4779 return false;
4780 }
4781
4782 /* Called via walk_tree. Verify that expressions have no blocks. */
4783
4784 static tree
4785 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4786 {
4787 if (!EXPR_P (*tp))
4788 {
4789 *walk_subtrees = false;
4790 return NULL;
4791 }
4792
4793 location_t loc = EXPR_LOCATION (*tp);
4794 if (LOCATION_BLOCK (loc) != NULL)
4795 return *tp;
4796
4797 return NULL;
4798 }
4799
4800 /* Called via walk_tree. Verify locations of expressions. */
4801
4802 static tree
4803 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4804 {
4805 hash_set<tree> *blocks = (hash_set<tree> *) data;
4806
4807 if (TREE_CODE (*tp) == VAR_DECL
4808 && DECL_HAS_DEBUG_EXPR_P (*tp))
4809 {
4810 tree t = DECL_DEBUG_EXPR (*tp);
4811 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4812 if (addr)
4813 return addr;
4814 }
4815 if ((TREE_CODE (*tp) == VAR_DECL
4816 || TREE_CODE (*tp) == PARM_DECL
4817 || TREE_CODE (*tp) == RESULT_DECL)
4818 && DECL_HAS_VALUE_EXPR_P (*tp))
4819 {
4820 tree t = DECL_VALUE_EXPR (*tp);
4821 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4822 if (addr)
4823 return addr;
4824 }
4825
4826 if (!EXPR_P (*tp))
4827 {
4828 *walk_subtrees = false;
4829 return NULL;
4830 }
4831
4832 location_t loc = EXPR_LOCATION (*tp);
4833 if (verify_location (blocks, loc))
4834 return *tp;
4835
4836 return NULL;
4837 }
4838
4839 /* Called via walk_gimple_op. Verify locations of expressions. */
4840
4841 static tree
4842 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
4843 {
4844 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4845 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
4846 }
4847
4848 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4849
4850 static void
4851 collect_subblocks (hash_set<tree> *blocks, tree block)
4852 {
4853 tree t;
4854 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
4855 {
4856 blocks->add (t);
4857 collect_subblocks (blocks, t);
4858 }
4859 }
4860
4861 /* Verify the GIMPLE statements in the CFG of FN. */
4862
4863 DEBUG_FUNCTION void
4864 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
4865 {
4866 basic_block bb;
4867 bool err = false;
4868
4869 timevar_push (TV_TREE_STMT_VERIFY);
4870 hash_set<void *> visited;
4871 hash_set<gimple> visited_stmts;
4872
4873 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4874 hash_set<tree> blocks;
4875 if (DECL_INITIAL (fn->decl))
4876 {
4877 blocks.add (DECL_INITIAL (fn->decl));
4878 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
4879 }
4880
4881 FOR_EACH_BB_FN (bb, fn)
4882 {
4883 gimple_stmt_iterator gsi;
4884
4885 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4886 {
4887 gimple phi = gsi_stmt (gsi);
4888 bool err2 = false;
4889 unsigned i;
4890
4891 visited_stmts.add (phi);
4892
4893 if (gimple_bb (phi) != bb)
4894 {
4895 error ("gimple_bb (phi) is set to a wrong basic block");
4896 err2 = true;
4897 }
4898
4899 err2 |= verify_gimple_phi (phi);
4900
4901 /* Only PHI arguments have locations. */
4902 if (gimple_location (phi) != UNKNOWN_LOCATION)
4903 {
4904 error ("PHI node with location");
4905 err2 = true;
4906 }
4907
4908 for (i = 0; i < gimple_phi_num_args (phi); i++)
4909 {
4910 tree arg = gimple_phi_arg_def (phi, i);
4911 tree addr = walk_tree (&arg, verify_node_sharing_1,
4912 &visited, NULL);
4913 if (addr)
4914 {
4915 error ("incorrect sharing of tree nodes");
4916 debug_generic_expr (addr);
4917 err2 |= true;
4918 }
4919 location_t loc = gimple_phi_arg_location (phi, i);
4920 if (virtual_operand_p (gimple_phi_result (phi))
4921 && loc != UNKNOWN_LOCATION)
4922 {
4923 error ("virtual PHI with argument locations");
4924 err2 = true;
4925 }
4926 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
4927 if (addr)
4928 {
4929 debug_generic_expr (addr);
4930 err2 = true;
4931 }
4932 err2 |= verify_location (&blocks, loc);
4933 }
4934
4935 if (err2)
4936 debug_gimple_stmt (phi);
4937 err |= err2;
4938 }
4939
4940 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4941 {
4942 gimple stmt = gsi_stmt (gsi);
4943 bool err2 = false;
4944 struct walk_stmt_info wi;
4945 tree addr;
4946 int lp_nr;
4947
4948 visited_stmts.add (stmt);
4949
4950 if (gimple_bb (stmt) != bb)
4951 {
4952 error ("gimple_bb (stmt) is set to a wrong basic block");
4953 err2 = true;
4954 }
4955
4956 err2 |= verify_gimple_stmt (stmt);
4957 err2 |= verify_location (&blocks, gimple_location (stmt));
4958
4959 memset (&wi, 0, sizeof (wi));
4960 wi.info = (void *) &visited;
4961 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
4962 if (addr)
4963 {
4964 error ("incorrect sharing of tree nodes");
4965 debug_generic_expr (addr);
4966 err2 |= true;
4967 }
4968
4969 memset (&wi, 0, sizeof (wi));
4970 wi.info = (void *) &blocks;
4971 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
4972 if (addr)
4973 {
4974 debug_generic_expr (addr);
4975 err2 |= true;
4976 }
4977
4978 /* ??? Instead of not checking these stmts at all the walker
4979 should know its context via wi. */
4980 if (!is_gimple_debug (stmt)
4981 && !is_gimple_omp (stmt))
4982 {
4983 memset (&wi, 0, sizeof (wi));
4984 addr = walk_gimple_op (stmt, verify_expr, &wi);
4985 if (addr)
4986 {
4987 debug_generic_expr (addr);
4988 inform (gimple_location (stmt), "in statement");
4989 err2 |= true;
4990 }
4991 }
4992
4993 /* If the statement is marked as part of an EH region, then it is
4994 expected that the statement could throw. Verify that when we
4995 have optimizations that simplify statements such that we prove
4996 that they cannot throw, that we update other data structures
4997 to match. */
4998 lp_nr = lookup_stmt_eh_lp (stmt);
4999 if (lp_nr > 0)
5000 {
5001 if (!stmt_could_throw_p (stmt))
5002 {
5003 if (verify_nothrow)
5004 {
5005 error ("statement marked for throw, but doesn%'t");
5006 err2 |= true;
5007 }
5008 }
5009 else if (!gsi_one_before_end_p (gsi))
5010 {
5011 error ("statement marked for throw in middle of block");
5012 err2 |= true;
5013 }
5014 }
5015
5016 if (err2)
5017 debug_gimple_stmt (stmt);
5018 err |= err2;
5019 }
5020 }
5021
5022 eh_error_found = false;
5023 hash_map<gimple, int> *eh_table = get_eh_throw_stmt_table (cfun);
5024 if (eh_table)
5025 eh_table->traverse<hash_set<gimple> *, verify_eh_throw_stmt_node>
5026 (&visited_stmts);
5027
5028 if (err || eh_error_found)
5029 internal_error ("verify_gimple failed");
5030
5031 verify_histograms ();
5032 timevar_pop (TV_TREE_STMT_VERIFY);
5033 }
5034
5035
5036 /* Verifies that the flow information is OK. */
5037
5038 static int
5039 gimple_verify_flow_info (void)
5040 {
5041 int err = 0;
5042 basic_block bb;
5043 gimple_stmt_iterator gsi;
5044 gimple stmt;
5045 edge e;
5046 edge_iterator ei;
5047
5048 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5049 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5050 {
5051 error ("ENTRY_BLOCK has IL associated with it");
5052 err = 1;
5053 }
5054
5055 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5056 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5057 {
5058 error ("EXIT_BLOCK has IL associated with it");
5059 err = 1;
5060 }
5061
5062 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5063 if (e->flags & EDGE_FALLTHRU)
5064 {
5065 error ("fallthru to exit from bb %d", e->src->index);
5066 err = 1;
5067 }
5068
5069 FOR_EACH_BB_FN (bb, cfun)
5070 {
5071 bool found_ctrl_stmt = false;
5072
5073 stmt = NULL;
5074
5075 /* Skip labels on the start of basic block. */
5076 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5077 {
5078 tree label;
5079 gimple prev_stmt = stmt;
5080
5081 stmt = gsi_stmt (gsi);
5082
5083 if (gimple_code (stmt) != GIMPLE_LABEL)
5084 break;
5085
5086 label = gimple_label_label (stmt);
5087 if (prev_stmt && DECL_NONLOCAL (label))
5088 {
5089 error ("nonlocal label ");
5090 print_generic_expr (stderr, label, 0);
5091 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5092 bb->index);
5093 err = 1;
5094 }
5095
5096 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5097 {
5098 error ("EH landing pad label ");
5099 print_generic_expr (stderr, label, 0);
5100 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5101 bb->index);
5102 err = 1;
5103 }
5104
5105 if (label_to_block (label) != bb)
5106 {
5107 error ("label ");
5108 print_generic_expr (stderr, label, 0);
5109 fprintf (stderr, " to block does not match in bb %d",
5110 bb->index);
5111 err = 1;
5112 }
5113
5114 if (decl_function_context (label) != current_function_decl)
5115 {
5116 error ("label ");
5117 print_generic_expr (stderr, label, 0);
5118 fprintf (stderr, " has incorrect context in bb %d",
5119 bb->index);
5120 err = 1;
5121 }
5122 }
5123
5124 /* Verify that body of basic block BB is free of control flow. */
5125 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5126 {
5127 gimple stmt = gsi_stmt (gsi);
5128
5129 if (found_ctrl_stmt)
5130 {
5131 error ("control flow in the middle of basic block %d",
5132 bb->index);
5133 err = 1;
5134 }
5135
5136 if (stmt_ends_bb_p (stmt))
5137 found_ctrl_stmt = true;
5138
5139 if (gimple_code (stmt) == GIMPLE_LABEL)
5140 {
5141 error ("label ");
5142 print_generic_expr (stderr, gimple_label_label (stmt), 0);
5143 fprintf (stderr, " in the middle of basic block %d", bb->index);
5144 err = 1;
5145 }
5146 }
5147
5148 gsi = gsi_last_bb (bb);
5149 if (gsi_end_p (gsi))
5150 continue;
5151
5152 stmt = gsi_stmt (gsi);
5153
5154 if (gimple_code (stmt) == GIMPLE_LABEL)
5155 continue;
5156
5157 err |= verify_eh_edges (stmt);
5158
5159 if (is_ctrl_stmt (stmt))
5160 {
5161 FOR_EACH_EDGE (e, ei, bb->succs)
5162 if (e->flags & EDGE_FALLTHRU)
5163 {
5164 error ("fallthru edge after a control statement in bb %d",
5165 bb->index);
5166 err = 1;
5167 }
5168 }
5169
5170 if (gimple_code (stmt) != GIMPLE_COND)
5171 {
5172 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5173 after anything else but if statement. */
5174 FOR_EACH_EDGE (e, ei, bb->succs)
5175 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5176 {
5177 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5178 bb->index);
5179 err = 1;
5180 }
5181 }
5182
5183 switch (gimple_code (stmt))
5184 {
5185 case GIMPLE_COND:
5186 {
5187 edge true_edge;
5188 edge false_edge;
5189
5190 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5191
5192 if (!true_edge
5193 || !false_edge
5194 || !(true_edge->flags & EDGE_TRUE_VALUE)
5195 || !(false_edge->flags & EDGE_FALSE_VALUE)
5196 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5197 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5198 || EDGE_COUNT (bb->succs) >= 3)
5199 {
5200 error ("wrong outgoing edge flags at end of bb %d",
5201 bb->index);
5202 err = 1;
5203 }
5204 }
5205 break;
5206
5207 case GIMPLE_GOTO:
5208 if (simple_goto_p (stmt))
5209 {
5210 error ("explicit goto at end of bb %d", bb->index);
5211 err = 1;
5212 }
5213 else
5214 {
5215 /* FIXME. We should double check that the labels in the
5216 destination blocks have their address taken. */
5217 FOR_EACH_EDGE (e, ei, bb->succs)
5218 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5219 | EDGE_FALSE_VALUE))
5220 || !(e->flags & EDGE_ABNORMAL))
5221 {
5222 error ("wrong outgoing edge flags at end of bb %d",
5223 bb->index);
5224 err = 1;
5225 }
5226 }
5227 break;
5228
5229 case GIMPLE_CALL:
5230 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5231 break;
5232 /* ... fallthru ... */
5233 case GIMPLE_RETURN:
5234 if (!single_succ_p (bb)
5235 || (single_succ_edge (bb)->flags
5236 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5237 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5238 {
5239 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5240 err = 1;
5241 }
5242 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5243 {
5244 error ("return edge does not point to exit in bb %d",
5245 bb->index);
5246 err = 1;
5247 }
5248 break;
5249
5250 case GIMPLE_SWITCH:
5251 {
5252 tree prev;
5253 edge e;
5254 size_t i, n;
5255
5256 n = gimple_switch_num_labels (stmt);
5257
5258 /* Mark all the destination basic blocks. */
5259 for (i = 0; i < n; ++i)
5260 {
5261 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
5262 basic_block label_bb = label_to_block (lab);
5263 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5264 label_bb->aux = (void *)1;
5265 }
5266
5267 /* Verify that the case labels are sorted. */
5268 prev = gimple_switch_label (stmt, 0);
5269 for (i = 1; i < n; ++i)
5270 {
5271 tree c = gimple_switch_label (stmt, i);
5272 if (!CASE_LOW (c))
5273 {
5274 error ("found default case not at the start of "
5275 "case vector");
5276 err = 1;
5277 continue;
5278 }
5279 if (CASE_LOW (prev)
5280 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5281 {
5282 error ("case labels not sorted: ");
5283 print_generic_expr (stderr, prev, 0);
5284 fprintf (stderr," is greater than ");
5285 print_generic_expr (stderr, c, 0);
5286 fprintf (stderr," but comes before it.\n");
5287 err = 1;
5288 }
5289 prev = c;
5290 }
5291 /* VRP will remove the default case if it can prove it will
5292 never be executed. So do not verify there always exists
5293 a default case here. */
5294
5295 FOR_EACH_EDGE (e, ei, bb->succs)
5296 {
5297 if (!e->dest->aux)
5298 {
5299 error ("extra outgoing edge %d->%d",
5300 bb->index, e->dest->index);
5301 err = 1;
5302 }
5303
5304 e->dest->aux = (void *)2;
5305 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5306 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5307 {
5308 error ("wrong outgoing edge flags at end of bb %d",
5309 bb->index);
5310 err = 1;
5311 }
5312 }
5313
5314 /* Check that we have all of them. */
5315 for (i = 0; i < n; ++i)
5316 {
5317 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
5318 basic_block label_bb = label_to_block (lab);
5319
5320 if (label_bb->aux != (void *)2)
5321 {
5322 error ("missing edge %i->%i", bb->index, label_bb->index);
5323 err = 1;
5324 }
5325 }
5326
5327 FOR_EACH_EDGE (e, ei, bb->succs)
5328 e->dest->aux = (void *)0;
5329 }
5330 break;
5331
5332 case GIMPLE_EH_DISPATCH:
5333 err |= verify_eh_dispatch_edge (stmt);
5334 break;
5335
5336 default:
5337 break;
5338 }
5339 }
5340
5341 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5342 verify_dominators (CDI_DOMINATORS);
5343
5344 return err;
5345 }
5346
5347
5348 /* Updates phi nodes after creating a forwarder block joined
5349 by edge FALLTHRU. */
5350
5351 static void
5352 gimple_make_forwarder_block (edge fallthru)
5353 {
5354 edge e;
5355 edge_iterator ei;
5356 basic_block dummy, bb;
5357 tree var;
5358 gimple_stmt_iterator gsi;
5359
5360 dummy = fallthru->src;
5361 bb = fallthru->dest;
5362
5363 if (single_pred_p (bb))
5364 return;
5365
5366 /* If we redirected a branch we must create new PHI nodes at the
5367 start of BB. */
5368 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5369 {
5370 gimple phi, new_phi;
5371
5372 phi = gsi_stmt (gsi);
5373 var = gimple_phi_result (phi);
5374 new_phi = create_phi_node (var, bb);
5375 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5376 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5377 UNKNOWN_LOCATION);
5378 }
5379
5380 /* Add the arguments we have stored on edges. */
5381 FOR_EACH_EDGE (e, ei, bb->preds)
5382 {
5383 if (e == fallthru)
5384 continue;
5385
5386 flush_pending_stmts (e);
5387 }
5388 }
5389
5390
5391 /* Return a non-special label in the head of basic block BLOCK.
5392 Create one if it doesn't exist. */
5393
5394 tree
5395 gimple_block_label (basic_block bb)
5396 {
5397 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5398 bool first = true;
5399 tree label;
5400 gimple stmt;
5401
5402 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5403 {
5404 stmt = gsi_stmt (i);
5405 if (gimple_code (stmt) != GIMPLE_LABEL)
5406 break;
5407 label = gimple_label_label (stmt);
5408 if (!DECL_NONLOCAL (label))
5409 {
5410 if (!first)
5411 gsi_move_before (&i, &s);
5412 return label;
5413 }
5414 }
5415
5416 label = create_artificial_label (UNKNOWN_LOCATION);
5417 stmt = gimple_build_label (label);
5418 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5419 return label;
5420 }
5421
5422
5423 /* Attempt to perform edge redirection by replacing a possibly complex
5424 jump instruction by a goto or by removing the jump completely.
5425 This can apply only if all edges now point to the same block. The
5426 parameters and return values are equivalent to
5427 redirect_edge_and_branch. */
5428
5429 static edge
5430 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5431 {
5432 basic_block src = e->src;
5433 gimple_stmt_iterator i;
5434 gimple stmt;
5435
5436 /* We can replace or remove a complex jump only when we have exactly
5437 two edges. */
5438 if (EDGE_COUNT (src->succs) != 2
5439 /* Verify that all targets will be TARGET. Specifically, the
5440 edge that is not E must also go to TARGET. */
5441 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5442 return NULL;
5443
5444 i = gsi_last_bb (src);
5445 if (gsi_end_p (i))
5446 return NULL;
5447
5448 stmt = gsi_stmt (i);
5449
5450 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5451 {
5452 gsi_remove (&i, true);
5453 e = ssa_redirect_edge (e, target);
5454 e->flags = EDGE_FALLTHRU;
5455 return e;
5456 }
5457
5458 return NULL;
5459 }
5460
5461
5462 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5463 edge representing the redirected branch. */
5464
5465 static edge
5466 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5467 {
5468 basic_block bb = e->src;
5469 gimple_stmt_iterator gsi;
5470 edge ret;
5471 gimple stmt;
5472
5473 if (e->flags & EDGE_ABNORMAL)
5474 return NULL;
5475
5476 if (e->dest == dest)
5477 return NULL;
5478
5479 if (e->flags & EDGE_EH)
5480 return redirect_eh_edge (e, dest);
5481
5482 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5483 {
5484 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5485 if (ret)
5486 return ret;
5487 }
5488
5489 gsi = gsi_last_bb (bb);
5490 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5491
5492 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5493 {
5494 case GIMPLE_COND:
5495 /* For COND_EXPR, we only need to redirect the edge. */
5496 break;
5497
5498 case GIMPLE_GOTO:
5499 /* No non-abnormal edges should lead from a non-simple goto, and
5500 simple ones should be represented implicitly. */
5501 gcc_unreachable ();
5502
5503 case GIMPLE_SWITCH:
5504 {
5505 tree label = gimple_block_label (dest);
5506 tree cases = get_cases_for_edge (e, stmt);
5507
5508 /* If we have a list of cases associated with E, then use it
5509 as it's a lot faster than walking the entire case vector. */
5510 if (cases)
5511 {
5512 edge e2 = find_edge (e->src, dest);
5513 tree last, first;
5514
5515 first = cases;
5516 while (cases)
5517 {
5518 last = cases;
5519 CASE_LABEL (cases) = label;
5520 cases = CASE_CHAIN (cases);
5521 }
5522
5523 /* If there was already an edge in the CFG, then we need
5524 to move all the cases associated with E to E2. */
5525 if (e2)
5526 {
5527 tree cases2 = get_cases_for_edge (e2, stmt);
5528
5529 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5530 CASE_CHAIN (cases2) = first;
5531 }
5532 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5533 }
5534 else
5535 {
5536 size_t i, n = gimple_switch_num_labels (stmt);
5537
5538 for (i = 0; i < n; i++)
5539 {
5540 tree elt = gimple_switch_label (stmt, i);
5541 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5542 CASE_LABEL (elt) = label;
5543 }
5544 }
5545 }
5546 break;
5547
5548 case GIMPLE_ASM:
5549 {
5550 int i, n = gimple_asm_nlabels (stmt);
5551 tree label = NULL;
5552
5553 for (i = 0; i < n; ++i)
5554 {
5555 tree cons = gimple_asm_label_op (stmt, i);
5556 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5557 {
5558 if (!label)
5559 label = gimple_block_label (dest);
5560 TREE_VALUE (cons) = label;
5561 }
5562 }
5563
5564 /* If we didn't find any label matching the former edge in the
5565 asm labels, we must be redirecting the fallthrough
5566 edge. */
5567 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5568 }
5569 break;
5570
5571 case GIMPLE_RETURN:
5572 gsi_remove (&gsi, true);
5573 e->flags |= EDGE_FALLTHRU;
5574 break;
5575
5576 case GIMPLE_OMP_RETURN:
5577 case GIMPLE_OMP_CONTINUE:
5578 case GIMPLE_OMP_SECTIONS_SWITCH:
5579 case GIMPLE_OMP_FOR:
5580 /* The edges from OMP constructs can be simply redirected. */
5581 break;
5582
5583 case GIMPLE_EH_DISPATCH:
5584 if (!(e->flags & EDGE_FALLTHRU))
5585 redirect_eh_dispatch_edge (stmt, e, dest);
5586 break;
5587
5588 case GIMPLE_TRANSACTION:
5589 /* The ABORT edge has a stored label associated with it, otherwise
5590 the edges are simply redirectable. */
5591 if (e->flags == 0)
5592 gimple_transaction_set_label (stmt, gimple_block_label (dest));
5593 break;
5594
5595 default:
5596 /* Otherwise it must be a fallthru edge, and we don't need to
5597 do anything besides redirecting it. */
5598 gcc_assert (e->flags & EDGE_FALLTHRU);
5599 break;
5600 }
5601
5602 /* Update/insert PHI nodes as necessary. */
5603
5604 /* Now update the edges in the CFG. */
5605 e = ssa_redirect_edge (e, dest);
5606
5607 return e;
5608 }
5609
5610 /* Returns true if it is possible to remove edge E by redirecting
5611 it to the destination of the other edge from E->src. */
5612
5613 static bool
5614 gimple_can_remove_branch_p (const_edge e)
5615 {
5616 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5617 return false;
5618
5619 return true;
5620 }
5621
5622 /* Simple wrapper, as we can always redirect fallthru edges. */
5623
5624 static basic_block
5625 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5626 {
5627 e = gimple_redirect_edge_and_branch (e, dest);
5628 gcc_assert (e);
5629
5630 return NULL;
5631 }
5632
5633
5634 /* Splits basic block BB after statement STMT (but at least after the
5635 labels). If STMT is NULL, BB is split just after the labels. */
5636
5637 static basic_block
5638 gimple_split_block (basic_block bb, void *stmt)
5639 {
5640 gimple_stmt_iterator gsi;
5641 gimple_stmt_iterator gsi_tgt;
5642 gimple act;
5643 gimple_seq list;
5644 basic_block new_bb;
5645 edge e;
5646 edge_iterator ei;
5647
5648 new_bb = create_empty_bb (bb);
5649
5650 /* Redirect the outgoing edges. */
5651 new_bb->succs = bb->succs;
5652 bb->succs = NULL;
5653 FOR_EACH_EDGE (e, ei, new_bb->succs)
5654 e->src = new_bb;
5655
5656 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5657 stmt = NULL;
5658
5659 /* Move everything from GSI to the new basic block. */
5660 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5661 {
5662 act = gsi_stmt (gsi);
5663 if (gimple_code (act) == GIMPLE_LABEL)
5664 continue;
5665
5666 if (!stmt)
5667 break;
5668
5669 if (stmt == act)
5670 {
5671 gsi_next (&gsi);
5672 break;
5673 }
5674 }
5675
5676 if (gsi_end_p (gsi))
5677 return new_bb;
5678
5679 /* Split the statement list - avoid re-creating new containers as this
5680 brings ugly quadratic memory consumption in the inliner.
5681 (We are still quadratic since we need to update stmt BB pointers,
5682 sadly.) */
5683 gsi_split_seq_before (&gsi, &list);
5684 set_bb_seq (new_bb, list);
5685 for (gsi_tgt = gsi_start (list);
5686 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5687 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5688
5689 return new_bb;
5690 }
5691
5692
5693 /* Moves basic block BB after block AFTER. */
5694
5695 static bool
5696 gimple_move_block_after (basic_block bb, basic_block after)
5697 {
5698 if (bb->prev_bb == after)
5699 return true;
5700
5701 unlink_block (bb);
5702 link_block (bb, after);
5703
5704 return true;
5705 }
5706
5707
5708 /* Return TRUE if block BB has no executable statements, otherwise return
5709 FALSE. */
5710
5711 static bool
5712 gimple_empty_block_p (basic_block bb)
5713 {
5714 /* BB must have no executable statements. */
5715 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5716 if (phi_nodes (bb))
5717 return false;
5718 if (gsi_end_p (gsi))
5719 return true;
5720 if (is_gimple_debug (gsi_stmt (gsi)))
5721 gsi_next_nondebug (&gsi);
5722 return gsi_end_p (gsi);
5723 }
5724
5725
5726 /* Split a basic block if it ends with a conditional branch and if the
5727 other part of the block is not empty. */
5728
5729 static basic_block
5730 gimple_split_block_before_cond_jump (basic_block bb)
5731 {
5732 gimple last, split_point;
5733 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5734 if (gsi_end_p (gsi))
5735 return NULL;
5736 last = gsi_stmt (gsi);
5737 if (gimple_code (last) != GIMPLE_COND
5738 && gimple_code (last) != GIMPLE_SWITCH)
5739 return NULL;
5740 gsi_prev_nondebug (&gsi);
5741 split_point = gsi_stmt (gsi);
5742 return split_block (bb, split_point)->dest;
5743 }
5744
5745
5746 /* Return true if basic_block can be duplicated. */
5747
5748 static bool
5749 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5750 {
5751 return true;
5752 }
5753
5754 /* Create a duplicate of the basic block BB. NOTE: This does not
5755 preserve SSA form. */
5756
5757 static basic_block
5758 gimple_duplicate_bb (basic_block bb)
5759 {
5760 basic_block new_bb;
5761 gimple_stmt_iterator gsi, gsi_tgt;
5762 gimple_seq phis = phi_nodes (bb);
5763 gimple phi, stmt, copy;
5764
5765 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
5766
5767 /* Copy the PHI nodes. We ignore PHI node arguments here because
5768 the incoming edges have not been setup yet. */
5769 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
5770 {
5771 phi = gsi_stmt (gsi);
5772 copy = create_phi_node (NULL_TREE, new_bb);
5773 create_new_def_for (gimple_phi_result (phi), copy,
5774 gimple_phi_result_ptr (copy));
5775 gimple_set_uid (copy, gimple_uid (phi));
5776 }
5777
5778 gsi_tgt = gsi_start_bb (new_bb);
5779 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5780 {
5781 def_operand_p def_p;
5782 ssa_op_iter op_iter;
5783 tree lhs;
5784
5785 stmt = gsi_stmt (gsi);
5786 if (gimple_code (stmt) == GIMPLE_LABEL)
5787 continue;
5788
5789 /* Don't duplicate label debug stmts. */
5790 if (gimple_debug_bind_p (stmt)
5791 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5792 == LABEL_DECL)
5793 continue;
5794
5795 /* Create a new copy of STMT and duplicate STMT's virtual
5796 operands. */
5797 copy = gimple_copy (stmt);
5798 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5799
5800 maybe_duplicate_eh_stmt (copy, stmt);
5801 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5802
5803 /* When copying around a stmt writing into a local non-user
5804 aggregate, make sure it won't share stack slot with other
5805 vars. */
5806 lhs = gimple_get_lhs (stmt);
5807 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5808 {
5809 tree base = get_base_address (lhs);
5810 if (base
5811 && (TREE_CODE (base) == VAR_DECL
5812 || TREE_CODE (base) == RESULT_DECL)
5813 && DECL_IGNORED_P (base)
5814 && !TREE_STATIC (base)
5815 && !DECL_EXTERNAL (base)
5816 && (TREE_CODE (base) != VAR_DECL
5817 || !DECL_HAS_VALUE_EXPR_P (base)))
5818 DECL_NONSHAREABLE (base) = 1;
5819 }
5820
5821 /* Create new names for all the definitions created by COPY and
5822 add replacement mappings for each new name. */
5823 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5824 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5825 }
5826
5827 return new_bb;
5828 }
5829
5830 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5831
5832 static void
5833 add_phi_args_after_copy_edge (edge e_copy)
5834 {
5835 basic_block bb, bb_copy = e_copy->src, dest;
5836 edge e;
5837 edge_iterator ei;
5838 gimple phi, phi_copy;
5839 tree def;
5840 gimple_stmt_iterator psi, psi_copy;
5841
5842 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5843 return;
5844
5845 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5846
5847 if (e_copy->dest->flags & BB_DUPLICATED)
5848 dest = get_bb_original (e_copy->dest);
5849 else
5850 dest = e_copy->dest;
5851
5852 e = find_edge (bb, dest);
5853 if (!e)
5854 {
5855 /* During loop unrolling the target of the latch edge is copied.
5856 In this case we are not looking for edge to dest, but to
5857 duplicated block whose original was dest. */
5858 FOR_EACH_EDGE (e, ei, bb->succs)
5859 {
5860 if ((e->dest->flags & BB_DUPLICATED)
5861 && get_bb_original (e->dest) == dest)
5862 break;
5863 }
5864
5865 gcc_assert (e != NULL);
5866 }
5867
5868 for (psi = gsi_start_phis (e->dest),
5869 psi_copy = gsi_start_phis (e_copy->dest);
5870 !gsi_end_p (psi);
5871 gsi_next (&psi), gsi_next (&psi_copy))
5872 {
5873 phi = gsi_stmt (psi);
5874 phi_copy = gsi_stmt (psi_copy);
5875 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5876 add_phi_arg (phi_copy, def, e_copy,
5877 gimple_phi_arg_location_from_edge (phi, e));
5878 }
5879 }
5880
5881
5882 /* Basic block BB_COPY was created by code duplication. Add phi node
5883 arguments for edges going out of BB_COPY. The blocks that were
5884 duplicated have BB_DUPLICATED set. */
5885
5886 void
5887 add_phi_args_after_copy_bb (basic_block bb_copy)
5888 {
5889 edge e_copy;
5890 edge_iterator ei;
5891
5892 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5893 {
5894 add_phi_args_after_copy_edge (e_copy);
5895 }
5896 }
5897
5898 /* Blocks in REGION_COPY array of length N_REGION were created by
5899 duplication of basic blocks. Add phi node arguments for edges
5900 going from these blocks. If E_COPY is not NULL, also add
5901 phi node arguments for its destination.*/
5902
5903 void
5904 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5905 edge e_copy)
5906 {
5907 unsigned i;
5908
5909 for (i = 0; i < n_region; i++)
5910 region_copy[i]->flags |= BB_DUPLICATED;
5911
5912 for (i = 0; i < n_region; i++)
5913 add_phi_args_after_copy_bb (region_copy[i]);
5914 if (e_copy)
5915 add_phi_args_after_copy_edge (e_copy);
5916
5917 for (i = 0; i < n_region; i++)
5918 region_copy[i]->flags &= ~BB_DUPLICATED;
5919 }
5920
5921 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5922 important exit edge EXIT. By important we mean that no SSA name defined
5923 inside region is live over the other exit edges of the region. All entry
5924 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5925 to the duplicate of the region. Dominance and loop information is
5926 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
5927 UPDATE_DOMINANCE is false then we assume that the caller will update the
5928 dominance information after calling this function. The new basic
5929 blocks are stored to REGION_COPY in the same order as they had in REGION,
5930 provided that REGION_COPY is not NULL.
5931 The function returns false if it is unable to copy the region,
5932 true otherwise. */
5933
5934 bool
5935 gimple_duplicate_sese_region (edge entry, edge exit,
5936 basic_block *region, unsigned n_region,
5937 basic_block *region_copy,
5938 bool update_dominance)
5939 {
5940 unsigned i;
5941 bool free_region_copy = false, copying_header = false;
5942 struct loop *loop = entry->dest->loop_father;
5943 edge exit_copy;
5944 vec<basic_block> doms;
5945 edge redirected;
5946 int total_freq = 0, entry_freq = 0;
5947 gcov_type total_count = 0, entry_count = 0;
5948
5949 if (!can_copy_bbs_p (region, n_region))
5950 return false;
5951
5952 /* Some sanity checking. Note that we do not check for all possible
5953 missuses of the functions. I.e. if you ask to copy something weird,
5954 it will work, but the state of structures probably will not be
5955 correct. */
5956 for (i = 0; i < n_region; i++)
5957 {
5958 /* We do not handle subloops, i.e. all the blocks must belong to the
5959 same loop. */
5960 if (region[i]->loop_father != loop)
5961 return false;
5962
5963 if (region[i] != entry->dest
5964 && region[i] == loop->header)
5965 return false;
5966 }
5967
5968 /* In case the function is used for loop header copying (which is the primary
5969 use), ensure that EXIT and its copy will be new latch and entry edges. */
5970 if (loop->header == entry->dest)
5971 {
5972 copying_header = true;
5973
5974 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5975 return false;
5976
5977 for (i = 0; i < n_region; i++)
5978 if (region[i] != exit->src
5979 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5980 return false;
5981 }
5982
5983 initialize_original_copy_tables ();
5984
5985 if (copying_header)
5986 set_loop_copy (loop, loop_outer (loop));
5987 else
5988 set_loop_copy (loop, loop);
5989
5990 if (!region_copy)
5991 {
5992 region_copy = XNEWVEC (basic_block, n_region);
5993 free_region_copy = true;
5994 }
5995
5996 /* Record blocks outside the region that are dominated by something
5997 inside. */
5998 if (update_dominance)
5999 {
6000 doms.create (0);
6001 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6002 }
6003
6004 if (entry->dest->count)
6005 {
6006 total_count = entry->dest->count;
6007 entry_count = entry->count;
6008 /* Fix up corner cases, to avoid division by zero or creation of negative
6009 frequencies. */
6010 if (entry_count > total_count)
6011 entry_count = total_count;
6012 }
6013 else
6014 {
6015 total_freq = entry->dest->frequency;
6016 entry_freq = EDGE_FREQUENCY (entry);
6017 /* Fix up corner cases, to avoid division by zero or creation of negative
6018 frequencies. */
6019 if (total_freq == 0)
6020 total_freq = 1;
6021 else if (entry_freq > total_freq)
6022 entry_freq = total_freq;
6023 }
6024
6025 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6026 split_edge_bb_loc (entry), update_dominance);
6027 if (total_count)
6028 {
6029 scale_bbs_frequencies_gcov_type (region, n_region,
6030 total_count - entry_count,
6031 total_count);
6032 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
6033 total_count);
6034 }
6035 else
6036 {
6037 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
6038 total_freq);
6039 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
6040 }
6041
6042 if (copying_header)
6043 {
6044 loop->header = exit->dest;
6045 loop->latch = exit->src;
6046 }
6047
6048 /* Redirect the entry and add the phi node arguments. */
6049 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6050 gcc_assert (redirected != NULL);
6051 flush_pending_stmts (entry);
6052
6053 /* Concerning updating of dominators: We must recount dominators
6054 for entry block and its copy. Anything that is outside of the
6055 region, but was dominated by something inside needs recounting as
6056 well. */
6057 if (update_dominance)
6058 {
6059 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6060 doms.safe_push (get_bb_original (entry->dest));
6061 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6062 doms.release ();
6063 }
6064
6065 /* Add the other PHI node arguments. */
6066 add_phi_args_after_copy (region_copy, n_region, NULL);
6067
6068 if (free_region_copy)
6069 free (region_copy);
6070
6071 free_original_copy_tables ();
6072 return true;
6073 }
6074
6075 /* Checks if BB is part of the region defined by N_REGION BBS. */
6076 static bool
6077 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6078 {
6079 unsigned int n;
6080
6081 for (n = 0; n < n_region; n++)
6082 {
6083 if (bb == bbs[n])
6084 return true;
6085 }
6086 return false;
6087 }
6088
6089 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6090 are stored to REGION_COPY in the same order in that they appear
6091 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6092 the region, EXIT an exit from it. The condition guarding EXIT
6093 is moved to ENTRY. Returns true if duplication succeeds, false
6094 otherwise.
6095
6096 For example,
6097
6098 some_code;
6099 if (cond)
6100 A;
6101 else
6102 B;
6103
6104 is transformed to
6105
6106 if (cond)
6107 {
6108 some_code;
6109 A;
6110 }
6111 else
6112 {
6113 some_code;
6114 B;
6115 }
6116 */
6117
6118 bool
6119 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
6120 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
6121 basic_block *region_copy ATTRIBUTE_UNUSED)
6122 {
6123 unsigned i;
6124 bool free_region_copy = false;
6125 struct loop *loop = exit->dest->loop_father;
6126 struct loop *orig_loop = entry->dest->loop_father;
6127 basic_block switch_bb, entry_bb, nentry_bb;
6128 vec<basic_block> doms;
6129 int total_freq = 0, exit_freq = 0;
6130 gcov_type total_count = 0, exit_count = 0;
6131 edge exits[2], nexits[2], e;
6132 gimple_stmt_iterator gsi;
6133 gimple cond_stmt;
6134 edge sorig, snew;
6135 basic_block exit_bb;
6136 gimple_stmt_iterator psi;
6137 gimple phi;
6138 tree def;
6139 struct loop *target, *aloop, *cloop;
6140
6141 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6142 exits[0] = exit;
6143 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6144
6145 if (!can_copy_bbs_p (region, n_region))
6146 return false;
6147
6148 initialize_original_copy_tables ();
6149 set_loop_copy (orig_loop, loop);
6150
6151 target= loop;
6152 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6153 {
6154 if (bb_part_of_region_p (aloop->header, region, n_region))
6155 {
6156 cloop = duplicate_loop (aloop, target);
6157 duplicate_subloops (aloop, cloop);
6158 }
6159 }
6160
6161 if (!region_copy)
6162 {
6163 region_copy = XNEWVEC (basic_block, n_region);
6164 free_region_copy = true;
6165 }
6166
6167 gcc_assert (!need_ssa_update_p (cfun));
6168
6169 /* Record blocks outside the region that are dominated by something
6170 inside. */
6171 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6172
6173 if (exit->src->count)
6174 {
6175 total_count = exit->src->count;
6176 exit_count = exit->count;
6177 /* Fix up corner cases, to avoid division by zero or creation of negative
6178 frequencies. */
6179 if (exit_count > total_count)
6180 exit_count = total_count;
6181 }
6182 else
6183 {
6184 total_freq = exit->src->frequency;
6185 exit_freq = EDGE_FREQUENCY (exit);
6186 /* Fix up corner cases, to avoid division by zero or creation of negative
6187 frequencies. */
6188 if (total_freq == 0)
6189 total_freq = 1;
6190 if (exit_freq > total_freq)
6191 exit_freq = total_freq;
6192 }
6193
6194 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6195 split_edge_bb_loc (exit), true);
6196 if (total_count)
6197 {
6198 scale_bbs_frequencies_gcov_type (region, n_region,
6199 total_count - exit_count,
6200 total_count);
6201 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
6202 total_count);
6203 }
6204 else
6205 {
6206 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6207 total_freq);
6208 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6209 }
6210
6211 /* Create the switch block, and put the exit condition to it. */
6212 entry_bb = entry->dest;
6213 nentry_bb = get_bb_copy (entry_bb);
6214 if (!last_stmt (entry->src)
6215 || !stmt_ends_bb_p (last_stmt (entry->src)))
6216 switch_bb = entry->src;
6217 else
6218 switch_bb = split_edge (entry);
6219 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6220
6221 gsi = gsi_last_bb (switch_bb);
6222 cond_stmt = last_stmt (exit->src);
6223 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6224 cond_stmt = gimple_copy (cond_stmt);
6225
6226 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6227
6228 sorig = single_succ_edge (switch_bb);
6229 sorig->flags = exits[1]->flags;
6230 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6231
6232 /* Register the new edge from SWITCH_BB in loop exit lists. */
6233 rescan_loop_exit (snew, true, false);
6234
6235 /* Add the PHI node arguments. */
6236 add_phi_args_after_copy (region_copy, n_region, snew);
6237
6238 /* Get rid of now superfluous conditions and associated edges (and phi node
6239 arguments). */
6240 exit_bb = exit->dest;
6241
6242 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6243 PENDING_STMT (e) = NULL;
6244
6245 /* The latch of ORIG_LOOP was copied, and so was the backedge
6246 to the original header. We redirect this backedge to EXIT_BB. */
6247 for (i = 0; i < n_region; i++)
6248 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6249 {
6250 gcc_assert (single_succ_edge (region_copy[i]));
6251 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6252 PENDING_STMT (e) = NULL;
6253 for (psi = gsi_start_phis (exit_bb);
6254 !gsi_end_p (psi);
6255 gsi_next (&psi))
6256 {
6257 phi = gsi_stmt (psi);
6258 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6259 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6260 }
6261 }
6262 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6263 PENDING_STMT (e) = NULL;
6264
6265 /* Anything that is outside of the region, but was dominated by something
6266 inside needs to update dominance info. */
6267 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6268 doms.release ();
6269 /* Update the SSA web. */
6270 update_ssa (TODO_update_ssa);
6271
6272 if (free_region_copy)
6273 free (region_copy);
6274
6275 free_original_copy_tables ();
6276 return true;
6277 }
6278
6279 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6280 adding blocks when the dominator traversal reaches EXIT. This
6281 function silently assumes that ENTRY strictly dominates EXIT. */
6282
6283 void
6284 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6285 vec<basic_block> *bbs_p)
6286 {
6287 basic_block son;
6288
6289 for (son = first_dom_son (CDI_DOMINATORS, entry);
6290 son;
6291 son = next_dom_son (CDI_DOMINATORS, son))
6292 {
6293 bbs_p->safe_push (son);
6294 if (son != exit)
6295 gather_blocks_in_sese_region (son, exit, bbs_p);
6296 }
6297 }
6298
6299 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6300 The duplicates are recorded in VARS_MAP. */
6301
6302 static void
6303 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6304 tree to_context)
6305 {
6306 tree t = *tp, new_t;
6307 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6308
6309 if (DECL_CONTEXT (t) == to_context)
6310 return;
6311
6312 bool existed;
6313 tree &loc = vars_map->get_or_insert (t, &existed);
6314
6315 if (!existed)
6316 {
6317 if (SSA_VAR_P (t))
6318 {
6319 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6320 add_local_decl (f, new_t);
6321 }
6322 else
6323 {
6324 gcc_assert (TREE_CODE (t) == CONST_DECL);
6325 new_t = copy_node (t);
6326 }
6327 DECL_CONTEXT (new_t) = to_context;
6328
6329 loc = new_t;
6330 }
6331 else
6332 new_t = loc;
6333
6334 *tp = new_t;
6335 }
6336
6337
6338 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6339 VARS_MAP maps old ssa names and var_decls to the new ones. */
6340
6341 static tree
6342 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6343 tree to_context)
6344 {
6345 tree new_name;
6346
6347 gcc_assert (!virtual_operand_p (name));
6348
6349 tree *loc = vars_map->get (name);
6350
6351 if (!loc)
6352 {
6353 tree decl = SSA_NAME_VAR (name);
6354 if (decl)
6355 {
6356 replace_by_duplicate_decl (&decl, vars_map, to_context);
6357 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6358 decl, SSA_NAME_DEF_STMT (name));
6359 if (SSA_NAME_IS_DEFAULT_DEF (name))
6360 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context),
6361 decl, new_name);
6362 }
6363 else
6364 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6365 name, SSA_NAME_DEF_STMT (name));
6366
6367 vars_map->put (name, new_name);
6368 }
6369 else
6370 new_name = *loc;
6371
6372 return new_name;
6373 }
6374
6375 struct move_stmt_d
6376 {
6377 tree orig_block;
6378 tree new_block;
6379 tree from_context;
6380 tree to_context;
6381 hash_map<tree, tree> *vars_map;
6382 htab_t new_label_map;
6383 hash_map<void *, void *> *eh_map;
6384 bool remap_decls_p;
6385 };
6386
6387 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6388 contained in *TP if it has been ORIG_BLOCK previously and change the
6389 DECL_CONTEXT of every local variable referenced in *TP. */
6390
6391 static tree
6392 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6393 {
6394 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6395 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6396 tree t = *tp;
6397
6398 if (EXPR_P (t))
6399 {
6400 tree block = TREE_BLOCK (t);
6401 if (block == p->orig_block
6402 || (p->orig_block == NULL_TREE
6403 && block != NULL_TREE))
6404 TREE_SET_BLOCK (t, p->new_block);
6405 #ifdef ENABLE_CHECKING
6406 else if (block != NULL_TREE)
6407 {
6408 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6409 block = BLOCK_SUPERCONTEXT (block);
6410 gcc_assert (block == p->orig_block);
6411 }
6412 #endif
6413 }
6414 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6415 {
6416 if (TREE_CODE (t) == SSA_NAME)
6417 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6418 else if (TREE_CODE (t) == LABEL_DECL)
6419 {
6420 if (p->new_label_map)
6421 {
6422 struct tree_map in, *out;
6423 in.base.from = t;
6424 out = (struct tree_map *)
6425 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6426 if (out)
6427 *tp = t = out->to;
6428 }
6429
6430 DECL_CONTEXT (t) = p->to_context;
6431 }
6432 else if (p->remap_decls_p)
6433 {
6434 /* Replace T with its duplicate. T should no longer appear in the
6435 parent function, so this looks wasteful; however, it may appear
6436 in referenced_vars, and more importantly, as virtual operands of
6437 statements, and in alias lists of other variables. It would be
6438 quite difficult to expunge it from all those places. ??? It might
6439 suffice to do this for addressable variables. */
6440 if ((TREE_CODE (t) == VAR_DECL
6441 && !is_global_var (t))
6442 || TREE_CODE (t) == CONST_DECL)
6443 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6444 }
6445 *walk_subtrees = 0;
6446 }
6447 else if (TYPE_P (t))
6448 *walk_subtrees = 0;
6449
6450 return NULL_TREE;
6451 }
6452
6453 /* Helper for move_stmt_r. Given an EH region number for the source
6454 function, map that to the duplicate EH regio number in the dest. */
6455
6456 static int
6457 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6458 {
6459 eh_region old_r, new_r;
6460
6461 old_r = get_eh_region_from_number (old_nr);
6462 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6463
6464 return new_r->index;
6465 }
6466
6467 /* Similar, but operate on INTEGER_CSTs. */
6468
6469 static tree
6470 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6471 {
6472 int old_nr, new_nr;
6473
6474 old_nr = tree_to_shwi (old_t_nr);
6475 new_nr = move_stmt_eh_region_nr (old_nr, p);
6476
6477 return build_int_cst (integer_type_node, new_nr);
6478 }
6479
6480 /* Like move_stmt_op, but for gimple statements.
6481
6482 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6483 contained in the current statement in *GSI_P and change the
6484 DECL_CONTEXT of every local variable referenced in the current
6485 statement. */
6486
6487 static tree
6488 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6489 struct walk_stmt_info *wi)
6490 {
6491 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6492 gimple stmt = gsi_stmt (*gsi_p);
6493 tree block = gimple_block (stmt);
6494
6495 if (block == p->orig_block
6496 || (p->orig_block == NULL_TREE
6497 && block != NULL_TREE))
6498 gimple_set_block (stmt, p->new_block);
6499
6500 switch (gimple_code (stmt))
6501 {
6502 case GIMPLE_CALL:
6503 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6504 {
6505 tree r, fndecl = gimple_call_fndecl (stmt);
6506 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6507 switch (DECL_FUNCTION_CODE (fndecl))
6508 {
6509 case BUILT_IN_EH_COPY_VALUES:
6510 r = gimple_call_arg (stmt, 1);
6511 r = move_stmt_eh_region_tree_nr (r, p);
6512 gimple_call_set_arg (stmt, 1, r);
6513 /* FALLTHRU */
6514
6515 case BUILT_IN_EH_POINTER:
6516 case BUILT_IN_EH_FILTER:
6517 r = gimple_call_arg (stmt, 0);
6518 r = move_stmt_eh_region_tree_nr (r, p);
6519 gimple_call_set_arg (stmt, 0, r);
6520 break;
6521
6522 default:
6523 break;
6524 }
6525 }
6526 break;
6527
6528 case GIMPLE_RESX:
6529 {
6530 int r = gimple_resx_region (stmt);
6531 r = move_stmt_eh_region_nr (r, p);
6532 gimple_resx_set_region (stmt, r);
6533 }
6534 break;
6535
6536 case GIMPLE_EH_DISPATCH:
6537 {
6538 int r = gimple_eh_dispatch_region (stmt);
6539 r = move_stmt_eh_region_nr (r, p);
6540 gimple_eh_dispatch_set_region (stmt, r);
6541 }
6542 break;
6543
6544 case GIMPLE_OMP_RETURN:
6545 case GIMPLE_OMP_CONTINUE:
6546 break;
6547 default:
6548 if (is_gimple_omp (stmt))
6549 {
6550 /* Do not remap variables inside OMP directives. Variables
6551 referenced in clauses and directive header belong to the
6552 parent function and should not be moved into the child
6553 function. */
6554 bool save_remap_decls_p = p->remap_decls_p;
6555 p->remap_decls_p = false;
6556 *handled_ops_p = true;
6557
6558 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6559 move_stmt_op, wi);
6560
6561 p->remap_decls_p = save_remap_decls_p;
6562 }
6563 break;
6564 }
6565
6566 return NULL_TREE;
6567 }
6568
6569 /* Move basic block BB from function CFUN to function DEST_FN. The
6570 block is moved out of the original linked list and placed after
6571 block AFTER in the new list. Also, the block is removed from the
6572 original array of blocks and placed in DEST_FN's array of blocks.
6573 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6574 updated to reflect the moved edges.
6575
6576 The local variables are remapped to new instances, VARS_MAP is used
6577 to record the mapping. */
6578
6579 static void
6580 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6581 basic_block after, bool update_edge_count_p,
6582 struct move_stmt_d *d)
6583 {
6584 struct control_flow_graph *cfg;
6585 edge_iterator ei;
6586 edge e;
6587 gimple_stmt_iterator si;
6588 unsigned old_len, new_len;
6589
6590 /* Remove BB from dominance structures. */
6591 delete_from_dominance_info (CDI_DOMINATORS, bb);
6592
6593 /* Move BB from its current loop to the copy in the new function. */
6594 if (current_loops)
6595 {
6596 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6597 if (new_loop)
6598 bb->loop_father = new_loop;
6599 }
6600
6601 /* Link BB to the new linked list. */
6602 move_block_after (bb, after);
6603
6604 /* Update the edge count in the corresponding flowgraphs. */
6605 if (update_edge_count_p)
6606 FOR_EACH_EDGE (e, ei, bb->succs)
6607 {
6608 cfun->cfg->x_n_edges--;
6609 dest_cfun->cfg->x_n_edges++;
6610 }
6611
6612 /* Remove BB from the original basic block array. */
6613 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6614 cfun->cfg->x_n_basic_blocks--;
6615
6616 /* Grow DEST_CFUN's basic block array if needed. */
6617 cfg = dest_cfun->cfg;
6618 cfg->x_n_basic_blocks++;
6619 if (bb->index >= cfg->x_last_basic_block)
6620 cfg->x_last_basic_block = bb->index + 1;
6621
6622 old_len = vec_safe_length (cfg->x_basic_block_info);
6623 if ((unsigned) cfg->x_last_basic_block >= old_len)
6624 {
6625 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6626 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6627 }
6628
6629 (*cfg->x_basic_block_info)[bb->index] = bb;
6630
6631 /* Remap the variables in phi nodes. */
6632 for (si = gsi_start_phis (bb); !gsi_end_p (si); )
6633 {
6634 gimple phi = gsi_stmt (si);
6635 use_operand_p use;
6636 tree op = PHI_RESULT (phi);
6637 ssa_op_iter oi;
6638 unsigned i;
6639
6640 if (virtual_operand_p (op))
6641 {
6642 /* Remove the phi nodes for virtual operands (alias analysis will be
6643 run for the new function, anyway). */
6644 remove_phi_node (&si, true);
6645 continue;
6646 }
6647
6648 SET_PHI_RESULT (phi,
6649 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6650 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6651 {
6652 op = USE_FROM_PTR (use);
6653 if (TREE_CODE (op) == SSA_NAME)
6654 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6655 }
6656
6657 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6658 {
6659 location_t locus = gimple_phi_arg_location (phi, i);
6660 tree block = LOCATION_BLOCK (locus);
6661
6662 if (locus == UNKNOWN_LOCATION)
6663 continue;
6664 if (d->orig_block == NULL_TREE || block == d->orig_block)
6665 {
6666 if (d->new_block == NULL_TREE)
6667 locus = LOCATION_LOCUS (locus);
6668 else
6669 locus = COMBINE_LOCATION_DATA (line_table, locus, d->new_block);
6670 gimple_phi_arg_set_location (phi, i, locus);
6671 }
6672 }
6673
6674 gsi_next (&si);
6675 }
6676
6677 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6678 {
6679 gimple stmt = gsi_stmt (si);
6680 struct walk_stmt_info wi;
6681
6682 memset (&wi, 0, sizeof (wi));
6683 wi.info = d;
6684 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6685
6686 if (gimple_code (stmt) == GIMPLE_LABEL)
6687 {
6688 tree label = gimple_label_label (stmt);
6689 int uid = LABEL_DECL_UID (label);
6690
6691 gcc_assert (uid > -1);
6692
6693 old_len = vec_safe_length (cfg->x_label_to_block_map);
6694 if (old_len <= (unsigned) uid)
6695 {
6696 new_len = 3 * uid / 2 + 1;
6697 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6698 }
6699
6700 (*cfg->x_label_to_block_map)[uid] = bb;
6701 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6702
6703 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6704
6705 if (uid >= dest_cfun->cfg->last_label_uid)
6706 dest_cfun->cfg->last_label_uid = uid + 1;
6707 }
6708
6709 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6710 remove_stmt_from_eh_lp_fn (cfun, stmt);
6711
6712 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6713 gimple_remove_stmt_histograms (cfun, stmt);
6714
6715 /* We cannot leave any operands allocated from the operand caches of
6716 the current function. */
6717 free_stmt_operands (cfun, stmt);
6718 push_cfun (dest_cfun);
6719 update_stmt (stmt);
6720 pop_cfun ();
6721 }
6722
6723 FOR_EACH_EDGE (e, ei, bb->succs)
6724 if (e->goto_locus != UNKNOWN_LOCATION)
6725 {
6726 tree block = LOCATION_BLOCK (e->goto_locus);
6727 if (d->orig_block == NULL_TREE
6728 || block == d->orig_block)
6729 e->goto_locus = d->new_block ?
6730 COMBINE_LOCATION_DATA (line_table, e->goto_locus, d->new_block) :
6731 LOCATION_LOCUS (e->goto_locus);
6732 }
6733 }
6734
6735 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6736 the outermost EH region. Use REGION as the incoming base EH region. */
6737
6738 static eh_region
6739 find_outermost_region_in_block (struct function *src_cfun,
6740 basic_block bb, eh_region region)
6741 {
6742 gimple_stmt_iterator si;
6743
6744 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6745 {
6746 gimple stmt = gsi_stmt (si);
6747 eh_region stmt_region;
6748 int lp_nr;
6749
6750 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6751 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6752 if (stmt_region)
6753 {
6754 if (region == NULL)
6755 region = stmt_region;
6756 else if (stmt_region != region)
6757 {
6758 region = eh_region_outermost (src_cfun, stmt_region, region);
6759 gcc_assert (region != NULL);
6760 }
6761 }
6762 }
6763
6764 return region;
6765 }
6766
6767 static tree
6768 new_label_mapper (tree decl, void *data)
6769 {
6770 htab_t hash = (htab_t) data;
6771 struct tree_map *m;
6772 void **slot;
6773
6774 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6775
6776 m = XNEW (struct tree_map);
6777 m->hash = DECL_UID (decl);
6778 m->base.from = decl;
6779 m->to = create_artificial_label (UNKNOWN_LOCATION);
6780 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6781 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6782 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6783
6784 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6785 gcc_assert (*slot == NULL);
6786
6787 *slot = m;
6788
6789 return m->to;
6790 }
6791
6792 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6793 subblocks. */
6794
6795 static void
6796 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
6797 tree to_context)
6798 {
6799 tree *tp, t;
6800
6801 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6802 {
6803 t = *tp;
6804 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6805 continue;
6806 replace_by_duplicate_decl (&t, vars_map, to_context);
6807 if (t != *tp)
6808 {
6809 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6810 {
6811 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6812 DECL_HAS_VALUE_EXPR_P (t) = 1;
6813 }
6814 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6815 *tp = t;
6816 }
6817 }
6818
6819 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6820 replace_block_vars_by_duplicates (block, vars_map, to_context);
6821 }
6822
6823 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6824 from FN1 to FN2. */
6825
6826 static void
6827 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
6828 struct loop *loop)
6829 {
6830 /* Discard it from the old loop array. */
6831 (*get_loops (fn1))[loop->num] = NULL;
6832
6833 /* Place it in the new loop array, assigning it a new number. */
6834 loop->num = number_of_loops (fn2);
6835 vec_safe_push (loops_for_fn (fn2)->larray, loop);
6836
6837 /* Recurse to children. */
6838 for (loop = loop->inner; loop; loop = loop->next)
6839 fixup_loop_arrays_after_move (fn1, fn2, loop);
6840 }
6841
6842 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6843 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6844 single basic block in the original CFG and the new basic block is
6845 returned. DEST_CFUN must not have a CFG yet.
6846
6847 Note that the region need not be a pure SESE region. Blocks inside
6848 the region may contain calls to abort/exit. The only restriction
6849 is that ENTRY_BB should be the only entry point and it must
6850 dominate EXIT_BB.
6851
6852 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6853 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6854 to the new function.
6855
6856 All local variables referenced in the region are assumed to be in
6857 the corresponding BLOCK_VARS and unexpanded variable lists
6858 associated with DEST_CFUN. */
6859
6860 basic_block
6861 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
6862 basic_block exit_bb, tree orig_block)
6863 {
6864 vec<basic_block> bbs, dom_bbs;
6865 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6866 basic_block after, bb, *entry_pred, *exit_succ, abb;
6867 struct function *saved_cfun = cfun;
6868 int *entry_flag, *exit_flag;
6869 unsigned *entry_prob, *exit_prob;
6870 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
6871 edge e;
6872 edge_iterator ei;
6873 htab_t new_label_map;
6874 hash_map<void *, void *> *eh_map;
6875 struct loop *loop = entry_bb->loop_father;
6876 struct loop *loop0 = get_loop (saved_cfun, 0);
6877 struct move_stmt_d d;
6878
6879 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6880 region. */
6881 gcc_assert (entry_bb != exit_bb
6882 && (!exit_bb
6883 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
6884
6885 /* Collect all the blocks in the region. Manually add ENTRY_BB
6886 because it won't be added by dfs_enumerate_from. */
6887 bbs.create (0);
6888 bbs.safe_push (entry_bb);
6889 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6890
6891 /* The blocks that used to be dominated by something in BBS will now be
6892 dominated by the new block. */
6893 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6894 bbs.address (),
6895 bbs.length ());
6896
6897 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6898 the predecessor edges to ENTRY_BB and the successor edges to
6899 EXIT_BB so that we can re-attach them to the new basic block that
6900 will replace the region. */
6901 num_entry_edges = EDGE_COUNT (entry_bb->preds);
6902 entry_pred = XNEWVEC (basic_block, num_entry_edges);
6903 entry_flag = XNEWVEC (int, num_entry_edges);
6904 entry_prob = XNEWVEC (unsigned, num_entry_edges);
6905 i = 0;
6906 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
6907 {
6908 entry_prob[i] = e->probability;
6909 entry_flag[i] = e->flags;
6910 entry_pred[i++] = e->src;
6911 remove_edge (e);
6912 }
6913
6914 if (exit_bb)
6915 {
6916 num_exit_edges = EDGE_COUNT (exit_bb->succs);
6917 exit_succ = XNEWVEC (basic_block, num_exit_edges);
6918 exit_flag = XNEWVEC (int, num_exit_edges);
6919 exit_prob = XNEWVEC (unsigned, num_exit_edges);
6920 i = 0;
6921 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
6922 {
6923 exit_prob[i] = e->probability;
6924 exit_flag[i] = e->flags;
6925 exit_succ[i++] = e->dest;
6926 remove_edge (e);
6927 }
6928 }
6929 else
6930 {
6931 num_exit_edges = 0;
6932 exit_succ = NULL;
6933 exit_flag = NULL;
6934 exit_prob = NULL;
6935 }
6936
6937 /* Switch context to the child function to initialize DEST_FN's CFG. */
6938 gcc_assert (dest_cfun->cfg == NULL);
6939 push_cfun (dest_cfun);
6940
6941 init_empty_tree_cfg ();
6942
6943 /* Initialize EH information for the new function. */
6944 eh_map = NULL;
6945 new_label_map = NULL;
6946 if (saved_cfun->eh)
6947 {
6948 eh_region region = NULL;
6949
6950 FOR_EACH_VEC_ELT (bbs, i, bb)
6951 region = find_outermost_region_in_block (saved_cfun, bb, region);
6952
6953 init_eh_for_function ();
6954 if (region != NULL)
6955 {
6956 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6957 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
6958 new_label_mapper, new_label_map);
6959 }
6960 }
6961
6962 /* Initialize an empty loop tree. */
6963 struct loops *loops = ggc_cleared_alloc<struct loops> ();
6964 init_loops_structure (dest_cfun, loops, 1);
6965 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
6966 set_loops_for_fn (dest_cfun, loops);
6967
6968 /* Move the outlined loop tree part. */
6969 num_nodes = bbs.length ();
6970 FOR_EACH_VEC_ELT (bbs, i, bb)
6971 {
6972 if (bb->loop_father->header == bb)
6973 {
6974 struct loop *this_loop = bb->loop_father;
6975 struct loop *outer = loop_outer (this_loop);
6976 if (outer == loop
6977 /* If the SESE region contains some bbs ending with
6978 a noreturn call, those are considered to belong
6979 to the outermost loop in saved_cfun, rather than
6980 the entry_bb's loop_father. */
6981 || outer == loop0)
6982 {
6983 if (outer != loop)
6984 num_nodes -= this_loop->num_nodes;
6985 flow_loop_tree_node_remove (bb->loop_father);
6986 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
6987 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
6988 }
6989 }
6990 else if (bb->loop_father == loop0 && loop0 != loop)
6991 num_nodes--;
6992
6993 /* Remove loop exits from the outlined region. */
6994 if (loops_for_fn (saved_cfun)->exits)
6995 FOR_EACH_EDGE (e, ei, bb->succs)
6996 {
6997 struct loops *l = loops_for_fn (saved_cfun);
6998 loop_exit **slot
6999 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7000 NO_INSERT);
7001 if (slot)
7002 l->exits->clear_slot (slot);
7003 }
7004 }
7005
7006
7007 /* Adjust the number of blocks in the tree root of the outlined part. */
7008 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7009
7010 /* Setup a mapping to be used by move_block_to_fn. */
7011 loop->aux = current_loops->tree_root;
7012 loop0->aux = current_loops->tree_root;
7013
7014 pop_cfun ();
7015
7016 /* Move blocks from BBS into DEST_CFUN. */
7017 gcc_assert (bbs.length () >= 2);
7018 after = dest_cfun->cfg->x_entry_block_ptr;
7019 hash_map<tree, tree> vars_map;
7020
7021 memset (&d, 0, sizeof (d));
7022 d.orig_block = orig_block;
7023 d.new_block = DECL_INITIAL (dest_cfun->decl);
7024 d.from_context = cfun->decl;
7025 d.to_context = dest_cfun->decl;
7026 d.vars_map = &vars_map;
7027 d.new_label_map = new_label_map;
7028 d.eh_map = eh_map;
7029 d.remap_decls_p = true;
7030
7031 FOR_EACH_VEC_ELT (bbs, i, bb)
7032 {
7033 /* No need to update edge counts on the last block. It has
7034 already been updated earlier when we detached the region from
7035 the original CFG. */
7036 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7037 after = bb;
7038 }
7039
7040 loop->aux = NULL;
7041 loop0->aux = NULL;
7042 /* Loop sizes are no longer correct, fix them up. */
7043 loop->num_nodes -= num_nodes;
7044 for (struct loop *outer = loop_outer (loop);
7045 outer; outer = loop_outer (outer))
7046 outer->num_nodes -= num_nodes;
7047 loop0->num_nodes -= bbs.length () - num_nodes;
7048
7049 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7050 {
7051 struct loop *aloop;
7052 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7053 if (aloop != NULL)
7054 {
7055 if (aloop->simduid)
7056 {
7057 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7058 d.to_context);
7059 dest_cfun->has_simduid_loops = true;
7060 }
7061 if (aloop->force_vectorize)
7062 dest_cfun->has_force_vectorize_loops = true;
7063 }
7064 }
7065
7066 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7067 if (orig_block)
7068 {
7069 tree block;
7070 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7071 == NULL_TREE);
7072 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7073 = BLOCK_SUBBLOCKS (orig_block);
7074 for (block = BLOCK_SUBBLOCKS (orig_block);
7075 block; block = BLOCK_CHAIN (block))
7076 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7077 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7078 }
7079
7080 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7081 &vars_map, dest_cfun->decl);
7082
7083 if (new_label_map)
7084 htab_delete (new_label_map);
7085 if (eh_map)
7086 delete eh_map;
7087
7088 /* Rewire the entry and exit blocks. The successor to the entry
7089 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7090 the child function. Similarly, the predecessor of DEST_FN's
7091 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7092 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7093 various CFG manipulation function get to the right CFG.
7094
7095 FIXME, this is silly. The CFG ought to become a parameter to
7096 these helpers. */
7097 push_cfun (dest_cfun);
7098 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7099 if (exit_bb)
7100 make_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7101 pop_cfun ();
7102
7103 /* Back in the original function, the SESE region has disappeared,
7104 create a new basic block in its place. */
7105 bb = create_empty_bb (entry_pred[0]);
7106 if (current_loops)
7107 add_bb_to_loop (bb, loop);
7108 for (i = 0; i < num_entry_edges; i++)
7109 {
7110 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7111 e->probability = entry_prob[i];
7112 }
7113
7114 for (i = 0; i < num_exit_edges; i++)
7115 {
7116 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7117 e->probability = exit_prob[i];
7118 }
7119
7120 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7121 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7122 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7123 dom_bbs.release ();
7124
7125 if (exit_bb)
7126 {
7127 free (exit_prob);
7128 free (exit_flag);
7129 free (exit_succ);
7130 }
7131 free (entry_prob);
7132 free (entry_flag);
7133 free (entry_pred);
7134 bbs.release ();
7135
7136 return bb;
7137 }
7138
7139
7140 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7141 */
7142
7143 void
7144 dump_function_to_file (tree fndecl, FILE *file, int flags)
7145 {
7146 tree arg, var, old_current_fndecl = current_function_decl;
7147 struct function *dsf;
7148 bool ignore_topmost_bind = false, any_var = false;
7149 basic_block bb;
7150 tree chain;
7151 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7152 && decl_is_tm_clone (fndecl));
7153 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7154
7155 current_function_decl = fndecl;
7156 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7157
7158 arg = DECL_ARGUMENTS (fndecl);
7159 while (arg)
7160 {
7161 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7162 fprintf (file, " ");
7163 print_generic_expr (file, arg, dump_flags);
7164 if (flags & TDF_VERBOSE)
7165 print_node (file, "", arg, 4);
7166 if (DECL_CHAIN (arg))
7167 fprintf (file, ", ");
7168 arg = DECL_CHAIN (arg);
7169 }
7170 fprintf (file, ")\n");
7171
7172 if (flags & TDF_VERBOSE)
7173 print_node (file, "", fndecl, 2);
7174
7175 dsf = DECL_STRUCT_FUNCTION (fndecl);
7176 if (dsf && (flags & TDF_EH))
7177 dump_eh_tree (file, dsf);
7178
7179 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7180 {
7181 dump_node (fndecl, TDF_SLIM | flags, file);
7182 current_function_decl = old_current_fndecl;
7183 return;
7184 }
7185
7186 /* When GIMPLE is lowered, the variables are no longer available in
7187 BIND_EXPRs, so display them separately. */
7188 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7189 {
7190 unsigned ix;
7191 ignore_topmost_bind = true;
7192
7193 fprintf (file, "{\n");
7194 if (!vec_safe_is_empty (fun->local_decls))
7195 FOR_EACH_LOCAL_DECL (fun, ix, var)
7196 {
7197 print_generic_decl (file, var, flags);
7198 if (flags & TDF_VERBOSE)
7199 print_node (file, "", var, 4);
7200 fprintf (file, "\n");
7201
7202 any_var = true;
7203 }
7204 if (gimple_in_ssa_p (cfun))
7205 for (ix = 1; ix < num_ssa_names; ++ix)
7206 {
7207 tree name = ssa_name (ix);
7208 if (name && !SSA_NAME_VAR (name))
7209 {
7210 fprintf (file, " ");
7211 print_generic_expr (file, TREE_TYPE (name), flags);
7212 fprintf (file, " ");
7213 print_generic_expr (file, name, flags);
7214 fprintf (file, ";\n");
7215
7216 any_var = true;
7217 }
7218 }
7219 }
7220
7221 if (fun && fun->decl == fndecl
7222 && fun->cfg
7223 && basic_block_info_for_fn (fun))
7224 {
7225 /* If the CFG has been built, emit a CFG-based dump. */
7226 if (!ignore_topmost_bind)
7227 fprintf (file, "{\n");
7228
7229 if (any_var && n_basic_blocks_for_fn (fun))
7230 fprintf (file, "\n");
7231
7232 FOR_EACH_BB_FN (bb, fun)
7233 dump_bb (file, bb, 2, flags | TDF_COMMENT);
7234
7235 fprintf (file, "}\n");
7236 }
7237 else if (DECL_SAVED_TREE (fndecl) == NULL)
7238 {
7239 /* The function is now in GIMPLE form but the CFG has not been
7240 built yet. Emit the single sequence of GIMPLE statements
7241 that make up its body. */
7242 gimple_seq body = gimple_body (fndecl);
7243
7244 if (gimple_seq_first_stmt (body)
7245 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7246 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7247 print_gimple_seq (file, body, 0, flags);
7248 else
7249 {
7250 if (!ignore_topmost_bind)
7251 fprintf (file, "{\n");
7252
7253 if (any_var)
7254 fprintf (file, "\n");
7255
7256 print_gimple_seq (file, body, 2, flags);
7257 fprintf (file, "}\n");
7258 }
7259 }
7260 else
7261 {
7262 int indent;
7263
7264 /* Make a tree based dump. */
7265 chain = DECL_SAVED_TREE (fndecl);
7266 if (chain && TREE_CODE (chain) == BIND_EXPR)
7267 {
7268 if (ignore_topmost_bind)
7269 {
7270 chain = BIND_EXPR_BODY (chain);
7271 indent = 2;
7272 }
7273 else
7274 indent = 0;
7275 }
7276 else
7277 {
7278 if (!ignore_topmost_bind)
7279 fprintf (file, "{\n");
7280 indent = 2;
7281 }
7282
7283 if (any_var)
7284 fprintf (file, "\n");
7285
7286 print_generic_stmt_indented (file, chain, flags, indent);
7287 if (ignore_topmost_bind)
7288 fprintf (file, "}\n");
7289 }
7290
7291 if (flags & TDF_ENUMERATE_LOCALS)
7292 dump_enumerated_decls (file, flags);
7293 fprintf (file, "\n\n");
7294
7295 current_function_decl = old_current_fndecl;
7296 }
7297
7298 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7299
7300 DEBUG_FUNCTION void
7301 debug_function (tree fn, int flags)
7302 {
7303 dump_function_to_file (fn, stderr, flags);
7304 }
7305
7306
7307 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7308
7309 static void
7310 print_pred_bbs (FILE *file, basic_block bb)
7311 {
7312 edge e;
7313 edge_iterator ei;
7314
7315 FOR_EACH_EDGE (e, ei, bb->preds)
7316 fprintf (file, "bb_%d ", e->src->index);
7317 }
7318
7319
7320 /* Print on FILE the indexes for the successors of basic_block BB. */
7321
7322 static void
7323 print_succ_bbs (FILE *file, basic_block bb)
7324 {
7325 edge e;
7326 edge_iterator ei;
7327
7328 FOR_EACH_EDGE (e, ei, bb->succs)
7329 fprintf (file, "bb_%d ", e->dest->index);
7330 }
7331
7332 /* Print to FILE the basic block BB following the VERBOSITY level. */
7333
7334 void
7335 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7336 {
7337 char *s_indent = (char *) alloca ((size_t) indent + 1);
7338 memset ((void *) s_indent, ' ', (size_t) indent);
7339 s_indent[indent] = '\0';
7340
7341 /* Print basic_block's header. */
7342 if (verbosity >= 2)
7343 {
7344 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7345 print_pred_bbs (file, bb);
7346 fprintf (file, "}, succs = {");
7347 print_succ_bbs (file, bb);
7348 fprintf (file, "})\n");
7349 }
7350
7351 /* Print basic_block's body. */
7352 if (verbosity >= 3)
7353 {
7354 fprintf (file, "%s {\n", s_indent);
7355 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7356 fprintf (file, "%s }\n", s_indent);
7357 }
7358 }
7359
7360 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7361
7362 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7363 VERBOSITY level this outputs the contents of the loop, or just its
7364 structure. */
7365
7366 static void
7367 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7368 {
7369 char *s_indent;
7370 basic_block bb;
7371
7372 if (loop == NULL)
7373 return;
7374
7375 s_indent = (char *) alloca ((size_t) indent + 1);
7376 memset ((void *) s_indent, ' ', (size_t) indent);
7377 s_indent[indent] = '\0';
7378
7379 /* Print loop's header. */
7380 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7381 if (loop->header)
7382 fprintf (file, "header = %d", loop->header->index);
7383 else
7384 {
7385 fprintf (file, "deleted)\n");
7386 return;
7387 }
7388 if (loop->latch)
7389 fprintf (file, ", latch = %d", loop->latch->index);
7390 else
7391 fprintf (file, ", multiple latches");
7392 fprintf (file, ", niter = ");
7393 print_generic_expr (file, loop->nb_iterations, 0);
7394
7395 if (loop->any_upper_bound)
7396 {
7397 fprintf (file, ", upper_bound = ");
7398 print_decu (loop->nb_iterations_upper_bound, file);
7399 }
7400
7401 if (loop->any_estimate)
7402 {
7403 fprintf (file, ", estimate = ");
7404 print_decu (loop->nb_iterations_estimate, file);
7405 }
7406 fprintf (file, ")\n");
7407
7408 /* Print loop's body. */
7409 if (verbosity >= 1)
7410 {
7411 fprintf (file, "%s{\n", s_indent);
7412 FOR_EACH_BB_FN (bb, cfun)
7413 if (bb->loop_father == loop)
7414 print_loops_bb (file, bb, indent, verbosity);
7415
7416 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7417 fprintf (file, "%s}\n", s_indent);
7418 }
7419 }
7420
7421 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7422 spaces. Following VERBOSITY level this outputs the contents of the
7423 loop, or just its structure. */
7424
7425 static void
7426 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7427 int verbosity)
7428 {
7429 if (loop == NULL)
7430 return;
7431
7432 print_loop (file, loop, indent, verbosity);
7433 print_loop_and_siblings (file, loop->next, indent, verbosity);
7434 }
7435
7436 /* Follow a CFG edge from the entry point of the program, and on entry
7437 of a loop, pretty print the loop structure on FILE. */
7438
7439 void
7440 print_loops (FILE *file, int verbosity)
7441 {
7442 basic_block bb;
7443
7444 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7445 if (bb && bb->loop_father)
7446 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7447 }
7448
7449 /* Dump a loop. */
7450
7451 DEBUG_FUNCTION void
7452 debug (struct loop &ref)
7453 {
7454 print_loop (stderr, &ref, 0, /*verbosity*/0);
7455 }
7456
7457 DEBUG_FUNCTION void
7458 debug (struct loop *ptr)
7459 {
7460 if (ptr)
7461 debug (*ptr);
7462 else
7463 fprintf (stderr, "<nil>\n");
7464 }
7465
7466 /* Dump a loop verbosely. */
7467
7468 DEBUG_FUNCTION void
7469 debug_verbose (struct loop &ref)
7470 {
7471 print_loop (stderr, &ref, 0, /*verbosity*/3);
7472 }
7473
7474 DEBUG_FUNCTION void
7475 debug_verbose (struct loop *ptr)
7476 {
7477 if (ptr)
7478 debug (*ptr);
7479 else
7480 fprintf (stderr, "<nil>\n");
7481 }
7482
7483
7484 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7485
7486 DEBUG_FUNCTION void
7487 debug_loops (int verbosity)
7488 {
7489 print_loops (stderr, verbosity);
7490 }
7491
7492 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7493
7494 DEBUG_FUNCTION void
7495 debug_loop (struct loop *loop, int verbosity)
7496 {
7497 print_loop (stderr, loop, 0, verbosity);
7498 }
7499
7500 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7501 level. */
7502
7503 DEBUG_FUNCTION void
7504 debug_loop_num (unsigned num, int verbosity)
7505 {
7506 debug_loop (get_loop (cfun, num), verbosity);
7507 }
7508
7509 /* Return true if BB ends with a call, possibly followed by some
7510 instructions that must stay with the call. Return false,
7511 otherwise. */
7512
7513 static bool
7514 gimple_block_ends_with_call_p (basic_block bb)
7515 {
7516 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7517 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7518 }
7519
7520
7521 /* Return true if BB ends with a conditional branch. Return false,
7522 otherwise. */
7523
7524 static bool
7525 gimple_block_ends_with_condjump_p (const_basic_block bb)
7526 {
7527 gimple stmt = last_stmt (CONST_CAST_BB (bb));
7528 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7529 }
7530
7531
7532 /* Return true if we need to add fake edge to exit at statement T.
7533 Helper function for gimple_flow_call_edges_add. */
7534
7535 static bool
7536 need_fake_edge_p (gimple t)
7537 {
7538 tree fndecl = NULL_TREE;
7539 int call_flags = 0;
7540
7541 /* NORETURN and LONGJMP calls already have an edge to exit.
7542 CONST and PURE calls do not need one.
7543 We don't currently check for CONST and PURE here, although
7544 it would be a good idea, because those attributes are
7545 figured out from the RTL in mark_constant_function, and
7546 the counter incrementation code from -fprofile-arcs
7547 leads to different results from -fbranch-probabilities. */
7548 if (is_gimple_call (t))
7549 {
7550 fndecl = gimple_call_fndecl (t);
7551 call_flags = gimple_call_flags (t);
7552 }
7553
7554 if (is_gimple_call (t)
7555 && fndecl
7556 && DECL_BUILT_IN (fndecl)
7557 && (call_flags & ECF_NOTHROW)
7558 && !(call_flags & ECF_RETURNS_TWICE)
7559 /* fork() doesn't really return twice, but the effect of
7560 wrapping it in __gcov_fork() which calls __gcov_flush()
7561 and clears the counters before forking has the same
7562 effect as returning twice. Force a fake edge. */
7563 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7564 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7565 return false;
7566
7567 if (is_gimple_call (t))
7568 {
7569 edge_iterator ei;
7570 edge e;
7571 basic_block bb;
7572
7573 if (!(call_flags & ECF_NORETURN))
7574 return true;
7575
7576 bb = gimple_bb (t);
7577 FOR_EACH_EDGE (e, ei, bb->succs)
7578 if ((e->flags & EDGE_FAKE) == 0)
7579 return true;
7580 }
7581
7582 if (gimple_code (t) == GIMPLE_ASM
7583 && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
7584 return true;
7585
7586 return false;
7587 }
7588
7589
7590 /* Add fake edges to the function exit for any non constant and non
7591 noreturn calls (or noreturn calls with EH/abnormal edges),
7592 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7593 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7594 that were split.
7595
7596 The goal is to expose cases in which entering a basic block does
7597 not imply that all subsequent instructions must be executed. */
7598
7599 static int
7600 gimple_flow_call_edges_add (sbitmap blocks)
7601 {
7602 int i;
7603 int blocks_split = 0;
7604 int last_bb = last_basic_block_for_fn (cfun);
7605 bool check_last_block = false;
7606
7607 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
7608 return 0;
7609
7610 if (! blocks)
7611 check_last_block = true;
7612 else
7613 check_last_block = bitmap_bit_p (blocks,
7614 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
7615
7616 /* In the last basic block, before epilogue generation, there will be
7617 a fallthru edge to EXIT. Special care is required if the last insn
7618 of the last basic block is a call because make_edge folds duplicate
7619 edges, which would result in the fallthru edge also being marked
7620 fake, which would result in the fallthru edge being removed by
7621 remove_fake_edges, which would result in an invalid CFG.
7622
7623 Moreover, we can't elide the outgoing fake edge, since the block
7624 profiler needs to take this into account in order to solve the minimal
7625 spanning tree in the case that the call doesn't return.
7626
7627 Handle this by adding a dummy instruction in a new last basic block. */
7628 if (check_last_block)
7629 {
7630 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
7631 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7632 gimple t = NULL;
7633
7634 if (!gsi_end_p (gsi))
7635 t = gsi_stmt (gsi);
7636
7637 if (t && need_fake_edge_p (t))
7638 {
7639 edge e;
7640
7641 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7642 if (e)
7643 {
7644 gsi_insert_on_edge (e, gimple_build_nop ());
7645 gsi_commit_edge_inserts ();
7646 }
7647 }
7648 }
7649
7650 /* Now add fake edges to the function exit for any non constant
7651 calls since there is no way that we can determine if they will
7652 return or not... */
7653 for (i = 0; i < last_bb; i++)
7654 {
7655 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7656 gimple_stmt_iterator gsi;
7657 gimple stmt, last_stmt;
7658
7659 if (!bb)
7660 continue;
7661
7662 if (blocks && !bitmap_bit_p (blocks, i))
7663 continue;
7664
7665 gsi = gsi_last_nondebug_bb (bb);
7666 if (!gsi_end_p (gsi))
7667 {
7668 last_stmt = gsi_stmt (gsi);
7669 do
7670 {
7671 stmt = gsi_stmt (gsi);
7672 if (need_fake_edge_p (stmt))
7673 {
7674 edge e;
7675
7676 /* The handling above of the final block before the
7677 epilogue should be enough to verify that there is
7678 no edge to the exit block in CFG already.
7679 Calling make_edge in such case would cause us to
7680 mark that edge as fake and remove it later. */
7681 #ifdef ENABLE_CHECKING
7682 if (stmt == last_stmt)
7683 {
7684 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7685 gcc_assert (e == NULL);
7686 }
7687 #endif
7688
7689 /* Note that the following may create a new basic block
7690 and renumber the existing basic blocks. */
7691 if (stmt != last_stmt)
7692 {
7693 e = split_block (bb, stmt);
7694 if (e)
7695 blocks_split++;
7696 }
7697 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
7698 }
7699 gsi_prev (&gsi);
7700 }
7701 while (!gsi_end_p (gsi));
7702 }
7703 }
7704
7705 if (blocks_split)
7706 verify_flow_info ();
7707
7708 return blocks_split;
7709 }
7710
7711 /* Removes edge E and all the blocks dominated by it, and updates dominance
7712 information. The IL in E->src needs to be updated separately.
7713 If dominance info is not available, only the edge E is removed.*/
7714
7715 void
7716 remove_edge_and_dominated_blocks (edge e)
7717 {
7718 vec<basic_block> bbs_to_remove = vNULL;
7719 vec<basic_block> bbs_to_fix_dom = vNULL;
7720 bitmap df, df_idom;
7721 edge f;
7722 edge_iterator ei;
7723 bool none_removed = false;
7724 unsigned i;
7725 basic_block bb, dbb;
7726 bitmap_iterator bi;
7727
7728 if (!dom_info_available_p (CDI_DOMINATORS))
7729 {
7730 remove_edge (e);
7731 return;
7732 }
7733
7734 /* No updating is needed for edges to exit. */
7735 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7736 {
7737 if (cfgcleanup_altered_bbs)
7738 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7739 remove_edge (e);
7740 return;
7741 }
7742
7743 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7744 that is not dominated by E->dest, then this set is empty. Otherwise,
7745 all the basic blocks dominated by E->dest are removed.
7746
7747 Also, to DF_IDOM we store the immediate dominators of the blocks in
7748 the dominance frontier of E (i.e., of the successors of the
7749 removed blocks, if there are any, and of E->dest otherwise). */
7750 FOR_EACH_EDGE (f, ei, e->dest->preds)
7751 {
7752 if (f == e)
7753 continue;
7754
7755 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7756 {
7757 none_removed = true;
7758 break;
7759 }
7760 }
7761
7762 df = BITMAP_ALLOC (NULL);
7763 df_idom = BITMAP_ALLOC (NULL);
7764
7765 if (none_removed)
7766 bitmap_set_bit (df_idom,
7767 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7768 else
7769 {
7770 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7771 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7772 {
7773 FOR_EACH_EDGE (f, ei, bb->succs)
7774 {
7775 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
7776 bitmap_set_bit (df, f->dest->index);
7777 }
7778 }
7779 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7780 bitmap_clear_bit (df, bb->index);
7781
7782 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7783 {
7784 bb = BASIC_BLOCK_FOR_FN (cfun, i);
7785 bitmap_set_bit (df_idom,
7786 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7787 }
7788 }
7789
7790 if (cfgcleanup_altered_bbs)
7791 {
7792 /* Record the set of the altered basic blocks. */
7793 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7794 bitmap_ior_into (cfgcleanup_altered_bbs, df);
7795 }
7796
7797 /* Remove E and the cancelled blocks. */
7798 if (none_removed)
7799 remove_edge (e);
7800 else
7801 {
7802 /* Walk backwards so as to get a chance to substitute all
7803 released DEFs into debug stmts. See
7804 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7805 details. */
7806 for (i = bbs_to_remove.length (); i-- > 0; )
7807 delete_basic_block (bbs_to_remove[i]);
7808 }
7809
7810 /* Update the dominance information. The immediate dominator may change only
7811 for blocks whose immediate dominator belongs to DF_IDOM:
7812
7813 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7814 removal. Let Z the arbitrary block such that idom(Z) = Y and
7815 Z dominates X after the removal. Before removal, there exists a path P
7816 from Y to X that avoids Z. Let F be the last edge on P that is
7817 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7818 dominates W, and because of P, Z does not dominate W), and W belongs to
7819 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7820 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
7821 {
7822 bb = BASIC_BLOCK_FOR_FN (cfun, i);
7823 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
7824 dbb;
7825 dbb = next_dom_son (CDI_DOMINATORS, dbb))
7826 bbs_to_fix_dom.safe_push (dbb);
7827 }
7828
7829 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
7830
7831 BITMAP_FREE (df);
7832 BITMAP_FREE (df_idom);
7833 bbs_to_remove.release ();
7834 bbs_to_fix_dom.release ();
7835 }
7836
7837 /* Purge dead EH edges from basic block BB. */
7838
7839 bool
7840 gimple_purge_dead_eh_edges (basic_block bb)
7841 {
7842 bool changed = false;
7843 edge e;
7844 edge_iterator ei;
7845 gimple stmt = last_stmt (bb);
7846
7847 if (stmt && stmt_can_throw_internal (stmt))
7848 return false;
7849
7850 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7851 {
7852 if (e->flags & EDGE_EH)
7853 {
7854 remove_edge_and_dominated_blocks (e);
7855 changed = true;
7856 }
7857 else
7858 ei_next (&ei);
7859 }
7860
7861 return changed;
7862 }
7863
7864 /* Purge dead EH edges from basic block listed in BLOCKS. */
7865
7866 bool
7867 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
7868 {
7869 bool changed = false;
7870 unsigned i;
7871 bitmap_iterator bi;
7872
7873 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7874 {
7875 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7876
7877 /* Earlier gimple_purge_dead_eh_edges could have removed
7878 this basic block already. */
7879 gcc_assert (bb || changed);
7880 if (bb != NULL)
7881 changed |= gimple_purge_dead_eh_edges (bb);
7882 }
7883
7884 return changed;
7885 }
7886
7887 /* Purge dead abnormal call edges from basic block BB. */
7888
7889 bool
7890 gimple_purge_dead_abnormal_call_edges (basic_block bb)
7891 {
7892 bool changed = false;
7893 edge e;
7894 edge_iterator ei;
7895 gimple stmt = last_stmt (bb);
7896
7897 if (!cfun->has_nonlocal_label
7898 && !cfun->calls_setjmp)
7899 return false;
7900
7901 if (stmt && stmt_can_make_abnormal_goto (stmt))
7902 return false;
7903
7904 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7905 {
7906 if (e->flags & EDGE_ABNORMAL)
7907 {
7908 if (e->flags & EDGE_FALLTHRU)
7909 e->flags &= ~EDGE_ABNORMAL;
7910 else
7911 remove_edge_and_dominated_blocks (e);
7912 changed = true;
7913 }
7914 else
7915 ei_next (&ei);
7916 }
7917
7918 return changed;
7919 }
7920
7921 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
7922
7923 bool
7924 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
7925 {
7926 bool changed = false;
7927 unsigned i;
7928 bitmap_iterator bi;
7929
7930 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7931 {
7932 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7933
7934 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
7935 this basic block already. */
7936 gcc_assert (bb || changed);
7937 if (bb != NULL)
7938 changed |= gimple_purge_dead_abnormal_call_edges (bb);
7939 }
7940
7941 return changed;
7942 }
7943
7944 /* This function is called whenever a new edge is created or
7945 redirected. */
7946
7947 static void
7948 gimple_execute_on_growing_pred (edge e)
7949 {
7950 basic_block bb = e->dest;
7951
7952 if (!gimple_seq_empty_p (phi_nodes (bb)))
7953 reserve_phi_args_for_new_edge (bb);
7954 }
7955
7956 /* This function is called immediately before edge E is removed from
7957 the edge vector E->dest->preds. */
7958
7959 static void
7960 gimple_execute_on_shrinking_pred (edge e)
7961 {
7962 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
7963 remove_phi_args (e);
7964 }
7965
7966 /*---------------------------------------------------------------------------
7967 Helper functions for Loop versioning
7968 ---------------------------------------------------------------------------*/
7969
7970 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
7971 of 'first'. Both of them are dominated by 'new_head' basic block. When
7972 'new_head' was created by 'second's incoming edge it received phi arguments
7973 on the edge by split_edge(). Later, additional edge 'e' was created to
7974 connect 'new_head' and 'first'. Now this routine adds phi args on this
7975 additional edge 'e' that new_head to second edge received as part of edge
7976 splitting. */
7977
7978 static void
7979 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
7980 basic_block new_head, edge e)
7981 {
7982 gimple phi1, phi2;
7983 gimple_stmt_iterator psi1, psi2;
7984 tree def;
7985 edge e2 = find_edge (new_head, second);
7986
7987 /* Because NEW_HEAD has been created by splitting SECOND's incoming
7988 edge, we should always have an edge from NEW_HEAD to SECOND. */
7989 gcc_assert (e2 != NULL);
7990
7991 /* Browse all 'second' basic block phi nodes and add phi args to
7992 edge 'e' for 'first' head. PHI args are always in correct order. */
7993
7994 for (psi2 = gsi_start_phis (second),
7995 psi1 = gsi_start_phis (first);
7996 !gsi_end_p (psi2) && !gsi_end_p (psi1);
7997 gsi_next (&psi2), gsi_next (&psi1))
7998 {
7999 phi1 = gsi_stmt (psi1);
8000 phi2 = gsi_stmt (psi2);
8001 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8002 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8003 }
8004 }
8005
8006
8007 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8008 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8009 the destination of the ELSE part. */
8010
8011 static void
8012 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8013 basic_block second_head ATTRIBUTE_UNUSED,
8014 basic_block cond_bb, void *cond_e)
8015 {
8016 gimple_stmt_iterator gsi;
8017 gimple new_cond_expr;
8018 tree cond_expr = (tree) cond_e;
8019 edge e0;
8020
8021 /* Build new conditional expr */
8022 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8023 NULL_TREE, NULL_TREE);
8024
8025 /* Add new cond in cond_bb. */
8026 gsi = gsi_last_bb (cond_bb);
8027 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8028
8029 /* Adjust edges appropriately to connect new head with first head
8030 as well as second head. */
8031 e0 = single_succ_edge (cond_bb);
8032 e0->flags &= ~EDGE_FALLTHRU;
8033 e0->flags |= EDGE_FALSE_VALUE;
8034 }
8035
8036
8037 /* Do book-keeping of basic block BB for the profile consistency checker.
8038 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8039 then do post-pass accounting. Store the counting in RECORD. */
8040 static void
8041 gimple_account_profile_record (basic_block bb, int after_pass,
8042 struct profile_record *record)
8043 {
8044 gimple_stmt_iterator i;
8045 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8046 {
8047 record->size[after_pass]
8048 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8049 if (profile_status_for_fn (cfun) == PROFILE_READ)
8050 record->time[after_pass]
8051 += estimate_num_insns (gsi_stmt (i),
8052 &eni_time_weights) * bb->count;
8053 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8054 record->time[after_pass]
8055 += estimate_num_insns (gsi_stmt (i),
8056 &eni_time_weights) * bb->frequency;
8057 }
8058 }
8059
8060 struct cfg_hooks gimple_cfg_hooks = {
8061 "gimple",
8062 gimple_verify_flow_info,
8063 gimple_dump_bb, /* dump_bb */
8064 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8065 create_bb, /* create_basic_block */
8066 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8067 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8068 gimple_can_remove_branch_p, /* can_remove_branch_p */
8069 remove_bb, /* delete_basic_block */
8070 gimple_split_block, /* split_block */
8071 gimple_move_block_after, /* move_block_after */
8072 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8073 gimple_merge_blocks, /* merge_blocks */
8074 gimple_predict_edge, /* predict_edge */
8075 gimple_predicted_by_p, /* predicted_by_p */
8076 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8077 gimple_duplicate_bb, /* duplicate_block */
8078 gimple_split_edge, /* split_edge */
8079 gimple_make_forwarder_block, /* make_forward_block */
8080 NULL, /* tidy_fallthru_edge */
8081 NULL, /* force_nonfallthru */
8082 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8083 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8084 gimple_flow_call_edges_add, /* flow_call_edges_add */
8085 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8086 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8087 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8088 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8089 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8090 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8091 flush_pending_stmts, /* flush_pending_stmts */
8092 gimple_empty_block_p, /* block_empty_p */
8093 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8094 gimple_account_profile_record,
8095 };
8096
8097
8098 /* Split all critical edges. */
8099
8100 unsigned int
8101 split_critical_edges (void)
8102 {
8103 basic_block bb;
8104 edge e;
8105 edge_iterator ei;
8106
8107 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8108 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8109 mappings around the calls to split_edge. */
8110 start_recording_case_labels ();
8111 FOR_ALL_BB_FN (bb, cfun)
8112 {
8113 FOR_EACH_EDGE (e, ei, bb->succs)
8114 {
8115 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8116 split_edge (e);
8117 /* PRE inserts statements to edges and expects that
8118 since split_critical_edges was done beforehand, committing edge
8119 insertions will not split more edges. In addition to critical
8120 edges we must split edges that have multiple successors and
8121 end by control flow statements, such as RESX.
8122 Go ahead and split them too. This matches the logic in
8123 gimple_find_edge_insert_loc. */
8124 else if ((!single_pred_p (e->dest)
8125 || !gimple_seq_empty_p (phi_nodes (e->dest))
8126 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8127 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8128 && !(e->flags & EDGE_ABNORMAL))
8129 {
8130 gimple_stmt_iterator gsi;
8131
8132 gsi = gsi_last_bb (e->src);
8133 if (!gsi_end_p (gsi)
8134 && stmt_ends_bb_p (gsi_stmt (gsi))
8135 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8136 && !gimple_call_builtin_p (gsi_stmt (gsi),
8137 BUILT_IN_RETURN)))
8138 split_edge (e);
8139 }
8140 }
8141 }
8142 end_recording_case_labels ();
8143 return 0;
8144 }
8145
8146 namespace {
8147
8148 const pass_data pass_data_split_crit_edges =
8149 {
8150 GIMPLE_PASS, /* type */
8151 "crited", /* name */
8152 OPTGROUP_NONE, /* optinfo_flags */
8153 TV_TREE_SPLIT_EDGES, /* tv_id */
8154 PROP_cfg, /* properties_required */
8155 PROP_no_crit_edges, /* properties_provided */
8156 0, /* properties_destroyed */
8157 0, /* todo_flags_start */
8158 0, /* todo_flags_finish */
8159 };
8160
8161 class pass_split_crit_edges : public gimple_opt_pass
8162 {
8163 public:
8164 pass_split_crit_edges (gcc::context *ctxt)
8165 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8166 {}
8167
8168 /* opt_pass methods: */
8169 virtual unsigned int execute (function *) { return split_critical_edges (); }
8170
8171 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8172 }; // class pass_split_crit_edges
8173
8174 } // anon namespace
8175
8176 gimple_opt_pass *
8177 make_pass_split_crit_edges (gcc::context *ctxt)
8178 {
8179 return new pass_split_crit_edges (ctxt);
8180 }
8181
8182
8183 /* Build a ternary operation and gimplify it. Emit code before GSI.
8184 Return the gimple_val holding the result. */
8185
8186 tree
8187 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8188 tree type, tree a, tree b, tree c)
8189 {
8190 tree ret;
8191 location_t loc = gimple_location (gsi_stmt (*gsi));
8192
8193 ret = fold_build3_loc (loc, code, type, a, b, c);
8194 STRIP_NOPS (ret);
8195
8196 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8197 GSI_SAME_STMT);
8198 }
8199
8200 /* Build a binary operation and gimplify it. Emit code before GSI.
8201 Return the gimple_val holding the result. */
8202
8203 tree
8204 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8205 tree type, tree a, tree b)
8206 {
8207 tree ret;
8208
8209 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8210 STRIP_NOPS (ret);
8211
8212 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8213 GSI_SAME_STMT);
8214 }
8215
8216 /* Build a unary operation and gimplify it. Emit code before GSI.
8217 Return the gimple_val holding the result. */
8218
8219 tree
8220 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8221 tree a)
8222 {
8223 tree ret;
8224
8225 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8226 STRIP_NOPS (ret);
8227
8228 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8229 GSI_SAME_STMT);
8230 }
8231
8232
8233 \f
8234 /* Given a basic block B which ends with a conditional and has
8235 precisely two successors, determine which of the edges is taken if
8236 the conditional is true and which is taken if the conditional is
8237 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8238
8239 void
8240 extract_true_false_edges_from_block (basic_block b,
8241 edge *true_edge,
8242 edge *false_edge)
8243 {
8244 edge e = EDGE_SUCC (b, 0);
8245
8246 if (e->flags & EDGE_TRUE_VALUE)
8247 {
8248 *true_edge = e;
8249 *false_edge = EDGE_SUCC (b, 1);
8250 }
8251 else
8252 {
8253 *false_edge = e;
8254 *true_edge = EDGE_SUCC (b, 1);
8255 }
8256 }
8257
8258 /* Emit return warnings. */
8259
8260 namespace {
8261
8262 const pass_data pass_data_warn_function_return =
8263 {
8264 GIMPLE_PASS, /* type */
8265 "*warn_function_return", /* name */
8266 OPTGROUP_NONE, /* optinfo_flags */
8267 TV_NONE, /* tv_id */
8268 PROP_cfg, /* properties_required */
8269 0, /* properties_provided */
8270 0, /* properties_destroyed */
8271 0, /* todo_flags_start */
8272 0, /* todo_flags_finish */
8273 };
8274
8275 class pass_warn_function_return : public gimple_opt_pass
8276 {
8277 public:
8278 pass_warn_function_return (gcc::context *ctxt)
8279 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8280 {}
8281
8282 /* opt_pass methods: */
8283 virtual unsigned int execute (function *);
8284
8285 }; // class pass_warn_function_return
8286
8287 unsigned int
8288 pass_warn_function_return::execute (function *fun)
8289 {
8290 source_location location;
8291 gimple last;
8292 edge e;
8293 edge_iterator ei;
8294
8295 if (!targetm.warn_func_return (fun->decl))
8296 return 0;
8297
8298 /* If we have a path to EXIT, then we do return. */
8299 if (TREE_THIS_VOLATILE (fun->decl)
8300 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
8301 {
8302 location = UNKNOWN_LOCATION;
8303 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8304 {
8305 last = last_stmt (e->src);
8306 if ((gimple_code (last) == GIMPLE_RETURN
8307 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8308 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8309 break;
8310 }
8311 if (location == UNKNOWN_LOCATION)
8312 location = cfun->function_end_locus;
8313 warning_at (location, 0, "%<noreturn%> function does return");
8314 }
8315
8316 /* If we see "return;" in some basic block, then we do reach the end
8317 without returning a value. */
8318 else if (warn_return_type
8319 && !TREE_NO_WARNING (fun->decl)
8320 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0
8321 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
8322 {
8323 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8324 {
8325 gimple last = last_stmt (e->src);
8326 if (gimple_code (last) == GIMPLE_RETURN
8327 && gimple_return_retval (last) == NULL
8328 && !gimple_no_warning_p (last))
8329 {
8330 location = gimple_location (last);
8331 if (location == UNKNOWN_LOCATION)
8332 location = fun->function_end_locus;
8333 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8334 TREE_NO_WARNING (fun->decl) = 1;
8335 break;
8336 }
8337 }
8338 }
8339 return 0;
8340 }
8341
8342 } // anon namespace
8343
8344 gimple_opt_pass *
8345 make_pass_warn_function_return (gcc::context *ctxt)
8346 {
8347 return new pass_warn_function_return (ctxt);
8348 }
8349
8350 /* Walk a gimplified function and warn for functions whose return value is
8351 ignored and attribute((warn_unused_result)) is set. This is done before
8352 inlining, so we don't have to worry about that. */
8353
8354 static void
8355 do_warn_unused_result (gimple_seq seq)
8356 {
8357 tree fdecl, ftype;
8358 gimple_stmt_iterator i;
8359
8360 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8361 {
8362 gimple g = gsi_stmt (i);
8363
8364 switch (gimple_code (g))
8365 {
8366 case GIMPLE_BIND:
8367 do_warn_unused_result (gimple_bind_body (g));
8368 break;
8369 case GIMPLE_TRY:
8370 do_warn_unused_result (gimple_try_eval (g));
8371 do_warn_unused_result (gimple_try_cleanup (g));
8372 break;
8373 case GIMPLE_CATCH:
8374 do_warn_unused_result (gimple_catch_handler (g));
8375 break;
8376 case GIMPLE_EH_FILTER:
8377 do_warn_unused_result (gimple_eh_filter_failure (g));
8378 break;
8379
8380 case GIMPLE_CALL:
8381 if (gimple_call_lhs (g))
8382 break;
8383 if (gimple_call_internal_p (g))
8384 break;
8385
8386 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8387 LHS. All calls whose value is ignored should be
8388 represented like this. Look for the attribute. */
8389 fdecl = gimple_call_fndecl (g);
8390 ftype = gimple_call_fntype (g);
8391
8392 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8393 {
8394 location_t loc = gimple_location (g);
8395
8396 if (fdecl)
8397 warning_at (loc, OPT_Wunused_result,
8398 "ignoring return value of %qD, "
8399 "declared with attribute warn_unused_result",
8400 fdecl);
8401 else
8402 warning_at (loc, OPT_Wunused_result,
8403 "ignoring return value of function "
8404 "declared with attribute warn_unused_result");
8405 }
8406 break;
8407
8408 default:
8409 /* Not a container, not a call, or a call whose value is used. */
8410 break;
8411 }
8412 }
8413 }
8414
8415 namespace {
8416
8417 const pass_data pass_data_warn_unused_result =
8418 {
8419 GIMPLE_PASS, /* type */
8420 "*warn_unused_result", /* name */
8421 OPTGROUP_NONE, /* optinfo_flags */
8422 TV_NONE, /* tv_id */
8423 PROP_gimple_any, /* properties_required */
8424 0, /* properties_provided */
8425 0, /* properties_destroyed */
8426 0, /* todo_flags_start */
8427 0, /* todo_flags_finish */
8428 };
8429
8430 class pass_warn_unused_result : public gimple_opt_pass
8431 {
8432 public:
8433 pass_warn_unused_result (gcc::context *ctxt)
8434 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8435 {}
8436
8437 /* opt_pass methods: */
8438 virtual bool gate (function *) { return flag_warn_unused_result; }
8439 virtual unsigned int execute (function *)
8440 {
8441 do_warn_unused_result (gimple_body (current_function_decl));
8442 return 0;
8443 }
8444
8445 }; // class pass_warn_unused_result
8446
8447 } // anon namespace
8448
8449 gimple_opt_pass *
8450 make_pass_warn_unused_result (gcc::context *ctxt)
8451 {
8452 return new pass_warn_unused_result (ctxt);
8453 }
8454
8455 /* IPA passes, compilation of earlier functions or inlining
8456 might have changed some properties, such as marked functions nothrow,
8457 pure, const or noreturn.
8458 Remove redundant edges and basic blocks, and create new ones if necessary.
8459
8460 This pass can't be executed as stand alone pass from pass manager, because
8461 in between inlining and this fixup the verify_flow_info would fail. */
8462
8463 unsigned int
8464 execute_fixup_cfg (void)
8465 {
8466 basic_block bb;
8467 gimple_stmt_iterator gsi;
8468 int todo = 0;
8469 gcov_type count_scale;
8470 edge e;
8471 edge_iterator ei;
8472
8473 count_scale
8474 = GCOV_COMPUTE_SCALE (cgraph_node::get (current_function_decl)->count,
8475 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
8476
8477 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
8478 cgraph_node::get (current_function_decl)->count;
8479 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
8480 apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count,
8481 count_scale);
8482
8483 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
8484 e->count = apply_scale (e->count, count_scale);
8485
8486 FOR_EACH_BB_FN (bb, cfun)
8487 {
8488 bb->count = apply_scale (bb->count, count_scale);
8489 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
8490 {
8491 gimple stmt = gsi_stmt (gsi);
8492 tree decl = is_gimple_call (stmt)
8493 ? gimple_call_fndecl (stmt)
8494 : NULL;
8495 if (decl)
8496 {
8497 int flags = gimple_call_flags (stmt);
8498 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
8499 {
8500 if (gimple_purge_dead_abnormal_call_edges (bb))
8501 todo |= TODO_cleanup_cfg;
8502
8503 if (gimple_in_ssa_p (cfun))
8504 {
8505 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8506 update_stmt (stmt);
8507 }
8508 }
8509
8510 if (flags & ECF_NORETURN
8511 && fixup_noreturn_call (stmt))
8512 todo |= TODO_cleanup_cfg;
8513 }
8514
8515 /* Remove stores to variables we marked write-only.
8516 Keep access when store has side effect, i.e. in case when source
8517 is volatile. */
8518 if (gimple_store_p (stmt)
8519 && !gimple_has_side_effects (stmt))
8520 {
8521 tree lhs = get_base_address (gimple_get_lhs (stmt));
8522
8523 if (TREE_CODE (lhs) == VAR_DECL
8524 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8525 && varpool_node::get (lhs)->writeonly)
8526 {
8527 unlink_stmt_vdef (stmt);
8528 gsi_remove (&gsi, true);
8529 release_defs (stmt);
8530 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8531 continue;
8532 }
8533 }
8534 /* For calls we can simply remove LHS when it is known
8535 to be write-only. */
8536 if (is_gimple_call (stmt)
8537 && gimple_get_lhs (stmt))
8538 {
8539 tree lhs = get_base_address (gimple_get_lhs (stmt));
8540
8541 if (TREE_CODE (lhs) == VAR_DECL
8542 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8543 && varpool_node::get (lhs)->writeonly)
8544 {
8545 gimple_call_set_lhs (stmt, NULL);
8546 update_stmt (stmt);
8547 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8548 }
8549 }
8550
8551 if (maybe_clean_eh_stmt (stmt)
8552 && gimple_purge_dead_eh_edges (bb))
8553 todo |= TODO_cleanup_cfg;
8554 gsi_next (&gsi);
8555 }
8556
8557 FOR_EACH_EDGE (e, ei, bb->succs)
8558 e->count = apply_scale (e->count, count_scale);
8559
8560 /* If we have a basic block with no successors that does not
8561 end with a control statement or a noreturn call end it with
8562 a call to __builtin_unreachable. This situation can occur
8563 when inlining a noreturn call that does in fact return. */
8564 if (EDGE_COUNT (bb->succs) == 0)
8565 {
8566 gimple stmt = last_stmt (bb);
8567 if (!stmt
8568 || (!is_ctrl_stmt (stmt)
8569 && (!is_gimple_call (stmt)
8570 || (gimple_call_flags (stmt) & ECF_NORETURN) == 0)))
8571 {
8572 if (stmt && is_gimple_call (stmt))
8573 gimple_call_set_ctrl_altering (stmt, false);
8574 stmt = gimple_build_call
8575 (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
8576 gimple_stmt_iterator gsi = gsi_last_bb (bb);
8577 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
8578 }
8579 }
8580 }
8581 if (count_scale != REG_BR_PROB_BASE)
8582 compute_function_frequency ();
8583
8584 /* Dump a textual representation of the flowgraph. */
8585 if (dump_file)
8586 gimple_dump_cfg (dump_file, dump_flags);
8587
8588 if (current_loops
8589 && (todo & TODO_cleanup_cfg))
8590 loops_state_set (LOOPS_NEED_FIXUP);
8591
8592 return todo;
8593 }
8594
8595 namespace {
8596
8597 const pass_data pass_data_fixup_cfg =
8598 {
8599 GIMPLE_PASS, /* type */
8600 "*free_cfg_annotations", /* name */
8601 OPTGROUP_NONE, /* optinfo_flags */
8602 TV_NONE, /* tv_id */
8603 PROP_cfg, /* properties_required */
8604 0, /* properties_provided */
8605 0, /* properties_destroyed */
8606 0, /* todo_flags_start */
8607 0, /* todo_flags_finish */
8608 };
8609
8610 class pass_fixup_cfg : public gimple_opt_pass
8611 {
8612 public:
8613 pass_fixup_cfg (gcc::context *ctxt)
8614 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
8615 {}
8616
8617 /* opt_pass methods: */
8618 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
8619 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
8620
8621 }; // class pass_fixup_cfg
8622
8623 } // anon namespace
8624
8625 gimple_opt_pass *
8626 make_pass_fixup_cfg (gcc::context *ctxt)
8627 {
8628 return new pass_fixup_cfg (ctxt);
8629 }
8630
8631 /* Garbage collection support for edge_def. */
8632
8633 extern void gt_ggc_mx (tree&);
8634 extern void gt_ggc_mx (gimple&);
8635 extern void gt_ggc_mx (rtx&);
8636 extern void gt_ggc_mx (basic_block&);
8637
8638 static void
8639 gt_ggc_mx (rtx_insn *& x)
8640 {
8641 if (x)
8642 gt_ggc_mx_rtx_def ((void *) x);
8643 }
8644
8645 void
8646 gt_ggc_mx (edge_def *e)
8647 {
8648 tree block = LOCATION_BLOCK (e->goto_locus);
8649 gt_ggc_mx (e->src);
8650 gt_ggc_mx (e->dest);
8651 if (current_ir_type () == IR_GIMPLE)
8652 gt_ggc_mx (e->insns.g);
8653 else
8654 gt_ggc_mx (e->insns.r);
8655 gt_ggc_mx (block);
8656 }
8657
8658 /* PCH support for edge_def. */
8659
8660 extern void gt_pch_nx (tree&);
8661 extern void gt_pch_nx (gimple&);
8662 extern void gt_pch_nx (rtx&);
8663 extern void gt_pch_nx (basic_block&);
8664
8665 static void
8666 gt_pch_nx (rtx_insn *& x)
8667 {
8668 if (x)
8669 gt_pch_nx_rtx_def ((void *) x);
8670 }
8671
8672 void
8673 gt_pch_nx (edge_def *e)
8674 {
8675 tree block = LOCATION_BLOCK (e->goto_locus);
8676 gt_pch_nx (e->src);
8677 gt_pch_nx (e->dest);
8678 if (current_ir_type () == IR_GIMPLE)
8679 gt_pch_nx (e->insns.g);
8680 else
8681 gt_pch_nx (e->insns.r);
8682 gt_pch_nx (block);
8683 }
8684
8685 void
8686 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
8687 {
8688 tree block = LOCATION_BLOCK (e->goto_locus);
8689 op (&(e->src), cookie);
8690 op (&(e->dest), cookie);
8691 if (current_ir_type () == IR_GIMPLE)
8692 op (&(e->insns.g), cookie);
8693 else
8694 op (&(e->insns.r), cookie);
8695 op (&(block), cookie);
8696 }