cfgloop.h (struct loop_iterator): C++-ify, add constructor and destructor and make...
[gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2013 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "hash-table.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "trans-mem.h"
28 #include "stor-layout.h"
29 #include "print-tree.h"
30 #include "tm_p.h"
31 #include "basic-block.h"
32 #include "flags.h"
33 #include "function.h"
34 #include "ggc.h"
35 #include "gimple-pretty-print.h"
36 #include "gimple.h"
37 #include "gimple-iterator.h"
38 #include "gimplify-me.h"
39 #include "gimple-walk.h"
40 #include "gimple-ssa.h"
41 #include "cgraph.h"
42 #include "tree-cfg.h"
43 #include "tree-phinodes.h"
44 #include "ssa-iterators.h"
45 #include "stringpool.h"
46 #include "tree-ssanames.h"
47 #include "tree-ssa-loop-manip.h"
48 #include "tree-ssa-loop-niter.h"
49 #include "tree-into-ssa.h"
50 #include "expr.h"
51 #include "tree-dfa.h"
52 #include "tree-ssa.h"
53 #include "tree-dump.h"
54 #include "tree-pass.h"
55 #include "diagnostic-core.h"
56 #include "except.h"
57 #include "cfgloop.h"
58 #include "tree-ssa-propagate.h"
59 #include "value-prof.h"
60 #include "pointer-set.h"
61 #include "tree-inline.h"
62 #include "target.h"
63 #include "tree-ssa-live.h"
64 #include "omp-low.h"
65 #include "tree-cfgcleanup.h"
66
67 /* This file contains functions for building the Control Flow Graph (CFG)
68 for a function tree. */
69
70 /* Local declarations. */
71
72 /* Initial capacity for the basic block array. */
73 static const int initial_cfg_capacity = 20;
74
75 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
76 which use a particular edge. The CASE_LABEL_EXPRs are chained together
77 via their CASE_CHAIN field, which we clear after we're done with the
78 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
79
80 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
81 update the case vector in response to edge redirections.
82
83 Right now this table is set up and torn down at key points in the
84 compilation process. It would be nice if we could make the table
85 more persistent. The key is getting notification of changes to
86 the CFG (particularly edge removal, creation and redirection). */
87
88 static struct pointer_map_t *edge_to_cases;
89
90 /* If we record edge_to_cases, this bitmap will hold indexes
91 of basic blocks that end in a GIMPLE_SWITCH which we touched
92 due to edge manipulations. */
93
94 static bitmap touched_switch_bbs;
95
96 /* CFG statistics. */
97 struct cfg_stats_d
98 {
99 long num_merged_labels;
100 };
101
102 static struct cfg_stats_d cfg_stats;
103
104 /* Nonzero if we found a computed goto while building basic blocks. */
105 static bool found_computed_goto;
106
107 /* Hash table to store last discriminator assigned for each locus. */
108 struct locus_discrim_map
109 {
110 location_t locus;
111 int discriminator;
112 };
113
114 /* Hashtable helpers. */
115
116 struct locus_discrim_hasher : typed_free_remove <locus_discrim_map>
117 {
118 typedef locus_discrim_map value_type;
119 typedef locus_discrim_map compare_type;
120 static inline hashval_t hash (const value_type *);
121 static inline bool equal (const value_type *, const compare_type *);
122 };
123
124 /* Trivial hash function for a location_t. ITEM is a pointer to
125 a hash table entry that maps a location_t to a discriminator. */
126
127 inline hashval_t
128 locus_discrim_hasher::hash (const value_type *item)
129 {
130 return LOCATION_LINE (item->locus);
131 }
132
133 /* Equality function for the locus-to-discriminator map. A and B
134 point to the two hash table entries to compare. */
135
136 inline bool
137 locus_discrim_hasher::equal (const value_type *a, const compare_type *b)
138 {
139 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
140 }
141
142 static hash_table <locus_discrim_hasher> discriminator_per_locus;
143
144 /* Basic blocks and flowgraphs. */
145 static void make_blocks (gimple_seq);
146 static void factor_computed_gotos (void);
147
148 /* Edges. */
149 static void make_edges (void);
150 static void assign_discriminators (void);
151 static void make_cond_expr_edges (basic_block);
152 static void make_gimple_switch_edges (basic_block);
153 static void make_goto_expr_edges (basic_block);
154 static void make_gimple_asm_edges (basic_block);
155 static edge gimple_redirect_edge_and_branch (edge, basic_block);
156 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
157 static unsigned int split_critical_edges (void);
158
159 /* Various helpers. */
160 static inline bool stmt_starts_bb_p (gimple, gimple);
161 static int gimple_verify_flow_info (void);
162 static void gimple_make_forwarder_block (edge);
163 static gimple first_non_label_stmt (basic_block);
164 static bool verify_gimple_transaction (gimple);
165
166 /* Flowgraph optimization and cleanup. */
167 static void gimple_merge_blocks (basic_block, basic_block);
168 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
169 static void remove_bb (basic_block);
170 static edge find_taken_edge_computed_goto (basic_block, tree);
171 static edge find_taken_edge_cond_expr (basic_block, tree);
172 static edge find_taken_edge_switch_expr (basic_block, tree);
173 static tree find_case_label_for_value (gimple, tree);
174
175 void
176 init_empty_tree_cfg_for_function (struct function *fn)
177 {
178 /* Initialize the basic block array. */
179 init_flow (fn);
180 profile_status_for_function (fn) = PROFILE_ABSENT;
181 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
182 last_basic_block_for_function (fn) = NUM_FIXED_BLOCKS;
183 vec_alloc (basic_block_info_for_function (fn), initial_cfg_capacity);
184 vec_safe_grow_cleared (basic_block_info_for_function (fn),
185 initial_cfg_capacity);
186
187 /* Build a mapping of labels to their associated blocks. */
188 vec_alloc (label_to_block_map_for_function (fn), initial_cfg_capacity);
189 vec_safe_grow_cleared (label_to_block_map_for_function (fn),
190 initial_cfg_capacity);
191
192 SET_BASIC_BLOCK_FOR_FUNCTION (fn, ENTRY_BLOCK,
193 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn));
194 SET_BASIC_BLOCK_FOR_FUNCTION (fn, EXIT_BLOCK,
195 EXIT_BLOCK_PTR_FOR_FUNCTION (fn));
196
197 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)->next_bb
198 = EXIT_BLOCK_PTR_FOR_FUNCTION (fn);
199 EXIT_BLOCK_PTR_FOR_FUNCTION (fn)->prev_bb
200 = ENTRY_BLOCK_PTR_FOR_FUNCTION (fn);
201 }
202
203 void
204 init_empty_tree_cfg (void)
205 {
206 init_empty_tree_cfg_for_function (cfun);
207 }
208
209 /*---------------------------------------------------------------------------
210 Create basic blocks
211 ---------------------------------------------------------------------------*/
212
213 /* Entry point to the CFG builder for trees. SEQ is the sequence of
214 statements to be added to the flowgraph. */
215
216 static void
217 build_gimple_cfg (gimple_seq seq)
218 {
219 /* Register specific gimple functions. */
220 gimple_register_cfg_hooks ();
221
222 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
223
224 init_empty_tree_cfg ();
225
226 found_computed_goto = 0;
227 make_blocks (seq);
228
229 /* Computed gotos are hell to deal with, especially if there are
230 lots of them with a large number of destinations. So we factor
231 them to a common computed goto location before we build the
232 edge list. After we convert back to normal form, we will un-factor
233 the computed gotos since factoring introduces an unwanted jump. */
234 if (found_computed_goto)
235 factor_computed_gotos ();
236
237 /* Make sure there is always at least one block, even if it's empty. */
238 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
239 create_empty_bb (ENTRY_BLOCK_PTR);
240
241 /* Adjust the size of the array. */
242 if (basic_block_info->length () < (size_t) n_basic_blocks_for_fn (cfun))
243 vec_safe_grow_cleared (basic_block_info, n_basic_blocks_for_fn (cfun));
244
245 /* To speed up statement iterator walks, we first purge dead labels. */
246 cleanup_dead_labels ();
247
248 /* Group case nodes to reduce the number of edges.
249 We do this after cleaning up dead labels because otherwise we miss
250 a lot of obvious case merging opportunities. */
251 group_case_labels ();
252
253 /* Create the edges of the flowgraph. */
254 discriminator_per_locus.create (13);
255 make_edges ();
256 assign_discriminators ();
257 cleanup_dead_labels ();
258 discriminator_per_locus.dispose ();
259 }
260
261
262 /* Search for ANNOTATE call with annot_expr_ivdep_kind; if found, remove
263 it and set loop->safelen to INT_MAX. We assume that the annotation
264 comes immediately before the condition. */
265
266 static void
267 replace_loop_annotate ()
268 {
269 struct loop *loop;
270 basic_block bb;
271 gimple_stmt_iterator gsi;
272 gimple stmt;
273
274 FOR_EACH_LOOP (loop, 0)
275 {
276 gsi = gsi_last_bb (loop->header);
277 stmt = gsi_stmt (gsi);
278 if (stmt && gimple_code (stmt) == GIMPLE_COND)
279 {
280 gsi_prev_nondebug (&gsi);
281 if (gsi_end_p (gsi))
282 continue;
283 stmt = gsi_stmt (gsi);
284 if (gimple_code (stmt) != GIMPLE_CALL)
285 continue;
286 if (!gimple_call_internal_p (stmt)
287 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
288 continue;
289 if ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1))
290 != annot_expr_ivdep_kind)
291 continue;
292 stmt = gimple_build_assign (gimple_call_lhs (stmt),
293 gimple_call_arg (stmt, 0));
294 gsi_replace (&gsi, stmt, true);
295 loop->safelen = INT_MAX;
296 }
297 }
298
299 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
300 FOR_EACH_BB (bb)
301 {
302 gsi = gsi_last_bb (bb);
303 stmt = gsi_stmt (gsi);
304 if (stmt && gimple_code (stmt) == GIMPLE_COND)
305 gsi_prev_nondebug (&gsi);
306 if (gsi_end_p (gsi))
307 continue;
308 stmt = gsi_stmt (gsi);
309 if (gimple_code (stmt) != GIMPLE_CALL)
310 continue;
311 if (!gimple_call_internal_p (stmt)
312 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
313 continue;
314 if ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1))
315 != annot_expr_ivdep_kind)
316 continue;
317 warning_at (gimple_location (stmt), 0, "ignoring %<GCC ivdep%> "
318 "annotation");
319 stmt = gimple_build_assign (gimple_call_lhs (stmt),
320 gimple_call_arg (stmt, 0));
321 gsi_replace (&gsi, stmt, true);
322 }
323 }
324
325
326 static unsigned int
327 execute_build_cfg (void)
328 {
329 gimple_seq body = gimple_body (current_function_decl);
330
331 build_gimple_cfg (body);
332 gimple_set_body (current_function_decl, NULL);
333 if (dump_file && (dump_flags & TDF_DETAILS))
334 {
335 fprintf (dump_file, "Scope blocks:\n");
336 dump_scope_blocks (dump_file, dump_flags);
337 }
338 cleanup_tree_cfg ();
339 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
340 replace_loop_annotate ();
341 return 0;
342 }
343
344 namespace {
345
346 const pass_data pass_data_build_cfg =
347 {
348 GIMPLE_PASS, /* type */
349 "cfg", /* name */
350 OPTGROUP_NONE, /* optinfo_flags */
351 false, /* has_gate */
352 true, /* has_execute */
353 TV_TREE_CFG, /* tv_id */
354 PROP_gimple_leh, /* properties_required */
355 ( PROP_cfg | PROP_loops ), /* properties_provided */
356 0, /* properties_destroyed */
357 0, /* todo_flags_start */
358 TODO_verify_stmts, /* todo_flags_finish */
359 };
360
361 class pass_build_cfg : public gimple_opt_pass
362 {
363 public:
364 pass_build_cfg (gcc::context *ctxt)
365 : gimple_opt_pass (pass_data_build_cfg, ctxt)
366 {}
367
368 /* opt_pass methods: */
369 unsigned int execute () { return execute_build_cfg (); }
370
371 }; // class pass_build_cfg
372
373 } // anon namespace
374
375 gimple_opt_pass *
376 make_pass_build_cfg (gcc::context *ctxt)
377 {
378 return new pass_build_cfg (ctxt);
379 }
380
381
382 /* Return true if T is a computed goto. */
383
384 static bool
385 computed_goto_p (gimple t)
386 {
387 return (gimple_code (t) == GIMPLE_GOTO
388 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
389 }
390
391 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
392 the other edge points to a bb with just __builtin_unreachable ().
393 I.e. return true for C->M edge in:
394 <bb C>:
395 ...
396 if (something)
397 goto <bb N>;
398 else
399 goto <bb M>;
400 <bb N>:
401 __builtin_unreachable ();
402 <bb M>: */
403
404 bool
405 assert_unreachable_fallthru_edge_p (edge e)
406 {
407 basic_block pred_bb = e->src;
408 gimple last = last_stmt (pred_bb);
409 if (last && gimple_code (last) == GIMPLE_COND)
410 {
411 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
412 if (other_bb == e->dest)
413 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
414 if (EDGE_COUNT (other_bb->succs) == 0)
415 {
416 gimple_stmt_iterator gsi = gsi_after_labels (other_bb);
417 gimple stmt;
418
419 if (gsi_end_p (gsi))
420 return false;
421 stmt = gsi_stmt (gsi);
422 if (is_gimple_debug (stmt))
423 {
424 gsi_next_nondebug (&gsi);
425 if (gsi_end_p (gsi))
426 return false;
427 stmt = gsi_stmt (gsi);
428 }
429 return gimple_call_builtin_p (stmt, BUILT_IN_UNREACHABLE);
430 }
431 }
432 return false;
433 }
434
435
436 /* Search the CFG for any computed gotos. If found, factor them to a
437 common computed goto site. Also record the location of that site so
438 that we can un-factor the gotos after we have converted back to
439 normal form. */
440
441 static void
442 factor_computed_gotos (void)
443 {
444 basic_block bb;
445 tree factored_label_decl = NULL;
446 tree var = NULL;
447 gimple factored_computed_goto_label = NULL;
448 gimple factored_computed_goto = NULL;
449
450 /* We know there are one or more computed gotos in this function.
451 Examine the last statement in each basic block to see if the block
452 ends with a computed goto. */
453
454 FOR_EACH_BB (bb)
455 {
456 gimple_stmt_iterator gsi = gsi_last_bb (bb);
457 gimple last;
458
459 if (gsi_end_p (gsi))
460 continue;
461
462 last = gsi_stmt (gsi);
463
464 /* Ignore the computed goto we create when we factor the original
465 computed gotos. */
466 if (last == factored_computed_goto)
467 continue;
468
469 /* If the last statement is a computed goto, factor it. */
470 if (computed_goto_p (last))
471 {
472 gimple assignment;
473
474 /* The first time we find a computed goto we need to create
475 the factored goto block and the variable each original
476 computed goto will use for their goto destination. */
477 if (!factored_computed_goto)
478 {
479 basic_block new_bb = create_empty_bb (bb);
480 gimple_stmt_iterator new_gsi = gsi_start_bb (new_bb);
481
482 /* Create the destination of the factored goto. Each original
483 computed goto will put its desired destination into this
484 variable and jump to the label we create immediately
485 below. */
486 var = create_tmp_var (ptr_type_node, "gotovar");
487
488 /* Build a label for the new block which will contain the
489 factored computed goto. */
490 factored_label_decl = create_artificial_label (UNKNOWN_LOCATION);
491 factored_computed_goto_label
492 = gimple_build_label (factored_label_decl);
493 gsi_insert_after (&new_gsi, factored_computed_goto_label,
494 GSI_NEW_STMT);
495
496 /* Build our new computed goto. */
497 factored_computed_goto = gimple_build_goto (var);
498 gsi_insert_after (&new_gsi, factored_computed_goto, GSI_NEW_STMT);
499 }
500
501 /* Copy the original computed goto's destination into VAR. */
502 assignment = gimple_build_assign (var, gimple_goto_dest (last));
503 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
504
505 /* And re-vector the computed goto to the new destination. */
506 gimple_goto_set_dest (last, factored_label_decl);
507 }
508 }
509 }
510
511
512 /* Build a flowgraph for the sequence of stmts SEQ. */
513
514 static void
515 make_blocks (gimple_seq seq)
516 {
517 gimple_stmt_iterator i = gsi_start (seq);
518 gimple stmt = NULL;
519 bool start_new_block = true;
520 bool first_stmt_of_seq = true;
521 basic_block bb = ENTRY_BLOCK_PTR;
522
523 while (!gsi_end_p (i))
524 {
525 gimple prev_stmt;
526
527 prev_stmt = stmt;
528 stmt = gsi_stmt (i);
529
530 /* If the statement starts a new basic block or if we have determined
531 in a previous pass that we need to create a new block for STMT, do
532 so now. */
533 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
534 {
535 if (!first_stmt_of_seq)
536 gsi_split_seq_before (&i, &seq);
537 bb = create_basic_block (seq, NULL, bb);
538 start_new_block = false;
539 }
540
541 /* Now add STMT to BB and create the subgraphs for special statement
542 codes. */
543 gimple_set_bb (stmt, bb);
544
545 if (computed_goto_p (stmt))
546 found_computed_goto = true;
547
548 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
549 next iteration. */
550 if (stmt_ends_bb_p (stmt))
551 {
552 /* If the stmt can make abnormal goto use a new temporary
553 for the assignment to the LHS. This makes sure the old value
554 of the LHS is available on the abnormal edge. Otherwise
555 we will end up with overlapping life-ranges for abnormal
556 SSA names. */
557 if (gimple_has_lhs (stmt)
558 && stmt_can_make_abnormal_goto (stmt)
559 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
560 {
561 tree lhs = gimple_get_lhs (stmt);
562 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
563 gimple s = gimple_build_assign (lhs, tmp);
564 gimple_set_location (s, gimple_location (stmt));
565 gimple_set_block (s, gimple_block (stmt));
566 gimple_set_lhs (stmt, tmp);
567 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
568 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
569 DECL_GIMPLE_REG_P (tmp) = 1;
570 gsi_insert_after (&i, s, GSI_SAME_STMT);
571 }
572 start_new_block = true;
573 }
574
575 gsi_next (&i);
576 first_stmt_of_seq = false;
577 }
578 }
579
580
581 /* Create and return a new empty basic block after bb AFTER. */
582
583 static basic_block
584 create_bb (void *h, void *e, basic_block after)
585 {
586 basic_block bb;
587
588 gcc_assert (!e);
589
590 /* Create and initialize a new basic block. Since alloc_block uses
591 GC allocation that clears memory to allocate a basic block, we do
592 not have to clear the newly allocated basic block here. */
593 bb = alloc_block ();
594
595 bb->index = last_basic_block;
596 bb->flags = BB_NEW;
597 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
598
599 /* Add the new block to the linked list of blocks. */
600 link_block (bb, after);
601
602 /* Grow the basic block array if needed. */
603 if ((size_t) last_basic_block == basic_block_info->length ())
604 {
605 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
606 vec_safe_grow_cleared (basic_block_info, new_size);
607 }
608
609 /* Add the newly created block to the array. */
610 SET_BASIC_BLOCK (last_basic_block, bb);
611
612 n_basic_blocks_for_fn (cfun)++;
613 last_basic_block++;
614
615 return bb;
616 }
617
618
619 /*---------------------------------------------------------------------------
620 Edge creation
621 ---------------------------------------------------------------------------*/
622
623 /* Fold COND_EXPR_COND of each COND_EXPR. */
624
625 void
626 fold_cond_expr_cond (void)
627 {
628 basic_block bb;
629
630 FOR_EACH_BB (bb)
631 {
632 gimple stmt = last_stmt (bb);
633
634 if (stmt && gimple_code (stmt) == GIMPLE_COND)
635 {
636 location_t loc = gimple_location (stmt);
637 tree cond;
638 bool zerop, onep;
639
640 fold_defer_overflow_warnings ();
641 cond = fold_binary_loc (loc, gimple_cond_code (stmt), boolean_type_node,
642 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
643 if (cond)
644 {
645 zerop = integer_zerop (cond);
646 onep = integer_onep (cond);
647 }
648 else
649 zerop = onep = false;
650
651 fold_undefer_overflow_warnings (zerop || onep,
652 stmt,
653 WARN_STRICT_OVERFLOW_CONDITIONAL);
654 if (zerop)
655 gimple_cond_make_false (stmt);
656 else if (onep)
657 gimple_cond_make_true (stmt);
658 }
659 }
660 }
661
662 /* Join all the blocks in the flowgraph. */
663
664 static void
665 make_edges (void)
666 {
667 basic_block bb;
668 struct omp_region *cur_region = NULL;
669
670 /* Create an edge from entry to the first block with executable
671 statements in it. */
672 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
673
674 /* Traverse the basic block array placing edges. */
675 FOR_EACH_BB (bb)
676 {
677 gimple last = last_stmt (bb);
678 bool fallthru;
679
680 if (last)
681 {
682 enum gimple_code code = gimple_code (last);
683 switch (code)
684 {
685 case GIMPLE_GOTO:
686 make_goto_expr_edges (bb);
687 fallthru = false;
688 break;
689 case GIMPLE_RETURN:
690 make_edge (bb, EXIT_BLOCK_PTR, 0);
691 fallthru = false;
692 break;
693 case GIMPLE_COND:
694 make_cond_expr_edges (bb);
695 fallthru = false;
696 break;
697 case GIMPLE_SWITCH:
698 make_gimple_switch_edges (bb);
699 fallthru = false;
700 break;
701 case GIMPLE_RESX:
702 make_eh_edges (last);
703 fallthru = false;
704 break;
705 case GIMPLE_EH_DISPATCH:
706 fallthru = make_eh_dispatch_edges (last);
707 break;
708
709 case GIMPLE_CALL:
710 /* If this function receives a nonlocal goto, then we need to
711 make edges from this call site to all the nonlocal goto
712 handlers. */
713 if (stmt_can_make_abnormal_goto (last))
714 make_abnormal_goto_edges (bb, true);
715
716 /* If this statement has reachable exception handlers, then
717 create abnormal edges to them. */
718 make_eh_edges (last);
719
720 /* BUILTIN_RETURN is really a return statement. */
721 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
722 make_edge (bb, EXIT_BLOCK_PTR, 0), fallthru = false;
723 /* Some calls are known not to return. */
724 else
725 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
726 break;
727
728 case GIMPLE_ASSIGN:
729 /* A GIMPLE_ASSIGN may throw internally and thus be considered
730 control-altering. */
731 if (is_ctrl_altering_stmt (last))
732 make_eh_edges (last);
733 fallthru = true;
734 break;
735
736 case GIMPLE_ASM:
737 make_gimple_asm_edges (bb);
738 fallthru = true;
739 break;
740
741 CASE_GIMPLE_OMP:
742 fallthru = make_gimple_omp_edges (bb, &cur_region);
743 break;
744
745 case GIMPLE_TRANSACTION:
746 {
747 tree abort_label = gimple_transaction_label (last);
748 if (abort_label)
749 make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT);
750 fallthru = true;
751 }
752 break;
753
754 default:
755 gcc_assert (!stmt_ends_bb_p (last));
756 fallthru = true;
757 }
758 }
759 else
760 fallthru = true;
761
762 if (fallthru)
763 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
764 }
765
766 free_omp_regions ();
767
768 /* Fold COND_EXPR_COND of each COND_EXPR. */
769 fold_cond_expr_cond ();
770 }
771
772 /* Find the next available discriminator value for LOCUS. The
773 discriminator distinguishes among several basic blocks that
774 share a common locus, allowing for more accurate sample-based
775 profiling. */
776
777 static int
778 next_discriminator_for_locus (location_t locus)
779 {
780 struct locus_discrim_map item;
781 struct locus_discrim_map **slot;
782
783 item.locus = locus;
784 item.discriminator = 0;
785 slot = discriminator_per_locus.find_slot_with_hash (
786 &item, LOCATION_LINE (locus), INSERT);
787 gcc_assert (slot);
788 if (*slot == HTAB_EMPTY_ENTRY)
789 {
790 *slot = XNEW (struct locus_discrim_map);
791 gcc_assert (*slot);
792 (*slot)->locus = locus;
793 (*slot)->discriminator = 0;
794 }
795 (*slot)->discriminator++;
796 return (*slot)->discriminator;
797 }
798
799 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
800
801 static bool
802 same_line_p (location_t locus1, location_t locus2)
803 {
804 expanded_location from, to;
805
806 if (locus1 == locus2)
807 return true;
808
809 from = expand_location (locus1);
810 to = expand_location (locus2);
811
812 if (from.line != to.line)
813 return false;
814 if (from.file == to.file)
815 return true;
816 return (from.file != NULL
817 && to.file != NULL
818 && filename_cmp (from.file, to.file) == 0);
819 }
820
821 /* Assign discriminators to each basic block. */
822
823 static void
824 assign_discriminators (void)
825 {
826 basic_block bb;
827
828 FOR_EACH_BB (bb)
829 {
830 edge e;
831 edge_iterator ei;
832 gimple last = last_stmt (bb);
833 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
834
835 if (locus == UNKNOWN_LOCATION)
836 continue;
837
838 FOR_EACH_EDGE (e, ei, bb->succs)
839 {
840 gimple first = first_non_label_stmt (e->dest);
841 gimple last = last_stmt (e->dest);
842 if ((first && same_line_p (locus, gimple_location (first)))
843 || (last && same_line_p (locus, gimple_location (last))))
844 {
845 if (e->dest->discriminator != 0 && bb->discriminator == 0)
846 bb->discriminator = next_discriminator_for_locus (locus);
847 else
848 e->dest->discriminator = next_discriminator_for_locus (locus);
849 }
850 }
851 }
852 }
853
854 /* Create the edges for a GIMPLE_COND starting at block BB. */
855
856 static void
857 make_cond_expr_edges (basic_block bb)
858 {
859 gimple entry = last_stmt (bb);
860 gimple then_stmt, else_stmt;
861 basic_block then_bb, else_bb;
862 tree then_label, else_label;
863 edge e;
864
865 gcc_assert (entry);
866 gcc_assert (gimple_code (entry) == GIMPLE_COND);
867
868 /* Entry basic blocks for each component. */
869 then_label = gimple_cond_true_label (entry);
870 else_label = gimple_cond_false_label (entry);
871 then_bb = label_to_block (then_label);
872 else_bb = label_to_block (else_label);
873 then_stmt = first_stmt (then_bb);
874 else_stmt = first_stmt (else_bb);
875
876 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
877 e->goto_locus = gimple_location (then_stmt);
878 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
879 if (e)
880 e->goto_locus = gimple_location (else_stmt);
881
882 /* We do not need the labels anymore. */
883 gimple_cond_set_true_label (entry, NULL_TREE);
884 gimple_cond_set_false_label (entry, NULL_TREE);
885 }
886
887
888 /* Called for each element in the hash table (P) as we delete the
889 edge to cases hash table.
890
891 Clear all the TREE_CHAINs to prevent problems with copying of
892 SWITCH_EXPRs and structure sharing rules, then free the hash table
893 element. */
894
895 static bool
896 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
897 void *data ATTRIBUTE_UNUSED)
898 {
899 tree t, next;
900
901 for (t = (tree) *value; t; t = next)
902 {
903 next = CASE_CHAIN (t);
904 CASE_CHAIN (t) = NULL;
905 }
906
907 *value = NULL;
908 return true;
909 }
910
911 /* Start recording information mapping edges to case labels. */
912
913 void
914 start_recording_case_labels (void)
915 {
916 gcc_assert (edge_to_cases == NULL);
917 edge_to_cases = pointer_map_create ();
918 touched_switch_bbs = BITMAP_ALLOC (NULL);
919 }
920
921 /* Return nonzero if we are recording information for case labels. */
922
923 static bool
924 recording_case_labels_p (void)
925 {
926 return (edge_to_cases != NULL);
927 }
928
929 /* Stop recording information mapping edges to case labels and
930 remove any information we have recorded. */
931 void
932 end_recording_case_labels (void)
933 {
934 bitmap_iterator bi;
935 unsigned i;
936 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
937 pointer_map_destroy (edge_to_cases);
938 edge_to_cases = NULL;
939 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
940 {
941 basic_block bb = BASIC_BLOCK (i);
942 if (bb)
943 {
944 gimple stmt = last_stmt (bb);
945 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
946 group_case_labels_stmt (stmt);
947 }
948 }
949 BITMAP_FREE (touched_switch_bbs);
950 }
951
952 /* If we are inside a {start,end}_recording_cases block, then return
953 a chain of CASE_LABEL_EXPRs from T which reference E.
954
955 Otherwise return NULL. */
956
957 static tree
958 get_cases_for_edge (edge e, gimple t)
959 {
960 void **slot;
961 size_t i, n;
962
963 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
964 chains available. Return NULL so the caller can detect this case. */
965 if (!recording_case_labels_p ())
966 return NULL;
967
968 slot = pointer_map_contains (edge_to_cases, e);
969 if (slot)
970 return (tree) *slot;
971
972 /* If we did not find E in the hash table, then this must be the first
973 time we have been queried for information about E & T. Add all the
974 elements from T to the hash table then perform the query again. */
975
976 n = gimple_switch_num_labels (t);
977 for (i = 0; i < n; i++)
978 {
979 tree elt = gimple_switch_label (t, i);
980 tree lab = CASE_LABEL (elt);
981 basic_block label_bb = label_to_block (lab);
982 edge this_edge = find_edge (e->src, label_bb);
983
984 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
985 a new chain. */
986 slot = pointer_map_insert (edge_to_cases, this_edge);
987 CASE_CHAIN (elt) = (tree) *slot;
988 *slot = elt;
989 }
990
991 return (tree) *pointer_map_contains (edge_to_cases, e);
992 }
993
994 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
995
996 static void
997 make_gimple_switch_edges (basic_block bb)
998 {
999 gimple entry = last_stmt (bb);
1000 size_t i, n;
1001
1002 n = gimple_switch_num_labels (entry);
1003
1004 for (i = 0; i < n; ++i)
1005 {
1006 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1007 basic_block label_bb = label_to_block (lab);
1008 make_edge (bb, label_bb, 0);
1009 }
1010 }
1011
1012
1013 /* Return the basic block holding label DEST. */
1014
1015 basic_block
1016 label_to_block_fn (struct function *ifun, tree dest)
1017 {
1018 int uid = LABEL_DECL_UID (dest);
1019
1020 /* We would die hard when faced by an undefined label. Emit a label to
1021 the very first basic block. This will hopefully make even the dataflow
1022 and undefined variable warnings quite right. */
1023 if (seen_error () && uid < 0)
1024 {
1025 gimple_stmt_iterator gsi = gsi_start_bb (BASIC_BLOCK (NUM_FIXED_BLOCKS));
1026 gimple stmt;
1027
1028 stmt = gimple_build_label (dest);
1029 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1030 uid = LABEL_DECL_UID (dest);
1031 }
1032 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1033 return NULL;
1034 return (*ifun->cfg->x_label_to_block_map)[uid];
1035 }
1036
1037 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
1038 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
1039
1040 void
1041 make_abnormal_goto_edges (basic_block bb, bool for_call)
1042 {
1043 basic_block target_bb;
1044 gimple_stmt_iterator gsi;
1045
1046 FOR_EACH_BB (target_bb)
1047 {
1048 for (gsi = gsi_start_bb (target_bb); !gsi_end_p (gsi); gsi_next (&gsi))
1049 {
1050 gimple label_stmt = gsi_stmt (gsi);
1051 tree target;
1052
1053 if (gimple_code (label_stmt) != GIMPLE_LABEL)
1054 break;
1055
1056 target = gimple_label_label (label_stmt);
1057
1058 /* Make an edge to every label block that has been marked as a
1059 potential target for a computed goto or a non-local goto. */
1060 if ((FORCED_LABEL (target) && !for_call)
1061 || (DECL_NONLOCAL (target) && for_call))
1062 {
1063 make_edge (bb, target_bb, EDGE_ABNORMAL);
1064 break;
1065 }
1066 }
1067 if (!gsi_end_p (gsi)
1068 && is_gimple_debug (gsi_stmt (gsi)))
1069 gsi_next_nondebug (&gsi);
1070 if (!gsi_end_p (gsi))
1071 {
1072 /* Make an edge to every setjmp-like call. */
1073 gimple call_stmt = gsi_stmt (gsi);
1074 if (is_gimple_call (call_stmt)
1075 && (gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE))
1076 make_edge (bb, target_bb, EDGE_ABNORMAL);
1077 }
1078 }
1079 }
1080
1081 /* Create edges for a goto statement at block BB. */
1082
1083 static void
1084 make_goto_expr_edges (basic_block bb)
1085 {
1086 gimple_stmt_iterator last = gsi_last_bb (bb);
1087 gimple goto_t = gsi_stmt (last);
1088
1089 /* A simple GOTO creates normal edges. */
1090 if (simple_goto_p (goto_t))
1091 {
1092 tree dest = gimple_goto_dest (goto_t);
1093 basic_block label_bb = label_to_block (dest);
1094 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1095 e->goto_locus = gimple_location (goto_t);
1096 gsi_remove (&last, true);
1097 return;
1098 }
1099
1100 /* A computed GOTO creates abnormal edges. */
1101 make_abnormal_goto_edges (bb, false);
1102 }
1103
1104 /* Create edges for an asm statement with labels at block BB. */
1105
1106 static void
1107 make_gimple_asm_edges (basic_block bb)
1108 {
1109 gimple stmt = last_stmt (bb);
1110 int i, n = gimple_asm_nlabels (stmt);
1111
1112 for (i = 0; i < n; ++i)
1113 {
1114 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1115 basic_block label_bb = label_to_block (label);
1116 make_edge (bb, label_bb, 0);
1117 }
1118 }
1119
1120 /*---------------------------------------------------------------------------
1121 Flowgraph analysis
1122 ---------------------------------------------------------------------------*/
1123
1124 /* Cleanup useless labels in basic blocks. This is something we wish
1125 to do early because it allows us to group case labels before creating
1126 the edges for the CFG, and it speeds up block statement iterators in
1127 all passes later on.
1128 We rerun this pass after CFG is created, to get rid of the labels that
1129 are no longer referenced. After then we do not run it any more, since
1130 (almost) no new labels should be created. */
1131
1132 /* A map from basic block index to the leading label of that block. */
1133 static struct label_record
1134 {
1135 /* The label. */
1136 tree label;
1137
1138 /* True if the label is referenced from somewhere. */
1139 bool used;
1140 } *label_for_bb;
1141
1142 /* Given LABEL return the first label in the same basic block. */
1143
1144 static tree
1145 main_block_label (tree label)
1146 {
1147 basic_block bb = label_to_block (label);
1148 tree main_label = label_for_bb[bb->index].label;
1149
1150 /* label_to_block possibly inserted undefined label into the chain. */
1151 if (!main_label)
1152 {
1153 label_for_bb[bb->index].label = label;
1154 main_label = label;
1155 }
1156
1157 label_for_bb[bb->index].used = true;
1158 return main_label;
1159 }
1160
1161 /* Clean up redundant labels within the exception tree. */
1162
1163 static void
1164 cleanup_dead_labels_eh (void)
1165 {
1166 eh_landing_pad lp;
1167 eh_region r;
1168 tree lab;
1169 int i;
1170
1171 if (cfun->eh == NULL)
1172 return;
1173
1174 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1175 if (lp && lp->post_landing_pad)
1176 {
1177 lab = main_block_label (lp->post_landing_pad);
1178 if (lab != lp->post_landing_pad)
1179 {
1180 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1181 EH_LANDING_PAD_NR (lab) = lp->index;
1182 }
1183 }
1184
1185 FOR_ALL_EH_REGION (r)
1186 switch (r->type)
1187 {
1188 case ERT_CLEANUP:
1189 case ERT_MUST_NOT_THROW:
1190 break;
1191
1192 case ERT_TRY:
1193 {
1194 eh_catch c;
1195 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1196 {
1197 lab = c->label;
1198 if (lab)
1199 c->label = main_block_label (lab);
1200 }
1201 }
1202 break;
1203
1204 case ERT_ALLOWED_EXCEPTIONS:
1205 lab = r->u.allowed.label;
1206 if (lab)
1207 r->u.allowed.label = main_block_label (lab);
1208 break;
1209 }
1210 }
1211
1212
1213 /* Cleanup redundant labels. This is a three-step process:
1214 1) Find the leading label for each block.
1215 2) Redirect all references to labels to the leading labels.
1216 3) Cleanup all useless labels. */
1217
1218 void
1219 cleanup_dead_labels (void)
1220 {
1221 basic_block bb;
1222 label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
1223
1224 /* Find a suitable label for each block. We use the first user-defined
1225 label if there is one, or otherwise just the first label we see. */
1226 FOR_EACH_BB (bb)
1227 {
1228 gimple_stmt_iterator i;
1229
1230 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1231 {
1232 tree label;
1233 gimple stmt = gsi_stmt (i);
1234
1235 if (gimple_code (stmt) != GIMPLE_LABEL)
1236 break;
1237
1238 label = gimple_label_label (stmt);
1239
1240 /* If we have not yet seen a label for the current block,
1241 remember this one and see if there are more labels. */
1242 if (!label_for_bb[bb->index].label)
1243 {
1244 label_for_bb[bb->index].label = label;
1245 continue;
1246 }
1247
1248 /* If we did see a label for the current block already, but it
1249 is an artificially created label, replace it if the current
1250 label is a user defined label. */
1251 if (!DECL_ARTIFICIAL (label)
1252 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1253 {
1254 label_for_bb[bb->index].label = label;
1255 break;
1256 }
1257 }
1258 }
1259
1260 /* Now redirect all jumps/branches to the selected label.
1261 First do so for each block ending in a control statement. */
1262 FOR_EACH_BB (bb)
1263 {
1264 gimple stmt = last_stmt (bb);
1265 tree label, new_label;
1266
1267 if (!stmt)
1268 continue;
1269
1270 switch (gimple_code (stmt))
1271 {
1272 case GIMPLE_COND:
1273 label = gimple_cond_true_label (stmt);
1274 if (label)
1275 {
1276 new_label = main_block_label (label);
1277 if (new_label != label)
1278 gimple_cond_set_true_label (stmt, new_label);
1279 }
1280
1281 label = gimple_cond_false_label (stmt);
1282 if (label)
1283 {
1284 new_label = main_block_label (label);
1285 if (new_label != label)
1286 gimple_cond_set_false_label (stmt, new_label);
1287 }
1288 break;
1289
1290 case GIMPLE_SWITCH:
1291 {
1292 size_t i, n = gimple_switch_num_labels (stmt);
1293
1294 /* Replace all destination labels. */
1295 for (i = 0; i < n; ++i)
1296 {
1297 tree case_label = gimple_switch_label (stmt, i);
1298 label = CASE_LABEL (case_label);
1299 new_label = main_block_label (label);
1300 if (new_label != label)
1301 CASE_LABEL (case_label) = new_label;
1302 }
1303 break;
1304 }
1305
1306 case GIMPLE_ASM:
1307 {
1308 int i, n = gimple_asm_nlabels (stmt);
1309
1310 for (i = 0; i < n; ++i)
1311 {
1312 tree cons = gimple_asm_label_op (stmt, i);
1313 tree label = main_block_label (TREE_VALUE (cons));
1314 TREE_VALUE (cons) = label;
1315 }
1316 break;
1317 }
1318
1319 /* We have to handle gotos until they're removed, and we don't
1320 remove them until after we've created the CFG edges. */
1321 case GIMPLE_GOTO:
1322 if (!computed_goto_p (stmt))
1323 {
1324 label = gimple_goto_dest (stmt);
1325 new_label = main_block_label (label);
1326 if (new_label != label)
1327 gimple_goto_set_dest (stmt, new_label);
1328 }
1329 break;
1330
1331 case GIMPLE_TRANSACTION:
1332 {
1333 tree label = gimple_transaction_label (stmt);
1334 if (label)
1335 {
1336 tree new_label = main_block_label (label);
1337 if (new_label != label)
1338 gimple_transaction_set_label (stmt, new_label);
1339 }
1340 }
1341 break;
1342
1343 default:
1344 break;
1345 }
1346 }
1347
1348 /* Do the same for the exception region tree labels. */
1349 cleanup_dead_labels_eh ();
1350
1351 /* Finally, purge dead labels. All user-defined labels and labels that
1352 can be the target of non-local gotos and labels which have their
1353 address taken are preserved. */
1354 FOR_EACH_BB (bb)
1355 {
1356 gimple_stmt_iterator i;
1357 tree label_for_this_bb = label_for_bb[bb->index].label;
1358
1359 if (!label_for_this_bb)
1360 continue;
1361
1362 /* If the main label of the block is unused, we may still remove it. */
1363 if (!label_for_bb[bb->index].used)
1364 label_for_this_bb = NULL;
1365
1366 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1367 {
1368 tree label;
1369 gimple stmt = gsi_stmt (i);
1370
1371 if (gimple_code (stmt) != GIMPLE_LABEL)
1372 break;
1373
1374 label = gimple_label_label (stmt);
1375
1376 if (label == label_for_this_bb
1377 || !DECL_ARTIFICIAL (label)
1378 || DECL_NONLOCAL (label)
1379 || FORCED_LABEL (label))
1380 gsi_next (&i);
1381 else
1382 gsi_remove (&i, true);
1383 }
1384 }
1385
1386 free (label_for_bb);
1387 }
1388
1389 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1390 the ones jumping to the same label.
1391 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1392
1393 void
1394 group_case_labels_stmt (gimple stmt)
1395 {
1396 int old_size = gimple_switch_num_labels (stmt);
1397 int i, j, new_size = old_size;
1398 basic_block default_bb = NULL;
1399
1400 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1401
1402 /* Look for possible opportunities to merge cases. */
1403 i = 1;
1404 while (i < old_size)
1405 {
1406 tree base_case, base_high;
1407 basic_block base_bb;
1408
1409 base_case = gimple_switch_label (stmt, i);
1410
1411 gcc_assert (base_case);
1412 base_bb = label_to_block (CASE_LABEL (base_case));
1413
1414 /* Discard cases that have the same destination as the
1415 default case. */
1416 if (base_bb == default_bb)
1417 {
1418 gimple_switch_set_label (stmt, i, NULL_TREE);
1419 i++;
1420 new_size--;
1421 continue;
1422 }
1423
1424 base_high = CASE_HIGH (base_case)
1425 ? CASE_HIGH (base_case)
1426 : CASE_LOW (base_case);
1427 i++;
1428
1429 /* Try to merge case labels. Break out when we reach the end
1430 of the label vector or when we cannot merge the next case
1431 label with the current one. */
1432 while (i < old_size)
1433 {
1434 tree merge_case = gimple_switch_label (stmt, i);
1435 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1436 double_int bhp1 = tree_to_double_int (base_high) + double_int_one;
1437
1438 /* Merge the cases if they jump to the same place,
1439 and their ranges are consecutive. */
1440 if (merge_bb == base_bb
1441 && tree_to_double_int (CASE_LOW (merge_case)) == bhp1)
1442 {
1443 base_high = CASE_HIGH (merge_case) ?
1444 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1445 CASE_HIGH (base_case) = base_high;
1446 gimple_switch_set_label (stmt, i, NULL_TREE);
1447 new_size--;
1448 i++;
1449 }
1450 else
1451 break;
1452 }
1453 }
1454
1455 /* Compress the case labels in the label vector, and adjust the
1456 length of the vector. */
1457 for (i = 0, j = 0; i < new_size; i++)
1458 {
1459 while (! gimple_switch_label (stmt, j))
1460 j++;
1461 gimple_switch_set_label (stmt, i,
1462 gimple_switch_label (stmt, j++));
1463 }
1464
1465 gcc_assert (new_size <= old_size);
1466 gimple_switch_set_num_labels (stmt, new_size);
1467 }
1468
1469 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1470 and scan the sorted vector of cases. Combine the ones jumping to the
1471 same label. */
1472
1473 void
1474 group_case_labels (void)
1475 {
1476 basic_block bb;
1477
1478 FOR_EACH_BB (bb)
1479 {
1480 gimple stmt = last_stmt (bb);
1481 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1482 group_case_labels_stmt (stmt);
1483 }
1484 }
1485
1486 /* Checks whether we can merge block B into block A. */
1487
1488 static bool
1489 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1490 {
1491 gimple stmt;
1492 gimple_stmt_iterator gsi;
1493
1494 if (!single_succ_p (a))
1495 return false;
1496
1497 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1498 return false;
1499
1500 if (single_succ (a) != b)
1501 return false;
1502
1503 if (!single_pred_p (b))
1504 return false;
1505
1506 if (b == EXIT_BLOCK_PTR)
1507 return false;
1508
1509 /* If A ends by a statement causing exceptions or something similar, we
1510 cannot merge the blocks. */
1511 stmt = last_stmt (a);
1512 if (stmt && stmt_ends_bb_p (stmt))
1513 return false;
1514
1515 /* Do not allow a block with only a non-local label to be merged. */
1516 if (stmt
1517 && gimple_code (stmt) == GIMPLE_LABEL
1518 && DECL_NONLOCAL (gimple_label_label (stmt)))
1519 return false;
1520
1521 /* Examine the labels at the beginning of B. */
1522 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
1523 {
1524 tree lab;
1525 stmt = gsi_stmt (gsi);
1526 if (gimple_code (stmt) != GIMPLE_LABEL)
1527 break;
1528 lab = gimple_label_label (stmt);
1529
1530 /* Do not remove user forced labels or for -O0 any user labels. */
1531 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1532 return false;
1533 }
1534
1535 /* Protect the loop latches. */
1536 if (current_loops && b->loop_father->latch == b)
1537 return false;
1538
1539 /* It must be possible to eliminate all phi nodes in B. If ssa form
1540 is not up-to-date and a name-mapping is registered, we cannot eliminate
1541 any phis. Symbols marked for renaming are never a problem though. */
1542 for (gsi = gsi_start_phis (b); !gsi_end_p (gsi); gsi_next (&gsi))
1543 {
1544 gimple phi = gsi_stmt (gsi);
1545 /* Technically only new names matter. */
1546 if (name_registered_for_update_p (PHI_RESULT (phi)))
1547 return false;
1548 }
1549
1550 /* When not optimizing, don't merge if we'd lose goto_locus. */
1551 if (!optimize
1552 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1553 {
1554 location_t goto_locus = single_succ_edge (a)->goto_locus;
1555 gimple_stmt_iterator prev, next;
1556 prev = gsi_last_nondebug_bb (a);
1557 next = gsi_after_labels (b);
1558 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1559 gsi_next_nondebug (&next);
1560 if ((gsi_end_p (prev)
1561 || gimple_location (gsi_stmt (prev)) != goto_locus)
1562 && (gsi_end_p (next)
1563 || gimple_location (gsi_stmt (next)) != goto_locus))
1564 return false;
1565 }
1566
1567 return true;
1568 }
1569
1570 /* Replaces all uses of NAME by VAL. */
1571
1572 void
1573 replace_uses_by (tree name, tree val)
1574 {
1575 imm_use_iterator imm_iter;
1576 use_operand_p use;
1577 gimple stmt;
1578 edge e;
1579
1580 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1581 {
1582 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1583 {
1584 replace_exp (use, val);
1585
1586 if (gimple_code (stmt) == GIMPLE_PHI)
1587 {
1588 e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
1589 if (e->flags & EDGE_ABNORMAL)
1590 {
1591 /* This can only occur for virtual operands, since
1592 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1593 would prevent replacement. */
1594 gcc_checking_assert (virtual_operand_p (name));
1595 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1596 }
1597 }
1598 }
1599
1600 if (gimple_code (stmt) != GIMPLE_PHI)
1601 {
1602 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1603 gimple orig_stmt = stmt;
1604 size_t i;
1605
1606 /* Mark the block if we changed the last stmt in it. */
1607 if (cfgcleanup_altered_bbs
1608 && stmt_ends_bb_p (stmt))
1609 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1610
1611 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1612 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1613 only change sth from non-invariant to invariant, and only
1614 when propagating constants. */
1615 if (is_gimple_min_invariant (val))
1616 for (i = 0; i < gimple_num_ops (stmt); i++)
1617 {
1618 tree op = gimple_op (stmt, i);
1619 /* Operands may be empty here. For example, the labels
1620 of a GIMPLE_COND are nulled out following the creation
1621 of the corresponding CFG edges. */
1622 if (op && TREE_CODE (op) == ADDR_EXPR)
1623 recompute_tree_invariant_for_addr_expr (op);
1624 }
1625
1626 if (fold_stmt (&gsi))
1627 stmt = gsi_stmt (gsi);
1628
1629 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1630 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1631
1632 update_stmt (stmt);
1633 }
1634 }
1635
1636 gcc_checking_assert (has_zero_uses (name));
1637
1638 /* Also update the trees stored in loop structures. */
1639 if (current_loops)
1640 {
1641 struct loop *loop;
1642
1643 FOR_EACH_LOOP (loop, 0)
1644 {
1645 substitute_in_loop_info (loop, name, val);
1646 }
1647 }
1648 }
1649
1650 /* Merge block B into block A. */
1651
1652 static void
1653 gimple_merge_blocks (basic_block a, basic_block b)
1654 {
1655 gimple_stmt_iterator last, gsi, psi;
1656
1657 if (dump_file)
1658 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1659
1660 /* Remove all single-valued PHI nodes from block B of the form
1661 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1662 gsi = gsi_last_bb (a);
1663 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1664 {
1665 gimple phi = gsi_stmt (psi);
1666 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1667 gimple copy;
1668 bool may_replace_uses = (virtual_operand_p (def)
1669 || may_propagate_copy (def, use));
1670
1671 /* In case we maintain loop closed ssa form, do not propagate arguments
1672 of loop exit phi nodes. */
1673 if (current_loops
1674 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1675 && !virtual_operand_p (def)
1676 && TREE_CODE (use) == SSA_NAME
1677 && a->loop_father != b->loop_father)
1678 may_replace_uses = false;
1679
1680 if (!may_replace_uses)
1681 {
1682 gcc_assert (!virtual_operand_p (def));
1683
1684 /* Note that just emitting the copies is fine -- there is no problem
1685 with ordering of phi nodes. This is because A is the single
1686 predecessor of B, therefore results of the phi nodes cannot
1687 appear as arguments of the phi nodes. */
1688 copy = gimple_build_assign (def, use);
1689 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1690 remove_phi_node (&psi, false);
1691 }
1692 else
1693 {
1694 /* If we deal with a PHI for virtual operands, we can simply
1695 propagate these without fussing with folding or updating
1696 the stmt. */
1697 if (virtual_operand_p (def))
1698 {
1699 imm_use_iterator iter;
1700 use_operand_p use_p;
1701 gimple stmt;
1702
1703 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1704 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1705 SET_USE (use_p, use);
1706
1707 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1708 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1709 }
1710 else
1711 replace_uses_by (def, use);
1712
1713 remove_phi_node (&psi, true);
1714 }
1715 }
1716
1717 /* Ensure that B follows A. */
1718 move_block_after (b, a);
1719
1720 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1721 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1722
1723 /* Remove labels from B and set gimple_bb to A for other statements. */
1724 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1725 {
1726 gimple stmt = gsi_stmt (gsi);
1727 if (gimple_code (stmt) == GIMPLE_LABEL)
1728 {
1729 tree label = gimple_label_label (stmt);
1730 int lp_nr;
1731
1732 gsi_remove (&gsi, false);
1733
1734 /* Now that we can thread computed gotos, we might have
1735 a situation where we have a forced label in block B
1736 However, the label at the start of block B might still be
1737 used in other ways (think about the runtime checking for
1738 Fortran assigned gotos). So we can not just delete the
1739 label. Instead we move the label to the start of block A. */
1740 if (FORCED_LABEL (label))
1741 {
1742 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1743 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1744 }
1745 /* Other user labels keep around in a form of a debug stmt. */
1746 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1747 {
1748 gimple dbg = gimple_build_debug_bind (label,
1749 integer_zero_node,
1750 stmt);
1751 gimple_debug_bind_reset_value (dbg);
1752 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1753 }
1754
1755 lp_nr = EH_LANDING_PAD_NR (label);
1756 if (lp_nr)
1757 {
1758 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1759 lp->post_landing_pad = NULL;
1760 }
1761 }
1762 else
1763 {
1764 gimple_set_bb (stmt, a);
1765 gsi_next (&gsi);
1766 }
1767 }
1768
1769 /* Merge the sequences. */
1770 last = gsi_last_bb (a);
1771 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1772 set_bb_seq (b, NULL);
1773
1774 if (cfgcleanup_altered_bbs)
1775 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1776 }
1777
1778
1779 /* Return the one of two successors of BB that is not reachable by a
1780 complex edge, if there is one. Else, return BB. We use
1781 this in optimizations that use post-dominators for their heuristics,
1782 to catch the cases in C++ where function calls are involved. */
1783
1784 basic_block
1785 single_noncomplex_succ (basic_block bb)
1786 {
1787 edge e0, e1;
1788 if (EDGE_COUNT (bb->succs) != 2)
1789 return bb;
1790
1791 e0 = EDGE_SUCC (bb, 0);
1792 e1 = EDGE_SUCC (bb, 1);
1793 if (e0->flags & EDGE_COMPLEX)
1794 return e1->dest;
1795 if (e1->flags & EDGE_COMPLEX)
1796 return e0->dest;
1797
1798 return bb;
1799 }
1800
1801 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1802
1803 void
1804 notice_special_calls (gimple call)
1805 {
1806 int flags = gimple_call_flags (call);
1807
1808 if (flags & ECF_MAY_BE_ALLOCA)
1809 cfun->calls_alloca = true;
1810 if (flags & ECF_RETURNS_TWICE)
1811 cfun->calls_setjmp = true;
1812 }
1813
1814
1815 /* Clear flags set by notice_special_calls. Used by dead code removal
1816 to update the flags. */
1817
1818 void
1819 clear_special_calls (void)
1820 {
1821 cfun->calls_alloca = false;
1822 cfun->calls_setjmp = false;
1823 }
1824
1825 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1826
1827 static void
1828 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1829 {
1830 /* Since this block is no longer reachable, we can just delete all
1831 of its PHI nodes. */
1832 remove_phi_nodes (bb);
1833
1834 /* Remove edges to BB's successors. */
1835 while (EDGE_COUNT (bb->succs) > 0)
1836 remove_edge (EDGE_SUCC (bb, 0));
1837 }
1838
1839
1840 /* Remove statements of basic block BB. */
1841
1842 static void
1843 remove_bb (basic_block bb)
1844 {
1845 gimple_stmt_iterator i;
1846
1847 if (dump_file)
1848 {
1849 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1850 if (dump_flags & TDF_DETAILS)
1851 {
1852 dump_bb (dump_file, bb, 0, dump_flags);
1853 fprintf (dump_file, "\n");
1854 }
1855 }
1856
1857 if (current_loops)
1858 {
1859 struct loop *loop = bb->loop_father;
1860
1861 /* If a loop gets removed, clean up the information associated
1862 with it. */
1863 if (loop->latch == bb
1864 || loop->header == bb)
1865 free_numbers_of_iterations_estimates_loop (loop);
1866 }
1867
1868 /* Remove all the instructions in the block. */
1869 if (bb_seq (bb) != NULL)
1870 {
1871 /* Walk backwards so as to get a chance to substitute all
1872 released DEFs into debug stmts. See
1873 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
1874 details. */
1875 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
1876 {
1877 gimple stmt = gsi_stmt (i);
1878 if (gimple_code (stmt) == GIMPLE_LABEL
1879 && (FORCED_LABEL (gimple_label_label (stmt))
1880 || DECL_NONLOCAL (gimple_label_label (stmt))))
1881 {
1882 basic_block new_bb;
1883 gimple_stmt_iterator new_gsi;
1884
1885 /* A non-reachable non-local label may still be referenced.
1886 But it no longer needs to carry the extra semantics of
1887 non-locality. */
1888 if (DECL_NONLOCAL (gimple_label_label (stmt)))
1889 {
1890 DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
1891 FORCED_LABEL (gimple_label_label (stmt)) = 1;
1892 }
1893
1894 new_bb = bb->prev_bb;
1895 new_gsi = gsi_start_bb (new_bb);
1896 gsi_remove (&i, false);
1897 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
1898 }
1899 else
1900 {
1901 /* Release SSA definitions if we are in SSA. Note that we
1902 may be called when not in SSA. For example,
1903 final_cleanup calls this function via
1904 cleanup_tree_cfg. */
1905 if (gimple_in_ssa_p (cfun))
1906 release_defs (stmt);
1907
1908 gsi_remove (&i, true);
1909 }
1910
1911 if (gsi_end_p (i))
1912 i = gsi_last_bb (bb);
1913 else
1914 gsi_prev (&i);
1915 }
1916 }
1917
1918 remove_phi_nodes_and_edges_for_unreachable_block (bb);
1919 bb->il.gimple.seq = NULL;
1920 bb->il.gimple.phi_nodes = NULL;
1921 }
1922
1923
1924 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
1925 predicate VAL, return the edge that will be taken out of the block.
1926 If VAL does not match a unique edge, NULL is returned. */
1927
1928 edge
1929 find_taken_edge (basic_block bb, tree val)
1930 {
1931 gimple stmt;
1932
1933 stmt = last_stmt (bb);
1934
1935 gcc_assert (stmt);
1936 gcc_assert (is_ctrl_stmt (stmt));
1937
1938 if (val == NULL)
1939 return NULL;
1940
1941 if (!is_gimple_min_invariant (val))
1942 return NULL;
1943
1944 if (gimple_code (stmt) == GIMPLE_COND)
1945 return find_taken_edge_cond_expr (bb, val);
1946
1947 if (gimple_code (stmt) == GIMPLE_SWITCH)
1948 return find_taken_edge_switch_expr (bb, val);
1949
1950 if (computed_goto_p (stmt))
1951 {
1952 /* Only optimize if the argument is a label, if the argument is
1953 not a label then we can not construct a proper CFG.
1954
1955 It may be the case that we only need to allow the LABEL_REF to
1956 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
1957 appear inside a LABEL_EXPR just to be safe. */
1958 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
1959 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
1960 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
1961 return NULL;
1962 }
1963
1964 gcc_unreachable ();
1965 }
1966
1967 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
1968 statement, determine which of the outgoing edges will be taken out of the
1969 block. Return NULL if either edge may be taken. */
1970
1971 static edge
1972 find_taken_edge_computed_goto (basic_block bb, tree val)
1973 {
1974 basic_block dest;
1975 edge e = NULL;
1976
1977 dest = label_to_block (val);
1978 if (dest)
1979 {
1980 e = find_edge (bb, dest);
1981 gcc_assert (e != NULL);
1982 }
1983
1984 return e;
1985 }
1986
1987 /* Given a constant value VAL and the entry block BB to a COND_EXPR
1988 statement, determine which of the two edges will be taken out of the
1989 block. Return NULL if either edge may be taken. */
1990
1991 static edge
1992 find_taken_edge_cond_expr (basic_block bb, tree val)
1993 {
1994 edge true_edge, false_edge;
1995
1996 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1997
1998 gcc_assert (TREE_CODE (val) == INTEGER_CST);
1999 return (integer_zerop (val) ? false_edge : true_edge);
2000 }
2001
2002 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2003 statement, determine which edge will be taken out of the block. Return
2004 NULL if any edge may be taken. */
2005
2006 static edge
2007 find_taken_edge_switch_expr (basic_block bb, tree val)
2008 {
2009 basic_block dest_bb;
2010 edge e;
2011 gimple switch_stmt;
2012 tree taken_case;
2013
2014 switch_stmt = last_stmt (bb);
2015 taken_case = find_case_label_for_value (switch_stmt, val);
2016 dest_bb = label_to_block (CASE_LABEL (taken_case));
2017
2018 e = find_edge (bb, dest_bb);
2019 gcc_assert (e);
2020 return e;
2021 }
2022
2023
2024 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2025 We can make optimal use here of the fact that the case labels are
2026 sorted: We can do a binary search for a case matching VAL. */
2027
2028 static tree
2029 find_case_label_for_value (gimple switch_stmt, tree val)
2030 {
2031 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2032 tree default_case = gimple_switch_default_label (switch_stmt);
2033
2034 for (low = 0, high = n; high - low > 1; )
2035 {
2036 size_t i = (high + low) / 2;
2037 tree t = gimple_switch_label (switch_stmt, i);
2038 int cmp;
2039
2040 /* Cache the result of comparing CASE_LOW and val. */
2041 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2042
2043 if (cmp > 0)
2044 high = i;
2045 else
2046 low = i;
2047
2048 if (CASE_HIGH (t) == NULL)
2049 {
2050 /* A singe-valued case label. */
2051 if (cmp == 0)
2052 return t;
2053 }
2054 else
2055 {
2056 /* A case range. We can only handle integer ranges. */
2057 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2058 return t;
2059 }
2060 }
2061
2062 return default_case;
2063 }
2064
2065
2066 /* Dump a basic block on stderr. */
2067
2068 void
2069 gimple_debug_bb (basic_block bb)
2070 {
2071 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2072 }
2073
2074
2075 /* Dump basic block with index N on stderr. */
2076
2077 basic_block
2078 gimple_debug_bb_n (int n)
2079 {
2080 gimple_debug_bb (BASIC_BLOCK (n));
2081 return BASIC_BLOCK (n);
2082 }
2083
2084
2085 /* Dump the CFG on stderr.
2086
2087 FLAGS are the same used by the tree dumping functions
2088 (see TDF_* in dumpfile.h). */
2089
2090 void
2091 gimple_debug_cfg (int flags)
2092 {
2093 gimple_dump_cfg (stderr, flags);
2094 }
2095
2096
2097 /* Dump the program showing basic block boundaries on the given FILE.
2098
2099 FLAGS are the same used by the tree dumping functions (see TDF_* in
2100 tree.h). */
2101
2102 void
2103 gimple_dump_cfg (FILE *file, int flags)
2104 {
2105 if (flags & TDF_DETAILS)
2106 {
2107 dump_function_header (file, current_function_decl, flags);
2108 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2109 n_basic_blocks_for_fn (cfun), n_edges, last_basic_block);
2110
2111 brief_dump_cfg (file, flags | TDF_COMMENT);
2112 fprintf (file, "\n");
2113 }
2114
2115 if (flags & TDF_STATS)
2116 dump_cfg_stats (file);
2117
2118 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2119 }
2120
2121
2122 /* Dump CFG statistics on FILE. */
2123
2124 void
2125 dump_cfg_stats (FILE *file)
2126 {
2127 static long max_num_merged_labels = 0;
2128 unsigned long size, total = 0;
2129 long num_edges;
2130 basic_block bb;
2131 const char * const fmt_str = "%-30s%-13s%12s\n";
2132 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2133 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2134 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2135 const char *funcname = current_function_name ();
2136
2137 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2138
2139 fprintf (file, "---------------------------------------------------------\n");
2140 fprintf (file, fmt_str, "", " Number of ", "Memory");
2141 fprintf (file, fmt_str, "", " instances ", "used ");
2142 fprintf (file, "---------------------------------------------------------\n");
2143
2144 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2145 total += size;
2146 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2147 SCALE (size), LABEL (size));
2148
2149 num_edges = 0;
2150 FOR_EACH_BB (bb)
2151 num_edges += EDGE_COUNT (bb->succs);
2152 size = num_edges * sizeof (struct edge_def);
2153 total += size;
2154 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2155
2156 fprintf (file, "---------------------------------------------------------\n");
2157 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2158 LABEL (total));
2159 fprintf (file, "---------------------------------------------------------\n");
2160 fprintf (file, "\n");
2161
2162 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2163 max_num_merged_labels = cfg_stats.num_merged_labels;
2164
2165 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2166 cfg_stats.num_merged_labels, max_num_merged_labels);
2167
2168 fprintf (file, "\n");
2169 }
2170
2171
2172 /* Dump CFG statistics on stderr. Keep extern so that it's always
2173 linked in the final executable. */
2174
2175 DEBUG_FUNCTION void
2176 debug_cfg_stats (void)
2177 {
2178 dump_cfg_stats (stderr);
2179 }
2180
2181 /*---------------------------------------------------------------------------
2182 Miscellaneous helpers
2183 ---------------------------------------------------------------------------*/
2184
2185 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2186 flow. Transfers of control flow associated with EH are excluded. */
2187
2188 static bool
2189 call_can_make_abnormal_goto (gimple t)
2190 {
2191 /* If the function has no non-local labels, then a call cannot make an
2192 abnormal transfer of control. */
2193 if (!cfun->has_nonlocal_label
2194 && !cfun->calls_setjmp)
2195 return false;
2196
2197 /* Likewise if the call has no side effects. */
2198 if (!gimple_has_side_effects (t))
2199 return false;
2200
2201 /* Likewise if the called function is leaf. */
2202 if (gimple_call_flags (t) & ECF_LEAF)
2203 return false;
2204
2205 return true;
2206 }
2207
2208
2209 /* Return true if T can make an abnormal transfer of control flow.
2210 Transfers of control flow associated with EH are excluded. */
2211
2212 bool
2213 stmt_can_make_abnormal_goto (gimple t)
2214 {
2215 if (computed_goto_p (t))
2216 return true;
2217 if (is_gimple_call (t))
2218 return call_can_make_abnormal_goto (t);
2219 return false;
2220 }
2221
2222
2223 /* Return true if T represents a stmt that always transfers control. */
2224
2225 bool
2226 is_ctrl_stmt (gimple t)
2227 {
2228 switch (gimple_code (t))
2229 {
2230 case GIMPLE_COND:
2231 case GIMPLE_SWITCH:
2232 case GIMPLE_GOTO:
2233 case GIMPLE_RETURN:
2234 case GIMPLE_RESX:
2235 return true;
2236 default:
2237 return false;
2238 }
2239 }
2240
2241
2242 /* Return true if T is a statement that may alter the flow of control
2243 (e.g., a call to a non-returning function). */
2244
2245 bool
2246 is_ctrl_altering_stmt (gimple t)
2247 {
2248 gcc_assert (t);
2249
2250 switch (gimple_code (t))
2251 {
2252 case GIMPLE_CALL:
2253 {
2254 int flags = gimple_call_flags (t);
2255
2256 /* A call alters control flow if it can make an abnormal goto. */
2257 if (call_can_make_abnormal_goto (t))
2258 return true;
2259
2260 /* A call also alters control flow if it does not return. */
2261 if (flags & ECF_NORETURN)
2262 return true;
2263
2264 /* TM ending statements have backedges out of the transaction.
2265 Return true so we split the basic block containing them.
2266 Note that the TM_BUILTIN test is merely an optimization. */
2267 if ((flags & ECF_TM_BUILTIN)
2268 && is_tm_ending_fndecl (gimple_call_fndecl (t)))
2269 return true;
2270
2271 /* BUILT_IN_RETURN call is same as return statement. */
2272 if (gimple_call_builtin_p (t, BUILT_IN_RETURN))
2273 return true;
2274 }
2275 break;
2276
2277 case GIMPLE_EH_DISPATCH:
2278 /* EH_DISPATCH branches to the individual catch handlers at
2279 this level of a try or allowed-exceptions region. It can
2280 fallthru to the next statement as well. */
2281 return true;
2282
2283 case GIMPLE_ASM:
2284 if (gimple_asm_nlabels (t) > 0)
2285 return true;
2286 break;
2287
2288 CASE_GIMPLE_OMP:
2289 /* OpenMP directives alter control flow. */
2290 return true;
2291
2292 case GIMPLE_TRANSACTION:
2293 /* A transaction start alters control flow. */
2294 return true;
2295
2296 default:
2297 break;
2298 }
2299
2300 /* If a statement can throw, it alters control flow. */
2301 return stmt_can_throw_internal (t);
2302 }
2303
2304
2305 /* Return true if T is a simple local goto. */
2306
2307 bool
2308 simple_goto_p (gimple t)
2309 {
2310 return (gimple_code (t) == GIMPLE_GOTO
2311 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2312 }
2313
2314
2315 /* Return true if STMT should start a new basic block. PREV_STMT is
2316 the statement preceding STMT. It is used when STMT is a label or a
2317 case label. Labels should only start a new basic block if their
2318 previous statement wasn't a label. Otherwise, sequence of labels
2319 would generate unnecessary basic blocks that only contain a single
2320 label. */
2321
2322 static inline bool
2323 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2324 {
2325 if (stmt == NULL)
2326 return false;
2327
2328 /* Labels start a new basic block only if the preceding statement
2329 wasn't a label of the same type. This prevents the creation of
2330 consecutive blocks that have nothing but a single label. */
2331 if (gimple_code (stmt) == GIMPLE_LABEL)
2332 {
2333 /* Nonlocal and computed GOTO targets always start a new block. */
2334 if (DECL_NONLOCAL (gimple_label_label (stmt))
2335 || FORCED_LABEL (gimple_label_label (stmt)))
2336 return true;
2337
2338 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2339 {
2340 if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
2341 return true;
2342
2343 cfg_stats.num_merged_labels++;
2344 return false;
2345 }
2346 else
2347 return true;
2348 }
2349 else if (gimple_code (stmt) == GIMPLE_CALL
2350 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2351 /* setjmp acts similar to a nonlocal GOTO target and thus should
2352 start a new block. */
2353 return true;
2354
2355 return false;
2356 }
2357
2358
2359 /* Return true if T should end a basic block. */
2360
2361 bool
2362 stmt_ends_bb_p (gimple t)
2363 {
2364 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2365 }
2366
2367 /* Remove block annotations and other data structures. */
2368
2369 void
2370 delete_tree_cfg_annotations (void)
2371 {
2372 vec_free (label_to_block_map);
2373 }
2374
2375
2376 /* Return the first statement in basic block BB. */
2377
2378 gimple
2379 first_stmt (basic_block bb)
2380 {
2381 gimple_stmt_iterator i = gsi_start_bb (bb);
2382 gimple stmt = NULL;
2383
2384 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2385 {
2386 gsi_next (&i);
2387 stmt = NULL;
2388 }
2389 return stmt;
2390 }
2391
2392 /* Return the first non-label statement in basic block BB. */
2393
2394 static gimple
2395 first_non_label_stmt (basic_block bb)
2396 {
2397 gimple_stmt_iterator i = gsi_start_bb (bb);
2398 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2399 gsi_next (&i);
2400 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2401 }
2402
2403 /* Return the last statement in basic block BB. */
2404
2405 gimple
2406 last_stmt (basic_block bb)
2407 {
2408 gimple_stmt_iterator i = gsi_last_bb (bb);
2409 gimple stmt = NULL;
2410
2411 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2412 {
2413 gsi_prev (&i);
2414 stmt = NULL;
2415 }
2416 return stmt;
2417 }
2418
2419 /* Return the last statement of an otherwise empty block. Return NULL
2420 if the block is totally empty, or if it contains more than one
2421 statement. */
2422
2423 gimple
2424 last_and_only_stmt (basic_block bb)
2425 {
2426 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2427 gimple last, prev;
2428
2429 if (gsi_end_p (i))
2430 return NULL;
2431
2432 last = gsi_stmt (i);
2433 gsi_prev_nondebug (&i);
2434 if (gsi_end_p (i))
2435 return last;
2436
2437 /* Empty statements should no longer appear in the instruction stream.
2438 Everything that might have appeared before should be deleted by
2439 remove_useless_stmts, and the optimizers should just gsi_remove
2440 instead of smashing with build_empty_stmt.
2441
2442 Thus the only thing that should appear here in a block containing
2443 one executable statement is a label. */
2444 prev = gsi_stmt (i);
2445 if (gimple_code (prev) == GIMPLE_LABEL)
2446 return last;
2447 else
2448 return NULL;
2449 }
2450
2451 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2452
2453 static void
2454 reinstall_phi_args (edge new_edge, edge old_edge)
2455 {
2456 edge_var_map_vector *v;
2457 edge_var_map *vm;
2458 int i;
2459 gimple_stmt_iterator phis;
2460
2461 v = redirect_edge_var_map_vector (old_edge);
2462 if (!v)
2463 return;
2464
2465 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2466 v->iterate (i, &vm) && !gsi_end_p (phis);
2467 i++, gsi_next (&phis))
2468 {
2469 gimple phi = gsi_stmt (phis);
2470 tree result = redirect_edge_var_map_result (vm);
2471 tree arg = redirect_edge_var_map_def (vm);
2472
2473 gcc_assert (result == gimple_phi_result (phi));
2474
2475 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2476 }
2477
2478 redirect_edge_var_map_clear (old_edge);
2479 }
2480
2481 /* Returns the basic block after which the new basic block created
2482 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2483 near its "logical" location. This is of most help to humans looking
2484 at debugging dumps. */
2485
2486 static basic_block
2487 split_edge_bb_loc (edge edge_in)
2488 {
2489 basic_block dest = edge_in->dest;
2490 basic_block dest_prev = dest->prev_bb;
2491
2492 if (dest_prev)
2493 {
2494 edge e = find_edge (dest_prev, dest);
2495 if (e && !(e->flags & EDGE_COMPLEX))
2496 return edge_in->src;
2497 }
2498 return dest_prev;
2499 }
2500
2501 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2502 Abort on abnormal edges. */
2503
2504 static basic_block
2505 gimple_split_edge (edge edge_in)
2506 {
2507 basic_block new_bb, after_bb, dest;
2508 edge new_edge, e;
2509
2510 /* Abnormal edges cannot be split. */
2511 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2512
2513 dest = edge_in->dest;
2514
2515 after_bb = split_edge_bb_loc (edge_in);
2516
2517 new_bb = create_empty_bb (after_bb);
2518 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2519 new_bb->count = edge_in->count;
2520 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2521 new_edge->probability = REG_BR_PROB_BASE;
2522 new_edge->count = edge_in->count;
2523
2524 e = redirect_edge_and_branch (edge_in, new_bb);
2525 gcc_assert (e == edge_in);
2526 reinstall_phi_args (new_edge, e);
2527
2528 return new_bb;
2529 }
2530
2531
2532 /* Verify properties of the address expression T with base object BASE. */
2533
2534 static tree
2535 verify_address (tree t, tree base)
2536 {
2537 bool old_constant;
2538 bool old_side_effects;
2539 bool new_constant;
2540 bool new_side_effects;
2541
2542 old_constant = TREE_CONSTANT (t);
2543 old_side_effects = TREE_SIDE_EFFECTS (t);
2544
2545 recompute_tree_invariant_for_addr_expr (t);
2546 new_side_effects = TREE_SIDE_EFFECTS (t);
2547 new_constant = TREE_CONSTANT (t);
2548
2549 if (old_constant != new_constant)
2550 {
2551 error ("constant not recomputed when ADDR_EXPR changed");
2552 return t;
2553 }
2554 if (old_side_effects != new_side_effects)
2555 {
2556 error ("side effects not recomputed when ADDR_EXPR changed");
2557 return t;
2558 }
2559
2560 if (!(TREE_CODE (base) == VAR_DECL
2561 || TREE_CODE (base) == PARM_DECL
2562 || TREE_CODE (base) == RESULT_DECL))
2563 return NULL_TREE;
2564
2565 if (DECL_GIMPLE_REG_P (base))
2566 {
2567 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2568 return base;
2569 }
2570
2571 return NULL_TREE;
2572 }
2573
2574 /* Callback for walk_tree, check that all elements with address taken are
2575 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2576 inside a PHI node. */
2577
2578 static tree
2579 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2580 {
2581 tree t = *tp, x;
2582
2583 if (TYPE_P (t))
2584 *walk_subtrees = 0;
2585
2586 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2587 #define CHECK_OP(N, MSG) \
2588 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2589 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2590
2591 switch (TREE_CODE (t))
2592 {
2593 case SSA_NAME:
2594 if (SSA_NAME_IN_FREE_LIST (t))
2595 {
2596 error ("SSA name in freelist but still referenced");
2597 return *tp;
2598 }
2599 break;
2600
2601 case INDIRECT_REF:
2602 error ("INDIRECT_REF in gimple IL");
2603 return t;
2604
2605 case MEM_REF:
2606 x = TREE_OPERAND (t, 0);
2607 if (!POINTER_TYPE_P (TREE_TYPE (x))
2608 || !is_gimple_mem_ref_addr (x))
2609 {
2610 error ("invalid first operand of MEM_REF");
2611 return x;
2612 }
2613 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2614 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2615 {
2616 error ("invalid offset operand of MEM_REF");
2617 return TREE_OPERAND (t, 1);
2618 }
2619 if (TREE_CODE (x) == ADDR_EXPR
2620 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2621 return x;
2622 *walk_subtrees = 0;
2623 break;
2624
2625 case ASSERT_EXPR:
2626 x = fold (ASSERT_EXPR_COND (t));
2627 if (x == boolean_false_node)
2628 {
2629 error ("ASSERT_EXPR with an always-false condition");
2630 return *tp;
2631 }
2632 break;
2633
2634 case MODIFY_EXPR:
2635 error ("MODIFY_EXPR not expected while having tuples");
2636 return *tp;
2637
2638 case ADDR_EXPR:
2639 {
2640 tree tem;
2641
2642 gcc_assert (is_gimple_address (t));
2643
2644 /* Skip any references (they will be checked when we recurse down the
2645 tree) and ensure that any variable used as a prefix is marked
2646 addressable. */
2647 for (x = TREE_OPERAND (t, 0);
2648 handled_component_p (x);
2649 x = TREE_OPERAND (x, 0))
2650 ;
2651
2652 if ((tem = verify_address (t, x)))
2653 return tem;
2654
2655 if (!(TREE_CODE (x) == VAR_DECL
2656 || TREE_CODE (x) == PARM_DECL
2657 || TREE_CODE (x) == RESULT_DECL))
2658 return NULL;
2659
2660 if (!TREE_ADDRESSABLE (x))
2661 {
2662 error ("address taken, but ADDRESSABLE bit not set");
2663 return x;
2664 }
2665
2666 break;
2667 }
2668
2669 case COND_EXPR:
2670 x = COND_EXPR_COND (t);
2671 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2672 {
2673 error ("non-integral used in condition");
2674 return x;
2675 }
2676 if (!is_gimple_condexpr (x))
2677 {
2678 error ("invalid conditional operand");
2679 return x;
2680 }
2681 break;
2682
2683 case NON_LVALUE_EXPR:
2684 case TRUTH_NOT_EXPR:
2685 gcc_unreachable ();
2686
2687 CASE_CONVERT:
2688 case FIX_TRUNC_EXPR:
2689 case FLOAT_EXPR:
2690 case NEGATE_EXPR:
2691 case ABS_EXPR:
2692 case BIT_NOT_EXPR:
2693 CHECK_OP (0, "invalid operand to unary operator");
2694 break;
2695
2696 case REALPART_EXPR:
2697 case IMAGPART_EXPR:
2698 case BIT_FIELD_REF:
2699 if (!is_gimple_reg_type (TREE_TYPE (t)))
2700 {
2701 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2702 return t;
2703 }
2704
2705 if (TREE_CODE (t) == BIT_FIELD_REF)
2706 {
2707 if (!tree_fits_uhwi_p (TREE_OPERAND (t, 1))
2708 || !tree_fits_uhwi_p (TREE_OPERAND (t, 2)))
2709 {
2710 error ("invalid position or size operand to BIT_FIELD_REF");
2711 return t;
2712 }
2713 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2714 && (TYPE_PRECISION (TREE_TYPE (t))
2715 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2716 {
2717 error ("integral result type precision does not match "
2718 "field size of BIT_FIELD_REF");
2719 return t;
2720 }
2721 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2722 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2723 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2724 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2725 {
2726 error ("mode precision of non-integral result does not "
2727 "match field size of BIT_FIELD_REF");
2728 return t;
2729 }
2730 }
2731 t = TREE_OPERAND (t, 0);
2732
2733 /* Fall-through. */
2734 case COMPONENT_REF:
2735 case ARRAY_REF:
2736 case ARRAY_RANGE_REF:
2737 case VIEW_CONVERT_EXPR:
2738 /* We have a nest of references. Verify that each of the operands
2739 that determine where to reference is either a constant or a variable,
2740 verify that the base is valid, and then show we've already checked
2741 the subtrees. */
2742 while (handled_component_p (t))
2743 {
2744 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2745 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2746 else if (TREE_CODE (t) == ARRAY_REF
2747 || TREE_CODE (t) == ARRAY_RANGE_REF)
2748 {
2749 CHECK_OP (1, "invalid array index");
2750 if (TREE_OPERAND (t, 2))
2751 CHECK_OP (2, "invalid array lower bound");
2752 if (TREE_OPERAND (t, 3))
2753 CHECK_OP (3, "invalid array stride");
2754 }
2755 else if (TREE_CODE (t) == BIT_FIELD_REF
2756 || TREE_CODE (t) == REALPART_EXPR
2757 || TREE_CODE (t) == IMAGPART_EXPR)
2758 {
2759 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
2760 "REALPART_EXPR");
2761 return t;
2762 }
2763
2764 t = TREE_OPERAND (t, 0);
2765 }
2766
2767 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2768 {
2769 error ("invalid reference prefix");
2770 return t;
2771 }
2772 *walk_subtrees = 0;
2773 break;
2774 case PLUS_EXPR:
2775 case MINUS_EXPR:
2776 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2777 POINTER_PLUS_EXPR. */
2778 if (POINTER_TYPE_P (TREE_TYPE (t)))
2779 {
2780 error ("invalid operand to plus/minus, type is a pointer");
2781 return t;
2782 }
2783 CHECK_OP (0, "invalid operand to binary operator");
2784 CHECK_OP (1, "invalid operand to binary operator");
2785 break;
2786
2787 case POINTER_PLUS_EXPR:
2788 /* Check to make sure the first operand is a pointer or reference type. */
2789 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
2790 {
2791 error ("invalid operand to pointer plus, first operand is not a pointer");
2792 return t;
2793 }
2794 /* Check to make sure the second operand is a ptrofftype. */
2795 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
2796 {
2797 error ("invalid operand to pointer plus, second operand is not an "
2798 "integer type of appropriate width");
2799 return t;
2800 }
2801 /* FALLTHROUGH */
2802 case LT_EXPR:
2803 case LE_EXPR:
2804 case GT_EXPR:
2805 case GE_EXPR:
2806 case EQ_EXPR:
2807 case NE_EXPR:
2808 case UNORDERED_EXPR:
2809 case ORDERED_EXPR:
2810 case UNLT_EXPR:
2811 case UNLE_EXPR:
2812 case UNGT_EXPR:
2813 case UNGE_EXPR:
2814 case UNEQ_EXPR:
2815 case LTGT_EXPR:
2816 case MULT_EXPR:
2817 case TRUNC_DIV_EXPR:
2818 case CEIL_DIV_EXPR:
2819 case FLOOR_DIV_EXPR:
2820 case ROUND_DIV_EXPR:
2821 case TRUNC_MOD_EXPR:
2822 case CEIL_MOD_EXPR:
2823 case FLOOR_MOD_EXPR:
2824 case ROUND_MOD_EXPR:
2825 case RDIV_EXPR:
2826 case EXACT_DIV_EXPR:
2827 case MIN_EXPR:
2828 case MAX_EXPR:
2829 case LSHIFT_EXPR:
2830 case RSHIFT_EXPR:
2831 case LROTATE_EXPR:
2832 case RROTATE_EXPR:
2833 case BIT_IOR_EXPR:
2834 case BIT_XOR_EXPR:
2835 case BIT_AND_EXPR:
2836 CHECK_OP (0, "invalid operand to binary operator");
2837 CHECK_OP (1, "invalid operand to binary operator");
2838 break;
2839
2840 case CONSTRUCTOR:
2841 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2842 *walk_subtrees = 0;
2843 break;
2844
2845 case CASE_LABEL_EXPR:
2846 if (CASE_CHAIN (t))
2847 {
2848 error ("invalid CASE_CHAIN");
2849 return t;
2850 }
2851 break;
2852
2853 default:
2854 break;
2855 }
2856 return NULL;
2857
2858 #undef CHECK_OP
2859 }
2860
2861
2862 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
2863 Returns true if there is an error, otherwise false. */
2864
2865 static bool
2866 verify_types_in_gimple_min_lval (tree expr)
2867 {
2868 tree op;
2869
2870 if (is_gimple_id (expr))
2871 return false;
2872
2873 if (TREE_CODE (expr) != TARGET_MEM_REF
2874 && TREE_CODE (expr) != MEM_REF)
2875 {
2876 error ("invalid expression for min lvalue");
2877 return true;
2878 }
2879
2880 /* TARGET_MEM_REFs are strange beasts. */
2881 if (TREE_CODE (expr) == TARGET_MEM_REF)
2882 return false;
2883
2884 op = TREE_OPERAND (expr, 0);
2885 if (!is_gimple_val (op))
2886 {
2887 error ("invalid operand in indirect reference");
2888 debug_generic_stmt (op);
2889 return true;
2890 }
2891 /* Memory references now generally can involve a value conversion. */
2892
2893 return false;
2894 }
2895
2896 /* Verify if EXPR is a valid GIMPLE reference expression. If
2897 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
2898 if there is an error, otherwise false. */
2899
2900 static bool
2901 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
2902 {
2903 while (handled_component_p (expr))
2904 {
2905 tree op = TREE_OPERAND (expr, 0);
2906
2907 if (TREE_CODE (expr) == ARRAY_REF
2908 || TREE_CODE (expr) == ARRAY_RANGE_REF)
2909 {
2910 if (!is_gimple_val (TREE_OPERAND (expr, 1))
2911 || (TREE_OPERAND (expr, 2)
2912 && !is_gimple_val (TREE_OPERAND (expr, 2)))
2913 || (TREE_OPERAND (expr, 3)
2914 && !is_gimple_val (TREE_OPERAND (expr, 3))))
2915 {
2916 error ("invalid operands to array reference");
2917 debug_generic_stmt (expr);
2918 return true;
2919 }
2920 }
2921
2922 /* Verify if the reference array element types are compatible. */
2923 if (TREE_CODE (expr) == ARRAY_REF
2924 && !useless_type_conversion_p (TREE_TYPE (expr),
2925 TREE_TYPE (TREE_TYPE (op))))
2926 {
2927 error ("type mismatch in array reference");
2928 debug_generic_stmt (TREE_TYPE (expr));
2929 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2930 return true;
2931 }
2932 if (TREE_CODE (expr) == ARRAY_RANGE_REF
2933 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
2934 TREE_TYPE (TREE_TYPE (op))))
2935 {
2936 error ("type mismatch in array range reference");
2937 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
2938 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2939 return true;
2940 }
2941
2942 if ((TREE_CODE (expr) == REALPART_EXPR
2943 || TREE_CODE (expr) == IMAGPART_EXPR)
2944 && !useless_type_conversion_p (TREE_TYPE (expr),
2945 TREE_TYPE (TREE_TYPE (op))))
2946 {
2947 error ("type mismatch in real/imagpart reference");
2948 debug_generic_stmt (TREE_TYPE (expr));
2949 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2950 return true;
2951 }
2952
2953 if (TREE_CODE (expr) == COMPONENT_REF
2954 && !useless_type_conversion_p (TREE_TYPE (expr),
2955 TREE_TYPE (TREE_OPERAND (expr, 1))))
2956 {
2957 error ("type mismatch in component reference");
2958 debug_generic_stmt (TREE_TYPE (expr));
2959 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
2960 return true;
2961 }
2962
2963 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2964 {
2965 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
2966 that their operand is not an SSA name or an invariant when
2967 requiring an lvalue (this usually means there is a SRA or IPA-SRA
2968 bug). Otherwise there is nothing to verify, gross mismatches at
2969 most invoke undefined behavior. */
2970 if (require_lvalue
2971 && (TREE_CODE (op) == SSA_NAME
2972 || is_gimple_min_invariant (op)))
2973 {
2974 error ("conversion of an SSA_NAME on the left hand side");
2975 debug_generic_stmt (expr);
2976 return true;
2977 }
2978 else if (TREE_CODE (op) == SSA_NAME
2979 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
2980 {
2981 error ("conversion of register to a different size");
2982 debug_generic_stmt (expr);
2983 return true;
2984 }
2985 else if (!handled_component_p (op))
2986 return false;
2987 }
2988
2989 expr = op;
2990 }
2991
2992 if (TREE_CODE (expr) == MEM_REF)
2993 {
2994 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
2995 {
2996 error ("invalid address operand in MEM_REF");
2997 debug_generic_stmt (expr);
2998 return true;
2999 }
3000 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3001 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3002 {
3003 error ("invalid offset operand in MEM_REF");
3004 debug_generic_stmt (expr);
3005 return true;
3006 }
3007 }
3008 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3009 {
3010 if (!TMR_BASE (expr)
3011 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3012 {
3013 error ("invalid address operand in TARGET_MEM_REF");
3014 return true;
3015 }
3016 if (!TMR_OFFSET (expr)
3017 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3018 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3019 {
3020 error ("invalid offset operand in TARGET_MEM_REF");
3021 debug_generic_stmt (expr);
3022 return true;
3023 }
3024 }
3025
3026 return ((require_lvalue || !is_gimple_min_invariant (expr))
3027 && verify_types_in_gimple_min_lval (expr));
3028 }
3029
3030 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3031 list of pointer-to types that is trivially convertible to DEST. */
3032
3033 static bool
3034 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3035 {
3036 tree src;
3037
3038 if (!TYPE_POINTER_TO (src_obj))
3039 return true;
3040
3041 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3042 if (useless_type_conversion_p (dest, src))
3043 return true;
3044
3045 return false;
3046 }
3047
3048 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3049 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3050
3051 static bool
3052 valid_fixed_convert_types_p (tree type1, tree type2)
3053 {
3054 return (FIXED_POINT_TYPE_P (type1)
3055 && (INTEGRAL_TYPE_P (type2)
3056 || SCALAR_FLOAT_TYPE_P (type2)
3057 || FIXED_POINT_TYPE_P (type2)));
3058 }
3059
3060 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3061 is a problem, otherwise false. */
3062
3063 static bool
3064 verify_gimple_call (gimple stmt)
3065 {
3066 tree fn = gimple_call_fn (stmt);
3067 tree fntype, fndecl;
3068 unsigned i;
3069
3070 if (gimple_call_internal_p (stmt))
3071 {
3072 if (fn)
3073 {
3074 error ("gimple call has two targets");
3075 debug_generic_stmt (fn);
3076 return true;
3077 }
3078 }
3079 else
3080 {
3081 if (!fn)
3082 {
3083 error ("gimple call has no target");
3084 return true;
3085 }
3086 }
3087
3088 if (fn && !is_gimple_call_addr (fn))
3089 {
3090 error ("invalid function in gimple call");
3091 debug_generic_stmt (fn);
3092 return true;
3093 }
3094
3095 if (fn
3096 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3097 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3098 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3099 {
3100 error ("non-function in gimple call");
3101 return true;
3102 }
3103
3104 fndecl = gimple_call_fndecl (stmt);
3105 if (fndecl
3106 && TREE_CODE (fndecl) == FUNCTION_DECL
3107 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3108 && !DECL_PURE_P (fndecl)
3109 && !TREE_READONLY (fndecl))
3110 {
3111 error ("invalid pure const state for function");
3112 return true;
3113 }
3114
3115 if (gimple_call_lhs (stmt)
3116 && (!is_gimple_lvalue (gimple_call_lhs (stmt))
3117 || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
3118 {
3119 error ("invalid LHS in gimple call");
3120 return true;
3121 }
3122
3123 if (gimple_call_lhs (stmt) && gimple_call_noreturn_p (stmt))
3124 {
3125 error ("LHS in noreturn call");
3126 return true;
3127 }
3128
3129 fntype = gimple_call_fntype (stmt);
3130 if (fntype
3131 && gimple_call_lhs (stmt)
3132 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3133 TREE_TYPE (fntype))
3134 /* ??? At least C++ misses conversions at assignments from
3135 void * call results.
3136 ??? Java is completely off. Especially with functions
3137 returning java.lang.Object.
3138 For now simply allow arbitrary pointer type conversions. */
3139 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3140 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3141 {
3142 error ("invalid conversion in gimple call");
3143 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3144 debug_generic_stmt (TREE_TYPE (fntype));
3145 return true;
3146 }
3147
3148 if (gimple_call_chain (stmt)
3149 && !is_gimple_val (gimple_call_chain (stmt)))
3150 {
3151 error ("invalid static chain in gimple call");
3152 debug_generic_stmt (gimple_call_chain (stmt));
3153 return true;
3154 }
3155
3156 /* If there is a static chain argument, this should not be an indirect
3157 call, and the decl should have DECL_STATIC_CHAIN set. */
3158 if (gimple_call_chain (stmt))
3159 {
3160 if (!gimple_call_fndecl (stmt))
3161 {
3162 error ("static chain in indirect gimple call");
3163 return true;
3164 }
3165 fn = TREE_OPERAND (fn, 0);
3166
3167 if (!DECL_STATIC_CHAIN (fn))
3168 {
3169 error ("static chain with function that doesn%'t use one");
3170 return true;
3171 }
3172 }
3173
3174 /* ??? The C frontend passes unpromoted arguments in case it
3175 didn't see a function declaration before the call. So for now
3176 leave the call arguments mostly unverified. Once we gimplify
3177 unit-at-a-time we have a chance to fix this. */
3178
3179 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3180 {
3181 tree arg = gimple_call_arg (stmt, i);
3182 if ((is_gimple_reg_type (TREE_TYPE (arg))
3183 && !is_gimple_val (arg))
3184 || (!is_gimple_reg_type (TREE_TYPE (arg))
3185 && !is_gimple_lvalue (arg)))
3186 {
3187 error ("invalid argument to gimple call");
3188 debug_generic_expr (arg);
3189 return true;
3190 }
3191 }
3192
3193 return false;
3194 }
3195
3196 /* Verifies the gimple comparison with the result type TYPE and
3197 the operands OP0 and OP1. */
3198
3199 static bool
3200 verify_gimple_comparison (tree type, tree op0, tree op1)
3201 {
3202 tree op0_type = TREE_TYPE (op0);
3203 tree op1_type = TREE_TYPE (op1);
3204
3205 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3206 {
3207 error ("invalid operands in gimple comparison");
3208 return true;
3209 }
3210
3211 /* For comparisons we do not have the operations type as the
3212 effective type the comparison is carried out in. Instead
3213 we require that either the first operand is trivially
3214 convertible into the second, or the other way around.
3215 Because we special-case pointers to void we allow
3216 comparisons of pointers with the same mode as well. */
3217 if (!useless_type_conversion_p (op0_type, op1_type)
3218 && !useless_type_conversion_p (op1_type, op0_type)
3219 && (!POINTER_TYPE_P (op0_type)
3220 || !POINTER_TYPE_P (op1_type)
3221 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3222 {
3223 error ("mismatching comparison operand types");
3224 debug_generic_expr (op0_type);
3225 debug_generic_expr (op1_type);
3226 return true;
3227 }
3228
3229 /* The resulting type of a comparison may be an effective boolean type. */
3230 if (INTEGRAL_TYPE_P (type)
3231 && (TREE_CODE (type) == BOOLEAN_TYPE
3232 || TYPE_PRECISION (type) == 1))
3233 {
3234 if (TREE_CODE (op0_type) == VECTOR_TYPE
3235 || TREE_CODE (op1_type) == VECTOR_TYPE)
3236 {
3237 error ("vector comparison returning a boolean");
3238 debug_generic_expr (op0_type);
3239 debug_generic_expr (op1_type);
3240 return true;
3241 }
3242 }
3243 /* Or an integer vector type with the same size and element count
3244 as the comparison operand types. */
3245 else if (TREE_CODE (type) == VECTOR_TYPE
3246 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3247 {
3248 if (TREE_CODE (op0_type) != VECTOR_TYPE
3249 || TREE_CODE (op1_type) != VECTOR_TYPE)
3250 {
3251 error ("non-vector operands in vector comparison");
3252 debug_generic_expr (op0_type);
3253 debug_generic_expr (op1_type);
3254 return true;
3255 }
3256
3257 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3258 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3259 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type))))
3260 /* The result of a vector comparison is of signed
3261 integral type. */
3262 || TYPE_UNSIGNED (TREE_TYPE (type)))
3263 {
3264 error ("invalid vector comparison resulting type");
3265 debug_generic_expr (type);
3266 return true;
3267 }
3268 }
3269 else
3270 {
3271 error ("bogus comparison result type");
3272 debug_generic_expr (type);
3273 return true;
3274 }
3275
3276 return false;
3277 }
3278
3279 /* Verify a gimple assignment statement STMT with an unary rhs.
3280 Returns true if anything is wrong. */
3281
3282 static bool
3283 verify_gimple_assign_unary (gimple stmt)
3284 {
3285 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3286 tree lhs = gimple_assign_lhs (stmt);
3287 tree lhs_type = TREE_TYPE (lhs);
3288 tree rhs1 = gimple_assign_rhs1 (stmt);
3289 tree rhs1_type = TREE_TYPE (rhs1);
3290
3291 if (!is_gimple_reg (lhs))
3292 {
3293 error ("non-register as LHS of unary operation");
3294 return true;
3295 }
3296
3297 if (!is_gimple_val (rhs1))
3298 {
3299 error ("invalid operand in unary operation");
3300 return true;
3301 }
3302
3303 /* First handle conversions. */
3304 switch (rhs_code)
3305 {
3306 CASE_CONVERT:
3307 {
3308 /* Allow conversions from pointer type to integral type only if
3309 there is no sign or zero extension involved.
3310 For targets were the precision of ptrofftype doesn't match that
3311 of pointers we need to allow arbitrary conversions to ptrofftype. */
3312 if ((POINTER_TYPE_P (lhs_type)
3313 && INTEGRAL_TYPE_P (rhs1_type))
3314 || (POINTER_TYPE_P (rhs1_type)
3315 && INTEGRAL_TYPE_P (lhs_type)
3316 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3317 || ptrofftype_p (sizetype))))
3318 return false;
3319
3320 /* Allow conversion from integral to offset type and vice versa. */
3321 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3322 && INTEGRAL_TYPE_P (rhs1_type))
3323 || (INTEGRAL_TYPE_P (lhs_type)
3324 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3325 return false;
3326
3327 /* Otherwise assert we are converting between types of the
3328 same kind. */
3329 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3330 {
3331 error ("invalid types in nop conversion");
3332 debug_generic_expr (lhs_type);
3333 debug_generic_expr (rhs1_type);
3334 return true;
3335 }
3336
3337 return false;
3338 }
3339
3340 case ADDR_SPACE_CONVERT_EXPR:
3341 {
3342 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3343 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3344 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3345 {
3346 error ("invalid types in address space conversion");
3347 debug_generic_expr (lhs_type);
3348 debug_generic_expr (rhs1_type);
3349 return true;
3350 }
3351
3352 return false;
3353 }
3354
3355 case FIXED_CONVERT_EXPR:
3356 {
3357 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3358 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3359 {
3360 error ("invalid types in fixed-point conversion");
3361 debug_generic_expr (lhs_type);
3362 debug_generic_expr (rhs1_type);
3363 return true;
3364 }
3365
3366 return false;
3367 }
3368
3369 case FLOAT_EXPR:
3370 {
3371 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3372 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3373 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3374 {
3375 error ("invalid types in conversion to floating point");
3376 debug_generic_expr (lhs_type);
3377 debug_generic_expr (rhs1_type);
3378 return true;
3379 }
3380
3381 return false;
3382 }
3383
3384 case FIX_TRUNC_EXPR:
3385 {
3386 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3387 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3388 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3389 {
3390 error ("invalid types in conversion to integer");
3391 debug_generic_expr (lhs_type);
3392 debug_generic_expr (rhs1_type);
3393 return true;
3394 }
3395
3396 return false;
3397 }
3398
3399 case VEC_UNPACK_HI_EXPR:
3400 case VEC_UNPACK_LO_EXPR:
3401 case REDUC_MAX_EXPR:
3402 case REDUC_MIN_EXPR:
3403 case REDUC_PLUS_EXPR:
3404 case VEC_UNPACK_FLOAT_HI_EXPR:
3405 case VEC_UNPACK_FLOAT_LO_EXPR:
3406 /* FIXME. */
3407 return false;
3408
3409 case NEGATE_EXPR:
3410 case ABS_EXPR:
3411 case BIT_NOT_EXPR:
3412 case PAREN_EXPR:
3413 case NON_LVALUE_EXPR:
3414 case CONJ_EXPR:
3415 break;
3416
3417 default:
3418 gcc_unreachable ();
3419 }
3420
3421 /* For the remaining codes assert there is no conversion involved. */
3422 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3423 {
3424 error ("non-trivial conversion in unary operation");
3425 debug_generic_expr (lhs_type);
3426 debug_generic_expr (rhs1_type);
3427 return true;
3428 }
3429
3430 return false;
3431 }
3432
3433 /* Verify a gimple assignment statement STMT with a binary rhs.
3434 Returns true if anything is wrong. */
3435
3436 static bool
3437 verify_gimple_assign_binary (gimple stmt)
3438 {
3439 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3440 tree lhs = gimple_assign_lhs (stmt);
3441 tree lhs_type = TREE_TYPE (lhs);
3442 tree rhs1 = gimple_assign_rhs1 (stmt);
3443 tree rhs1_type = TREE_TYPE (rhs1);
3444 tree rhs2 = gimple_assign_rhs2 (stmt);
3445 tree rhs2_type = TREE_TYPE (rhs2);
3446
3447 if (!is_gimple_reg (lhs))
3448 {
3449 error ("non-register as LHS of binary operation");
3450 return true;
3451 }
3452
3453 if (!is_gimple_val (rhs1)
3454 || !is_gimple_val (rhs2))
3455 {
3456 error ("invalid operands in binary operation");
3457 return true;
3458 }
3459
3460 /* First handle operations that involve different types. */
3461 switch (rhs_code)
3462 {
3463 case COMPLEX_EXPR:
3464 {
3465 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3466 || !(INTEGRAL_TYPE_P (rhs1_type)
3467 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3468 || !(INTEGRAL_TYPE_P (rhs2_type)
3469 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3470 {
3471 error ("type mismatch in complex expression");
3472 debug_generic_expr (lhs_type);
3473 debug_generic_expr (rhs1_type);
3474 debug_generic_expr (rhs2_type);
3475 return true;
3476 }
3477
3478 return false;
3479 }
3480
3481 case LSHIFT_EXPR:
3482 case RSHIFT_EXPR:
3483 case LROTATE_EXPR:
3484 case RROTATE_EXPR:
3485 {
3486 /* Shifts and rotates are ok on integral types, fixed point
3487 types and integer vector types. */
3488 if ((!INTEGRAL_TYPE_P (rhs1_type)
3489 && !FIXED_POINT_TYPE_P (rhs1_type)
3490 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3491 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3492 || (!INTEGRAL_TYPE_P (rhs2_type)
3493 /* Vector shifts of vectors are also ok. */
3494 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3495 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3496 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3497 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3498 || !useless_type_conversion_p (lhs_type, rhs1_type))
3499 {
3500 error ("type mismatch in shift expression");
3501 debug_generic_expr (lhs_type);
3502 debug_generic_expr (rhs1_type);
3503 debug_generic_expr (rhs2_type);
3504 return true;
3505 }
3506
3507 return false;
3508 }
3509
3510 case VEC_LSHIFT_EXPR:
3511 case VEC_RSHIFT_EXPR:
3512 {
3513 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3514 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3515 || POINTER_TYPE_P (TREE_TYPE (rhs1_type))
3516 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type))
3517 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3518 || (!INTEGRAL_TYPE_P (rhs2_type)
3519 && (TREE_CODE (rhs2_type) != VECTOR_TYPE
3520 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3521 || !useless_type_conversion_p (lhs_type, rhs1_type))
3522 {
3523 error ("type mismatch in vector shift expression");
3524 debug_generic_expr (lhs_type);
3525 debug_generic_expr (rhs1_type);
3526 debug_generic_expr (rhs2_type);
3527 return true;
3528 }
3529 /* For shifting a vector of non-integral components we
3530 only allow shifting by a constant multiple of the element size. */
3531 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3532 && (TREE_CODE (rhs2) != INTEGER_CST
3533 || !div_if_zero_remainder (EXACT_DIV_EXPR, rhs2,
3534 TYPE_SIZE (TREE_TYPE (rhs1_type)))))
3535 {
3536 error ("non-element sized vector shift of floating point vector");
3537 return true;
3538 }
3539
3540 return false;
3541 }
3542
3543 case WIDEN_LSHIFT_EXPR:
3544 {
3545 if (!INTEGRAL_TYPE_P (lhs_type)
3546 || !INTEGRAL_TYPE_P (rhs1_type)
3547 || TREE_CODE (rhs2) != INTEGER_CST
3548 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3549 {
3550 error ("type mismatch in widening vector shift expression");
3551 debug_generic_expr (lhs_type);
3552 debug_generic_expr (rhs1_type);
3553 debug_generic_expr (rhs2_type);
3554 return true;
3555 }
3556
3557 return false;
3558 }
3559
3560 case VEC_WIDEN_LSHIFT_HI_EXPR:
3561 case VEC_WIDEN_LSHIFT_LO_EXPR:
3562 {
3563 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3564 || TREE_CODE (lhs_type) != VECTOR_TYPE
3565 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3566 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3567 || TREE_CODE (rhs2) != INTEGER_CST
3568 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3569 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3570 {
3571 error ("type mismatch in widening vector shift expression");
3572 debug_generic_expr (lhs_type);
3573 debug_generic_expr (rhs1_type);
3574 debug_generic_expr (rhs2_type);
3575 return true;
3576 }
3577
3578 return false;
3579 }
3580
3581 case PLUS_EXPR:
3582 case MINUS_EXPR:
3583 {
3584 tree lhs_etype = lhs_type;
3585 tree rhs1_etype = rhs1_type;
3586 tree rhs2_etype = rhs2_type;
3587 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3588 {
3589 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3590 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3591 {
3592 error ("invalid non-vector operands to vector valued plus");
3593 return true;
3594 }
3595 lhs_etype = TREE_TYPE (lhs_type);
3596 rhs1_etype = TREE_TYPE (rhs1_type);
3597 rhs2_etype = TREE_TYPE (rhs2_type);
3598 }
3599 if (POINTER_TYPE_P (lhs_etype)
3600 || POINTER_TYPE_P (rhs1_etype)
3601 || POINTER_TYPE_P (rhs2_etype))
3602 {
3603 error ("invalid (pointer) operands to plus/minus");
3604 return true;
3605 }
3606
3607 /* Continue with generic binary expression handling. */
3608 break;
3609 }
3610
3611 case POINTER_PLUS_EXPR:
3612 {
3613 if (!POINTER_TYPE_P (rhs1_type)
3614 || !useless_type_conversion_p (lhs_type, rhs1_type)
3615 || !ptrofftype_p (rhs2_type))
3616 {
3617 error ("type mismatch in pointer plus expression");
3618 debug_generic_stmt (lhs_type);
3619 debug_generic_stmt (rhs1_type);
3620 debug_generic_stmt (rhs2_type);
3621 return true;
3622 }
3623
3624 return false;
3625 }
3626
3627 case TRUTH_ANDIF_EXPR:
3628 case TRUTH_ORIF_EXPR:
3629 case TRUTH_AND_EXPR:
3630 case TRUTH_OR_EXPR:
3631 case TRUTH_XOR_EXPR:
3632
3633 gcc_unreachable ();
3634
3635 case LT_EXPR:
3636 case LE_EXPR:
3637 case GT_EXPR:
3638 case GE_EXPR:
3639 case EQ_EXPR:
3640 case NE_EXPR:
3641 case UNORDERED_EXPR:
3642 case ORDERED_EXPR:
3643 case UNLT_EXPR:
3644 case UNLE_EXPR:
3645 case UNGT_EXPR:
3646 case UNGE_EXPR:
3647 case UNEQ_EXPR:
3648 case LTGT_EXPR:
3649 /* Comparisons are also binary, but the result type is not
3650 connected to the operand types. */
3651 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3652
3653 case WIDEN_MULT_EXPR:
3654 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3655 return true;
3656 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3657 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3658
3659 case WIDEN_SUM_EXPR:
3660 case VEC_WIDEN_MULT_HI_EXPR:
3661 case VEC_WIDEN_MULT_LO_EXPR:
3662 case VEC_WIDEN_MULT_EVEN_EXPR:
3663 case VEC_WIDEN_MULT_ODD_EXPR:
3664 case VEC_PACK_TRUNC_EXPR:
3665 case VEC_PACK_SAT_EXPR:
3666 case VEC_PACK_FIX_TRUNC_EXPR:
3667 /* FIXME. */
3668 return false;
3669
3670 case MULT_EXPR:
3671 case MULT_HIGHPART_EXPR:
3672 case TRUNC_DIV_EXPR:
3673 case CEIL_DIV_EXPR:
3674 case FLOOR_DIV_EXPR:
3675 case ROUND_DIV_EXPR:
3676 case TRUNC_MOD_EXPR:
3677 case CEIL_MOD_EXPR:
3678 case FLOOR_MOD_EXPR:
3679 case ROUND_MOD_EXPR:
3680 case RDIV_EXPR:
3681 case EXACT_DIV_EXPR:
3682 case MIN_EXPR:
3683 case MAX_EXPR:
3684 case BIT_IOR_EXPR:
3685 case BIT_XOR_EXPR:
3686 case BIT_AND_EXPR:
3687 /* Continue with generic binary expression handling. */
3688 break;
3689
3690 default:
3691 gcc_unreachable ();
3692 }
3693
3694 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3695 || !useless_type_conversion_p (lhs_type, rhs2_type))
3696 {
3697 error ("type mismatch in binary expression");
3698 debug_generic_stmt (lhs_type);
3699 debug_generic_stmt (rhs1_type);
3700 debug_generic_stmt (rhs2_type);
3701 return true;
3702 }
3703
3704 return false;
3705 }
3706
3707 /* Verify a gimple assignment statement STMT with a ternary rhs.
3708 Returns true if anything is wrong. */
3709
3710 static bool
3711 verify_gimple_assign_ternary (gimple stmt)
3712 {
3713 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3714 tree lhs = gimple_assign_lhs (stmt);
3715 tree lhs_type = TREE_TYPE (lhs);
3716 tree rhs1 = gimple_assign_rhs1 (stmt);
3717 tree rhs1_type = TREE_TYPE (rhs1);
3718 tree rhs2 = gimple_assign_rhs2 (stmt);
3719 tree rhs2_type = TREE_TYPE (rhs2);
3720 tree rhs3 = gimple_assign_rhs3 (stmt);
3721 tree rhs3_type = TREE_TYPE (rhs3);
3722
3723 if (!is_gimple_reg (lhs))
3724 {
3725 error ("non-register as LHS of ternary operation");
3726 return true;
3727 }
3728
3729 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3730 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3731 || !is_gimple_val (rhs2)
3732 || !is_gimple_val (rhs3))
3733 {
3734 error ("invalid operands in ternary operation");
3735 return true;
3736 }
3737
3738 /* First handle operations that involve different types. */
3739 switch (rhs_code)
3740 {
3741 case WIDEN_MULT_PLUS_EXPR:
3742 case WIDEN_MULT_MINUS_EXPR:
3743 if ((!INTEGRAL_TYPE_P (rhs1_type)
3744 && !FIXED_POINT_TYPE_P (rhs1_type))
3745 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3746 || !useless_type_conversion_p (lhs_type, rhs3_type)
3747 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3748 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3749 {
3750 error ("type mismatch in widening multiply-accumulate expression");
3751 debug_generic_expr (lhs_type);
3752 debug_generic_expr (rhs1_type);
3753 debug_generic_expr (rhs2_type);
3754 debug_generic_expr (rhs3_type);
3755 return true;
3756 }
3757 break;
3758
3759 case FMA_EXPR:
3760 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3761 || !useless_type_conversion_p (lhs_type, rhs2_type)
3762 || !useless_type_conversion_p (lhs_type, rhs3_type))
3763 {
3764 error ("type mismatch in fused multiply-add expression");
3765 debug_generic_expr (lhs_type);
3766 debug_generic_expr (rhs1_type);
3767 debug_generic_expr (rhs2_type);
3768 debug_generic_expr (rhs3_type);
3769 return true;
3770 }
3771 break;
3772
3773 case COND_EXPR:
3774 case VEC_COND_EXPR:
3775 if (!useless_type_conversion_p (lhs_type, rhs2_type)
3776 || !useless_type_conversion_p (lhs_type, rhs3_type))
3777 {
3778 error ("type mismatch in conditional expression");
3779 debug_generic_expr (lhs_type);
3780 debug_generic_expr (rhs2_type);
3781 debug_generic_expr (rhs3_type);
3782 return true;
3783 }
3784 break;
3785
3786 case VEC_PERM_EXPR:
3787 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3788 || !useless_type_conversion_p (lhs_type, rhs2_type))
3789 {
3790 error ("type mismatch in vector permute expression");
3791 debug_generic_expr (lhs_type);
3792 debug_generic_expr (rhs1_type);
3793 debug_generic_expr (rhs2_type);
3794 debug_generic_expr (rhs3_type);
3795 return true;
3796 }
3797
3798 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3799 || TREE_CODE (rhs2_type) != VECTOR_TYPE
3800 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
3801 {
3802 error ("vector types expected in vector permute expression");
3803 debug_generic_expr (lhs_type);
3804 debug_generic_expr (rhs1_type);
3805 debug_generic_expr (rhs2_type);
3806 debug_generic_expr (rhs3_type);
3807 return true;
3808 }
3809
3810 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
3811 || TYPE_VECTOR_SUBPARTS (rhs2_type)
3812 != TYPE_VECTOR_SUBPARTS (rhs3_type)
3813 || TYPE_VECTOR_SUBPARTS (rhs3_type)
3814 != TYPE_VECTOR_SUBPARTS (lhs_type))
3815 {
3816 error ("vectors with different element number found "
3817 "in vector permute expression");
3818 debug_generic_expr (lhs_type);
3819 debug_generic_expr (rhs1_type);
3820 debug_generic_expr (rhs2_type);
3821 debug_generic_expr (rhs3_type);
3822 return true;
3823 }
3824
3825 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
3826 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
3827 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
3828 {
3829 error ("invalid mask type in vector permute expression");
3830 debug_generic_expr (lhs_type);
3831 debug_generic_expr (rhs1_type);
3832 debug_generic_expr (rhs2_type);
3833 debug_generic_expr (rhs3_type);
3834 return true;
3835 }
3836
3837 return false;
3838
3839 case DOT_PROD_EXPR:
3840 case REALIGN_LOAD_EXPR:
3841 /* FIXME. */
3842 return false;
3843
3844 default:
3845 gcc_unreachable ();
3846 }
3847 return false;
3848 }
3849
3850 /* Verify a gimple assignment statement STMT with a single rhs.
3851 Returns true if anything is wrong. */
3852
3853 static bool
3854 verify_gimple_assign_single (gimple stmt)
3855 {
3856 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3857 tree lhs = gimple_assign_lhs (stmt);
3858 tree lhs_type = TREE_TYPE (lhs);
3859 tree rhs1 = gimple_assign_rhs1 (stmt);
3860 tree rhs1_type = TREE_TYPE (rhs1);
3861 bool res = false;
3862
3863 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3864 {
3865 error ("non-trivial conversion at assignment");
3866 debug_generic_expr (lhs_type);
3867 debug_generic_expr (rhs1_type);
3868 return true;
3869 }
3870
3871 if (gimple_clobber_p (stmt)
3872 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
3873 {
3874 error ("non-decl/MEM_REF LHS in clobber statement");
3875 debug_generic_expr (lhs);
3876 return true;
3877 }
3878
3879 if (handled_component_p (lhs))
3880 res |= verify_types_in_gimple_reference (lhs, true);
3881
3882 /* Special codes we cannot handle via their class. */
3883 switch (rhs_code)
3884 {
3885 case ADDR_EXPR:
3886 {
3887 tree op = TREE_OPERAND (rhs1, 0);
3888 if (!is_gimple_addressable (op))
3889 {
3890 error ("invalid operand in unary expression");
3891 return true;
3892 }
3893
3894 /* Technically there is no longer a need for matching types, but
3895 gimple hygiene asks for this check. In LTO we can end up
3896 combining incompatible units and thus end up with addresses
3897 of globals that change their type to a common one. */
3898 if (!in_lto_p
3899 && !types_compatible_p (TREE_TYPE (op),
3900 TREE_TYPE (TREE_TYPE (rhs1)))
3901 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
3902 TREE_TYPE (op)))
3903 {
3904 error ("type mismatch in address expression");
3905 debug_generic_stmt (TREE_TYPE (rhs1));
3906 debug_generic_stmt (TREE_TYPE (op));
3907 return true;
3908 }
3909
3910 return verify_types_in_gimple_reference (op, true);
3911 }
3912
3913 /* tcc_reference */
3914 case INDIRECT_REF:
3915 error ("INDIRECT_REF in gimple IL");
3916 return true;
3917
3918 case COMPONENT_REF:
3919 case BIT_FIELD_REF:
3920 case ARRAY_REF:
3921 case ARRAY_RANGE_REF:
3922 case VIEW_CONVERT_EXPR:
3923 case REALPART_EXPR:
3924 case IMAGPART_EXPR:
3925 case TARGET_MEM_REF:
3926 case MEM_REF:
3927 if (!is_gimple_reg (lhs)
3928 && is_gimple_reg_type (TREE_TYPE (lhs)))
3929 {
3930 error ("invalid rhs for gimple memory store");
3931 debug_generic_stmt (lhs);
3932 debug_generic_stmt (rhs1);
3933 return true;
3934 }
3935 return res || verify_types_in_gimple_reference (rhs1, false);
3936
3937 /* tcc_constant */
3938 case SSA_NAME:
3939 case INTEGER_CST:
3940 case REAL_CST:
3941 case FIXED_CST:
3942 case COMPLEX_CST:
3943 case VECTOR_CST:
3944 case STRING_CST:
3945 return res;
3946
3947 /* tcc_declaration */
3948 case CONST_DECL:
3949 return res;
3950 case VAR_DECL:
3951 case PARM_DECL:
3952 if (!is_gimple_reg (lhs)
3953 && !is_gimple_reg (rhs1)
3954 && is_gimple_reg_type (TREE_TYPE (lhs)))
3955 {
3956 error ("invalid rhs for gimple memory store");
3957 debug_generic_stmt (lhs);
3958 debug_generic_stmt (rhs1);
3959 return true;
3960 }
3961 return res;
3962
3963 case CONSTRUCTOR:
3964 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
3965 {
3966 unsigned int i;
3967 tree elt_i, elt_v, elt_t = NULL_TREE;
3968
3969 if (CONSTRUCTOR_NELTS (rhs1) == 0)
3970 return res;
3971 /* For vector CONSTRUCTORs we require that either it is empty
3972 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
3973 (then the element count must be correct to cover the whole
3974 outer vector and index must be NULL on all elements, or it is
3975 a CONSTRUCTOR of scalar elements, where we as an exception allow
3976 smaller number of elements (assuming zero filling) and
3977 consecutive indexes as compared to NULL indexes (such
3978 CONSTRUCTORs can appear in the IL from FEs). */
3979 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
3980 {
3981 if (elt_t == NULL_TREE)
3982 {
3983 elt_t = TREE_TYPE (elt_v);
3984 if (TREE_CODE (elt_t) == VECTOR_TYPE)
3985 {
3986 tree elt_t = TREE_TYPE (elt_v);
3987 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
3988 TREE_TYPE (elt_t)))
3989 {
3990 error ("incorrect type of vector CONSTRUCTOR"
3991 " elements");
3992 debug_generic_stmt (rhs1);
3993 return true;
3994 }
3995 else if (CONSTRUCTOR_NELTS (rhs1)
3996 * TYPE_VECTOR_SUBPARTS (elt_t)
3997 != TYPE_VECTOR_SUBPARTS (rhs1_type))
3998 {
3999 error ("incorrect number of vector CONSTRUCTOR"
4000 " elements");
4001 debug_generic_stmt (rhs1);
4002 return true;
4003 }
4004 }
4005 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4006 elt_t))
4007 {
4008 error ("incorrect type of vector CONSTRUCTOR elements");
4009 debug_generic_stmt (rhs1);
4010 return true;
4011 }
4012 else if (CONSTRUCTOR_NELTS (rhs1)
4013 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4014 {
4015 error ("incorrect number of vector CONSTRUCTOR elements");
4016 debug_generic_stmt (rhs1);
4017 return true;
4018 }
4019 }
4020 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4021 {
4022 error ("incorrect type of vector CONSTRUCTOR elements");
4023 debug_generic_stmt (rhs1);
4024 return true;
4025 }
4026 if (elt_i != NULL_TREE
4027 && (TREE_CODE (elt_t) == VECTOR_TYPE
4028 || TREE_CODE (elt_i) != INTEGER_CST
4029 || compare_tree_int (elt_i, i) != 0))
4030 {
4031 error ("vector CONSTRUCTOR with non-NULL element index");
4032 debug_generic_stmt (rhs1);
4033 return true;
4034 }
4035 }
4036 }
4037 return res;
4038 case OBJ_TYPE_REF:
4039 case ASSERT_EXPR:
4040 case WITH_SIZE_EXPR:
4041 /* FIXME. */
4042 return res;
4043
4044 default:;
4045 }
4046
4047 return res;
4048 }
4049
4050 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4051 is a problem, otherwise false. */
4052
4053 static bool
4054 verify_gimple_assign (gimple stmt)
4055 {
4056 switch (gimple_assign_rhs_class (stmt))
4057 {
4058 case GIMPLE_SINGLE_RHS:
4059 return verify_gimple_assign_single (stmt);
4060
4061 case GIMPLE_UNARY_RHS:
4062 return verify_gimple_assign_unary (stmt);
4063
4064 case GIMPLE_BINARY_RHS:
4065 return verify_gimple_assign_binary (stmt);
4066
4067 case GIMPLE_TERNARY_RHS:
4068 return verify_gimple_assign_ternary (stmt);
4069
4070 default:
4071 gcc_unreachable ();
4072 }
4073 }
4074
4075 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4076 is a problem, otherwise false. */
4077
4078 static bool
4079 verify_gimple_return (gimple stmt)
4080 {
4081 tree op = gimple_return_retval (stmt);
4082 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4083
4084 /* We cannot test for present return values as we do not fix up missing
4085 return values from the original source. */
4086 if (op == NULL)
4087 return false;
4088
4089 if (!is_gimple_val (op)
4090 && TREE_CODE (op) != RESULT_DECL)
4091 {
4092 error ("invalid operand in return statement");
4093 debug_generic_stmt (op);
4094 return true;
4095 }
4096
4097 if ((TREE_CODE (op) == RESULT_DECL
4098 && DECL_BY_REFERENCE (op))
4099 || (TREE_CODE (op) == SSA_NAME
4100 && SSA_NAME_VAR (op)
4101 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4102 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4103 op = TREE_TYPE (op);
4104
4105 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4106 {
4107 error ("invalid conversion in return statement");
4108 debug_generic_stmt (restype);
4109 debug_generic_stmt (TREE_TYPE (op));
4110 return true;
4111 }
4112
4113 return false;
4114 }
4115
4116
4117 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4118 is a problem, otherwise false. */
4119
4120 static bool
4121 verify_gimple_goto (gimple stmt)
4122 {
4123 tree dest = gimple_goto_dest (stmt);
4124
4125 /* ??? We have two canonical forms of direct goto destinations, a
4126 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4127 if (TREE_CODE (dest) != LABEL_DECL
4128 && (!is_gimple_val (dest)
4129 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4130 {
4131 error ("goto destination is neither a label nor a pointer");
4132 return true;
4133 }
4134
4135 return false;
4136 }
4137
4138 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4139 is a problem, otherwise false. */
4140
4141 static bool
4142 verify_gimple_switch (gimple stmt)
4143 {
4144 unsigned int i, n;
4145 tree elt, prev_upper_bound = NULL_TREE;
4146 tree index_type, elt_type = NULL_TREE;
4147
4148 if (!is_gimple_val (gimple_switch_index (stmt)))
4149 {
4150 error ("invalid operand to switch statement");
4151 debug_generic_stmt (gimple_switch_index (stmt));
4152 return true;
4153 }
4154
4155 index_type = TREE_TYPE (gimple_switch_index (stmt));
4156 if (! INTEGRAL_TYPE_P (index_type))
4157 {
4158 error ("non-integral type switch statement");
4159 debug_generic_expr (index_type);
4160 return true;
4161 }
4162
4163 elt = gimple_switch_label (stmt, 0);
4164 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4165 {
4166 error ("invalid default case label in switch statement");
4167 debug_generic_expr (elt);
4168 return true;
4169 }
4170
4171 n = gimple_switch_num_labels (stmt);
4172 for (i = 1; i < n; i++)
4173 {
4174 elt = gimple_switch_label (stmt, i);
4175
4176 if (! CASE_LOW (elt))
4177 {
4178 error ("invalid case label in switch statement");
4179 debug_generic_expr (elt);
4180 return true;
4181 }
4182 if (CASE_HIGH (elt)
4183 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4184 {
4185 error ("invalid case range in switch statement");
4186 debug_generic_expr (elt);
4187 return true;
4188 }
4189
4190 if (elt_type)
4191 {
4192 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4193 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4194 {
4195 error ("type mismatch for case label in switch statement");
4196 debug_generic_expr (elt);
4197 return true;
4198 }
4199 }
4200 else
4201 {
4202 elt_type = TREE_TYPE (CASE_LOW (elt));
4203 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4204 {
4205 error ("type precision mismatch in switch statement");
4206 return true;
4207 }
4208 }
4209
4210 if (prev_upper_bound)
4211 {
4212 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4213 {
4214 error ("case labels not sorted in switch statement");
4215 return true;
4216 }
4217 }
4218
4219 prev_upper_bound = CASE_HIGH (elt);
4220 if (! prev_upper_bound)
4221 prev_upper_bound = CASE_LOW (elt);
4222 }
4223
4224 return false;
4225 }
4226
4227 /* Verify a gimple debug statement STMT.
4228 Returns true if anything is wrong. */
4229
4230 static bool
4231 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4232 {
4233 /* There isn't much that could be wrong in a gimple debug stmt. A
4234 gimple debug bind stmt, for example, maps a tree, that's usually
4235 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4236 component or member of an aggregate type, to another tree, that
4237 can be an arbitrary expression. These stmts expand into debug
4238 insns, and are converted to debug notes by var-tracking.c. */
4239 return false;
4240 }
4241
4242 /* Verify a gimple label statement STMT.
4243 Returns true if anything is wrong. */
4244
4245 static bool
4246 verify_gimple_label (gimple stmt)
4247 {
4248 tree decl = gimple_label_label (stmt);
4249 int uid;
4250 bool err = false;
4251
4252 if (TREE_CODE (decl) != LABEL_DECL)
4253 return true;
4254 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4255 && DECL_CONTEXT (decl) != current_function_decl)
4256 {
4257 error ("label's context is not the current function decl");
4258 err |= true;
4259 }
4260
4261 uid = LABEL_DECL_UID (decl);
4262 if (cfun->cfg
4263 && (uid == -1 || (*label_to_block_map)[uid] != gimple_bb (stmt)))
4264 {
4265 error ("incorrect entry in label_to_block_map");
4266 err |= true;
4267 }
4268
4269 uid = EH_LANDING_PAD_NR (decl);
4270 if (uid)
4271 {
4272 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4273 if (decl != lp->post_landing_pad)
4274 {
4275 error ("incorrect setting of landing pad number");
4276 err |= true;
4277 }
4278 }
4279
4280 return err;
4281 }
4282
4283 /* Verify the GIMPLE statement STMT. Returns true if there is an
4284 error, otherwise false. */
4285
4286 static bool
4287 verify_gimple_stmt (gimple stmt)
4288 {
4289 switch (gimple_code (stmt))
4290 {
4291 case GIMPLE_ASSIGN:
4292 return verify_gimple_assign (stmt);
4293
4294 case GIMPLE_LABEL:
4295 return verify_gimple_label (stmt);
4296
4297 case GIMPLE_CALL:
4298 return verify_gimple_call (stmt);
4299
4300 case GIMPLE_COND:
4301 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4302 {
4303 error ("invalid comparison code in gimple cond");
4304 return true;
4305 }
4306 if (!(!gimple_cond_true_label (stmt)
4307 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4308 || !(!gimple_cond_false_label (stmt)
4309 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4310 {
4311 error ("invalid labels in gimple cond");
4312 return true;
4313 }
4314
4315 return verify_gimple_comparison (boolean_type_node,
4316 gimple_cond_lhs (stmt),
4317 gimple_cond_rhs (stmt));
4318
4319 case GIMPLE_GOTO:
4320 return verify_gimple_goto (stmt);
4321
4322 case GIMPLE_SWITCH:
4323 return verify_gimple_switch (stmt);
4324
4325 case GIMPLE_RETURN:
4326 return verify_gimple_return (stmt);
4327
4328 case GIMPLE_ASM:
4329 return false;
4330
4331 case GIMPLE_TRANSACTION:
4332 return verify_gimple_transaction (stmt);
4333
4334 /* Tuples that do not have tree operands. */
4335 case GIMPLE_NOP:
4336 case GIMPLE_PREDICT:
4337 case GIMPLE_RESX:
4338 case GIMPLE_EH_DISPATCH:
4339 case GIMPLE_EH_MUST_NOT_THROW:
4340 return false;
4341
4342 CASE_GIMPLE_OMP:
4343 /* OpenMP directives are validated by the FE and never operated
4344 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4345 non-gimple expressions when the main index variable has had
4346 its address taken. This does not affect the loop itself
4347 because the header of an GIMPLE_OMP_FOR is merely used to determine
4348 how to setup the parallel iteration. */
4349 return false;
4350
4351 case GIMPLE_DEBUG:
4352 return verify_gimple_debug (stmt);
4353
4354 default:
4355 gcc_unreachable ();
4356 }
4357 }
4358
4359 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4360 and false otherwise. */
4361
4362 static bool
4363 verify_gimple_phi (gimple phi)
4364 {
4365 bool err = false;
4366 unsigned i;
4367 tree phi_result = gimple_phi_result (phi);
4368 bool virtual_p;
4369
4370 if (!phi_result)
4371 {
4372 error ("invalid PHI result");
4373 return true;
4374 }
4375
4376 virtual_p = virtual_operand_p (phi_result);
4377 if (TREE_CODE (phi_result) != SSA_NAME
4378 || (virtual_p
4379 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4380 {
4381 error ("invalid PHI result");
4382 err = true;
4383 }
4384
4385 for (i = 0; i < gimple_phi_num_args (phi); i++)
4386 {
4387 tree t = gimple_phi_arg_def (phi, i);
4388
4389 if (!t)
4390 {
4391 error ("missing PHI def");
4392 err |= true;
4393 continue;
4394 }
4395 /* Addressable variables do have SSA_NAMEs but they
4396 are not considered gimple values. */
4397 else if ((TREE_CODE (t) == SSA_NAME
4398 && virtual_p != virtual_operand_p (t))
4399 || (virtual_p
4400 && (TREE_CODE (t) != SSA_NAME
4401 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4402 || (!virtual_p
4403 && !is_gimple_val (t)))
4404 {
4405 error ("invalid PHI argument");
4406 debug_generic_expr (t);
4407 err |= true;
4408 }
4409 #ifdef ENABLE_TYPES_CHECKING
4410 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4411 {
4412 error ("incompatible types in PHI argument %u", i);
4413 debug_generic_stmt (TREE_TYPE (phi_result));
4414 debug_generic_stmt (TREE_TYPE (t));
4415 err |= true;
4416 }
4417 #endif
4418 }
4419
4420 return err;
4421 }
4422
4423 /* Verify the GIMPLE statements inside the sequence STMTS. */
4424
4425 static bool
4426 verify_gimple_in_seq_2 (gimple_seq stmts)
4427 {
4428 gimple_stmt_iterator ittr;
4429 bool err = false;
4430
4431 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4432 {
4433 gimple stmt = gsi_stmt (ittr);
4434
4435 switch (gimple_code (stmt))
4436 {
4437 case GIMPLE_BIND:
4438 err |= verify_gimple_in_seq_2 (gimple_bind_body (stmt));
4439 break;
4440
4441 case GIMPLE_TRY:
4442 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4443 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4444 break;
4445
4446 case GIMPLE_EH_FILTER:
4447 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4448 break;
4449
4450 case GIMPLE_EH_ELSE:
4451 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (stmt));
4452 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (stmt));
4453 break;
4454
4455 case GIMPLE_CATCH:
4456 err |= verify_gimple_in_seq_2 (gimple_catch_handler (stmt));
4457 break;
4458
4459 case GIMPLE_TRANSACTION:
4460 err |= verify_gimple_transaction (stmt);
4461 break;
4462
4463 default:
4464 {
4465 bool err2 = verify_gimple_stmt (stmt);
4466 if (err2)
4467 debug_gimple_stmt (stmt);
4468 err |= err2;
4469 }
4470 }
4471 }
4472
4473 return err;
4474 }
4475
4476 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4477 is a problem, otherwise false. */
4478
4479 static bool
4480 verify_gimple_transaction (gimple stmt)
4481 {
4482 tree lab = gimple_transaction_label (stmt);
4483 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4484 return true;
4485 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4486 }
4487
4488
4489 /* Verify the GIMPLE statements inside the statement list STMTS. */
4490
4491 DEBUG_FUNCTION void
4492 verify_gimple_in_seq (gimple_seq stmts)
4493 {
4494 timevar_push (TV_TREE_STMT_VERIFY);
4495 if (verify_gimple_in_seq_2 (stmts))
4496 internal_error ("verify_gimple failed");
4497 timevar_pop (TV_TREE_STMT_VERIFY);
4498 }
4499
4500 /* Return true when the T can be shared. */
4501
4502 static bool
4503 tree_node_can_be_shared (tree t)
4504 {
4505 if (IS_TYPE_OR_DECL_P (t)
4506 || is_gimple_min_invariant (t)
4507 || TREE_CODE (t) == SSA_NAME
4508 || t == error_mark_node
4509 || TREE_CODE (t) == IDENTIFIER_NODE)
4510 return true;
4511
4512 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4513 return true;
4514
4515 if (DECL_P (t))
4516 return true;
4517
4518 return false;
4519 }
4520
4521 /* Called via walk_tree. Verify tree sharing. */
4522
4523 static tree
4524 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4525 {
4526 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4527
4528 if (tree_node_can_be_shared (*tp))
4529 {
4530 *walk_subtrees = false;
4531 return NULL;
4532 }
4533
4534 if (pointer_set_insert (visited, *tp))
4535 return *tp;
4536
4537 return NULL;
4538 }
4539
4540 /* Called via walk_gimple_stmt. Verify tree sharing. */
4541
4542 static tree
4543 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4544 {
4545 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4546 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4547 }
4548
4549 static bool eh_error_found;
4550 static int
4551 verify_eh_throw_stmt_node (void **slot, void *data)
4552 {
4553 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4554 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4555
4556 if (!pointer_set_contains (visited, node->stmt))
4557 {
4558 error ("dead STMT in EH table");
4559 debug_gimple_stmt (node->stmt);
4560 eh_error_found = true;
4561 }
4562 return 1;
4563 }
4564
4565 /* Verify if the location LOCs block is in BLOCKS. */
4566
4567 static bool
4568 verify_location (pointer_set_t *blocks, location_t loc)
4569 {
4570 tree block = LOCATION_BLOCK (loc);
4571 if (block != NULL_TREE
4572 && !pointer_set_contains (blocks, block))
4573 {
4574 error ("location references block not in block tree");
4575 return true;
4576 }
4577 if (block != NULL_TREE)
4578 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4579 return false;
4580 }
4581
4582 /* Called via walk_tree. Verify that expressions have no blocks. */
4583
4584 static tree
4585 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4586 {
4587 if (!EXPR_P (*tp))
4588 {
4589 *walk_subtrees = false;
4590 return NULL;
4591 }
4592
4593 location_t loc = EXPR_LOCATION (*tp);
4594 if (LOCATION_BLOCK (loc) != NULL)
4595 return *tp;
4596
4597 return NULL;
4598 }
4599
4600 /* Called via walk_tree. Verify locations of expressions. */
4601
4602 static tree
4603 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4604 {
4605 struct pointer_set_t *blocks = (struct pointer_set_t *) data;
4606
4607 if (TREE_CODE (*tp) == VAR_DECL
4608 && DECL_HAS_DEBUG_EXPR_P (*tp))
4609 {
4610 tree t = DECL_DEBUG_EXPR (*tp);
4611 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4612 if (addr)
4613 return addr;
4614 }
4615 if ((TREE_CODE (*tp) == VAR_DECL
4616 || TREE_CODE (*tp) == PARM_DECL
4617 || TREE_CODE (*tp) == RESULT_DECL)
4618 && DECL_HAS_VALUE_EXPR_P (*tp))
4619 {
4620 tree t = DECL_VALUE_EXPR (*tp);
4621 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4622 if (addr)
4623 return addr;
4624 }
4625
4626 if (!EXPR_P (*tp))
4627 {
4628 *walk_subtrees = false;
4629 return NULL;
4630 }
4631
4632 location_t loc = EXPR_LOCATION (*tp);
4633 if (verify_location (blocks, loc))
4634 return *tp;
4635
4636 return NULL;
4637 }
4638
4639 /* Called via walk_gimple_op. Verify locations of expressions. */
4640
4641 static tree
4642 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
4643 {
4644 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4645 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
4646 }
4647
4648 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4649
4650 static void
4651 collect_subblocks (pointer_set_t *blocks, tree block)
4652 {
4653 tree t;
4654 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
4655 {
4656 pointer_set_insert (blocks, t);
4657 collect_subblocks (blocks, t);
4658 }
4659 }
4660
4661 /* Verify the GIMPLE statements in the CFG of FN. */
4662
4663 DEBUG_FUNCTION void
4664 verify_gimple_in_cfg (struct function *fn)
4665 {
4666 basic_block bb;
4667 bool err = false;
4668 struct pointer_set_t *visited, *visited_stmts, *blocks;
4669
4670 timevar_push (TV_TREE_STMT_VERIFY);
4671 visited = pointer_set_create ();
4672 visited_stmts = pointer_set_create ();
4673
4674 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4675 blocks = pointer_set_create ();
4676 if (DECL_INITIAL (fn->decl))
4677 {
4678 pointer_set_insert (blocks, DECL_INITIAL (fn->decl));
4679 collect_subblocks (blocks, DECL_INITIAL (fn->decl));
4680 }
4681
4682 FOR_EACH_BB_FN (bb, fn)
4683 {
4684 gimple_stmt_iterator gsi;
4685
4686 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4687 {
4688 gimple phi = gsi_stmt (gsi);
4689 bool err2 = false;
4690 unsigned i;
4691
4692 pointer_set_insert (visited_stmts, phi);
4693
4694 if (gimple_bb (phi) != bb)
4695 {
4696 error ("gimple_bb (phi) is set to a wrong basic block");
4697 err2 = true;
4698 }
4699
4700 err2 |= verify_gimple_phi (phi);
4701
4702 /* Only PHI arguments have locations. */
4703 if (gimple_location (phi) != UNKNOWN_LOCATION)
4704 {
4705 error ("PHI node with location");
4706 err2 = true;
4707 }
4708
4709 for (i = 0; i < gimple_phi_num_args (phi); i++)
4710 {
4711 tree arg = gimple_phi_arg_def (phi, i);
4712 tree addr = walk_tree (&arg, verify_node_sharing_1,
4713 visited, NULL);
4714 if (addr)
4715 {
4716 error ("incorrect sharing of tree nodes");
4717 debug_generic_expr (addr);
4718 err2 |= true;
4719 }
4720 location_t loc = gimple_phi_arg_location (phi, i);
4721 if (virtual_operand_p (gimple_phi_result (phi))
4722 && loc != UNKNOWN_LOCATION)
4723 {
4724 error ("virtual PHI with argument locations");
4725 err2 = true;
4726 }
4727 addr = walk_tree (&arg, verify_expr_location_1, blocks, NULL);
4728 if (addr)
4729 {
4730 debug_generic_expr (addr);
4731 err2 = true;
4732 }
4733 err2 |= verify_location (blocks, loc);
4734 }
4735
4736 if (err2)
4737 debug_gimple_stmt (phi);
4738 err |= err2;
4739 }
4740
4741 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4742 {
4743 gimple stmt = gsi_stmt (gsi);
4744 bool err2 = false;
4745 struct walk_stmt_info wi;
4746 tree addr;
4747 int lp_nr;
4748
4749 pointer_set_insert (visited_stmts, stmt);
4750
4751 if (gimple_bb (stmt) != bb)
4752 {
4753 error ("gimple_bb (stmt) is set to a wrong basic block");
4754 err2 = true;
4755 }
4756
4757 err2 |= verify_gimple_stmt (stmt);
4758 err2 |= verify_location (blocks, gimple_location (stmt));
4759
4760 memset (&wi, 0, sizeof (wi));
4761 wi.info = (void *) visited;
4762 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
4763 if (addr)
4764 {
4765 error ("incorrect sharing of tree nodes");
4766 debug_generic_expr (addr);
4767 err2 |= true;
4768 }
4769
4770 memset (&wi, 0, sizeof (wi));
4771 wi.info = (void *) blocks;
4772 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
4773 if (addr)
4774 {
4775 debug_generic_expr (addr);
4776 err2 |= true;
4777 }
4778
4779 /* ??? Instead of not checking these stmts at all the walker
4780 should know its context via wi. */
4781 if (!is_gimple_debug (stmt)
4782 && !is_gimple_omp (stmt))
4783 {
4784 memset (&wi, 0, sizeof (wi));
4785 addr = walk_gimple_op (stmt, verify_expr, &wi);
4786 if (addr)
4787 {
4788 debug_generic_expr (addr);
4789 inform (gimple_location (stmt), "in statement");
4790 err2 |= true;
4791 }
4792 }
4793
4794 /* If the statement is marked as part of an EH region, then it is
4795 expected that the statement could throw. Verify that when we
4796 have optimizations that simplify statements such that we prove
4797 that they cannot throw, that we update other data structures
4798 to match. */
4799 lp_nr = lookup_stmt_eh_lp (stmt);
4800 if (lp_nr != 0)
4801 {
4802 if (!stmt_could_throw_p (stmt))
4803 {
4804 error ("statement marked for throw, but doesn%'t");
4805 err2 |= true;
4806 }
4807 else if (lp_nr > 0
4808 && !gsi_one_before_end_p (gsi)
4809 && stmt_can_throw_internal (stmt))
4810 {
4811 error ("statement marked for throw in middle of block");
4812 err2 |= true;
4813 }
4814 }
4815
4816 if (err2)
4817 debug_gimple_stmt (stmt);
4818 err |= err2;
4819 }
4820 }
4821
4822 eh_error_found = false;
4823 if (get_eh_throw_stmt_table (cfun))
4824 htab_traverse (get_eh_throw_stmt_table (cfun),
4825 verify_eh_throw_stmt_node,
4826 visited_stmts);
4827
4828 if (err || eh_error_found)
4829 internal_error ("verify_gimple failed");
4830
4831 pointer_set_destroy (visited);
4832 pointer_set_destroy (visited_stmts);
4833 pointer_set_destroy (blocks);
4834 verify_histograms ();
4835 timevar_pop (TV_TREE_STMT_VERIFY);
4836 }
4837
4838
4839 /* Verifies that the flow information is OK. */
4840
4841 static int
4842 gimple_verify_flow_info (void)
4843 {
4844 int err = 0;
4845 basic_block bb;
4846 gimple_stmt_iterator gsi;
4847 gimple stmt;
4848 edge e;
4849 edge_iterator ei;
4850
4851 if (ENTRY_BLOCK_PTR->il.gimple.seq || ENTRY_BLOCK_PTR->il.gimple.phi_nodes)
4852 {
4853 error ("ENTRY_BLOCK has IL associated with it");
4854 err = 1;
4855 }
4856
4857 if (EXIT_BLOCK_PTR->il.gimple.seq || EXIT_BLOCK_PTR->il.gimple.phi_nodes)
4858 {
4859 error ("EXIT_BLOCK has IL associated with it");
4860 err = 1;
4861 }
4862
4863 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4864 if (e->flags & EDGE_FALLTHRU)
4865 {
4866 error ("fallthru to exit from bb %d", e->src->index);
4867 err = 1;
4868 }
4869
4870 FOR_EACH_BB (bb)
4871 {
4872 bool found_ctrl_stmt = false;
4873
4874 stmt = NULL;
4875
4876 /* Skip labels on the start of basic block. */
4877 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4878 {
4879 tree label;
4880 gimple prev_stmt = stmt;
4881
4882 stmt = gsi_stmt (gsi);
4883
4884 if (gimple_code (stmt) != GIMPLE_LABEL)
4885 break;
4886
4887 label = gimple_label_label (stmt);
4888 if (prev_stmt && DECL_NONLOCAL (label))
4889 {
4890 error ("nonlocal label ");
4891 print_generic_expr (stderr, label, 0);
4892 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4893 bb->index);
4894 err = 1;
4895 }
4896
4897 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
4898 {
4899 error ("EH landing pad label ");
4900 print_generic_expr (stderr, label, 0);
4901 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4902 bb->index);
4903 err = 1;
4904 }
4905
4906 if (label_to_block (label) != bb)
4907 {
4908 error ("label ");
4909 print_generic_expr (stderr, label, 0);
4910 fprintf (stderr, " to block does not match in bb %d",
4911 bb->index);
4912 err = 1;
4913 }
4914
4915 if (decl_function_context (label) != current_function_decl)
4916 {
4917 error ("label ");
4918 print_generic_expr (stderr, label, 0);
4919 fprintf (stderr, " has incorrect context in bb %d",
4920 bb->index);
4921 err = 1;
4922 }
4923 }
4924
4925 /* Verify that body of basic block BB is free of control flow. */
4926 for (; !gsi_end_p (gsi); gsi_next (&gsi))
4927 {
4928 gimple stmt = gsi_stmt (gsi);
4929
4930 if (found_ctrl_stmt)
4931 {
4932 error ("control flow in the middle of basic block %d",
4933 bb->index);
4934 err = 1;
4935 }
4936
4937 if (stmt_ends_bb_p (stmt))
4938 found_ctrl_stmt = true;
4939
4940 if (gimple_code (stmt) == GIMPLE_LABEL)
4941 {
4942 error ("label ");
4943 print_generic_expr (stderr, gimple_label_label (stmt), 0);
4944 fprintf (stderr, " in the middle of basic block %d", bb->index);
4945 err = 1;
4946 }
4947 }
4948
4949 gsi = gsi_last_bb (bb);
4950 if (gsi_end_p (gsi))
4951 continue;
4952
4953 stmt = gsi_stmt (gsi);
4954
4955 if (gimple_code (stmt) == GIMPLE_LABEL)
4956 continue;
4957
4958 err |= verify_eh_edges (stmt);
4959
4960 if (is_ctrl_stmt (stmt))
4961 {
4962 FOR_EACH_EDGE (e, ei, bb->succs)
4963 if (e->flags & EDGE_FALLTHRU)
4964 {
4965 error ("fallthru edge after a control statement in bb %d",
4966 bb->index);
4967 err = 1;
4968 }
4969 }
4970
4971 if (gimple_code (stmt) != GIMPLE_COND)
4972 {
4973 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4974 after anything else but if statement. */
4975 FOR_EACH_EDGE (e, ei, bb->succs)
4976 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
4977 {
4978 error ("true/false edge after a non-GIMPLE_COND in bb %d",
4979 bb->index);
4980 err = 1;
4981 }
4982 }
4983
4984 switch (gimple_code (stmt))
4985 {
4986 case GIMPLE_COND:
4987 {
4988 edge true_edge;
4989 edge false_edge;
4990
4991 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
4992
4993 if (!true_edge
4994 || !false_edge
4995 || !(true_edge->flags & EDGE_TRUE_VALUE)
4996 || !(false_edge->flags & EDGE_FALSE_VALUE)
4997 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4998 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4999 || EDGE_COUNT (bb->succs) >= 3)
5000 {
5001 error ("wrong outgoing edge flags at end of bb %d",
5002 bb->index);
5003 err = 1;
5004 }
5005 }
5006 break;
5007
5008 case GIMPLE_GOTO:
5009 if (simple_goto_p (stmt))
5010 {
5011 error ("explicit goto at end of bb %d", bb->index);
5012 err = 1;
5013 }
5014 else
5015 {
5016 /* FIXME. We should double check that the labels in the
5017 destination blocks have their address taken. */
5018 FOR_EACH_EDGE (e, ei, bb->succs)
5019 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5020 | EDGE_FALSE_VALUE))
5021 || !(e->flags & EDGE_ABNORMAL))
5022 {
5023 error ("wrong outgoing edge flags at end of bb %d",
5024 bb->index);
5025 err = 1;
5026 }
5027 }
5028 break;
5029
5030 case GIMPLE_CALL:
5031 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5032 break;
5033 /* ... fallthru ... */
5034 case GIMPLE_RETURN:
5035 if (!single_succ_p (bb)
5036 || (single_succ_edge (bb)->flags
5037 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5038 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5039 {
5040 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5041 err = 1;
5042 }
5043 if (single_succ (bb) != EXIT_BLOCK_PTR)
5044 {
5045 error ("return edge does not point to exit in bb %d",
5046 bb->index);
5047 err = 1;
5048 }
5049 break;
5050
5051 case GIMPLE_SWITCH:
5052 {
5053 tree prev;
5054 edge e;
5055 size_t i, n;
5056
5057 n = gimple_switch_num_labels (stmt);
5058
5059 /* Mark all the destination basic blocks. */
5060 for (i = 0; i < n; ++i)
5061 {
5062 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
5063 basic_block label_bb = label_to_block (lab);
5064 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5065 label_bb->aux = (void *)1;
5066 }
5067
5068 /* Verify that the case labels are sorted. */
5069 prev = gimple_switch_label (stmt, 0);
5070 for (i = 1; i < n; ++i)
5071 {
5072 tree c = gimple_switch_label (stmt, i);
5073 if (!CASE_LOW (c))
5074 {
5075 error ("found default case not at the start of "
5076 "case vector");
5077 err = 1;
5078 continue;
5079 }
5080 if (CASE_LOW (prev)
5081 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5082 {
5083 error ("case labels not sorted: ");
5084 print_generic_expr (stderr, prev, 0);
5085 fprintf (stderr," is greater than ");
5086 print_generic_expr (stderr, c, 0);
5087 fprintf (stderr," but comes before it.\n");
5088 err = 1;
5089 }
5090 prev = c;
5091 }
5092 /* VRP will remove the default case if it can prove it will
5093 never be executed. So do not verify there always exists
5094 a default case here. */
5095
5096 FOR_EACH_EDGE (e, ei, bb->succs)
5097 {
5098 if (!e->dest->aux)
5099 {
5100 error ("extra outgoing edge %d->%d",
5101 bb->index, e->dest->index);
5102 err = 1;
5103 }
5104
5105 e->dest->aux = (void *)2;
5106 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5107 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5108 {
5109 error ("wrong outgoing edge flags at end of bb %d",
5110 bb->index);
5111 err = 1;
5112 }
5113 }
5114
5115 /* Check that we have all of them. */
5116 for (i = 0; i < n; ++i)
5117 {
5118 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
5119 basic_block label_bb = label_to_block (lab);
5120
5121 if (label_bb->aux != (void *)2)
5122 {
5123 error ("missing edge %i->%i", bb->index, label_bb->index);
5124 err = 1;
5125 }
5126 }
5127
5128 FOR_EACH_EDGE (e, ei, bb->succs)
5129 e->dest->aux = (void *)0;
5130 }
5131 break;
5132
5133 case GIMPLE_EH_DISPATCH:
5134 err |= verify_eh_dispatch_edge (stmt);
5135 break;
5136
5137 default:
5138 break;
5139 }
5140 }
5141
5142 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5143 verify_dominators (CDI_DOMINATORS);
5144
5145 return err;
5146 }
5147
5148
5149 /* Updates phi nodes after creating a forwarder block joined
5150 by edge FALLTHRU. */
5151
5152 static void
5153 gimple_make_forwarder_block (edge fallthru)
5154 {
5155 edge e;
5156 edge_iterator ei;
5157 basic_block dummy, bb;
5158 tree var;
5159 gimple_stmt_iterator gsi;
5160
5161 dummy = fallthru->src;
5162 bb = fallthru->dest;
5163
5164 if (single_pred_p (bb))
5165 return;
5166
5167 /* If we redirected a branch we must create new PHI nodes at the
5168 start of BB. */
5169 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5170 {
5171 gimple phi, new_phi;
5172
5173 phi = gsi_stmt (gsi);
5174 var = gimple_phi_result (phi);
5175 new_phi = create_phi_node (var, bb);
5176 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5177 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5178 UNKNOWN_LOCATION);
5179 }
5180
5181 /* Add the arguments we have stored on edges. */
5182 FOR_EACH_EDGE (e, ei, bb->preds)
5183 {
5184 if (e == fallthru)
5185 continue;
5186
5187 flush_pending_stmts (e);
5188 }
5189 }
5190
5191
5192 /* Return a non-special label in the head of basic block BLOCK.
5193 Create one if it doesn't exist. */
5194
5195 tree
5196 gimple_block_label (basic_block bb)
5197 {
5198 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5199 bool first = true;
5200 tree label;
5201 gimple stmt;
5202
5203 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5204 {
5205 stmt = gsi_stmt (i);
5206 if (gimple_code (stmt) != GIMPLE_LABEL)
5207 break;
5208 label = gimple_label_label (stmt);
5209 if (!DECL_NONLOCAL (label))
5210 {
5211 if (!first)
5212 gsi_move_before (&i, &s);
5213 return label;
5214 }
5215 }
5216
5217 label = create_artificial_label (UNKNOWN_LOCATION);
5218 stmt = gimple_build_label (label);
5219 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5220 return label;
5221 }
5222
5223
5224 /* Attempt to perform edge redirection by replacing a possibly complex
5225 jump instruction by a goto or by removing the jump completely.
5226 This can apply only if all edges now point to the same block. The
5227 parameters and return values are equivalent to
5228 redirect_edge_and_branch. */
5229
5230 static edge
5231 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5232 {
5233 basic_block src = e->src;
5234 gimple_stmt_iterator i;
5235 gimple stmt;
5236
5237 /* We can replace or remove a complex jump only when we have exactly
5238 two edges. */
5239 if (EDGE_COUNT (src->succs) != 2
5240 /* Verify that all targets will be TARGET. Specifically, the
5241 edge that is not E must also go to TARGET. */
5242 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5243 return NULL;
5244
5245 i = gsi_last_bb (src);
5246 if (gsi_end_p (i))
5247 return NULL;
5248
5249 stmt = gsi_stmt (i);
5250
5251 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5252 {
5253 gsi_remove (&i, true);
5254 e = ssa_redirect_edge (e, target);
5255 e->flags = EDGE_FALLTHRU;
5256 return e;
5257 }
5258
5259 return NULL;
5260 }
5261
5262
5263 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5264 edge representing the redirected branch. */
5265
5266 static edge
5267 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5268 {
5269 basic_block bb = e->src;
5270 gimple_stmt_iterator gsi;
5271 edge ret;
5272 gimple stmt;
5273
5274 if (e->flags & EDGE_ABNORMAL)
5275 return NULL;
5276
5277 if (e->dest == dest)
5278 return NULL;
5279
5280 if (e->flags & EDGE_EH)
5281 return redirect_eh_edge (e, dest);
5282
5283 if (e->src != ENTRY_BLOCK_PTR)
5284 {
5285 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5286 if (ret)
5287 return ret;
5288 }
5289
5290 gsi = gsi_last_bb (bb);
5291 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5292
5293 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5294 {
5295 case GIMPLE_COND:
5296 /* For COND_EXPR, we only need to redirect the edge. */
5297 break;
5298
5299 case GIMPLE_GOTO:
5300 /* No non-abnormal edges should lead from a non-simple goto, and
5301 simple ones should be represented implicitly. */
5302 gcc_unreachable ();
5303
5304 case GIMPLE_SWITCH:
5305 {
5306 tree label = gimple_block_label (dest);
5307 tree cases = get_cases_for_edge (e, stmt);
5308
5309 /* If we have a list of cases associated with E, then use it
5310 as it's a lot faster than walking the entire case vector. */
5311 if (cases)
5312 {
5313 edge e2 = find_edge (e->src, dest);
5314 tree last, first;
5315
5316 first = cases;
5317 while (cases)
5318 {
5319 last = cases;
5320 CASE_LABEL (cases) = label;
5321 cases = CASE_CHAIN (cases);
5322 }
5323
5324 /* If there was already an edge in the CFG, then we need
5325 to move all the cases associated with E to E2. */
5326 if (e2)
5327 {
5328 tree cases2 = get_cases_for_edge (e2, stmt);
5329
5330 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5331 CASE_CHAIN (cases2) = first;
5332 }
5333 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5334 }
5335 else
5336 {
5337 size_t i, n = gimple_switch_num_labels (stmt);
5338
5339 for (i = 0; i < n; i++)
5340 {
5341 tree elt = gimple_switch_label (stmt, i);
5342 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5343 CASE_LABEL (elt) = label;
5344 }
5345 }
5346 }
5347 break;
5348
5349 case GIMPLE_ASM:
5350 {
5351 int i, n = gimple_asm_nlabels (stmt);
5352 tree label = NULL;
5353
5354 for (i = 0; i < n; ++i)
5355 {
5356 tree cons = gimple_asm_label_op (stmt, i);
5357 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5358 {
5359 if (!label)
5360 label = gimple_block_label (dest);
5361 TREE_VALUE (cons) = label;
5362 }
5363 }
5364
5365 /* If we didn't find any label matching the former edge in the
5366 asm labels, we must be redirecting the fallthrough
5367 edge. */
5368 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5369 }
5370 break;
5371
5372 case GIMPLE_RETURN:
5373 gsi_remove (&gsi, true);
5374 e->flags |= EDGE_FALLTHRU;
5375 break;
5376
5377 case GIMPLE_OMP_RETURN:
5378 case GIMPLE_OMP_CONTINUE:
5379 case GIMPLE_OMP_SECTIONS_SWITCH:
5380 case GIMPLE_OMP_FOR:
5381 /* The edges from OMP constructs can be simply redirected. */
5382 break;
5383
5384 case GIMPLE_EH_DISPATCH:
5385 if (!(e->flags & EDGE_FALLTHRU))
5386 redirect_eh_dispatch_edge (stmt, e, dest);
5387 break;
5388
5389 case GIMPLE_TRANSACTION:
5390 /* The ABORT edge has a stored label associated with it, otherwise
5391 the edges are simply redirectable. */
5392 if (e->flags == 0)
5393 gimple_transaction_set_label (stmt, gimple_block_label (dest));
5394 break;
5395
5396 default:
5397 /* Otherwise it must be a fallthru edge, and we don't need to
5398 do anything besides redirecting it. */
5399 gcc_assert (e->flags & EDGE_FALLTHRU);
5400 break;
5401 }
5402
5403 /* Update/insert PHI nodes as necessary. */
5404
5405 /* Now update the edges in the CFG. */
5406 e = ssa_redirect_edge (e, dest);
5407
5408 return e;
5409 }
5410
5411 /* Returns true if it is possible to remove edge E by redirecting
5412 it to the destination of the other edge from E->src. */
5413
5414 static bool
5415 gimple_can_remove_branch_p (const_edge e)
5416 {
5417 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5418 return false;
5419
5420 return true;
5421 }
5422
5423 /* Simple wrapper, as we can always redirect fallthru edges. */
5424
5425 static basic_block
5426 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5427 {
5428 e = gimple_redirect_edge_and_branch (e, dest);
5429 gcc_assert (e);
5430
5431 return NULL;
5432 }
5433
5434
5435 /* Splits basic block BB after statement STMT (but at least after the
5436 labels). If STMT is NULL, BB is split just after the labels. */
5437
5438 static basic_block
5439 gimple_split_block (basic_block bb, void *stmt)
5440 {
5441 gimple_stmt_iterator gsi;
5442 gimple_stmt_iterator gsi_tgt;
5443 gimple act;
5444 gimple_seq list;
5445 basic_block new_bb;
5446 edge e;
5447 edge_iterator ei;
5448
5449 new_bb = create_empty_bb (bb);
5450
5451 /* Redirect the outgoing edges. */
5452 new_bb->succs = bb->succs;
5453 bb->succs = NULL;
5454 FOR_EACH_EDGE (e, ei, new_bb->succs)
5455 e->src = new_bb;
5456
5457 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5458 stmt = NULL;
5459
5460 /* Move everything from GSI to the new basic block. */
5461 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5462 {
5463 act = gsi_stmt (gsi);
5464 if (gimple_code (act) == GIMPLE_LABEL)
5465 continue;
5466
5467 if (!stmt)
5468 break;
5469
5470 if (stmt == act)
5471 {
5472 gsi_next (&gsi);
5473 break;
5474 }
5475 }
5476
5477 if (gsi_end_p (gsi))
5478 return new_bb;
5479
5480 /* Split the statement list - avoid re-creating new containers as this
5481 brings ugly quadratic memory consumption in the inliner.
5482 (We are still quadratic since we need to update stmt BB pointers,
5483 sadly.) */
5484 gsi_split_seq_before (&gsi, &list);
5485 set_bb_seq (new_bb, list);
5486 for (gsi_tgt = gsi_start (list);
5487 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5488 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5489
5490 return new_bb;
5491 }
5492
5493
5494 /* Moves basic block BB after block AFTER. */
5495
5496 static bool
5497 gimple_move_block_after (basic_block bb, basic_block after)
5498 {
5499 if (bb->prev_bb == after)
5500 return true;
5501
5502 unlink_block (bb);
5503 link_block (bb, after);
5504
5505 return true;
5506 }
5507
5508
5509 /* Return TRUE if block BB has no executable statements, otherwise return
5510 FALSE. */
5511
5512 static bool
5513 gimple_empty_block_p (basic_block bb)
5514 {
5515 /* BB must have no executable statements. */
5516 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5517 if (phi_nodes (bb))
5518 return false;
5519 if (gsi_end_p (gsi))
5520 return true;
5521 if (is_gimple_debug (gsi_stmt (gsi)))
5522 gsi_next_nondebug (&gsi);
5523 return gsi_end_p (gsi);
5524 }
5525
5526
5527 /* Split a basic block if it ends with a conditional branch and if the
5528 other part of the block is not empty. */
5529
5530 static basic_block
5531 gimple_split_block_before_cond_jump (basic_block bb)
5532 {
5533 gimple last, split_point;
5534 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5535 if (gsi_end_p (gsi))
5536 return NULL;
5537 last = gsi_stmt (gsi);
5538 if (gimple_code (last) != GIMPLE_COND
5539 && gimple_code (last) != GIMPLE_SWITCH)
5540 return NULL;
5541 gsi_prev_nondebug (&gsi);
5542 split_point = gsi_stmt (gsi);
5543 return split_block (bb, split_point)->dest;
5544 }
5545
5546
5547 /* Return true if basic_block can be duplicated. */
5548
5549 static bool
5550 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5551 {
5552 return true;
5553 }
5554
5555 /* Create a duplicate of the basic block BB. NOTE: This does not
5556 preserve SSA form. */
5557
5558 static basic_block
5559 gimple_duplicate_bb (basic_block bb)
5560 {
5561 basic_block new_bb;
5562 gimple_stmt_iterator gsi, gsi_tgt;
5563 gimple_seq phis = phi_nodes (bb);
5564 gimple phi, stmt, copy;
5565
5566 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
5567
5568 /* Copy the PHI nodes. We ignore PHI node arguments here because
5569 the incoming edges have not been setup yet. */
5570 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
5571 {
5572 phi = gsi_stmt (gsi);
5573 copy = create_phi_node (NULL_TREE, new_bb);
5574 create_new_def_for (gimple_phi_result (phi), copy,
5575 gimple_phi_result_ptr (copy));
5576 gimple_set_uid (copy, gimple_uid (phi));
5577 }
5578
5579 gsi_tgt = gsi_start_bb (new_bb);
5580 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5581 {
5582 def_operand_p def_p;
5583 ssa_op_iter op_iter;
5584 tree lhs;
5585
5586 stmt = gsi_stmt (gsi);
5587 if (gimple_code (stmt) == GIMPLE_LABEL)
5588 continue;
5589
5590 /* Don't duplicate label debug stmts. */
5591 if (gimple_debug_bind_p (stmt)
5592 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5593 == LABEL_DECL)
5594 continue;
5595
5596 /* Create a new copy of STMT and duplicate STMT's virtual
5597 operands. */
5598 copy = gimple_copy (stmt);
5599 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5600
5601 maybe_duplicate_eh_stmt (copy, stmt);
5602 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5603
5604 /* When copying around a stmt writing into a local non-user
5605 aggregate, make sure it won't share stack slot with other
5606 vars. */
5607 lhs = gimple_get_lhs (stmt);
5608 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5609 {
5610 tree base = get_base_address (lhs);
5611 if (base
5612 && (TREE_CODE (base) == VAR_DECL
5613 || TREE_CODE (base) == RESULT_DECL)
5614 && DECL_IGNORED_P (base)
5615 && !TREE_STATIC (base)
5616 && !DECL_EXTERNAL (base)
5617 && (TREE_CODE (base) != VAR_DECL
5618 || !DECL_HAS_VALUE_EXPR_P (base)))
5619 DECL_NONSHAREABLE (base) = 1;
5620 }
5621
5622 /* Create new names for all the definitions created by COPY and
5623 add replacement mappings for each new name. */
5624 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5625 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5626 }
5627
5628 return new_bb;
5629 }
5630
5631 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5632
5633 static void
5634 add_phi_args_after_copy_edge (edge e_copy)
5635 {
5636 basic_block bb, bb_copy = e_copy->src, dest;
5637 edge e;
5638 edge_iterator ei;
5639 gimple phi, phi_copy;
5640 tree def;
5641 gimple_stmt_iterator psi, psi_copy;
5642
5643 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5644 return;
5645
5646 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5647
5648 if (e_copy->dest->flags & BB_DUPLICATED)
5649 dest = get_bb_original (e_copy->dest);
5650 else
5651 dest = e_copy->dest;
5652
5653 e = find_edge (bb, dest);
5654 if (!e)
5655 {
5656 /* During loop unrolling the target of the latch edge is copied.
5657 In this case we are not looking for edge to dest, but to
5658 duplicated block whose original was dest. */
5659 FOR_EACH_EDGE (e, ei, bb->succs)
5660 {
5661 if ((e->dest->flags & BB_DUPLICATED)
5662 && get_bb_original (e->dest) == dest)
5663 break;
5664 }
5665
5666 gcc_assert (e != NULL);
5667 }
5668
5669 for (psi = gsi_start_phis (e->dest),
5670 psi_copy = gsi_start_phis (e_copy->dest);
5671 !gsi_end_p (psi);
5672 gsi_next (&psi), gsi_next (&psi_copy))
5673 {
5674 phi = gsi_stmt (psi);
5675 phi_copy = gsi_stmt (psi_copy);
5676 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5677 add_phi_arg (phi_copy, def, e_copy,
5678 gimple_phi_arg_location_from_edge (phi, e));
5679 }
5680 }
5681
5682
5683 /* Basic block BB_COPY was created by code duplication. Add phi node
5684 arguments for edges going out of BB_COPY. The blocks that were
5685 duplicated have BB_DUPLICATED set. */
5686
5687 void
5688 add_phi_args_after_copy_bb (basic_block bb_copy)
5689 {
5690 edge e_copy;
5691 edge_iterator ei;
5692
5693 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5694 {
5695 add_phi_args_after_copy_edge (e_copy);
5696 }
5697 }
5698
5699 /* Blocks in REGION_COPY array of length N_REGION were created by
5700 duplication of basic blocks. Add phi node arguments for edges
5701 going from these blocks. If E_COPY is not NULL, also add
5702 phi node arguments for its destination.*/
5703
5704 void
5705 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5706 edge e_copy)
5707 {
5708 unsigned i;
5709
5710 for (i = 0; i < n_region; i++)
5711 region_copy[i]->flags |= BB_DUPLICATED;
5712
5713 for (i = 0; i < n_region; i++)
5714 add_phi_args_after_copy_bb (region_copy[i]);
5715 if (e_copy)
5716 add_phi_args_after_copy_edge (e_copy);
5717
5718 for (i = 0; i < n_region; i++)
5719 region_copy[i]->flags &= ~BB_DUPLICATED;
5720 }
5721
5722 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5723 important exit edge EXIT. By important we mean that no SSA name defined
5724 inside region is live over the other exit edges of the region. All entry
5725 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5726 to the duplicate of the region. Dominance and loop information is
5727 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
5728 UPDATE_DOMINANCE is false then we assume that the caller will update the
5729 dominance information after calling this function. The new basic
5730 blocks are stored to REGION_COPY in the same order as they had in REGION,
5731 provided that REGION_COPY is not NULL.
5732 The function returns false if it is unable to copy the region,
5733 true otherwise. */
5734
5735 bool
5736 gimple_duplicate_sese_region (edge entry, edge exit,
5737 basic_block *region, unsigned n_region,
5738 basic_block *region_copy,
5739 bool update_dominance)
5740 {
5741 unsigned i;
5742 bool free_region_copy = false, copying_header = false;
5743 struct loop *loop = entry->dest->loop_father;
5744 edge exit_copy;
5745 vec<basic_block> doms;
5746 edge redirected;
5747 int total_freq = 0, entry_freq = 0;
5748 gcov_type total_count = 0, entry_count = 0;
5749
5750 if (!can_copy_bbs_p (region, n_region))
5751 return false;
5752
5753 /* Some sanity checking. Note that we do not check for all possible
5754 missuses of the functions. I.e. if you ask to copy something weird,
5755 it will work, but the state of structures probably will not be
5756 correct. */
5757 for (i = 0; i < n_region; i++)
5758 {
5759 /* We do not handle subloops, i.e. all the blocks must belong to the
5760 same loop. */
5761 if (region[i]->loop_father != loop)
5762 return false;
5763
5764 if (region[i] != entry->dest
5765 && region[i] == loop->header)
5766 return false;
5767 }
5768
5769 set_loop_copy (loop, loop);
5770
5771 /* In case the function is used for loop header copying (which is the primary
5772 use), ensure that EXIT and its copy will be new latch and entry edges. */
5773 if (loop->header == entry->dest)
5774 {
5775 copying_header = true;
5776 set_loop_copy (loop, loop_outer (loop));
5777
5778 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5779 return false;
5780
5781 for (i = 0; i < n_region; i++)
5782 if (region[i] != exit->src
5783 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5784 return false;
5785 }
5786
5787 if (!region_copy)
5788 {
5789 region_copy = XNEWVEC (basic_block, n_region);
5790 free_region_copy = true;
5791 }
5792
5793 initialize_original_copy_tables ();
5794
5795 /* Record blocks outside the region that are dominated by something
5796 inside. */
5797 if (update_dominance)
5798 {
5799 doms.create (0);
5800 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5801 }
5802
5803 if (entry->dest->count)
5804 {
5805 total_count = entry->dest->count;
5806 entry_count = entry->count;
5807 /* Fix up corner cases, to avoid division by zero or creation of negative
5808 frequencies. */
5809 if (entry_count > total_count)
5810 entry_count = total_count;
5811 }
5812 else
5813 {
5814 total_freq = entry->dest->frequency;
5815 entry_freq = EDGE_FREQUENCY (entry);
5816 /* Fix up corner cases, to avoid division by zero or creation of negative
5817 frequencies. */
5818 if (total_freq == 0)
5819 total_freq = 1;
5820 else if (entry_freq > total_freq)
5821 entry_freq = total_freq;
5822 }
5823
5824 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5825 split_edge_bb_loc (entry), update_dominance);
5826 if (total_count)
5827 {
5828 scale_bbs_frequencies_gcov_type (region, n_region,
5829 total_count - entry_count,
5830 total_count);
5831 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
5832 total_count);
5833 }
5834 else
5835 {
5836 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5837 total_freq);
5838 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5839 }
5840
5841 if (copying_header)
5842 {
5843 loop->header = exit->dest;
5844 loop->latch = exit->src;
5845 }
5846
5847 /* Redirect the entry and add the phi node arguments. */
5848 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
5849 gcc_assert (redirected != NULL);
5850 flush_pending_stmts (entry);
5851
5852 /* Concerning updating of dominators: We must recount dominators
5853 for entry block and its copy. Anything that is outside of the
5854 region, but was dominated by something inside needs recounting as
5855 well. */
5856 if (update_dominance)
5857 {
5858 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
5859 doms.safe_push (get_bb_original (entry->dest));
5860 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5861 doms.release ();
5862 }
5863
5864 /* Add the other PHI node arguments. */
5865 add_phi_args_after_copy (region_copy, n_region, NULL);
5866
5867 if (free_region_copy)
5868 free (region_copy);
5869
5870 free_original_copy_tables ();
5871 return true;
5872 }
5873
5874 /* Checks if BB is part of the region defined by N_REGION BBS. */
5875 static bool
5876 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
5877 {
5878 unsigned int n;
5879
5880 for (n = 0; n < n_region; n++)
5881 {
5882 if (bb == bbs[n])
5883 return true;
5884 }
5885 return false;
5886 }
5887
5888 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
5889 are stored to REGION_COPY in the same order in that they appear
5890 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5891 the region, EXIT an exit from it. The condition guarding EXIT
5892 is moved to ENTRY. Returns true if duplication succeeds, false
5893 otherwise.
5894
5895 For example,
5896
5897 some_code;
5898 if (cond)
5899 A;
5900 else
5901 B;
5902
5903 is transformed to
5904
5905 if (cond)
5906 {
5907 some_code;
5908 A;
5909 }
5910 else
5911 {
5912 some_code;
5913 B;
5914 }
5915 */
5916
5917 bool
5918 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
5919 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
5920 basic_block *region_copy ATTRIBUTE_UNUSED)
5921 {
5922 unsigned i;
5923 bool free_region_copy = false;
5924 struct loop *loop = exit->dest->loop_father;
5925 struct loop *orig_loop = entry->dest->loop_father;
5926 basic_block switch_bb, entry_bb, nentry_bb;
5927 vec<basic_block> doms;
5928 int total_freq = 0, exit_freq = 0;
5929 gcov_type total_count = 0, exit_count = 0;
5930 edge exits[2], nexits[2], e;
5931 gimple_stmt_iterator gsi;
5932 gimple cond_stmt;
5933 edge sorig, snew;
5934 basic_block exit_bb;
5935 gimple_stmt_iterator psi;
5936 gimple phi;
5937 tree def;
5938 struct loop *target, *aloop, *cloop;
5939
5940 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
5941 exits[0] = exit;
5942 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
5943
5944 if (!can_copy_bbs_p (region, n_region))
5945 return false;
5946
5947 initialize_original_copy_tables ();
5948 set_loop_copy (orig_loop, loop);
5949
5950 target= loop;
5951 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
5952 {
5953 if (bb_part_of_region_p (aloop->header, region, n_region))
5954 {
5955 cloop = duplicate_loop (aloop, target);
5956 duplicate_subloops (aloop, cloop);
5957 }
5958 }
5959
5960 if (!region_copy)
5961 {
5962 region_copy = XNEWVEC (basic_block, n_region);
5963 free_region_copy = true;
5964 }
5965
5966 gcc_assert (!need_ssa_update_p (cfun));
5967
5968 /* Record blocks outside the region that are dominated by something
5969 inside. */
5970 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5971
5972 if (exit->src->count)
5973 {
5974 total_count = exit->src->count;
5975 exit_count = exit->count;
5976 /* Fix up corner cases, to avoid division by zero or creation of negative
5977 frequencies. */
5978 if (exit_count > total_count)
5979 exit_count = total_count;
5980 }
5981 else
5982 {
5983 total_freq = exit->src->frequency;
5984 exit_freq = EDGE_FREQUENCY (exit);
5985 /* Fix up corner cases, to avoid division by zero or creation of negative
5986 frequencies. */
5987 if (total_freq == 0)
5988 total_freq = 1;
5989 if (exit_freq > total_freq)
5990 exit_freq = total_freq;
5991 }
5992
5993 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
5994 split_edge_bb_loc (exit), true);
5995 if (total_count)
5996 {
5997 scale_bbs_frequencies_gcov_type (region, n_region,
5998 total_count - exit_count,
5999 total_count);
6000 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
6001 total_count);
6002 }
6003 else
6004 {
6005 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6006 total_freq);
6007 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6008 }
6009
6010 /* Create the switch block, and put the exit condition to it. */
6011 entry_bb = entry->dest;
6012 nentry_bb = get_bb_copy (entry_bb);
6013 if (!last_stmt (entry->src)
6014 || !stmt_ends_bb_p (last_stmt (entry->src)))
6015 switch_bb = entry->src;
6016 else
6017 switch_bb = split_edge (entry);
6018 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6019
6020 gsi = gsi_last_bb (switch_bb);
6021 cond_stmt = last_stmt (exit->src);
6022 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6023 cond_stmt = gimple_copy (cond_stmt);
6024
6025 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6026
6027 sorig = single_succ_edge (switch_bb);
6028 sorig->flags = exits[1]->flags;
6029 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6030
6031 /* Register the new edge from SWITCH_BB in loop exit lists. */
6032 rescan_loop_exit (snew, true, false);
6033
6034 /* Add the PHI node arguments. */
6035 add_phi_args_after_copy (region_copy, n_region, snew);
6036
6037 /* Get rid of now superfluous conditions and associated edges (and phi node
6038 arguments). */
6039 exit_bb = exit->dest;
6040
6041 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6042 PENDING_STMT (e) = NULL;
6043
6044 /* The latch of ORIG_LOOP was copied, and so was the backedge
6045 to the original header. We redirect this backedge to EXIT_BB. */
6046 for (i = 0; i < n_region; i++)
6047 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6048 {
6049 gcc_assert (single_succ_edge (region_copy[i]));
6050 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6051 PENDING_STMT (e) = NULL;
6052 for (psi = gsi_start_phis (exit_bb);
6053 !gsi_end_p (psi);
6054 gsi_next (&psi))
6055 {
6056 phi = gsi_stmt (psi);
6057 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6058 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6059 }
6060 }
6061 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6062 PENDING_STMT (e) = NULL;
6063
6064 /* Anything that is outside of the region, but was dominated by something
6065 inside needs to update dominance info. */
6066 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6067 doms.release ();
6068 /* Update the SSA web. */
6069 update_ssa (TODO_update_ssa);
6070
6071 if (free_region_copy)
6072 free (region_copy);
6073
6074 free_original_copy_tables ();
6075 return true;
6076 }
6077
6078 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6079 adding blocks when the dominator traversal reaches EXIT. This
6080 function silently assumes that ENTRY strictly dominates EXIT. */
6081
6082 void
6083 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6084 vec<basic_block> *bbs_p)
6085 {
6086 basic_block son;
6087
6088 for (son = first_dom_son (CDI_DOMINATORS, entry);
6089 son;
6090 son = next_dom_son (CDI_DOMINATORS, son))
6091 {
6092 bbs_p->safe_push (son);
6093 if (son != exit)
6094 gather_blocks_in_sese_region (son, exit, bbs_p);
6095 }
6096 }
6097
6098 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6099 The duplicates are recorded in VARS_MAP. */
6100
6101 static void
6102 replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
6103 tree to_context)
6104 {
6105 tree t = *tp, new_t;
6106 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6107 void **loc;
6108
6109 if (DECL_CONTEXT (t) == to_context)
6110 return;
6111
6112 loc = pointer_map_contains (vars_map, t);
6113
6114 if (!loc)
6115 {
6116 loc = pointer_map_insert (vars_map, t);
6117
6118 if (SSA_VAR_P (t))
6119 {
6120 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6121 add_local_decl (f, new_t);
6122 }
6123 else
6124 {
6125 gcc_assert (TREE_CODE (t) == CONST_DECL);
6126 new_t = copy_node (t);
6127 }
6128 DECL_CONTEXT (new_t) = to_context;
6129
6130 *loc = new_t;
6131 }
6132 else
6133 new_t = (tree) *loc;
6134
6135 *tp = new_t;
6136 }
6137
6138
6139 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6140 VARS_MAP maps old ssa names and var_decls to the new ones. */
6141
6142 static tree
6143 replace_ssa_name (tree name, struct pointer_map_t *vars_map,
6144 tree to_context)
6145 {
6146 void **loc;
6147 tree new_name;
6148
6149 gcc_assert (!virtual_operand_p (name));
6150
6151 loc = pointer_map_contains (vars_map, name);
6152
6153 if (!loc)
6154 {
6155 tree decl = SSA_NAME_VAR (name);
6156 if (decl)
6157 {
6158 replace_by_duplicate_decl (&decl, vars_map, to_context);
6159 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6160 decl, SSA_NAME_DEF_STMT (name));
6161 if (SSA_NAME_IS_DEFAULT_DEF (name))
6162 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context),
6163 decl, new_name);
6164 }
6165 else
6166 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6167 name, SSA_NAME_DEF_STMT (name));
6168
6169 loc = pointer_map_insert (vars_map, name);
6170 *loc = new_name;
6171 }
6172 else
6173 new_name = (tree) *loc;
6174
6175 return new_name;
6176 }
6177
6178 struct move_stmt_d
6179 {
6180 tree orig_block;
6181 tree new_block;
6182 tree from_context;
6183 tree to_context;
6184 struct pointer_map_t *vars_map;
6185 htab_t new_label_map;
6186 struct pointer_map_t *eh_map;
6187 bool remap_decls_p;
6188 };
6189
6190 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6191 contained in *TP if it has been ORIG_BLOCK previously and change the
6192 DECL_CONTEXT of every local variable referenced in *TP. */
6193
6194 static tree
6195 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6196 {
6197 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6198 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6199 tree t = *tp;
6200
6201 if (EXPR_P (t))
6202 {
6203 tree block = TREE_BLOCK (t);
6204 if (block == p->orig_block
6205 || (p->orig_block == NULL_TREE
6206 && block != NULL_TREE))
6207 TREE_SET_BLOCK (t, p->new_block);
6208 #ifdef ENABLE_CHECKING
6209 else if (block != NULL_TREE)
6210 {
6211 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6212 block = BLOCK_SUPERCONTEXT (block);
6213 gcc_assert (block == p->orig_block);
6214 }
6215 #endif
6216 }
6217 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6218 {
6219 if (TREE_CODE (t) == SSA_NAME)
6220 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6221 else if (TREE_CODE (t) == LABEL_DECL)
6222 {
6223 if (p->new_label_map)
6224 {
6225 struct tree_map in, *out;
6226 in.base.from = t;
6227 out = (struct tree_map *)
6228 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6229 if (out)
6230 *tp = t = out->to;
6231 }
6232
6233 DECL_CONTEXT (t) = p->to_context;
6234 }
6235 else if (p->remap_decls_p)
6236 {
6237 /* Replace T with its duplicate. T should no longer appear in the
6238 parent function, so this looks wasteful; however, it may appear
6239 in referenced_vars, and more importantly, as virtual operands of
6240 statements, and in alias lists of other variables. It would be
6241 quite difficult to expunge it from all those places. ??? It might
6242 suffice to do this for addressable variables. */
6243 if ((TREE_CODE (t) == VAR_DECL
6244 && !is_global_var (t))
6245 || TREE_CODE (t) == CONST_DECL)
6246 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6247 }
6248 *walk_subtrees = 0;
6249 }
6250 else if (TYPE_P (t))
6251 *walk_subtrees = 0;
6252
6253 return NULL_TREE;
6254 }
6255
6256 /* Helper for move_stmt_r. Given an EH region number for the source
6257 function, map that to the duplicate EH regio number in the dest. */
6258
6259 static int
6260 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6261 {
6262 eh_region old_r, new_r;
6263 void **slot;
6264
6265 old_r = get_eh_region_from_number (old_nr);
6266 slot = pointer_map_contains (p->eh_map, old_r);
6267 new_r = (eh_region) *slot;
6268
6269 return new_r->index;
6270 }
6271
6272 /* Similar, but operate on INTEGER_CSTs. */
6273
6274 static tree
6275 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6276 {
6277 int old_nr, new_nr;
6278
6279 old_nr = tree_to_shwi (old_t_nr);
6280 new_nr = move_stmt_eh_region_nr (old_nr, p);
6281
6282 return build_int_cst (integer_type_node, new_nr);
6283 }
6284
6285 /* Like move_stmt_op, but for gimple statements.
6286
6287 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6288 contained in the current statement in *GSI_P and change the
6289 DECL_CONTEXT of every local variable referenced in the current
6290 statement. */
6291
6292 static tree
6293 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6294 struct walk_stmt_info *wi)
6295 {
6296 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6297 gimple stmt = gsi_stmt (*gsi_p);
6298 tree block = gimple_block (stmt);
6299
6300 if (block == p->orig_block
6301 || (p->orig_block == NULL_TREE
6302 && block != NULL_TREE))
6303 gimple_set_block (stmt, p->new_block);
6304
6305 switch (gimple_code (stmt))
6306 {
6307 case GIMPLE_CALL:
6308 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6309 {
6310 tree r, fndecl = gimple_call_fndecl (stmt);
6311 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6312 switch (DECL_FUNCTION_CODE (fndecl))
6313 {
6314 case BUILT_IN_EH_COPY_VALUES:
6315 r = gimple_call_arg (stmt, 1);
6316 r = move_stmt_eh_region_tree_nr (r, p);
6317 gimple_call_set_arg (stmt, 1, r);
6318 /* FALLTHRU */
6319
6320 case BUILT_IN_EH_POINTER:
6321 case BUILT_IN_EH_FILTER:
6322 r = gimple_call_arg (stmt, 0);
6323 r = move_stmt_eh_region_tree_nr (r, p);
6324 gimple_call_set_arg (stmt, 0, r);
6325 break;
6326
6327 default:
6328 break;
6329 }
6330 }
6331 break;
6332
6333 case GIMPLE_RESX:
6334 {
6335 int r = gimple_resx_region (stmt);
6336 r = move_stmt_eh_region_nr (r, p);
6337 gimple_resx_set_region (stmt, r);
6338 }
6339 break;
6340
6341 case GIMPLE_EH_DISPATCH:
6342 {
6343 int r = gimple_eh_dispatch_region (stmt);
6344 r = move_stmt_eh_region_nr (r, p);
6345 gimple_eh_dispatch_set_region (stmt, r);
6346 }
6347 break;
6348
6349 case GIMPLE_OMP_RETURN:
6350 case GIMPLE_OMP_CONTINUE:
6351 break;
6352 default:
6353 if (is_gimple_omp (stmt))
6354 {
6355 /* Do not remap variables inside OMP directives. Variables
6356 referenced in clauses and directive header belong to the
6357 parent function and should not be moved into the child
6358 function. */
6359 bool save_remap_decls_p = p->remap_decls_p;
6360 p->remap_decls_p = false;
6361 *handled_ops_p = true;
6362
6363 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6364 move_stmt_op, wi);
6365
6366 p->remap_decls_p = save_remap_decls_p;
6367 }
6368 break;
6369 }
6370
6371 return NULL_TREE;
6372 }
6373
6374 /* Move basic block BB from function CFUN to function DEST_FN. The
6375 block is moved out of the original linked list and placed after
6376 block AFTER in the new list. Also, the block is removed from the
6377 original array of blocks and placed in DEST_FN's array of blocks.
6378 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6379 updated to reflect the moved edges.
6380
6381 The local variables are remapped to new instances, VARS_MAP is used
6382 to record the mapping. */
6383
6384 static void
6385 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6386 basic_block after, bool update_edge_count_p,
6387 struct move_stmt_d *d)
6388 {
6389 struct control_flow_graph *cfg;
6390 edge_iterator ei;
6391 edge e;
6392 gimple_stmt_iterator si;
6393 unsigned old_len, new_len;
6394
6395 /* Remove BB from dominance structures. */
6396 delete_from_dominance_info (CDI_DOMINATORS, bb);
6397
6398 /* Move BB from its current loop to the copy in the new function. */
6399 if (current_loops)
6400 {
6401 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6402 if (new_loop)
6403 bb->loop_father = new_loop;
6404 }
6405
6406 /* Link BB to the new linked list. */
6407 move_block_after (bb, after);
6408
6409 /* Update the edge count in the corresponding flowgraphs. */
6410 if (update_edge_count_p)
6411 FOR_EACH_EDGE (e, ei, bb->succs)
6412 {
6413 cfun->cfg->x_n_edges--;
6414 dest_cfun->cfg->x_n_edges++;
6415 }
6416
6417 /* Remove BB from the original basic block array. */
6418 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6419 cfun->cfg->x_n_basic_blocks--;
6420
6421 /* Grow DEST_CFUN's basic block array if needed. */
6422 cfg = dest_cfun->cfg;
6423 cfg->x_n_basic_blocks++;
6424 if (bb->index >= cfg->x_last_basic_block)
6425 cfg->x_last_basic_block = bb->index + 1;
6426
6427 old_len = vec_safe_length (cfg->x_basic_block_info);
6428 if ((unsigned) cfg->x_last_basic_block >= old_len)
6429 {
6430 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6431 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6432 }
6433
6434 (*cfg->x_basic_block_info)[bb->index] = bb;
6435
6436 /* Remap the variables in phi nodes. */
6437 for (si = gsi_start_phis (bb); !gsi_end_p (si); )
6438 {
6439 gimple phi = gsi_stmt (si);
6440 use_operand_p use;
6441 tree op = PHI_RESULT (phi);
6442 ssa_op_iter oi;
6443 unsigned i;
6444
6445 if (virtual_operand_p (op))
6446 {
6447 /* Remove the phi nodes for virtual operands (alias analysis will be
6448 run for the new function, anyway). */
6449 remove_phi_node (&si, true);
6450 continue;
6451 }
6452
6453 SET_PHI_RESULT (phi,
6454 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6455 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6456 {
6457 op = USE_FROM_PTR (use);
6458 if (TREE_CODE (op) == SSA_NAME)
6459 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6460 }
6461
6462 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6463 {
6464 location_t locus = gimple_phi_arg_location (phi, i);
6465 tree block = LOCATION_BLOCK (locus);
6466
6467 if (locus == UNKNOWN_LOCATION)
6468 continue;
6469 if (d->orig_block == NULL_TREE || block == d->orig_block)
6470 {
6471 if (d->new_block == NULL_TREE)
6472 locus = LOCATION_LOCUS (locus);
6473 else
6474 locus = COMBINE_LOCATION_DATA (line_table, locus, d->new_block);
6475 gimple_phi_arg_set_location (phi, i, locus);
6476 }
6477 }
6478
6479 gsi_next (&si);
6480 }
6481
6482 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6483 {
6484 gimple stmt = gsi_stmt (si);
6485 struct walk_stmt_info wi;
6486
6487 memset (&wi, 0, sizeof (wi));
6488 wi.info = d;
6489 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6490
6491 if (gimple_code (stmt) == GIMPLE_LABEL)
6492 {
6493 tree label = gimple_label_label (stmt);
6494 int uid = LABEL_DECL_UID (label);
6495
6496 gcc_assert (uid > -1);
6497
6498 old_len = vec_safe_length (cfg->x_label_to_block_map);
6499 if (old_len <= (unsigned) uid)
6500 {
6501 new_len = 3 * uid / 2 + 1;
6502 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6503 }
6504
6505 (*cfg->x_label_to_block_map)[uid] = bb;
6506 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6507
6508 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6509
6510 if (uid >= dest_cfun->cfg->last_label_uid)
6511 dest_cfun->cfg->last_label_uid = uid + 1;
6512 }
6513
6514 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6515 remove_stmt_from_eh_lp_fn (cfun, stmt);
6516
6517 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6518 gimple_remove_stmt_histograms (cfun, stmt);
6519
6520 /* We cannot leave any operands allocated from the operand caches of
6521 the current function. */
6522 free_stmt_operands (stmt);
6523 push_cfun (dest_cfun);
6524 update_stmt (stmt);
6525 pop_cfun ();
6526 }
6527
6528 FOR_EACH_EDGE (e, ei, bb->succs)
6529 if (e->goto_locus != UNKNOWN_LOCATION)
6530 {
6531 tree block = LOCATION_BLOCK (e->goto_locus);
6532 if (d->orig_block == NULL_TREE
6533 || block == d->orig_block)
6534 e->goto_locus = d->new_block ?
6535 COMBINE_LOCATION_DATA (line_table, e->goto_locus, d->new_block) :
6536 LOCATION_LOCUS (e->goto_locus);
6537 }
6538 }
6539
6540 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6541 the outermost EH region. Use REGION as the incoming base EH region. */
6542
6543 static eh_region
6544 find_outermost_region_in_block (struct function *src_cfun,
6545 basic_block bb, eh_region region)
6546 {
6547 gimple_stmt_iterator si;
6548
6549 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6550 {
6551 gimple stmt = gsi_stmt (si);
6552 eh_region stmt_region;
6553 int lp_nr;
6554
6555 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6556 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6557 if (stmt_region)
6558 {
6559 if (region == NULL)
6560 region = stmt_region;
6561 else if (stmt_region != region)
6562 {
6563 region = eh_region_outermost (src_cfun, stmt_region, region);
6564 gcc_assert (region != NULL);
6565 }
6566 }
6567 }
6568
6569 return region;
6570 }
6571
6572 static tree
6573 new_label_mapper (tree decl, void *data)
6574 {
6575 htab_t hash = (htab_t) data;
6576 struct tree_map *m;
6577 void **slot;
6578
6579 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6580
6581 m = XNEW (struct tree_map);
6582 m->hash = DECL_UID (decl);
6583 m->base.from = decl;
6584 m->to = create_artificial_label (UNKNOWN_LOCATION);
6585 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6586 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6587 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6588
6589 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6590 gcc_assert (*slot == NULL);
6591
6592 *slot = m;
6593
6594 return m->to;
6595 }
6596
6597 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6598 subblocks. */
6599
6600 static void
6601 replace_block_vars_by_duplicates (tree block, struct pointer_map_t *vars_map,
6602 tree to_context)
6603 {
6604 tree *tp, t;
6605
6606 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6607 {
6608 t = *tp;
6609 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6610 continue;
6611 replace_by_duplicate_decl (&t, vars_map, to_context);
6612 if (t != *tp)
6613 {
6614 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6615 {
6616 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6617 DECL_HAS_VALUE_EXPR_P (t) = 1;
6618 }
6619 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6620 *tp = t;
6621 }
6622 }
6623
6624 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6625 replace_block_vars_by_duplicates (block, vars_map, to_context);
6626 }
6627
6628 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6629 from FN1 to FN2. */
6630
6631 static void
6632 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
6633 struct loop *loop)
6634 {
6635 /* Discard it from the old loop array. */
6636 (*get_loops (fn1))[loop->num] = NULL;
6637
6638 /* Place it in the new loop array, assigning it a new number. */
6639 loop->num = number_of_loops (fn2);
6640 vec_safe_push (loops_for_fn (fn2)->larray, loop);
6641
6642 /* Recurse to children. */
6643 for (loop = loop->inner; loop; loop = loop->next)
6644 fixup_loop_arrays_after_move (fn1, fn2, loop);
6645 }
6646
6647 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6648 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6649 single basic block in the original CFG and the new basic block is
6650 returned. DEST_CFUN must not have a CFG yet.
6651
6652 Note that the region need not be a pure SESE region. Blocks inside
6653 the region may contain calls to abort/exit. The only restriction
6654 is that ENTRY_BB should be the only entry point and it must
6655 dominate EXIT_BB.
6656
6657 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6658 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6659 to the new function.
6660
6661 All local variables referenced in the region are assumed to be in
6662 the corresponding BLOCK_VARS and unexpanded variable lists
6663 associated with DEST_CFUN. */
6664
6665 basic_block
6666 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
6667 basic_block exit_bb, tree orig_block)
6668 {
6669 vec<basic_block> bbs, dom_bbs;
6670 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6671 basic_block after, bb, *entry_pred, *exit_succ, abb;
6672 struct function *saved_cfun = cfun;
6673 int *entry_flag, *exit_flag;
6674 unsigned *entry_prob, *exit_prob;
6675 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
6676 edge e;
6677 edge_iterator ei;
6678 htab_t new_label_map;
6679 struct pointer_map_t *vars_map, *eh_map;
6680 struct loop *loop = entry_bb->loop_father;
6681 struct loop *loop0 = get_loop (saved_cfun, 0);
6682 struct move_stmt_d d;
6683
6684 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6685 region. */
6686 gcc_assert (entry_bb != exit_bb
6687 && (!exit_bb
6688 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
6689
6690 /* Collect all the blocks in the region. Manually add ENTRY_BB
6691 because it won't be added by dfs_enumerate_from. */
6692 bbs.create (0);
6693 bbs.safe_push (entry_bb);
6694 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6695
6696 /* The blocks that used to be dominated by something in BBS will now be
6697 dominated by the new block. */
6698 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6699 bbs.address (),
6700 bbs.length ());
6701
6702 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6703 the predecessor edges to ENTRY_BB and the successor edges to
6704 EXIT_BB so that we can re-attach them to the new basic block that
6705 will replace the region. */
6706 num_entry_edges = EDGE_COUNT (entry_bb->preds);
6707 entry_pred = XNEWVEC (basic_block, num_entry_edges);
6708 entry_flag = XNEWVEC (int, num_entry_edges);
6709 entry_prob = XNEWVEC (unsigned, num_entry_edges);
6710 i = 0;
6711 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
6712 {
6713 entry_prob[i] = e->probability;
6714 entry_flag[i] = e->flags;
6715 entry_pred[i++] = e->src;
6716 remove_edge (e);
6717 }
6718
6719 if (exit_bb)
6720 {
6721 num_exit_edges = EDGE_COUNT (exit_bb->succs);
6722 exit_succ = XNEWVEC (basic_block, num_exit_edges);
6723 exit_flag = XNEWVEC (int, num_exit_edges);
6724 exit_prob = XNEWVEC (unsigned, num_exit_edges);
6725 i = 0;
6726 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
6727 {
6728 exit_prob[i] = e->probability;
6729 exit_flag[i] = e->flags;
6730 exit_succ[i++] = e->dest;
6731 remove_edge (e);
6732 }
6733 }
6734 else
6735 {
6736 num_exit_edges = 0;
6737 exit_succ = NULL;
6738 exit_flag = NULL;
6739 exit_prob = NULL;
6740 }
6741
6742 /* Switch context to the child function to initialize DEST_FN's CFG. */
6743 gcc_assert (dest_cfun->cfg == NULL);
6744 push_cfun (dest_cfun);
6745
6746 init_empty_tree_cfg ();
6747
6748 /* Initialize EH information for the new function. */
6749 eh_map = NULL;
6750 new_label_map = NULL;
6751 if (saved_cfun->eh)
6752 {
6753 eh_region region = NULL;
6754
6755 FOR_EACH_VEC_ELT (bbs, i, bb)
6756 region = find_outermost_region_in_block (saved_cfun, bb, region);
6757
6758 init_eh_for_function ();
6759 if (region != NULL)
6760 {
6761 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6762 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
6763 new_label_mapper, new_label_map);
6764 }
6765 }
6766
6767 /* Initialize an empty loop tree. */
6768 struct loops *loops = ggc_alloc_cleared_loops ();
6769 init_loops_structure (dest_cfun, loops, 1);
6770 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
6771 set_loops_for_fn (dest_cfun, loops);
6772
6773 /* Move the outlined loop tree part. */
6774 num_nodes = bbs.length ();
6775 FOR_EACH_VEC_ELT (bbs, i, bb)
6776 {
6777 if (bb->loop_father->header == bb)
6778 {
6779 struct loop *this_loop = bb->loop_father;
6780 struct loop *outer = loop_outer (this_loop);
6781 if (outer == loop
6782 /* If the SESE region contains some bbs ending with
6783 a noreturn call, those are considered to belong
6784 to the outermost loop in saved_cfun, rather than
6785 the entry_bb's loop_father. */
6786 || outer == loop0)
6787 {
6788 if (outer != loop)
6789 num_nodes -= this_loop->num_nodes;
6790 flow_loop_tree_node_remove (bb->loop_father);
6791 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
6792 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
6793 }
6794 }
6795 else if (bb->loop_father == loop0 && loop0 != loop)
6796 num_nodes--;
6797
6798 /* Remove loop exits from the outlined region. */
6799 if (loops_for_fn (saved_cfun)->exits)
6800 FOR_EACH_EDGE (e, ei, bb->succs)
6801 {
6802 void **slot = htab_find_slot_with_hash
6803 (loops_for_fn (saved_cfun)->exits, e,
6804 htab_hash_pointer (e), NO_INSERT);
6805 if (slot)
6806 htab_clear_slot (loops_for_fn (saved_cfun)->exits, slot);
6807 }
6808 }
6809
6810
6811 /* Adjust the number of blocks in the tree root of the outlined part. */
6812 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
6813
6814 /* Setup a mapping to be used by move_block_to_fn. */
6815 loop->aux = current_loops->tree_root;
6816 loop0->aux = current_loops->tree_root;
6817
6818 pop_cfun ();
6819
6820 /* Move blocks from BBS into DEST_CFUN. */
6821 gcc_assert (bbs.length () >= 2);
6822 after = dest_cfun->cfg->x_entry_block_ptr;
6823 vars_map = pointer_map_create ();
6824
6825 memset (&d, 0, sizeof (d));
6826 d.orig_block = orig_block;
6827 d.new_block = DECL_INITIAL (dest_cfun->decl);
6828 d.from_context = cfun->decl;
6829 d.to_context = dest_cfun->decl;
6830 d.vars_map = vars_map;
6831 d.new_label_map = new_label_map;
6832 d.eh_map = eh_map;
6833 d.remap_decls_p = true;
6834
6835 FOR_EACH_VEC_ELT (bbs, i, bb)
6836 {
6837 /* No need to update edge counts on the last block. It has
6838 already been updated earlier when we detached the region from
6839 the original CFG. */
6840 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
6841 after = bb;
6842 }
6843
6844 loop->aux = NULL;
6845 loop0->aux = NULL;
6846 /* Loop sizes are no longer correct, fix them up. */
6847 loop->num_nodes -= num_nodes;
6848 for (struct loop *outer = loop_outer (loop);
6849 outer; outer = loop_outer (outer))
6850 outer->num_nodes -= num_nodes;
6851 loop0->num_nodes -= bbs.length () - num_nodes;
6852
6853 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vect_loops)
6854 {
6855 struct loop *aloop;
6856 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
6857 if (aloop != NULL)
6858 {
6859 if (aloop->simduid)
6860 {
6861 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
6862 d.to_context);
6863 dest_cfun->has_simduid_loops = true;
6864 }
6865 if (aloop->force_vect)
6866 dest_cfun->has_force_vect_loops = true;
6867 }
6868 }
6869
6870 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
6871 if (orig_block)
6872 {
6873 tree block;
6874 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6875 == NULL_TREE);
6876 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6877 = BLOCK_SUBBLOCKS (orig_block);
6878 for (block = BLOCK_SUBBLOCKS (orig_block);
6879 block; block = BLOCK_CHAIN (block))
6880 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
6881 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
6882 }
6883
6884 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
6885 vars_map, dest_cfun->decl);
6886
6887 if (new_label_map)
6888 htab_delete (new_label_map);
6889 if (eh_map)
6890 pointer_map_destroy (eh_map);
6891 pointer_map_destroy (vars_map);
6892
6893 /* Rewire the entry and exit blocks. The successor to the entry
6894 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6895 the child function. Similarly, the predecessor of DEST_FN's
6896 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
6897 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6898 various CFG manipulation function get to the right CFG.
6899
6900 FIXME, this is silly. The CFG ought to become a parameter to
6901 these helpers. */
6902 push_cfun (dest_cfun);
6903 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
6904 if (exit_bb)
6905 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
6906 pop_cfun ();
6907
6908 /* Back in the original function, the SESE region has disappeared,
6909 create a new basic block in its place. */
6910 bb = create_empty_bb (entry_pred[0]);
6911 if (current_loops)
6912 add_bb_to_loop (bb, loop);
6913 for (i = 0; i < num_entry_edges; i++)
6914 {
6915 e = make_edge (entry_pred[i], bb, entry_flag[i]);
6916 e->probability = entry_prob[i];
6917 }
6918
6919 for (i = 0; i < num_exit_edges; i++)
6920 {
6921 e = make_edge (bb, exit_succ[i], exit_flag[i]);
6922 e->probability = exit_prob[i];
6923 }
6924
6925 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
6926 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
6927 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
6928 dom_bbs.release ();
6929
6930 if (exit_bb)
6931 {
6932 free (exit_prob);
6933 free (exit_flag);
6934 free (exit_succ);
6935 }
6936 free (entry_prob);
6937 free (entry_flag);
6938 free (entry_pred);
6939 bbs.release ();
6940
6941 return bb;
6942 }
6943
6944
6945 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
6946 */
6947
6948 void
6949 dump_function_to_file (tree fndecl, FILE *file, int flags)
6950 {
6951 tree arg, var, old_current_fndecl = current_function_decl;
6952 struct function *dsf;
6953 bool ignore_topmost_bind = false, any_var = false;
6954 basic_block bb;
6955 tree chain;
6956 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
6957 && decl_is_tm_clone (fndecl));
6958 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
6959
6960 current_function_decl = fndecl;
6961 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
6962
6963 arg = DECL_ARGUMENTS (fndecl);
6964 while (arg)
6965 {
6966 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
6967 fprintf (file, " ");
6968 print_generic_expr (file, arg, dump_flags);
6969 if (flags & TDF_VERBOSE)
6970 print_node (file, "", arg, 4);
6971 if (DECL_CHAIN (arg))
6972 fprintf (file, ", ");
6973 arg = DECL_CHAIN (arg);
6974 }
6975 fprintf (file, ")\n");
6976
6977 if (flags & TDF_VERBOSE)
6978 print_node (file, "", fndecl, 2);
6979
6980 dsf = DECL_STRUCT_FUNCTION (fndecl);
6981 if (dsf && (flags & TDF_EH))
6982 dump_eh_tree (file, dsf);
6983
6984 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
6985 {
6986 dump_node (fndecl, TDF_SLIM | flags, file);
6987 current_function_decl = old_current_fndecl;
6988 return;
6989 }
6990
6991 /* When GIMPLE is lowered, the variables are no longer available in
6992 BIND_EXPRs, so display them separately. */
6993 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
6994 {
6995 unsigned ix;
6996 ignore_topmost_bind = true;
6997
6998 fprintf (file, "{\n");
6999 if (!vec_safe_is_empty (fun->local_decls))
7000 FOR_EACH_LOCAL_DECL (fun, ix, var)
7001 {
7002 print_generic_decl (file, var, flags);
7003 if (flags & TDF_VERBOSE)
7004 print_node (file, "", var, 4);
7005 fprintf (file, "\n");
7006
7007 any_var = true;
7008 }
7009 if (gimple_in_ssa_p (cfun))
7010 for (ix = 1; ix < num_ssa_names; ++ix)
7011 {
7012 tree name = ssa_name (ix);
7013 if (name && !SSA_NAME_VAR (name))
7014 {
7015 fprintf (file, " ");
7016 print_generic_expr (file, TREE_TYPE (name), flags);
7017 fprintf (file, " ");
7018 print_generic_expr (file, name, flags);
7019 fprintf (file, ";\n");
7020
7021 any_var = true;
7022 }
7023 }
7024 }
7025
7026 if (fun && fun->decl == fndecl
7027 && fun->cfg
7028 && basic_block_info_for_function (fun))
7029 {
7030 /* If the CFG has been built, emit a CFG-based dump. */
7031 if (!ignore_topmost_bind)
7032 fprintf (file, "{\n");
7033
7034 if (any_var && n_basic_blocks_for_fn (fun))
7035 fprintf (file, "\n");
7036
7037 FOR_EACH_BB_FN (bb, fun)
7038 dump_bb (file, bb, 2, flags | TDF_COMMENT);
7039
7040 fprintf (file, "}\n");
7041 }
7042 else if (DECL_SAVED_TREE (fndecl) == NULL)
7043 {
7044 /* The function is now in GIMPLE form but the CFG has not been
7045 built yet. Emit the single sequence of GIMPLE statements
7046 that make up its body. */
7047 gimple_seq body = gimple_body (fndecl);
7048
7049 if (gimple_seq_first_stmt (body)
7050 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7051 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7052 print_gimple_seq (file, body, 0, flags);
7053 else
7054 {
7055 if (!ignore_topmost_bind)
7056 fprintf (file, "{\n");
7057
7058 if (any_var)
7059 fprintf (file, "\n");
7060
7061 print_gimple_seq (file, body, 2, flags);
7062 fprintf (file, "}\n");
7063 }
7064 }
7065 else
7066 {
7067 int indent;
7068
7069 /* Make a tree based dump. */
7070 chain = DECL_SAVED_TREE (fndecl);
7071 if (chain && TREE_CODE (chain) == BIND_EXPR)
7072 {
7073 if (ignore_topmost_bind)
7074 {
7075 chain = BIND_EXPR_BODY (chain);
7076 indent = 2;
7077 }
7078 else
7079 indent = 0;
7080 }
7081 else
7082 {
7083 if (!ignore_topmost_bind)
7084 fprintf (file, "{\n");
7085 indent = 2;
7086 }
7087
7088 if (any_var)
7089 fprintf (file, "\n");
7090
7091 print_generic_stmt_indented (file, chain, flags, indent);
7092 if (ignore_topmost_bind)
7093 fprintf (file, "}\n");
7094 }
7095
7096 if (flags & TDF_ENUMERATE_LOCALS)
7097 dump_enumerated_decls (file, flags);
7098 fprintf (file, "\n\n");
7099
7100 current_function_decl = old_current_fndecl;
7101 }
7102
7103 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7104
7105 DEBUG_FUNCTION void
7106 debug_function (tree fn, int flags)
7107 {
7108 dump_function_to_file (fn, stderr, flags);
7109 }
7110
7111
7112 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7113
7114 static void
7115 print_pred_bbs (FILE *file, basic_block bb)
7116 {
7117 edge e;
7118 edge_iterator ei;
7119
7120 FOR_EACH_EDGE (e, ei, bb->preds)
7121 fprintf (file, "bb_%d ", e->src->index);
7122 }
7123
7124
7125 /* Print on FILE the indexes for the successors of basic_block BB. */
7126
7127 static void
7128 print_succ_bbs (FILE *file, basic_block bb)
7129 {
7130 edge e;
7131 edge_iterator ei;
7132
7133 FOR_EACH_EDGE (e, ei, bb->succs)
7134 fprintf (file, "bb_%d ", e->dest->index);
7135 }
7136
7137 /* Print to FILE the basic block BB following the VERBOSITY level. */
7138
7139 void
7140 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7141 {
7142 char *s_indent = (char *) alloca ((size_t) indent + 1);
7143 memset ((void *) s_indent, ' ', (size_t) indent);
7144 s_indent[indent] = '\0';
7145
7146 /* Print basic_block's header. */
7147 if (verbosity >= 2)
7148 {
7149 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7150 print_pred_bbs (file, bb);
7151 fprintf (file, "}, succs = {");
7152 print_succ_bbs (file, bb);
7153 fprintf (file, "})\n");
7154 }
7155
7156 /* Print basic_block's body. */
7157 if (verbosity >= 3)
7158 {
7159 fprintf (file, "%s {\n", s_indent);
7160 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7161 fprintf (file, "%s }\n", s_indent);
7162 }
7163 }
7164
7165 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7166
7167 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7168 VERBOSITY level this outputs the contents of the loop, or just its
7169 structure. */
7170
7171 static void
7172 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7173 {
7174 char *s_indent;
7175 basic_block bb;
7176
7177 if (loop == NULL)
7178 return;
7179
7180 s_indent = (char *) alloca ((size_t) indent + 1);
7181 memset ((void *) s_indent, ' ', (size_t) indent);
7182 s_indent[indent] = '\0';
7183
7184 /* Print loop's header. */
7185 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7186 if (loop->header)
7187 fprintf (file, "header = %d", loop->header->index);
7188 else
7189 {
7190 fprintf (file, "deleted)\n");
7191 return;
7192 }
7193 if (loop->latch)
7194 fprintf (file, ", latch = %d", loop->latch->index);
7195 else
7196 fprintf (file, ", multiple latches");
7197 fprintf (file, ", niter = ");
7198 print_generic_expr (file, loop->nb_iterations, 0);
7199
7200 if (loop->any_upper_bound)
7201 {
7202 fprintf (file, ", upper_bound = ");
7203 dump_double_int (file, loop->nb_iterations_upper_bound, true);
7204 }
7205
7206 if (loop->any_estimate)
7207 {
7208 fprintf (file, ", estimate = ");
7209 dump_double_int (file, loop->nb_iterations_estimate, true);
7210 }
7211 fprintf (file, ")\n");
7212
7213 /* Print loop's body. */
7214 if (verbosity >= 1)
7215 {
7216 fprintf (file, "%s{\n", s_indent);
7217 FOR_EACH_BB (bb)
7218 if (bb->loop_father == loop)
7219 print_loops_bb (file, bb, indent, verbosity);
7220
7221 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7222 fprintf (file, "%s}\n", s_indent);
7223 }
7224 }
7225
7226 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7227 spaces. Following VERBOSITY level this outputs the contents of the
7228 loop, or just its structure. */
7229
7230 static void
7231 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7232 int verbosity)
7233 {
7234 if (loop == NULL)
7235 return;
7236
7237 print_loop (file, loop, indent, verbosity);
7238 print_loop_and_siblings (file, loop->next, indent, verbosity);
7239 }
7240
7241 /* Follow a CFG edge from the entry point of the program, and on entry
7242 of a loop, pretty print the loop structure on FILE. */
7243
7244 void
7245 print_loops (FILE *file, int verbosity)
7246 {
7247 basic_block bb;
7248
7249 bb = ENTRY_BLOCK_PTR;
7250 if (bb && bb->loop_father)
7251 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7252 }
7253
7254 /* Dump a loop. */
7255
7256 DEBUG_FUNCTION void
7257 debug (struct loop &ref)
7258 {
7259 print_loop (stderr, &ref, 0, /*verbosity*/0);
7260 }
7261
7262 DEBUG_FUNCTION void
7263 debug (struct loop *ptr)
7264 {
7265 if (ptr)
7266 debug (*ptr);
7267 else
7268 fprintf (stderr, "<nil>\n");
7269 }
7270
7271 /* Dump a loop verbosely. */
7272
7273 DEBUG_FUNCTION void
7274 debug_verbose (struct loop &ref)
7275 {
7276 print_loop (stderr, &ref, 0, /*verbosity*/3);
7277 }
7278
7279 DEBUG_FUNCTION void
7280 debug_verbose (struct loop *ptr)
7281 {
7282 if (ptr)
7283 debug (*ptr);
7284 else
7285 fprintf (stderr, "<nil>\n");
7286 }
7287
7288
7289 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7290
7291 DEBUG_FUNCTION void
7292 debug_loops (int verbosity)
7293 {
7294 print_loops (stderr, verbosity);
7295 }
7296
7297 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7298
7299 DEBUG_FUNCTION void
7300 debug_loop (struct loop *loop, int verbosity)
7301 {
7302 print_loop (stderr, loop, 0, verbosity);
7303 }
7304
7305 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7306 level. */
7307
7308 DEBUG_FUNCTION void
7309 debug_loop_num (unsigned num, int verbosity)
7310 {
7311 debug_loop (get_loop (cfun, num), verbosity);
7312 }
7313
7314 /* Return true if BB ends with a call, possibly followed by some
7315 instructions that must stay with the call. Return false,
7316 otherwise. */
7317
7318 static bool
7319 gimple_block_ends_with_call_p (basic_block bb)
7320 {
7321 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7322 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7323 }
7324
7325
7326 /* Return true if BB ends with a conditional branch. Return false,
7327 otherwise. */
7328
7329 static bool
7330 gimple_block_ends_with_condjump_p (const_basic_block bb)
7331 {
7332 gimple stmt = last_stmt (CONST_CAST_BB (bb));
7333 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7334 }
7335
7336
7337 /* Return true if we need to add fake edge to exit at statement T.
7338 Helper function for gimple_flow_call_edges_add. */
7339
7340 static bool
7341 need_fake_edge_p (gimple t)
7342 {
7343 tree fndecl = NULL_TREE;
7344 int call_flags = 0;
7345
7346 /* NORETURN and LONGJMP calls already have an edge to exit.
7347 CONST and PURE calls do not need one.
7348 We don't currently check for CONST and PURE here, although
7349 it would be a good idea, because those attributes are
7350 figured out from the RTL in mark_constant_function, and
7351 the counter incrementation code from -fprofile-arcs
7352 leads to different results from -fbranch-probabilities. */
7353 if (is_gimple_call (t))
7354 {
7355 fndecl = gimple_call_fndecl (t);
7356 call_flags = gimple_call_flags (t);
7357 }
7358
7359 if (is_gimple_call (t)
7360 && fndecl
7361 && DECL_BUILT_IN (fndecl)
7362 && (call_flags & ECF_NOTHROW)
7363 && !(call_flags & ECF_RETURNS_TWICE)
7364 /* fork() doesn't really return twice, but the effect of
7365 wrapping it in __gcov_fork() which calls __gcov_flush()
7366 and clears the counters before forking has the same
7367 effect as returning twice. Force a fake edge. */
7368 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7369 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7370 return false;
7371
7372 if (is_gimple_call (t))
7373 {
7374 edge_iterator ei;
7375 edge e;
7376 basic_block bb;
7377
7378 if (!(call_flags & ECF_NORETURN))
7379 return true;
7380
7381 bb = gimple_bb (t);
7382 FOR_EACH_EDGE (e, ei, bb->succs)
7383 if ((e->flags & EDGE_FAKE) == 0)
7384 return true;
7385 }
7386
7387 if (gimple_code (t) == GIMPLE_ASM
7388 && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
7389 return true;
7390
7391 return false;
7392 }
7393
7394
7395 /* Add fake edges to the function exit for any non constant and non
7396 noreturn calls (or noreturn calls with EH/abnormal edges),
7397 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7398 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7399 that were split.
7400
7401 The goal is to expose cases in which entering a basic block does
7402 not imply that all subsequent instructions must be executed. */
7403
7404 static int
7405 gimple_flow_call_edges_add (sbitmap blocks)
7406 {
7407 int i;
7408 int blocks_split = 0;
7409 int last_bb = last_basic_block;
7410 bool check_last_block = false;
7411
7412 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
7413 return 0;
7414
7415 if (! blocks)
7416 check_last_block = true;
7417 else
7418 check_last_block = bitmap_bit_p (blocks, EXIT_BLOCK_PTR->prev_bb->index);
7419
7420 /* In the last basic block, before epilogue generation, there will be
7421 a fallthru edge to EXIT. Special care is required if the last insn
7422 of the last basic block is a call because make_edge folds duplicate
7423 edges, which would result in the fallthru edge also being marked
7424 fake, which would result in the fallthru edge being removed by
7425 remove_fake_edges, which would result in an invalid CFG.
7426
7427 Moreover, we can't elide the outgoing fake edge, since the block
7428 profiler needs to take this into account in order to solve the minimal
7429 spanning tree in the case that the call doesn't return.
7430
7431 Handle this by adding a dummy instruction in a new last basic block. */
7432 if (check_last_block)
7433 {
7434 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
7435 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7436 gimple t = NULL;
7437
7438 if (!gsi_end_p (gsi))
7439 t = gsi_stmt (gsi);
7440
7441 if (t && need_fake_edge_p (t))
7442 {
7443 edge e;
7444
7445 e = find_edge (bb, EXIT_BLOCK_PTR);
7446 if (e)
7447 {
7448 gsi_insert_on_edge (e, gimple_build_nop ());
7449 gsi_commit_edge_inserts ();
7450 }
7451 }
7452 }
7453
7454 /* Now add fake edges to the function exit for any non constant
7455 calls since there is no way that we can determine if they will
7456 return or not... */
7457 for (i = 0; i < last_bb; i++)
7458 {
7459 basic_block bb = BASIC_BLOCK (i);
7460 gimple_stmt_iterator gsi;
7461 gimple stmt, last_stmt;
7462
7463 if (!bb)
7464 continue;
7465
7466 if (blocks && !bitmap_bit_p (blocks, i))
7467 continue;
7468
7469 gsi = gsi_last_nondebug_bb (bb);
7470 if (!gsi_end_p (gsi))
7471 {
7472 last_stmt = gsi_stmt (gsi);
7473 do
7474 {
7475 stmt = gsi_stmt (gsi);
7476 if (need_fake_edge_p (stmt))
7477 {
7478 edge e;
7479
7480 /* The handling above of the final block before the
7481 epilogue should be enough to verify that there is
7482 no edge to the exit block in CFG already.
7483 Calling make_edge in such case would cause us to
7484 mark that edge as fake and remove it later. */
7485 #ifdef ENABLE_CHECKING
7486 if (stmt == last_stmt)
7487 {
7488 e = find_edge (bb, EXIT_BLOCK_PTR);
7489 gcc_assert (e == NULL);
7490 }
7491 #endif
7492
7493 /* Note that the following may create a new basic block
7494 and renumber the existing basic blocks. */
7495 if (stmt != last_stmt)
7496 {
7497 e = split_block (bb, stmt);
7498 if (e)
7499 blocks_split++;
7500 }
7501 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
7502 }
7503 gsi_prev (&gsi);
7504 }
7505 while (!gsi_end_p (gsi));
7506 }
7507 }
7508
7509 if (blocks_split)
7510 verify_flow_info ();
7511
7512 return blocks_split;
7513 }
7514
7515 /* Removes edge E and all the blocks dominated by it, and updates dominance
7516 information. The IL in E->src needs to be updated separately.
7517 If dominance info is not available, only the edge E is removed.*/
7518
7519 void
7520 remove_edge_and_dominated_blocks (edge e)
7521 {
7522 vec<basic_block> bbs_to_remove = vNULL;
7523 vec<basic_block> bbs_to_fix_dom = vNULL;
7524 bitmap df, df_idom;
7525 edge f;
7526 edge_iterator ei;
7527 bool none_removed = false;
7528 unsigned i;
7529 basic_block bb, dbb;
7530 bitmap_iterator bi;
7531
7532 if (!dom_info_available_p (CDI_DOMINATORS))
7533 {
7534 remove_edge (e);
7535 return;
7536 }
7537
7538 /* No updating is needed for edges to exit. */
7539 if (e->dest == EXIT_BLOCK_PTR)
7540 {
7541 if (cfgcleanup_altered_bbs)
7542 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7543 remove_edge (e);
7544 return;
7545 }
7546
7547 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7548 that is not dominated by E->dest, then this set is empty. Otherwise,
7549 all the basic blocks dominated by E->dest are removed.
7550
7551 Also, to DF_IDOM we store the immediate dominators of the blocks in
7552 the dominance frontier of E (i.e., of the successors of the
7553 removed blocks, if there are any, and of E->dest otherwise). */
7554 FOR_EACH_EDGE (f, ei, e->dest->preds)
7555 {
7556 if (f == e)
7557 continue;
7558
7559 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7560 {
7561 none_removed = true;
7562 break;
7563 }
7564 }
7565
7566 df = BITMAP_ALLOC (NULL);
7567 df_idom = BITMAP_ALLOC (NULL);
7568
7569 if (none_removed)
7570 bitmap_set_bit (df_idom,
7571 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7572 else
7573 {
7574 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7575 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7576 {
7577 FOR_EACH_EDGE (f, ei, bb->succs)
7578 {
7579 if (f->dest != EXIT_BLOCK_PTR)
7580 bitmap_set_bit (df, f->dest->index);
7581 }
7582 }
7583 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7584 bitmap_clear_bit (df, bb->index);
7585
7586 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7587 {
7588 bb = BASIC_BLOCK (i);
7589 bitmap_set_bit (df_idom,
7590 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7591 }
7592 }
7593
7594 if (cfgcleanup_altered_bbs)
7595 {
7596 /* Record the set of the altered basic blocks. */
7597 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7598 bitmap_ior_into (cfgcleanup_altered_bbs, df);
7599 }
7600
7601 /* Remove E and the cancelled blocks. */
7602 if (none_removed)
7603 remove_edge (e);
7604 else
7605 {
7606 /* Walk backwards so as to get a chance to substitute all
7607 released DEFs into debug stmts. See
7608 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7609 details. */
7610 for (i = bbs_to_remove.length (); i-- > 0; )
7611 delete_basic_block (bbs_to_remove[i]);
7612 }
7613
7614 /* Update the dominance information. The immediate dominator may change only
7615 for blocks whose immediate dominator belongs to DF_IDOM:
7616
7617 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7618 removal. Let Z the arbitrary block such that idom(Z) = Y and
7619 Z dominates X after the removal. Before removal, there exists a path P
7620 from Y to X that avoids Z. Let F be the last edge on P that is
7621 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7622 dominates W, and because of P, Z does not dominate W), and W belongs to
7623 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7624 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
7625 {
7626 bb = BASIC_BLOCK (i);
7627 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
7628 dbb;
7629 dbb = next_dom_son (CDI_DOMINATORS, dbb))
7630 bbs_to_fix_dom.safe_push (dbb);
7631 }
7632
7633 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
7634
7635 BITMAP_FREE (df);
7636 BITMAP_FREE (df_idom);
7637 bbs_to_remove.release ();
7638 bbs_to_fix_dom.release ();
7639 }
7640
7641 /* Purge dead EH edges from basic block BB. */
7642
7643 bool
7644 gimple_purge_dead_eh_edges (basic_block bb)
7645 {
7646 bool changed = false;
7647 edge e;
7648 edge_iterator ei;
7649 gimple stmt = last_stmt (bb);
7650
7651 if (stmt && stmt_can_throw_internal (stmt))
7652 return false;
7653
7654 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7655 {
7656 if (e->flags & EDGE_EH)
7657 {
7658 remove_edge_and_dominated_blocks (e);
7659 changed = true;
7660 }
7661 else
7662 ei_next (&ei);
7663 }
7664
7665 return changed;
7666 }
7667
7668 /* Purge dead EH edges from basic block listed in BLOCKS. */
7669
7670 bool
7671 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
7672 {
7673 bool changed = false;
7674 unsigned i;
7675 bitmap_iterator bi;
7676
7677 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7678 {
7679 basic_block bb = BASIC_BLOCK (i);
7680
7681 /* Earlier gimple_purge_dead_eh_edges could have removed
7682 this basic block already. */
7683 gcc_assert (bb || changed);
7684 if (bb != NULL)
7685 changed |= gimple_purge_dead_eh_edges (bb);
7686 }
7687
7688 return changed;
7689 }
7690
7691 /* Purge dead abnormal call edges from basic block BB. */
7692
7693 bool
7694 gimple_purge_dead_abnormal_call_edges (basic_block bb)
7695 {
7696 bool changed = false;
7697 edge e;
7698 edge_iterator ei;
7699 gimple stmt = last_stmt (bb);
7700
7701 if (!cfun->has_nonlocal_label
7702 && !cfun->calls_setjmp)
7703 return false;
7704
7705 if (stmt && stmt_can_make_abnormal_goto (stmt))
7706 return false;
7707
7708 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7709 {
7710 if (e->flags & EDGE_ABNORMAL)
7711 {
7712 if (e->flags & EDGE_FALLTHRU)
7713 e->flags &= ~EDGE_ABNORMAL;
7714 else
7715 remove_edge_and_dominated_blocks (e);
7716 changed = true;
7717 }
7718 else
7719 ei_next (&ei);
7720 }
7721
7722 return changed;
7723 }
7724
7725 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
7726
7727 bool
7728 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
7729 {
7730 bool changed = false;
7731 unsigned i;
7732 bitmap_iterator bi;
7733
7734 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7735 {
7736 basic_block bb = BASIC_BLOCK (i);
7737
7738 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
7739 this basic block already. */
7740 gcc_assert (bb || changed);
7741 if (bb != NULL)
7742 changed |= gimple_purge_dead_abnormal_call_edges (bb);
7743 }
7744
7745 return changed;
7746 }
7747
7748 /* This function is called whenever a new edge is created or
7749 redirected. */
7750
7751 static void
7752 gimple_execute_on_growing_pred (edge e)
7753 {
7754 basic_block bb = e->dest;
7755
7756 if (!gimple_seq_empty_p (phi_nodes (bb)))
7757 reserve_phi_args_for_new_edge (bb);
7758 }
7759
7760 /* This function is called immediately before edge E is removed from
7761 the edge vector E->dest->preds. */
7762
7763 static void
7764 gimple_execute_on_shrinking_pred (edge e)
7765 {
7766 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
7767 remove_phi_args (e);
7768 }
7769
7770 /*---------------------------------------------------------------------------
7771 Helper functions for Loop versioning
7772 ---------------------------------------------------------------------------*/
7773
7774 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
7775 of 'first'. Both of them are dominated by 'new_head' basic block. When
7776 'new_head' was created by 'second's incoming edge it received phi arguments
7777 on the edge by split_edge(). Later, additional edge 'e' was created to
7778 connect 'new_head' and 'first'. Now this routine adds phi args on this
7779 additional edge 'e' that new_head to second edge received as part of edge
7780 splitting. */
7781
7782 static void
7783 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
7784 basic_block new_head, edge e)
7785 {
7786 gimple phi1, phi2;
7787 gimple_stmt_iterator psi1, psi2;
7788 tree def;
7789 edge e2 = find_edge (new_head, second);
7790
7791 /* Because NEW_HEAD has been created by splitting SECOND's incoming
7792 edge, we should always have an edge from NEW_HEAD to SECOND. */
7793 gcc_assert (e2 != NULL);
7794
7795 /* Browse all 'second' basic block phi nodes and add phi args to
7796 edge 'e' for 'first' head. PHI args are always in correct order. */
7797
7798 for (psi2 = gsi_start_phis (second),
7799 psi1 = gsi_start_phis (first);
7800 !gsi_end_p (psi2) && !gsi_end_p (psi1);
7801 gsi_next (&psi2), gsi_next (&psi1))
7802 {
7803 phi1 = gsi_stmt (psi1);
7804 phi2 = gsi_stmt (psi2);
7805 def = PHI_ARG_DEF (phi2, e2->dest_idx);
7806 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
7807 }
7808 }
7809
7810
7811 /* Adds a if else statement to COND_BB with condition COND_EXPR.
7812 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
7813 the destination of the ELSE part. */
7814
7815 static void
7816 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
7817 basic_block second_head ATTRIBUTE_UNUSED,
7818 basic_block cond_bb, void *cond_e)
7819 {
7820 gimple_stmt_iterator gsi;
7821 gimple new_cond_expr;
7822 tree cond_expr = (tree) cond_e;
7823 edge e0;
7824
7825 /* Build new conditional expr */
7826 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
7827 NULL_TREE, NULL_TREE);
7828
7829 /* Add new cond in cond_bb. */
7830 gsi = gsi_last_bb (cond_bb);
7831 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
7832
7833 /* Adjust edges appropriately to connect new head with first head
7834 as well as second head. */
7835 e0 = single_succ_edge (cond_bb);
7836 e0->flags &= ~EDGE_FALLTHRU;
7837 e0->flags |= EDGE_FALSE_VALUE;
7838 }
7839
7840
7841 /* Do book-keeping of basic block BB for the profile consistency checker.
7842 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
7843 then do post-pass accounting. Store the counting in RECORD. */
7844 static void
7845 gimple_account_profile_record (basic_block bb, int after_pass,
7846 struct profile_record *record)
7847 {
7848 gimple_stmt_iterator i;
7849 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
7850 {
7851 record->size[after_pass]
7852 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
7853 if (profile_status == PROFILE_READ)
7854 record->time[after_pass]
7855 += estimate_num_insns (gsi_stmt (i),
7856 &eni_time_weights) * bb->count;
7857 else if (profile_status == PROFILE_GUESSED)
7858 record->time[after_pass]
7859 += estimate_num_insns (gsi_stmt (i),
7860 &eni_time_weights) * bb->frequency;
7861 }
7862 }
7863
7864 struct cfg_hooks gimple_cfg_hooks = {
7865 "gimple",
7866 gimple_verify_flow_info,
7867 gimple_dump_bb, /* dump_bb */
7868 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
7869 create_bb, /* create_basic_block */
7870 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
7871 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
7872 gimple_can_remove_branch_p, /* can_remove_branch_p */
7873 remove_bb, /* delete_basic_block */
7874 gimple_split_block, /* split_block */
7875 gimple_move_block_after, /* move_block_after */
7876 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
7877 gimple_merge_blocks, /* merge_blocks */
7878 gimple_predict_edge, /* predict_edge */
7879 gimple_predicted_by_p, /* predicted_by_p */
7880 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
7881 gimple_duplicate_bb, /* duplicate_block */
7882 gimple_split_edge, /* split_edge */
7883 gimple_make_forwarder_block, /* make_forward_block */
7884 NULL, /* tidy_fallthru_edge */
7885 NULL, /* force_nonfallthru */
7886 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
7887 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
7888 gimple_flow_call_edges_add, /* flow_call_edges_add */
7889 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
7890 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
7891 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
7892 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
7893 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
7894 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
7895 flush_pending_stmts, /* flush_pending_stmts */
7896 gimple_empty_block_p, /* block_empty_p */
7897 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
7898 gimple_account_profile_record,
7899 };
7900
7901
7902 /* Split all critical edges. */
7903
7904 static unsigned int
7905 split_critical_edges (void)
7906 {
7907 basic_block bb;
7908 edge e;
7909 edge_iterator ei;
7910
7911 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
7912 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
7913 mappings around the calls to split_edge. */
7914 start_recording_case_labels ();
7915 FOR_ALL_BB (bb)
7916 {
7917 FOR_EACH_EDGE (e, ei, bb->succs)
7918 {
7919 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
7920 split_edge (e);
7921 /* PRE inserts statements to edges and expects that
7922 since split_critical_edges was done beforehand, committing edge
7923 insertions will not split more edges. In addition to critical
7924 edges we must split edges that have multiple successors and
7925 end by control flow statements, such as RESX.
7926 Go ahead and split them too. This matches the logic in
7927 gimple_find_edge_insert_loc. */
7928 else if ((!single_pred_p (e->dest)
7929 || !gimple_seq_empty_p (phi_nodes (e->dest))
7930 || e->dest == EXIT_BLOCK_PTR)
7931 && e->src != ENTRY_BLOCK_PTR
7932 && !(e->flags & EDGE_ABNORMAL))
7933 {
7934 gimple_stmt_iterator gsi;
7935
7936 gsi = gsi_last_bb (e->src);
7937 if (!gsi_end_p (gsi)
7938 && stmt_ends_bb_p (gsi_stmt (gsi))
7939 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
7940 && !gimple_call_builtin_p (gsi_stmt (gsi),
7941 BUILT_IN_RETURN)))
7942 split_edge (e);
7943 }
7944 }
7945 }
7946 end_recording_case_labels ();
7947 return 0;
7948 }
7949
7950 namespace {
7951
7952 const pass_data pass_data_split_crit_edges =
7953 {
7954 GIMPLE_PASS, /* type */
7955 "crited", /* name */
7956 OPTGROUP_NONE, /* optinfo_flags */
7957 false, /* has_gate */
7958 true, /* has_execute */
7959 TV_TREE_SPLIT_EDGES, /* tv_id */
7960 PROP_cfg, /* properties_required */
7961 PROP_no_crit_edges, /* properties_provided */
7962 0, /* properties_destroyed */
7963 0, /* todo_flags_start */
7964 TODO_verify_flow, /* todo_flags_finish */
7965 };
7966
7967 class pass_split_crit_edges : public gimple_opt_pass
7968 {
7969 public:
7970 pass_split_crit_edges (gcc::context *ctxt)
7971 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
7972 {}
7973
7974 /* opt_pass methods: */
7975 unsigned int execute () { return split_critical_edges (); }
7976
7977 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
7978 }; // class pass_split_crit_edges
7979
7980 } // anon namespace
7981
7982 gimple_opt_pass *
7983 make_pass_split_crit_edges (gcc::context *ctxt)
7984 {
7985 return new pass_split_crit_edges (ctxt);
7986 }
7987
7988
7989 /* Build a ternary operation and gimplify it. Emit code before GSI.
7990 Return the gimple_val holding the result. */
7991
7992 tree
7993 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
7994 tree type, tree a, tree b, tree c)
7995 {
7996 tree ret;
7997 location_t loc = gimple_location (gsi_stmt (*gsi));
7998
7999 ret = fold_build3_loc (loc, code, type, a, b, c);
8000 STRIP_NOPS (ret);
8001
8002 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8003 GSI_SAME_STMT);
8004 }
8005
8006 /* Build a binary operation and gimplify it. Emit code before GSI.
8007 Return the gimple_val holding the result. */
8008
8009 tree
8010 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8011 tree type, tree a, tree b)
8012 {
8013 tree ret;
8014
8015 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8016 STRIP_NOPS (ret);
8017
8018 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8019 GSI_SAME_STMT);
8020 }
8021
8022 /* Build a unary operation and gimplify it. Emit code before GSI.
8023 Return the gimple_val holding the result. */
8024
8025 tree
8026 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8027 tree a)
8028 {
8029 tree ret;
8030
8031 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8032 STRIP_NOPS (ret);
8033
8034 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8035 GSI_SAME_STMT);
8036 }
8037
8038
8039 \f
8040 /* Emit return warnings. */
8041
8042 static unsigned int
8043 execute_warn_function_return (void)
8044 {
8045 source_location location;
8046 gimple last;
8047 edge e;
8048 edge_iterator ei;
8049
8050 if (!targetm.warn_func_return (cfun->decl))
8051 return 0;
8052
8053 /* If we have a path to EXIT, then we do return. */
8054 if (TREE_THIS_VOLATILE (cfun->decl)
8055 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
8056 {
8057 location = UNKNOWN_LOCATION;
8058 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
8059 {
8060 last = last_stmt (e->src);
8061 if ((gimple_code (last) == GIMPLE_RETURN
8062 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8063 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8064 break;
8065 }
8066 if (location == UNKNOWN_LOCATION)
8067 location = cfun->function_end_locus;
8068 warning_at (location, 0, "%<noreturn%> function does return");
8069 }
8070
8071 /* If we see "return;" in some basic block, then we do reach the end
8072 without returning a value. */
8073 else if (warn_return_type
8074 && !TREE_NO_WARNING (cfun->decl)
8075 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
8076 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
8077 {
8078 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
8079 {
8080 gimple last = last_stmt (e->src);
8081 if (gimple_code (last) == GIMPLE_RETURN
8082 && gimple_return_retval (last) == NULL
8083 && !gimple_no_warning_p (last))
8084 {
8085 location = gimple_location (last);
8086 if (location == UNKNOWN_LOCATION)
8087 location = cfun->function_end_locus;
8088 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8089 TREE_NO_WARNING (cfun->decl) = 1;
8090 break;
8091 }
8092 }
8093 }
8094 return 0;
8095 }
8096
8097
8098 /* Given a basic block B which ends with a conditional and has
8099 precisely two successors, determine which of the edges is taken if
8100 the conditional is true and which is taken if the conditional is
8101 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8102
8103 void
8104 extract_true_false_edges_from_block (basic_block b,
8105 edge *true_edge,
8106 edge *false_edge)
8107 {
8108 edge e = EDGE_SUCC (b, 0);
8109
8110 if (e->flags & EDGE_TRUE_VALUE)
8111 {
8112 *true_edge = e;
8113 *false_edge = EDGE_SUCC (b, 1);
8114 }
8115 else
8116 {
8117 *false_edge = e;
8118 *true_edge = EDGE_SUCC (b, 1);
8119 }
8120 }
8121
8122 namespace {
8123
8124 const pass_data pass_data_warn_function_return =
8125 {
8126 GIMPLE_PASS, /* type */
8127 "*warn_function_return", /* name */
8128 OPTGROUP_NONE, /* optinfo_flags */
8129 false, /* has_gate */
8130 true, /* has_execute */
8131 TV_NONE, /* tv_id */
8132 PROP_cfg, /* properties_required */
8133 0, /* properties_provided */
8134 0, /* properties_destroyed */
8135 0, /* todo_flags_start */
8136 0, /* todo_flags_finish */
8137 };
8138
8139 class pass_warn_function_return : public gimple_opt_pass
8140 {
8141 public:
8142 pass_warn_function_return (gcc::context *ctxt)
8143 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8144 {}
8145
8146 /* opt_pass methods: */
8147 unsigned int execute () { return execute_warn_function_return (); }
8148
8149 }; // class pass_warn_function_return
8150
8151 } // anon namespace
8152
8153 gimple_opt_pass *
8154 make_pass_warn_function_return (gcc::context *ctxt)
8155 {
8156 return new pass_warn_function_return (ctxt);
8157 }
8158
8159 /* Walk a gimplified function and warn for functions whose return value is
8160 ignored and attribute((warn_unused_result)) is set. This is done before
8161 inlining, so we don't have to worry about that. */
8162
8163 static void
8164 do_warn_unused_result (gimple_seq seq)
8165 {
8166 tree fdecl, ftype;
8167 gimple_stmt_iterator i;
8168
8169 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8170 {
8171 gimple g = gsi_stmt (i);
8172
8173 switch (gimple_code (g))
8174 {
8175 case GIMPLE_BIND:
8176 do_warn_unused_result (gimple_bind_body (g));
8177 break;
8178 case GIMPLE_TRY:
8179 do_warn_unused_result (gimple_try_eval (g));
8180 do_warn_unused_result (gimple_try_cleanup (g));
8181 break;
8182 case GIMPLE_CATCH:
8183 do_warn_unused_result (gimple_catch_handler (g));
8184 break;
8185 case GIMPLE_EH_FILTER:
8186 do_warn_unused_result (gimple_eh_filter_failure (g));
8187 break;
8188
8189 case GIMPLE_CALL:
8190 if (gimple_call_lhs (g))
8191 break;
8192 if (gimple_call_internal_p (g))
8193 break;
8194
8195 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8196 LHS. All calls whose value is ignored should be
8197 represented like this. Look for the attribute. */
8198 fdecl = gimple_call_fndecl (g);
8199 ftype = gimple_call_fntype (g);
8200
8201 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8202 {
8203 location_t loc = gimple_location (g);
8204
8205 if (fdecl)
8206 warning_at (loc, OPT_Wunused_result,
8207 "ignoring return value of %qD, "
8208 "declared with attribute warn_unused_result",
8209 fdecl);
8210 else
8211 warning_at (loc, OPT_Wunused_result,
8212 "ignoring return value of function "
8213 "declared with attribute warn_unused_result");
8214 }
8215 break;
8216
8217 default:
8218 /* Not a container, not a call, or a call whose value is used. */
8219 break;
8220 }
8221 }
8222 }
8223
8224 static unsigned int
8225 run_warn_unused_result (void)
8226 {
8227 do_warn_unused_result (gimple_body (current_function_decl));
8228 return 0;
8229 }
8230
8231 static bool
8232 gate_warn_unused_result (void)
8233 {
8234 return flag_warn_unused_result;
8235 }
8236
8237 namespace {
8238
8239 const pass_data pass_data_warn_unused_result =
8240 {
8241 GIMPLE_PASS, /* type */
8242 "*warn_unused_result", /* name */
8243 OPTGROUP_NONE, /* optinfo_flags */
8244 true, /* has_gate */
8245 true, /* has_execute */
8246 TV_NONE, /* tv_id */
8247 PROP_gimple_any, /* properties_required */
8248 0, /* properties_provided */
8249 0, /* properties_destroyed */
8250 0, /* todo_flags_start */
8251 0, /* todo_flags_finish */
8252 };
8253
8254 class pass_warn_unused_result : public gimple_opt_pass
8255 {
8256 public:
8257 pass_warn_unused_result (gcc::context *ctxt)
8258 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8259 {}
8260
8261 /* opt_pass methods: */
8262 bool gate () { return gate_warn_unused_result (); }
8263 unsigned int execute () { return run_warn_unused_result (); }
8264
8265 }; // class pass_warn_unused_result
8266
8267 } // anon namespace
8268
8269 gimple_opt_pass *
8270 make_pass_warn_unused_result (gcc::context *ctxt)
8271 {
8272 return new pass_warn_unused_result (ctxt);
8273 }
8274
8275 /* IPA passes, compilation of earlier functions or inlining
8276 might have changed some properties, such as marked functions nothrow,
8277 pure, const or noreturn.
8278 Remove redundant edges and basic blocks, and create new ones if necessary.
8279
8280 This pass can't be executed as stand alone pass from pass manager, because
8281 in between inlining and this fixup the verify_flow_info would fail. */
8282
8283 unsigned int
8284 execute_fixup_cfg (void)
8285 {
8286 basic_block bb;
8287 gimple_stmt_iterator gsi;
8288 int todo = gimple_in_ssa_p (cfun) ? TODO_verify_ssa : 0;
8289 gcov_type count_scale;
8290 edge e;
8291 edge_iterator ei;
8292
8293 count_scale
8294 = GCOV_COMPUTE_SCALE (cgraph_get_node (current_function_decl)->count,
8295 ENTRY_BLOCK_PTR->count);
8296
8297 ENTRY_BLOCK_PTR->count = cgraph_get_node (current_function_decl)->count;
8298 EXIT_BLOCK_PTR->count = apply_scale (EXIT_BLOCK_PTR->count,
8299 count_scale);
8300
8301 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
8302 e->count = apply_scale (e->count, count_scale);
8303
8304 FOR_EACH_BB (bb)
8305 {
8306 bb->count = apply_scale (bb->count, count_scale);
8307 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
8308 {
8309 gimple stmt = gsi_stmt (gsi);
8310 tree decl = is_gimple_call (stmt)
8311 ? gimple_call_fndecl (stmt)
8312 : NULL;
8313 if (decl)
8314 {
8315 int flags = gimple_call_flags (stmt);
8316 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
8317 {
8318 if (gimple_purge_dead_abnormal_call_edges (bb))
8319 todo |= TODO_cleanup_cfg;
8320
8321 if (gimple_in_ssa_p (cfun))
8322 {
8323 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8324 update_stmt (stmt);
8325 }
8326 }
8327
8328 if (flags & ECF_NORETURN
8329 && fixup_noreturn_call (stmt))
8330 todo |= TODO_cleanup_cfg;
8331 }
8332
8333 if (maybe_clean_eh_stmt (stmt)
8334 && gimple_purge_dead_eh_edges (bb))
8335 todo |= TODO_cleanup_cfg;
8336 }
8337
8338 FOR_EACH_EDGE (e, ei, bb->succs)
8339 e->count = apply_scale (e->count, count_scale);
8340
8341 /* If we have a basic block with no successors that does not
8342 end with a control statement or a noreturn call end it with
8343 a call to __builtin_unreachable. This situation can occur
8344 when inlining a noreturn call that does in fact return. */
8345 if (EDGE_COUNT (bb->succs) == 0)
8346 {
8347 gimple stmt = last_stmt (bb);
8348 if (!stmt
8349 || (!is_ctrl_stmt (stmt)
8350 && (!is_gimple_call (stmt)
8351 || (gimple_call_flags (stmt) & ECF_NORETURN) == 0)))
8352 {
8353 stmt = gimple_build_call
8354 (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
8355 gimple_stmt_iterator gsi = gsi_last_bb (bb);
8356 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
8357 }
8358 }
8359 }
8360 if (count_scale != REG_BR_PROB_BASE)
8361 compute_function_frequency ();
8362
8363 /* We just processed all calls. */
8364 if (cfun->gimple_df)
8365 vec_free (MODIFIED_NORETURN_CALLS (cfun));
8366
8367 /* Dump a textual representation of the flowgraph. */
8368 if (dump_file)
8369 gimple_dump_cfg (dump_file, dump_flags);
8370
8371 if (current_loops
8372 && (todo & TODO_cleanup_cfg))
8373 loops_state_set (LOOPS_NEED_FIXUP);
8374
8375 return todo;
8376 }
8377
8378 namespace {
8379
8380 const pass_data pass_data_fixup_cfg =
8381 {
8382 GIMPLE_PASS, /* type */
8383 "*free_cfg_annotations", /* name */
8384 OPTGROUP_NONE, /* optinfo_flags */
8385 false, /* has_gate */
8386 true, /* has_execute */
8387 TV_NONE, /* tv_id */
8388 PROP_cfg, /* properties_required */
8389 0, /* properties_provided */
8390 0, /* properties_destroyed */
8391 0, /* todo_flags_start */
8392 0, /* todo_flags_finish */
8393 };
8394
8395 class pass_fixup_cfg : public gimple_opt_pass
8396 {
8397 public:
8398 pass_fixup_cfg (gcc::context *ctxt)
8399 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
8400 {}
8401
8402 /* opt_pass methods: */
8403 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
8404 unsigned int execute () { return execute_fixup_cfg (); }
8405
8406 }; // class pass_fixup_cfg
8407
8408 } // anon namespace
8409
8410 gimple_opt_pass *
8411 make_pass_fixup_cfg (gcc::context *ctxt)
8412 {
8413 return new pass_fixup_cfg (ctxt);
8414 }
8415
8416 /* Garbage collection support for edge_def. */
8417
8418 extern void gt_ggc_mx (tree&);
8419 extern void gt_ggc_mx (gimple&);
8420 extern void gt_ggc_mx (rtx&);
8421 extern void gt_ggc_mx (basic_block&);
8422
8423 void
8424 gt_ggc_mx (edge_def *e)
8425 {
8426 tree block = LOCATION_BLOCK (e->goto_locus);
8427 gt_ggc_mx (e->src);
8428 gt_ggc_mx (e->dest);
8429 if (current_ir_type () == IR_GIMPLE)
8430 gt_ggc_mx (e->insns.g);
8431 else
8432 gt_ggc_mx (e->insns.r);
8433 gt_ggc_mx (block);
8434 }
8435
8436 /* PCH support for edge_def. */
8437
8438 extern void gt_pch_nx (tree&);
8439 extern void gt_pch_nx (gimple&);
8440 extern void gt_pch_nx (rtx&);
8441 extern void gt_pch_nx (basic_block&);
8442
8443 void
8444 gt_pch_nx (edge_def *e)
8445 {
8446 tree block = LOCATION_BLOCK (e->goto_locus);
8447 gt_pch_nx (e->src);
8448 gt_pch_nx (e->dest);
8449 if (current_ir_type () == IR_GIMPLE)
8450 gt_pch_nx (e->insns.g);
8451 else
8452 gt_pch_nx (e->insns.r);
8453 gt_pch_nx (block);
8454 }
8455
8456 void
8457 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
8458 {
8459 tree block = LOCATION_BLOCK (e->goto_locus);
8460 op (&(e->src), cookie);
8461 op (&(e->dest), cookie);
8462 if (current_ir_type () == IR_GIMPLE)
8463 op (&(e->insns.g), cookie);
8464 else
8465 op (&(e->insns.r), cookie);
8466 op (&(block), cookie);
8467 }