Eliminate n_edges macro
[gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2013 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "hash-table.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "trans-mem.h"
28 #include "stor-layout.h"
29 #include "print-tree.h"
30 #include "tm_p.h"
31 #include "basic-block.h"
32 #include "flags.h"
33 #include "function.h"
34 #include "ggc.h"
35 #include "gimple-pretty-print.h"
36 #include "gimple.h"
37 #include "gimple-iterator.h"
38 #include "gimplify-me.h"
39 #include "gimple-walk.h"
40 #include "gimple-ssa.h"
41 #include "cgraph.h"
42 #include "tree-cfg.h"
43 #include "tree-phinodes.h"
44 #include "ssa-iterators.h"
45 #include "stringpool.h"
46 #include "tree-ssanames.h"
47 #include "tree-ssa-loop-manip.h"
48 #include "tree-ssa-loop-niter.h"
49 #include "tree-into-ssa.h"
50 #include "expr.h"
51 #include "tree-dfa.h"
52 #include "tree-ssa.h"
53 #include "tree-dump.h"
54 #include "tree-pass.h"
55 #include "diagnostic-core.h"
56 #include "except.h"
57 #include "cfgloop.h"
58 #include "tree-ssa-propagate.h"
59 #include "value-prof.h"
60 #include "pointer-set.h"
61 #include "tree-inline.h"
62 #include "target.h"
63 #include "tree-ssa-live.h"
64 #include "omp-low.h"
65 #include "tree-cfgcleanup.h"
66
67 /* This file contains functions for building the Control Flow Graph (CFG)
68 for a function tree. */
69
70 /* Local declarations. */
71
72 /* Initial capacity for the basic block array. */
73 static const int initial_cfg_capacity = 20;
74
75 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
76 which use a particular edge. The CASE_LABEL_EXPRs are chained together
77 via their CASE_CHAIN field, which we clear after we're done with the
78 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
79
80 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
81 update the case vector in response to edge redirections.
82
83 Right now this table is set up and torn down at key points in the
84 compilation process. It would be nice if we could make the table
85 more persistent. The key is getting notification of changes to
86 the CFG (particularly edge removal, creation and redirection). */
87
88 static struct pointer_map_t *edge_to_cases;
89
90 /* If we record edge_to_cases, this bitmap will hold indexes
91 of basic blocks that end in a GIMPLE_SWITCH which we touched
92 due to edge manipulations. */
93
94 static bitmap touched_switch_bbs;
95
96 /* CFG statistics. */
97 struct cfg_stats_d
98 {
99 long num_merged_labels;
100 };
101
102 static struct cfg_stats_d cfg_stats;
103
104 /* Nonzero if we found a computed goto while building basic blocks. */
105 static bool found_computed_goto;
106
107 /* Hash table to store last discriminator assigned for each locus. */
108 struct locus_discrim_map
109 {
110 location_t locus;
111 int discriminator;
112 };
113
114 /* Hashtable helpers. */
115
116 struct locus_discrim_hasher : typed_free_remove <locus_discrim_map>
117 {
118 typedef locus_discrim_map value_type;
119 typedef locus_discrim_map compare_type;
120 static inline hashval_t hash (const value_type *);
121 static inline bool equal (const value_type *, const compare_type *);
122 };
123
124 /* Trivial hash function for a location_t. ITEM is a pointer to
125 a hash table entry that maps a location_t to a discriminator. */
126
127 inline hashval_t
128 locus_discrim_hasher::hash (const value_type *item)
129 {
130 return LOCATION_LINE (item->locus);
131 }
132
133 /* Equality function for the locus-to-discriminator map. A and B
134 point to the two hash table entries to compare. */
135
136 inline bool
137 locus_discrim_hasher::equal (const value_type *a, const compare_type *b)
138 {
139 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
140 }
141
142 static hash_table <locus_discrim_hasher> discriminator_per_locus;
143
144 /* Basic blocks and flowgraphs. */
145 static void make_blocks (gimple_seq);
146 static void factor_computed_gotos (void);
147
148 /* Edges. */
149 static void make_edges (void);
150 static void assign_discriminators (void);
151 static void make_cond_expr_edges (basic_block);
152 static void make_gimple_switch_edges (basic_block);
153 static void make_goto_expr_edges (basic_block);
154 static void make_gimple_asm_edges (basic_block);
155 static edge gimple_redirect_edge_and_branch (edge, basic_block);
156 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
157 static unsigned int split_critical_edges (void);
158
159 /* Various helpers. */
160 static inline bool stmt_starts_bb_p (gimple, gimple);
161 static int gimple_verify_flow_info (void);
162 static void gimple_make_forwarder_block (edge);
163 static gimple first_non_label_stmt (basic_block);
164 static bool verify_gimple_transaction (gimple);
165
166 /* Flowgraph optimization and cleanup. */
167 static void gimple_merge_blocks (basic_block, basic_block);
168 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
169 static void remove_bb (basic_block);
170 static edge find_taken_edge_computed_goto (basic_block, tree);
171 static edge find_taken_edge_cond_expr (basic_block, tree);
172 static edge find_taken_edge_switch_expr (basic_block, tree);
173 static tree find_case_label_for_value (gimple, tree);
174
175 void
176 init_empty_tree_cfg_for_function (struct function *fn)
177 {
178 /* Initialize the basic block array. */
179 init_flow (fn);
180 profile_status_for_function (fn) = PROFILE_ABSENT;
181 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
182 last_basic_block_for_function (fn) = NUM_FIXED_BLOCKS;
183 vec_alloc (basic_block_info_for_function (fn), initial_cfg_capacity);
184 vec_safe_grow_cleared (basic_block_info_for_function (fn),
185 initial_cfg_capacity);
186
187 /* Build a mapping of labels to their associated blocks. */
188 vec_alloc (label_to_block_map_for_function (fn), initial_cfg_capacity);
189 vec_safe_grow_cleared (label_to_block_map_for_function (fn),
190 initial_cfg_capacity);
191
192 SET_BASIC_BLOCK_FOR_FUNCTION (fn, ENTRY_BLOCK,
193 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn));
194 SET_BASIC_BLOCK_FOR_FUNCTION (fn, EXIT_BLOCK,
195 EXIT_BLOCK_PTR_FOR_FUNCTION (fn));
196
197 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)->next_bb
198 = EXIT_BLOCK_PTR_FOR_FUNCTION (fn);
199 EXIT_BLOCK_PTR_FOR_FUNCTION (fn)->prev_bb
200 = ENTRY_BLOCK_PTR_FOR_FUNCTION (fn);
201 }
202
203 void
204 init_empty_tree_cfg (void)
205 {
206 init_empty_tree_cfg_for_function (cfun);
207 }
208
209 /*---------------------------------------------------------------------------
210 Create basic blocks
211 ---------------------------------------------------------------------------*/
212
213 /* Entry point to the CFG builder for trees. SEQ is the sequence of
214 statements to be added to the flowgraph. */
215
216 static void
217 build_gimple_cfg (gimple_seq seq)
218 {
219 /* Register specific gimple functions. */
220 gimple_register_cfg_hooks ();
221
222 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
223
224 init_empty_tree_cfg ();
225
226 found_computed_goto = 0;
227 make_blocks (seq);
228
229 /* Computed gotos are hell to deal with, especially if there are
230 lots of them with a large number of destinations. So we factor
231 them to a common computed goto location before we build the
232 edge list. After we convert back to normal form, we will un-factor
233 the computed gotos since factoring introduces an unwanted jump. */
234 if (found_computed_goto)
235 factor_computed_gotos ();
236
237 /* Make sure there is always at least one block, even if it's empty. */
238 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
239 create_empty_bb (ENTRY_BLOCK_PTR);
240
241 /* Adjust the size of the array. */
242 if (basic_block_info->length () < (size_t) n_basic_blocks_for_fn (cfun))
243 vec_safe_grow_cleared (basic_block_info, n_basic_blocks_for_fn (cfun));
244
245 /* To speed up statement iterator walks, we first purge dead labels. */
246 cleanup_dead_labels ();
247
248 /* Group case nodes to reduce the number of edges.
249 We do this after cleaning up dead labels because otherwise we miss
250 a lot of obvious case merging opportunities. */
251 group_case_labels ();
252
253 /* Create the edges of the flowgraph. */
254 discriminator_per_locus.create (13);
255 make_edges ();
256 assign_discriminators ();
257 cleanup_dead_labels ();
258 discriminator_per_locus.dispose ();
259 }
260
261
262 /* Search for ANNOTATE call with annot_expr_ivdep_kind; if found, remove
263 it and set loop->safelen to INT_MAX. We assume that the annotation
264 comes immediately before the condition. */
265
266 static void
267 replace_loop_annotate ()
268 {
269 struct loop *loop;
270 basic_block bb;
271 gimple_stmt_iterator gsi;
272 gimple stmt;
273
274 FOR_EACH_LOOP (loop, 0)
275 {
276 gsi = gsi_last_bb (loop->header);
277 stmt = gsi_stmt (gsi);
278 if (stmt && gimple_code (stmt) == GIMPLE_COND)
279 {
280 gsi_prev_nondebug (&gsi);
281 if (gsi_end_p (gsi))
282 continue;
283 stmt = gsi_stmt (gsi);
284 if (gimple_code (stmt) != GIMPLE_CALL)
285 continue;
286 if (!gimple_call_internal_p (stmt)
287 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
288 continue;
289 if ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1))
290 != annot_expr_ivdep_kind)
291 continue;
292 stmt = gimple_build_assign (gimple_call_lhs (stmt),
293 gimple_call_arg (stmt, 0));
294 gsi_replace (&gsi, stmt, true);
295 loop->safelen = INT_MAX;
296 }
297 }
298
299 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
300 FOR_EACH_BB (bb)
301 {
302 gsi = gsi_last_bb (bb);
303 stmt = gsi_stmt (gsi);
304 if (stmt && gimple_code (stmt) == GIMPLE_COND)
305 gsi_prev_nondebug (&gsi);
306 if (gsi_end_p (gsi))
307 continue;
308 stmt = gsi_stmt (gsi);
309 if (gimple_code (stmt) != GIMPLE_CALL)
310 continue;
311 if (!gimple_call_internal_p (stmt)
312 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
313 continue;
314 if ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1))
315 != annot_expr_ivdep_kind)
316 continue;
317 warning_at (gimple_location (stmt), 0, "ignoring %<GCC ivdep%> "
318 "annotation");
319 stmt = gimple_build_assign (gimple_call_lhs (stmt),
320 gimple_call_arg (stmt, 0));
321 gsi_replace (&gsi, stmt, true);
322 }
323 }
324
325
326 static unsigned int
327 execute_build_cfg (void)
328 {
329 gimple_seq body = gimple_body (current_function_decl);
330
331 build_gimple_cfg (body);
332 gimple_set_body (current_function_decl, NULL);
333 if (dump_file && (dump_flags & TDF_DETAILS))
334 {
335 fprintf (dump_file, "Scope blocks:\n");
336 dump_scope_blocks (dump_file, dump_flags);
337 }
338 cleanup_tree_cfg ();
339 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
340 replace_loop_annotate ();
341 return 0;
342 }
343
344 namespace {
345
346 const pass_data pass_data_build_cfg =
347 {
348 GIMPLE_PASS, /* type */
349 "cfg", /* name */
350 OPTGROUP_NONE, /* optinfo_flags */
351 false, /* has_gate */
352 true, /* has_execute */
353 TV_TREE_CFG, /* tv_id */
354 PROP_gimple_leh, /* properties_required */
355 ( PROP_cfg | PROP_loops ), /* properties_provided */
356 0, /* properties_destroyed */
357 0, /* todo_flags_start */
358 TODO_verify_stmts, /* todo_flags_finish */
359 };
360
361 class pass_build_cfg : public gimple_opt_pass
362 {
363 public:
364 pass_build_cfg (gcc::context *ctxt)
365 : gimple_opt_pass (pass_data_build_cfg, ctxt)
366 {}
367
368 /* opt_pass methods: */
369 unsigned int execute () { return execute_build_cfg (); }
370
371 }; // class pass_build_cfg
372
373 } // anon namespace
374
375 gimple_opt_pass *
376 make_pass_build_cfg (gcc::context *ctxt)
377 {
378 return new pass_build_cfg (ctxt);
379 }
380
381
382 /* Return true if T is a computed goto. */
383
384 static bool
385 computed_goto_p (gimple t)
386 {
387 return (gimple_code (t) == GIMPLE_GOTO
388 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
389 }
390
391 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
392 the other edge points to a bb with just __builtin_unreachable ().
393 I.e. return true for C->M edge in:
394 <bb C>:
395 ...
396 if (something)
397 goto <bb N>;
398 else
399 goto <bb M>;
400 <bb N>:
401 __builtin_unreachable ();
402 <bb M>: */
403
404 bool
405 assert_unreachable_fallthru_edge_p (edge e)
406 {
407 basic_block pred_bb = e->src;
408 gimple last = last_stmt (pred_bb);
409 if (last && gimple_code (last) == GIMPLE_COND)
410 {
411 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
412 if (other_bb == e->dest)
413 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
414 if (EDGE_COUNT (other_bb->succs) == 0)
415 {
416 gimple_stmt_iterator gsi = gsi_after_labels (other_bb);
417 gimple stmt;
418
419 if (gsi_end_p (gsi))
420 return false;
421 stmt = gsi_stmt (gsi);
422 if (is_gimple_debug (stmt))
423 {
424 gsi_next_nondebug (&gsi);
425 if (gsi_end_p (gsi))
426 return false;
427 stmt = gsi_stmt (gsi);
428 }
429 return gimple_call_builtin_p (stmt, BUILT_IN_UNREACHABLE);
430 }
431 }
432 return false;
433 }
434
435
436 /* Search the CFG for any computed gotos. If found, factor them to a
437 common computed goto site. Also record the location of that site so
438 that we can un-factor the gotos after we have converted back to
439 normal form. */
440
441 static void
442 factor_computed_gotos (void)
443 {
444 basic_block bb;
445 tree factored_label_decl = NULL;
446 tree var = NULL;
447 gimple factored_computed_goto_label = NULL;
448 gimple factored_computed_goto = NULL;
449
450 /* We know there are one or more computed gotos in this function.
451 Examine the last statement in each basic block to see if the block
452 ends with a computed goto. */
453
454 FOR_EACH_BB (bb)
455 {
456 gimple_stmt_iterator gsi = gsi_last_bb (bb);
457 gimple last;
458
459 if (gsi_end_p (gsi))
460 continue;
461
462 last = gsi_stmt (gsi);
463
464 /* Ignore the computed goto we create when we factor the original
465 computed gotos. */
466 if (last == factored_computed_goto)
467 continue;
468
469 /* If the last statement is a computed goto, factor it. */
470 if (computed_goto_p (last))
471 {
472 gimple assignment;
473
474 /* The first time we find a computed goto we need to create
475 the factored goto block and the variable each original
476 computed goto will use for their goto destination. */
477 if (!factored_computed_goto)
478 {
479 basic_block new_bb = create_empty_bb (bb);
480 gimple_stmt_iterator new_gsi = gsi_start_bb (new_bb);
481
482 /* Create the destination of the factored goto. Each original
483 computed goto will put its desired destination into this
484 variable and jump to the label we create immediately
485 below. */
486 var = create_tmp_var (ptr_type_node, "gotovar");
487
488 /* Build a label for the new block which will contain the
489 factored computed goto. */
490 factored_label_decl = create_artificial_label (UNKNOWN_LOCATION);
491 factored_computed_goto_label
492 = gimple_build_label (factored_label_decl);
493 gsi_insert_after (&new_gsi, factored_computed_goto_label,
494 GSI_NEW_STMT);
495
496 /* Build our new computed goto. */
497 factored_computed_goto = gimple_build_goto (var);
498 gsi_insert_after (&new_gsi, factored_computed_goto, GSI_NEW_STMT);
499 }
500
501 /* Copy the original computed goto's destination into VAR. */
502 assignment = gimple_build_assign (var, gimple_goto_dest (last));
503 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
504
505 /* And re-vector the computed goto to the new destination. */
506 gimple_goto_set_dest (last, factored_label_decl);
507 }
508 }
509 }
510
511
512 /* Build a flowgraph for the sequence of stmts SEQ. */
513
514 static void
515 make_blocks (gimple_seq seq)
516 {
517 gimple_stmt_iterator i = gsi_start (seq);
518 gimple stmt = NULL;
519 bool start_new_block = true;
520 bool first_stmt_of_seq = true;
521 basic_block bb = ENTRY_BLOCK_PTR;
522
523 while (!gsi_end_p (i))
524 {
525 gimple prev_stmt;
526
527 prev_stmt = stmt;
528 stmt = gsi_stmt (i);
529
530 /* If the statement starts a new basic block or if we have determined
531 in a previous pass that we need to create a new block for STMT, do
532 so now. */
533 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
534 {
535 if (!first_stmt_of_seq)
536 gsi_split_seq_before (&i, &seq);
537 bb = create_basic_block (seq, NULL, bb);
538 start_new_block = false;
539 }
540
541 /* Now add STMT to BB and create the subgraphs for special statement
542 codes. */
543 gimple_set_bb (stmt, bb);
544
545 if (computed_goto_p (stmt))
546 found_computed_goto = true;
547
548 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
549 next iteration. */
550 if (stmt_ends_bb_p (stmt))
551 {
552 /* If the stmt can make abnormal goto use a new temporary
553 for the assignment to the LHS. This makes sure the old value
554 of the LHS is available on the abnormal edge. Otherwise
555 we will end up with overlapping life-ranges for abnormal
556 SSA names. */
557 if (gimple_has_lhs (stmt)
558 && stmt_can_make_abnormal_goto (stmt)
559 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
560 {
561 tree lhs = gimple_get_lhs (stmt);
562 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
563 gimple s = gimple_build_assign (lhs, tmp);
564 gimple_set_location (s, gimple_location (stmt));
565 gimple_set_block (s, gimple_block (stmt));
566 gimple_set_lhs (stmt, tmp);
567 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
568 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
569 DECL_GIMPLE_REG_P (tmp) = 1;
570 gsi_insert_after (&i, s, GSI_SAME_STMT);
571 }
572 start_new_block = true;
573 }
574
575 gsi_next (&i);
576 first_stmt_of_seq = false;
577 }
578 }
579
580
581 /* Create and return a new empty basic block after bb AFTER. */
582
583 static basic_block
584 create_bb (void *h, void *e, basic_block after)
585 {
586 basic_block bb;
587
588 gcc_assert (!e);
589
590 /* Create and initialize a new basic block. Since alloc_block uses
591 GC allocation that clears memory to allocate a basic block, we do
592 not have to clear the newly allocated basic block here. */
593 bb = alloc_block ();
594
595 bb->index = last_basic_block;
596 bb->flags = BB_NEW;
597 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
598
599 /* Add the new block to the linked list of blocks. */
600 link_block (bb, after);
601
602 /* Grow the basic block array if needed. */
603 if ((size_t) last_basic_block == basic_block_info->length ())
604 {
605 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
606 vec_safe_grow_cleared (basic_block_info, new_size);
607 }
608
609 /* Add the newly created block to the array. */
610 SET_BASIC_BLOCK (last_basic_block, bb);
611
612 n_basic_blocks_for_fn (cfun)++;
613 last_basic_block++;
614
615 return bb;
616 }
617
618
619 /*---------------------------------------------------------------------------
620 Edge creation
621 ---------------------------------------------------------------------------*/
622
623 /* Fold COND_EXPR_COND of each COND_EXPR. */
624
625 void
626 fold_cond_expr_cond (void)
627 {
628 basic_block bb;
629
630 FOR_EACH_BB (bb)
631 {
632 gimple stmt = last_stmt (bb);
633
634 if (stmt && gimple_code (stmt) == GIMPLE_COND)
635 {
636 location_t loc = gimple_location (stmt);
637 tree cond;
638 bool zerop, onep;
639
640 fold_defer_overflow_warnings ();
641 cond = fold_binary_loc (loc, gimple_cond_code (stmt), boolean_type_node,
642 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
643 if (cond)
644 {
645 zerop = integer_zerop (cond);
646 onep = integer_onep (cond);
647 }
648 else
649 zerop = onep = false;
650
651 fold_undefer_overflow_warnings (zerop || onep,
652 stmt,
653 WARN_STRICT_OVERFLOW_CONDITIONAL);
654 if (zerop)
655 gimple_cond_make_false (stmt);
656 else if (onep)
657 gimple_cond_make_true (stmt);
658 }
659 }
660 }
661
662 /* Join all the blocks in the flowgraph. */
663
664 static void
665 make_edges (void)
666 {
667 basic_block bb;
668 struct omp_region *cur_region = NULL;
669
670 /* Create an edge from entry to the first block with executable
671 statements in it. */
672 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
673
674 /* Traverse the basic block array placing edges. */
675 FOR_EACH_BB (bb)
676 {
677 gimple last = last_stmt (bb);
678 bool fallthru;
679
680 if (last)
681 {
682 enum gimple_code code = gimple_code (last);
683 switch (code)
684 {
685 case GIMPLE_GOTO:
686 make_goto_expr_edges (bb);
687 fallthru = false;
688 break;
689 case GIMPLE_RETURN:
690 make_edge (bb, EXIT_BLOCK_PTR, 0);
691 fallthru = false;
692 break;
693 case GIMPLE_COND:
694 make_cond_expr_edges (bb);
695 fallthru = false;
696 break;
697 case GIMPLE_SWITCH:
698 make_gimple_switch_edges (bb);
699 fallthru = false;
700 break;
701 case GIMPLE_RESX:
702 make_eh_edges (last);
703 fallthru = false;
704 break;
705 case GIMPLE_EH_DISPATCH:
706 fallthru = make_eh_dispatch_edges (last);
707 break;
708
709 case GIMPLE_CALL:
710 /* If this function receives a nonlocal goto, then we need to
711 make edges from this call site to all the nonlocal goto
712 handlers. */
713 if (stmt_can_make_abnormal_goto (last))
714 make_abnormal_goto_edges (bb, true);
715
716 /* If this statement has reachable exception handlers, then
717 create abnormal edges to them. */
718 make_eh_edges (last);
719
720 /* BUILTIN_RETURN is really a return statement. */
721 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
722 make_edge (bb, EXIT_BLOCK_PTR, 0), fallthru = false;
723 /* Some calls are known not to return. */
724 else
725 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
726 break;
727
728 case GIMPLE_ASSIGN:
729 /* A GIMPLE_ASSIGN may throw internally and thus be considered
730 control-altering. */
731 if (is_ctrl_altering_stmt (last))
732 make_eh_edges (last);
733 fallthru = true;
734 break;
735
736 case GIMPLE_ASM:
737 make_gimple_asm_edges (bb);
738 fallthru = true;
739 break;
740
741 CASE_GIMPLE_OMP:
742 fallthru = make_gimple_omp_edges (bb, &cur_region);
743 break;
744
745 case GIMPLE_TRANSACTION:
746 {
747 tree abort_label = gimple_transaction_label (last);
748 if (abort_label)
749 make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT);
750 fallthru = true;
751 }
752 break;
753
754 default:
755 gcc_assert (!stmt_ends_bb_p (last));
756 fallthru = true;
757 }
758 }
759 else
760 fallthru = true;
761
762 if (fallthru)
763 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
764 }
765
766 free_omp_regions ();
767
768 /* Fold COND_EXPR_COND of each COND_EXPR. */
769 fold_cond_expr_cond ();
770 }
771
772 /* Find the next available discriminator value for LOCUS. The
773 discriminator distinguishes among several basic blocks that
774 share a common locus, allowing for more accurate sample-based
775 profiling. */
776
777 static int
778 next_discriminator_for_locus (location_t locus)
779 {
780 struct locus_discrim_map item;
781 struct locus_discrim_map **slot;
782
783 item.locus = locus;
784 item.discriminator = 0;
785 slot = discriminator_per_locus.find_slot_with_hash (
786 &item, LOCATION_LINE (locus), INSERT);
787 gcc_assert (slot);
788 if (*slot == HTAB_EMPTY_ENTRY)
789 {
790 *slot = XNEW (struct locus_discrim_map);
791 gcc_assert (*slot);
792 (*slot)->locus = locus;
793 (*slot)->discriminator = 0;
794 }
795 (*slot)->discriminator++;
796 return (*slot)->discriminator;
797 }
798
799 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
800
801 static bool
802 same_line_p (location_t locus1, location_t locus2)
803 {
804 expanded_location from, to;
805
806 if (locus1 == locus2)
807 return true;
808
809 from = expand_location (locus1);
810 to = expand_location (locus2);
811
812 if (from.line != to.line)
813 return false;
814 if (from.file == to.file)
815 return true;
816 return (from.file != NULL
817 && to.file != NULL
818 && filename_cmp (from.file, to.file) == 0);
819 }
820
821 /* Assign discriminators to each basic block. */
822
823 static void
824 assign_discriminators (void)
825 {
826 basic_block bb;
827
828 FOR_EACH_BB (bb)
829 {
830 edge e;
831 edge_iterator ei;
832 gimple last = last_stmt (bb);
833 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
834
835 if (locus == UNKNOWN_LOCATION)
836 continue;
837
838 FOR_EACH_EDGE (e, ei, bb->succs)
839 {
840 gimple first = first_non_label_stmt (e->dest);
841 gimple last = last_stmt (e->dest);
842 if ((first && same_line_p (locus, gimple_location (first)))
843 || (last && same_line_p (locus, gimple_location (last))))
844 {
845 if (e->dest->discriminator != 0 && bb->discriminator == 0)
846 bb->discriminator = next_discriminator_for_locus (locus);
847 else
848 e->dest->discriminator = next_discriminator_for_locus (locus);
849 }
850 }
851 }
852 }
853
854 /* Create the edges for a GIMPLE_COND starting at block BB. */
855
856 static void
857 make_cond_expr_edges (basic_block bb)
858 {
859 gimple entry = last_stmt (bb);
860 gimple then_stmt, else_stmt;
861 basic_block then_bb, else_bb;
862 tree then_label, else_label;
863 edge e;
864
865 gcc_assert (entry);
866 gcc_assert (gimple_code (entry) == GIMPLE_COND);
867
868 /* Entry basic blocks for each component. */
869 then_label = gimple_cond_true_label (entry);
870 else_label = gimple_cond_false_label (entry);
871 then_bb = label_to_block (then_label);
872 else_bb = label_to_block (else_label);
873 then_stmt = first_stmt (then_bb);
874 else_stmt = first_stmt (else_bb);
875
876 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
877 e->goto_locus = gimple_location (then_stmt);
878 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
879 if (e)
880 e->goto_locus = gimple_location (else_stmt);
881
882 /* We do not need the labels anymore. */
883 gimple_cond_set_true_label (entry, NULL_TREE);
884 gimple_cond_set_false_label (entry, NULL_TREE);
885 }
886
887
888 /* Called for each element in the hash table (P) as we delete the
889 edge to cases hash table.
890
891 Clear all the TREE_CHAINs to prevent problems with copying of
892 SWITCH_EXPRs and structure sharing rules, then free the hash table
893 element. */
894
895 static bool
896 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
897 void *data ATTRIBUTE_UNUSED)
898 {
899 tree t, next;
900
901 for (t = (tree) *value; t; t = next)
902 {
903 next = CASE_CHAIN (t);
904 CASE_CHAIN (t) = NULL;
905 }
906
907 *value = NULL;
908 return true;
909 }
910
911 /* Start recording information mapping edges to case labels. */
912
913 void
914 start_recording_case_labels (void)
915 {
916 gcc_assert (edge_to_cases == NULL);
917 edge_to_cases = pointer_map_create ();
918 touched_switch_bbs = BITMAP_ALLOC (NULL);
919 }
920
921 /* Return nonzero if we are recording information for case labels. */
922
923 static bool
924 recording_case_labels_p (void)
925 {
926 return (edge_to_cases != NULL);
927 }
928
929 /* Stop recording information mapping edges to case labels and
930 remove any information we have recorded. */
931 void
932 end_recording_case_labels (void)
933 {
934 bitmap_iterator bi;
935 unsigned i;
936 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
937 pointer_map_destroy (edge_to_cases);
938 edge_to_cases = NULL;
939 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
940 {
941 basic_block bb = BASIC_BLOCK (i);
942 if (bb)
943 {
944 gimple stmt = last_stmt (bb);
945 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
946 group_case_labels_stmt (stmt);
947 }
948 }
949 BITMAP_FREE (touched_switch_bbs);
950 }
951
952 /* If we are inside a {start,end}_recording_cases block, then return
953 a chain of CASE_LABEL_EXPRs from T which reference E.
954
955 Otherwise return NULL. */
956
957 static tree
958 get_cases_for_edge (edge e, gimple t)
959 {
960 void **slot;
961 size_t i, n;
962
963 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
964 chains available. Return NULL so the caller can detect this case. */
965 if (!recording_case_labels_p ())
966 return NULL;
967
968 slot = pointer_map_contains (edge_to_cases, e);
969 if (slot)
970 return (tree) *slot;
971
972 /* If we did not find E in the hash table, then this must be the first
973 time we have been queried for information about E & T. Add all the
974 elements from T to the hash table then perform the query again. */
975
976 n = gimple_switch_num_labels (t);
977 for (i = 0; i < n; i++)
978 {
979 tree elt = gimple_switch_label (t, i);
980 tree lab = CASE_LABEL (elt);
981 basic_block label_bb = label_to_block (lab);
982 edge this_edge = find_edge (e->src, label_bb);
983
984 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
985 a new chain. */
986 slot = pointer_map_insert (edge_to_cases, this_edge);
987 CASE_CHAIN (elt) = (tree) *slot;
988 *slot = elt;
989 }
990
991 return (tree) *pointer_map_contains (edge_to_cases, e);
992 }
993
994 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
995
996 static void
997 make_gimple_switch_edges (basic_block bb)
998 {
999 gimple entry = last_stmt (bb);
1000 size_t i, n;
1001
1002 n = gimple_switch_num_labels (entry);
1003
1004 for (i = 0; i < n; ++i)
1005 {
1006 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1007 basic_block label_bb = label_to_block (lab);
1008 make_edge (bb, label_bb, 0);
1009 }
1010 }
1011
1012
1013 /* Return the basic block holding label DEST. */
1014
1015 basic_block
1016 label_to_block_fn (struct function *ifun, tree dest)
1017 {
1018 int uid = LABEL_DECL_UID (dest);
1019
1020 /* We would die hard when faced by an undefined label. Emit a label to
1021 the very first basic block. This will hopefully make even the dataflow
1022 and undefined variable warnings quite right. */
1023 if (seen_error () && uid < 0)
1024 {
1025 gimple_stmt_iterator gsi = gsi_start_bb (BASIC_BLOCK (NUM_FIXED_BLOCKS));
1026 gimple stmt;
1027
1028 stmt = gimple_build_label (dest);
1029 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1030 uid = LABEL_DECL_UID (dest);
1031 }
1032 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1033 return NULL;
1034 return (*ifun->cfg->x_label_to_block_map)[uid];
1035 }
1036
1037 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
1038 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
1039
1040 void
1041 make_abnormal_goto_edges (basic_block bb, bool for_call)
1042 {
1043 basic_block target_bb;
1044 gimple_stmt_iterator gsi;
1045
1046 FOR_EACH_BB (target_bb)
1047 {
1048 for (gsi = gsi_start_bb (target_bb); !gsi_end_p (gsi); gsi_next (&gsi))
1049 {
1050 gimple label_stmt = gsi_stmt (gsi);
1051 tree target;
1052
1053 if (gimple_code (label_stmt) != GIMPLE_LABEL)
1054 break;
1055
1056 target = gimple_label_label (label_stmt);
1057
1058 /* Make an edge to every label block that has been marked as a
1059 potential target for a computed goto or a non-local goto. */
1060 if ((FORCED_LABEL (target) && !for_call)
1061 || (DECL_NONLOCAL (target) && for_call))
1062 {
1063 make_edge (bb, target_bb, EDGE_ABNORMAL);
1064 break;
1065 }
1066 }
1067 if (!gsi_end_p (gsi)
1068 && is_gimple_debug (gsi_stmt (gsi)))
1069 gsi_next_nondebug (&gsi);
1070 if (!gsi_end_p (gsi))
1071 {
1072 /* Make an edge to every setjmp-like call. */
1073 gimple call_stmt = gsi_stmt (gsi);
1074 if (is_gimple_call (call_stmt)
1075 && (gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE))
1076 make_edge (bb, target_bb, EDGE_ABNORMAL);
1077 }
1078 }
1079 }
1080
1081 /* Create edges for a goto statement at block BB. */
1082
1083 static void
1084 make_goto_expr_edges (basic_block bb)
1085 {
1086 gimple_stmt_iterator last = gsi_last_bb (bb);
1087 gimple goto_t = gsi_stmt (last);
1088
1089 /* A simple GOTO creates normal edges. */
1090 if (simple_goto_p (goto_t))
1091 {
1092 tree dest = gimple_goto_dest (goto_t);
1093 basic_block label_bb = label_to_block (dest);
1094 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1095 e->goto_locus = gimple_location (goto_t);
1096 gsi_remove (&last, true);
1097 return;
1098 }
1099
1100 /* A computed GOTO creates abnormal edges. */
1101 make_abnormal_goto_edges (bb, false);
1102 }
1103
1104 /* Create edges for an asm statement with labels at block BB. */
1105
1106 static void
1107 make_gimple_asm_edges (basic_block bb)
1108 {
1109 gimple stmt = last_stmt (bb);
1110 int i, n = gimple_asm_nlabels (stmt);
1111
1112 for (i = 0; i < n; ++i)
1113 {
1114 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1115 basic_block label_bb = label_to_block (label);
1116 make_edge (bb, label_bb, 0);
1117 }
1118 }
1119
1120 /*---------------------------------------------------------------------------
1121 Flowgraph analysis
1122 ---------------------------------------------------------------------------*/
1123
1124 /* Cleanup useless labels in basic blocks. This is something we wish
1125 to do early because it allows us to group case labels before creating
1126 the edges for the CFG, and it speeds up block statement iterators in
1127 all passes later on.
1128 We rerun this pass after CFG is created, to get rid of the labels that
1129 are no longer referenced. After then we do not run it any more, since
1130 (almost) no new labels should be created. */
1131
1132 /* A map from basic block index to the leading label of that block. */
1133 static struct label_record
1134 {
1135 /* The label. */
1136 tree label;
1137
1138 /* True if the label is referenced from somewhere. */
1139 bool used;
1140 } *label_for_bb;
1141
1142 /* Given LABEL return the first label in the same basic block. */
1143
1144 static tree
1145 main_block_label (tree label)
1146 {
1147 basic_block bb = label_to_block (label);
1148 tree main_label = label_for_bb[bb->index].label;
1149
1150 /* label_to_block possibly inserted undefined label into the chain. */
1151 if (!main_label)
1152 {
1153 label_for_bb[bb->index].label = label;
1154 main_label = label;
1155 }
1156
1157 label_for_bb[bb->index].used = true;
1158 return main_label;
1159 }
1160
1161 /* Clean up redundant labels within the exception tree. */
1162
1163 static void
1164 cleanup_dead_labels_eh (void)
1165 {
1166 eh_landing_pad lp;
1167 eh_region r;
1168 tree lab;
1169 int i;
1170
1171 if (cfun->eh == NULL)
1172 return;
1173
1174 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1175 if (lp && lp->post_landing_pad)
1176 {
1177 lab = main_block_label (lp->post_landing_pad);
1178 if (lab != lp->post_landing_pad)
1179 {
1180 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1181 EH_LANDING_PAD_NR (lab) = lp->index;
1182 }
1183 }
1184
1185 FOR_ALL_EH_REGION (r)
1186 switch (r->type)
1187 {
1188 case ERT_CLEANUP:
1189 case ERT_MUST_NOT_THROW:
1190 break;
1191
1192 case ERT_TRY:
1193 {
1194 eh_catch c;
1195 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1196 {
1197 lab = c->label;
1198 if (lab)
1199 c->label = main_block_label (lab);
1200 }
1201 }
1202 break;
1203
1204 case ERT_ALLOWED_EXCEPTIONS:
1205 lab = r->u.allowed.label;
1206 if (lab)
1207 r->u.allowed.label = main_block_label (lab);
1208 break;
1209 }
1210 }
1211
1212
1213 /* Cleanup redundant labels. This is a three-step process:
1214 1) Find the leading label for each block.
1215 2) Redirect all references to labels to the leading labels.
1216 3) Cleanup all useless labels. */
1217
1218 void
1219 cleanup_dead_labels (void)
1220 {
1221 basic_block bb;
1222 label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
1223
1224 /* Find a suitable label for each block. We use the first user-defined
1225 label if there is one, or otherwise just the first label we see. */
1226 FOR_EACH_BB (bb)
1227 {
1228 gimple_stmt_iterator i;
1229
1230 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1231 {
1232 tree label;
1233 gimple stmt = gsi_stmt (i);
1234
1235 if (gimple_code (stmt) != GIMPLE_LABEL)
1236 break;
1237
1238 label = gimple_label_label (stmt);
1239
1240 /* If we have not yet seen a label for the current block,
1241 remember this one and see if there are more labels. */
1242 if (!label_for_bb[bb->index].label)
1243 {
1244 label_for_bb[bb->index].label = label;
1245 continue;
1246 }
1247
1248 /* If we did see a label for the current block already, but it
1249 is an artificially created label, replace it if the current
1250 label is a user defined label. */
1251 if (!DECL_ARTIFICIAL (label)
1252 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1253 {
1254 label_for_bb[bb->index].label = label;
1255 break;
1256 }
1257 }
1258 }
1259
1260 /* Now redirect all jumps/branches to the selected label.
1261 First do so for each block ending in a control statement. */
1262 FOR_EACH_BB (bb)
1263 {
1264 gimple stmt = last_stmt (bb);
1265 tree label, new_label;
1266
1267 if (!stmt)
1268 continue;
1269
1270 switch (gimple_code (stmt))
1271 {
1272 case GIMPLE_COND:
1273 label = gimple_cond_true_label (stmt);
1274 if (label)
1275 {
1276 new_label = main_block_label (label);
1277 if (new_label != label)
1278 gimple_cond_set_true_label (stmt, new_label);
1279 }
1280
1281 label = gimple_cond_false_label (stmt);
1282 if (label)
1283 {
1284 new_label = main_block_label (label);
1285 if (new_label != label)
1286 gimple_cond_set_false_label (stmt, new_label);
1287 }
1288 break;
1289
1290 case GIMPLE_SWITCH:
1291 {
1292 size_t i, n = gimple_switch_num_labels (stmt);
1293
1294 /* Replace all destination labels. */
1295 for (i = 0; i < n; ++i)
1296 {
1297 tree case_label = gimple_switch_label (stmt, i);
1298 label = CASE_LABEL (case_label);
1299 new_label = main_block_label (label);
1300 if (new_label != label)
1301 CASE_LABEL (case_label) = new_label;
1302 }
1303 break;
1304 }
1305
1306 case GIMPLE_ASM:
1307 {
1308 int i, n = gimple_asm_nlabels (stmt);
1309
1310 for (i = 0; i < n; ++i)
1311 {
1312 tree cons = gimple_asm_label_op (stmt, i);
1313 tree label = main_block_label (TREE_VALUE (cons));
1314 TREE_VALUE (cons) = label;
1315 }
1316 break;
1317 }
1318
1319 /* We have to handle gotos until they're removed, and we don't
1320 remove them until after we've created the CFG edges. */
1321 case GIMPLE_GOTO:
1322 if (!computed_goto_p (stmt))
1323 {
1324 label = gimple_goto_dest (stmt);
1325 new_label = main_block_label (label);
1326 if (new_label != label)
1327 gimple_goto_set_dest (stmt, new_label);
1328 }
1329 break;
1330
1331 case GIMPLE_TRANSACTION:
1332 {
1333 tree label = gimple_transaction_label (stmt);
1334 if (label)
1335 {
1336 tree new_label = main_block_label (label);
1337 if (new_label != label)
1338 gimple_transaction_set_label (stmt, new_label);
1339 }
1340 }
1341 break;
1342
1343 default:
1344 break;
1345 }
1346 }
1347
1348 /* Do the same for the exception region tree labels. */
1349 cleanup_dead_labels_eh ();
1350
1351 /* Finally, purge dead labels. All user-defined labels and labels that
1352 can be the target of non-local gotos and labels which have their
1353 address taken are preserved. */
1354 FOR_EACH_BB (bb)
1355 {
1356 gimple_stmt_iterator i;
1357 tree label_for_this_bb = label_for_bb[bb->index].label;
1358
1359 if (!label_for_this_bb)
1360 continue;
1361
1362 /* If the main label of the block is unused, we may still remove it. */
1363 if (!label_for_bb[bb->index].used)
1364 label_for_this_bb = NULL;
1365
1366 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1367 {
1368 tree label;
1369 gimple stmt = gsi_stmt (i);
1370
1371 if (gimple_code (stmt) != GIMPLE_LABEL)
1372 break;
1373
1374 label = gimple_label_label (stmt);
1375
1376 if (label == label_for_this_bb
1377 || !DECL_ARTIFICIAL (label)
1378 || DECL_NONLOCAL (label)
1379 || FORCED_LABEL (label))
1380 gsi_next (&i);
1381 else
1382 gsi_remove (&i, true);
1383 }
1384 }
1385
1386 free (label_for_bb);
1387 }
1388
1389 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1390 the ones jumping to the same label.
1391 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1392
1393 void
1394 group_case_labels_stmt (gimple stmt)
1395 {
1396 int old_size = gimple_switch_num_labels (stmt);
1397 int i, j, new_size = old_size;
1398 basic_block default_bb = NULL;
1399
1400 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1401
1402 /* Look for possible opportunities to merge cases. */
1403 i = 1;
1404 while (i < old_size)
1405 {
1406 tree base_case, base_high;
1407 basic_block base_bb;
1408
1409 base_case = gimple_switch_label (stmt, i);
1410
1411 gcc_assert (base_case);
1412 base_bb = label_to_block (CASE_LABEL (base_case));
1413
1414 /* Discard cases that have the same destination as the
1415 default case. */
1416 if (base_bb == default_bb)
1417 {
1418 gimple_switch_set_label (stmt, i, NULL_TREE);
1419 i++;
1420 new_size--;
1421 continue;
1422 }
1423
1424 base_high = CASE_HIGH (base_case)
1425 ? CASE_HIGH (base_case)
1426 : CASE_LOW (base_case);
1427 i++;
1428
1429 /* Try to merge case labels. Break out when we reach the end
1430 of the label vector or when we cannot merge the next case
1431 label with the current one. */
1432 while (i < old_size)
1433 {
1434 tree merge_case = gimple_switch_label (stmt, i);
1435 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1436 double_int bhp1 = tree_to_double_int (base_high) + double_int_one;
1437
1438 /* Merge the cases if they jump to the same place,
1439 and their ranges are consecutive. */
1440 if (merge_bb == base_bb
1441 && tree_to_double_int (CASE_LOW (merge_case)) == bhp1)
1442 {
1443 base_high = CASE_HIGH (merge_case) ?
1444 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1445 CASE_HIGH (base_case) = base_high;
1446 gimple_switch_set_label (stmt, i, NULL_TREE);
1447 new_size--;
1448 i++;
1449 }
1450 else
1451 break;
1452 }
1453 }
1454
1455 /* Compress the case labels in the label vector, and adjust the
1456 length of the vector. */
1457 for (i = 0, j = 0; i < new_size; i++)
1458 {
1459 while (! gimple_switch_label (stmt, j))
1460 j++;
1461 gimple_switch_set_label (stmt, i,
1462 gimple_switch_label (stmt, j++));
1463 }
1464
1465 gcc_assert (new_size <= old_size);
1466 gimple_switch_set_num_labels (stmt, new_size);
1467 }
1468
1469 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1470 and scan the sorted vector of cases. Combine the ones jumping to the
1471 same label. */
1472
1473 void
1474 group_case_labels (void)
1475 {
1476 basic_block bb;
1477
1478 FOR_EACH_BB (bb)
1479 {
1480 gimple stmt = last_stmt (bb);
1481 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1482 group_case_labels_stmt (stmt);
1483 }
1484 }
1485
1486 /* Checks whether we can merge block B into block A. */
1487
1488 static bool
1489 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1490 {
1491 gimple stmt;
1492 gimple_stmt_iterator gsi;
1493
1494 if (!single_succ_p (a))
1495 return false;
1496
1497 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1498 return false;
1499
1500 if (single_succ (a) != b)
1501 return false;
1502
1503 if (!single_pred_p (b))
1504 return false;
1505
1506 if (b == EXIT_BLOCK_PTR)
1507 return false;
1508
1509 /* If A ends by a statement causing exceptions or something similar, we
1510 cannot merge the blocks. */
1511 stmt = last_stmt (a);
1512 if (stmt && stmt_ends_bb_p (stmt))
1513 return false;
1514
1515 /* Do not allow a block with only a non-local label to be merged. */
1516 if (stmt
1517 && gimple_code (stmt) == GIMPLE_LABEL
1518 && DECL_NONLOCAL (gimple_label_label (stmt)))
1519 return false;
1520
1521 /* Examine the labels at the beginning of B. */
1522 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
1523 {
1524 tree lab;
1525 stmt = gsi_stmt (gsi);
1526 if (gimple_code (stmt) != GIMPLE_LABEL)
1527 break;
1528 lab = gimple_label_label (stmt);
1529
1530 /* Do not remove user forced labels or for -O0 any user labels. */
1531 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1532 return false;
1533 }
1534
1535 /* Protect the loop latches. */
1536 if (current_loops && b->loop_father->latch == b)
1537 return false;
1538
1539 /* It must be possible to eliminate all phi nodes in B. If ssa form
1540 is not up-to-date and a name-mapping is registered, we cannot eliminate
1541 any phis. Symbols marked for renaming are never a problem though. */
1542 for (gsi = gsi_start_phis (b); !gsi_end_p (gsi); gsi_next (&gsi))
1543 {
1544 gimple phi = gsi_stmt (gsi);
1545 /* Technically only new names matter. */
1546 if (name_registered_for_update_p (PHI_RESULT (phi)))
1547 return false;
1548 }
1549
1550 /* When not optimizing, don't merge if we'd lose goto_locus. */
1551 if (!optimize
1552 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1553 {
1554 location_t goto_locus = single_succ_edge (a)->goto_locus;
1555 gimple_stmt_iterator prev, next;
1556 prev = gsi_last_nondebug_bb (a);
1557 next = gsi_after_labels (b);
1558 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1559 gsi_next_nondebug (&next);
1560 if ((gsi_end_p (prev)
1561 || gimple_location (gsi_stmt (prev)) != goto_locus)
1562 && (gsi_end_p (next)
1563 || gimple_location (gsi_stmt (next)) != goto_locus))
1564 return false;
1565 }
1566
1567 return true;
1568 }
1569
1570 /* Replaces all uses of NAME by VAL. */
1571
1572 void
1573 replace_uses_by (tree name, tree val)
1574 {
1575 imm_use_iterator imm_iter;
1576 use_operand_p use;
1577 gimple stmt;
1578 edge e;
1579
1580 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1581 {
1582 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1583 {
1584 replace_exp (use, val);
1585
1586 if (gimple_code (stmt) == GIMPLE_PHI)
1587 {
1588 e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
1589 if (e->flags & EDGE_ABNORMAL)
1590 {
1591 /* This can only occur for virtual operands, since
1592 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1593 would prevent replacement. */
1594 gcc_checking_assert (virtual_operand_p (name));
1595 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1596 }
1597 }
1598 }
1599
1600 if (gimple_code (stmt) != GIMPLE_PHI)
1601 {
1602 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1603 gimple orig_stmt = stmt;
1604 size_t i;
1605
1606 /* Mark the block if we changed the last stmt in it. */
1607 if (cfgcleanup_altered_bbs
1608 && stmt_ends_bb_p (stmt))
1609 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1610
1611 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1612 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1613 only change sth from non-invariant to invariant, and only
1614 when propagating constants. */
1615 if (is_gimple_min_invariant (val))
1616 for (i = 0; i < gimple_num_ops (stmt); i++)
1617 {
1618 tree op = gimple_op (stmt, i);
1619 /* Operands may be empty here. For example, the labels
1620 of a GIMPLE_COND are nulled out following the creation
1621 of the corresponding CFG edges. */
1622 if (op && TREE_CODE (op) == ADDR_EXPR)
1623 recompute_tree_invariant_for_addr_expr (op);
1624 }
1625
1626 if (fold_stmt (&gsi))
1627 stmt = gsi_stmt (gsi);
1628
1629 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1630 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1631
1632 update_stmt (stmt);
1633 }
1634 }
1635
1636 gcc_checking_assert (has_zero_uses (name));
1637
1638 /* Also update the trees stored in loop structures. */
1639 if (current_loops)
1640 {
1641 struct loop *loop;
1642
1643 FOR_EACH_LOOP (loop, 0)
1644 {
1645 substitute_in_loop_info (loop, name, val);
1646 }
1647 }
1648 }
1649
1650 /* Merge block B into block A. */
1651
1652 static void
1653 gimple_merge_blocks (basic_block a, basic_block b)
1654 {
1655 gimple_stmt_iterator last, gsi, psi;
1656
1657 if (dump_file)
1658 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1659
1660 /* Remove all single-valued PHI nodes from block B of the form
1661 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1662 gsi = gsi_last_bb (a);
1663 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1664 {
1665 gimple phi = gsi_stmt (psi);
1666 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1667 gimple copy;
1668 bool may_replace_uses = (virtual_operand_p (def)
1669 || may_propagate_copy (def, use));
1670
1671 /* In case we maintain loop closed ssa form, do not propagate arguments
1672 of loop exit phi nodes. */
1673 if (current_loops
1674 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1675 && !virtual_operand_p (def)
1676 && TREE_CODE (use) == SSA_NAME
1677 && a->loop_father != b->loop_father)
1678 may_replace_uses = false;
1679
1680 if (!may_replace_uses)
1681 {
1682 gcc_assert (!virtual_operand_p (def));
1683
1684 /* Note that just emitting the copies is fine -- there is no problem
1685 with ordering of phi nodes. This is because A is the single
1686 predecessor of B, therefore results of the phi nodes cannot
1687 appear as arguments of the phi nodes. */
1688 copy = gimple_build_assign (def, use);
1689 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1690 remove_phi_node (&psi, false);
1691 }
1692 else
1693 {
1694 /* If we deal with a PHI for virtual operands, we can simply
1695 propagate these without fussing with folding or updating
1696 the stmt. */
1697 if (virtual_operand_p (def))
1698 {
1699 imm_use_iterator iter;
1700 use_operand_p use_p;
1701 gimple stmt;
1702
1703 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1704 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1705 SET_USE (use_p, use);
1706
1707 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1708 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1709 }
1710 else
1711 replace_uses_by (def, use);
1712
1713 remove_phi_node (&psi, true);
1714 }
1715 }
1716
1717 /* Ensure that B follows A. */
1718 move_block_after (b, a);
1719
1720 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1721 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1722
1723 /* Remove labels from B and set gimple_bb to A for other statements. */
1724 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1725 {
1726 gimple stmt = gsi_stmt (gsi);
1727 if (gimple_code (stmt) == GIMPLE_LABEL)
1728 {
1729 tree label = gimple_label_label (stmt);
1730 int lp_nr;
1731
1732 gsi_remove (&gsi, false);
1733
1734 /* Now that we can thread computed gotos, we might have
1735 a situation where we have a forced label in block B
1736 However, the label at the start of block B might still be
1737 used in other ways (think about the runtime checking for
1738 Fortran assigned gotos). So we can not just delete the
1739 label. Instead we move the label to the start of block A. */
1740 if (FORCED_LABEL (label))
1741 {
1742 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1743 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1744 }
1745 /* Other user labels keep around in a form of a debug stmt. */
1746 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1747 {
1748 gimple dbg = gimple_build_debug_bind (label,
1749 integer_zero_node,
1750 stmt);
1751 gimple_debug_bind_reset_value (dbg);
1752 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1753 }
1754
1755 lp_nr = EH_LANDING_PAD_NR (label);
1756 if (lp_nr)
1757 {
1758 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1759 lp->post_landing_pad = NULL;
1760 }
1761 }
1762 else
1763 {
1764 gimple_set_bb (stmt, a);
1765 gsi_next (&gsi);
1766 }
1767 }
1768
1769 /* Merge the sequences. */
1770 last = gsi_last_bb (a);
1771 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1772 set_bb_seq (b, NULL);
1773
1774 if (cfgcleanup_altered_bbs)
1775 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1776 }
1777
1778
1779 /* Return the one of two successors of BB that is not reachable by a
1780 complex edge, if there is one. Else, return BB. We use
1781 this in optimizations that use post-dominators for their heuristics,
1782 to catch the cases in C++ where function calls are involved. */
1783
1784 basic_block
1785 single_noncomplex_succ (basic_block bb)
1786 {
1787 edge e0, e1;
1788 if (EDGE_COUNT (bb->succs) != 2)
1789 return bb;
1790
1791 e0 = EDGE_SUCC (bb, 0);
1792 e1 = EDGE_SUCC (bb, 1);
1793 if (e0->flags & EDGE_COMPLEX)
1794 return e1->dest;
1795 if (e1->flags & EDGE_COMPLEX)
1796 return e0->dest;
1797
1798 return bb;
1799 }
1800
1801 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1802
1803 void
1804 notice_special_calls (gimple call)
1805 {
1806 int flags = gimple_call_flags (call);
1807
1808 if (flags & ECF_MAY_BE_ALLOCA)
1809 cfun->calls_alloca = true;
1810 if (flags & ECF_RETURNS_TWICE)
1811 cfun->calls_setjmp = true;
1812 }
1813
1814
1815 /* Clear flags set by notice_special_calls. Used by dead code removal
1816 to update the flags. */
1817
1818 void
1819 clear_special_calls (void)
1820 {
1821 cfun->calls_alloca = false;
1822 cfun->calls_setjmp = false;
1823 }
1824
1825 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1826
1827 static void
1828 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1829 {
1830 /* Since this block is no longer reachable, we can just delete all
1831 of its PHI nodes. */
1832 remove_phi_nodes (bb);
1833
1834 /* Remove edges to BB's successors. */
1835 while (EDGE_COUNT (bb->succs) > 0)
1836 remove_edge (EDGE_SUCC (bb, 0));
1837 }
1838
1839
1840 /* Remove statements of basic block BB. */
1841
1842 static void
1843 remove_bb (basic_block bb)
1844 {
1845 gimple_stmt_iterator i;
1846
1847 if (dump_file)
1848 {
1849 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1850 if (dump_flags & TDF_DETAILS)
1851 {
1852 dump_bb (dump_file, bb, 0, dump_flags);
1853 fprintf (dump_file, "\n");
1854 }
1855 }
1856
1857 if (current_loops)
1858 {
1859 struct loop *loop = bb->loop_father;
1860
1861 /* If a loop gets removed, clean up the information associated
1862 with it. */
1863 if (loop->latch == bb
1864 || loop->header == bb)
1865 free_numbers_of_iterations_estimates_loop (loop);
1866 }
1867
1868 /* Remove all the instructions in the block. */
1869 if (bb_seq (bb) != NULL)
1870 {
1871 /* Walk backwards so as to get a chance to substitute all
1872 released DEFs into debug stmts. See
1873 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
1874 details. */
1875 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
1876 {
1877 gimple stmt = gsi_stmt (i);
1878 if (gimple_code (stmt) == GIMPLE_LABEL
1879 && (FORCED_LABEL (gimple_label_label (stmt))
1880 || DECL_NONLOCAL (gimple_label_label (stmt))))
1881 {
1882 basic_block new_bb;
1883 gimple_stmt_iterator new_gsi;
1884
1885 /* A non-reachable non-local label may still be referenced.
1886 But it no longer needs to carry the extra semantics of
1887 non-locality. */
1888 if (DECL_NONLOCAL (gimple_label_label (stmt)))
1889 {
1890 DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
1891 FORCED_LABEL (gimple_label_label (stmt)) = 1;
1892 }
1893
1894 new_bb = bb->prev_bb;
1895 new_gsi = gsi_start_bb (new_bb);
1896 gsi_remove (&i, false);
1897 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
1898 }
1899 else
1900 {
1901 /* Release SSA definitions if we are in SSA. Note that we
1902 may be called when not in SSA. For example,
1903 final_cleanup calls this function via
1904 cleanup_tree_cfg. */
1905 if (gimple_in_ssa_p (cfun))
1906 release_defs (stmt);
1907
1908 gsi_remove (&i, true);
1909 }
1910
1911 if (gsi_end_p (i))
1912 i = gsi_last_bb (bb);
1913 else
1914 gsi_prev (&i);
1915 }
1916 }
1917
1918 remove_phi_nodes_and_edges_for_unreachable_block (bb);
1919 bb->il.gimple.seq = NULL;
1920 bb->il.gimple.phi_nodes = NULL;
1921 }
1922
1923
1924 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
1925 predicate VAL, return the edge that will be taken out of the block.
1926 If VAL does not match a unique edge, NULL is returned. */
1927
1928 edge
1929 find_taken_edge (basic_block bb, tree val)
1930 {
1931 gimple stmt;
1932
1933 stmt = last_stmt (bb);
1934
1935 gcc_assert (stmt);
1936 gcc_assert (is_ctrl_stmt (stmt));
1937
1938 if (val == NULL)
1939 return NULL;
1940
1941 if (!is_gimple_min_invariant (val))
1942 return NULL;
1943
1944 if (gimple_code (stmt) == GIMPLE_COND)
1945 return find_taken_edge_cond_expr (bb, val);
1946
1947 if (gimple_code (stmt) == GIMPLE_SWITCH)
1948 return find_taken_edge_switch_expr (bb, val);
1949
1950 if (computed_goto_p (stmt))
1951 {
1952 /* Only optimize if the argument is a label, if the argument is
1953 not a label then we can not construct a proper CFG.
1954
1955 It may be the case that we only need to allow the LABEL_REF to
1956 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
1957 appear inside a LABEL_EXPR just to be safe. */
1958 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
1959 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
1960 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
1961 return NULL;
1962 }
1963
1964 gcc_unreachable ();
1965 }
1966
1967 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
1968 statement, determine which of the outgoing edges will be taken out of the
1969 block. Return NULL if either edge may be taken. */
1970
1971 static edge
1972 find_taken_edge_computed_goto (basic_block bb, tree val)
1973 {
1974 basic_block dest;
1975 edge e = NULL;
1976
1977 dest = label_to_block (val);
1978 if (dest)
1979 {
1980 e = find_edge (bb, dest);
1981 gcc_assert (e != NULL);
1982 }
1983
1984 return e;
1985 }
1986
1987 /* Given a constant value VAL and the entry block BB to a COND_EXPR
1988 statement, determine which of the two edges will be taken out of the
1989 block. Return NULL if either edge may be taken. */
1990
1991 static edge
1992 find_taken_edge_cond_expr (basic_block bb, tree val)
1993 {
1994 edge true_edge, false_edge;
1995
1996 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1997
1998 gcc_assert (TREE_CODE (val) == INTEGER_CST);
1999 return (integer_zerop (val) ? false_edge : true_edge);
2000 }
2001
2002 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2003 statement, determine which edge will be taken out of the block. Return
2004 NULL if any edge may be taken. */
2005
2006 static edge
2007 find_taken_edge_switch_expr (basic_block bb, tree val)
2008 {
2009 basic_block dest_bb;
2010 edge e;
2011 gimple switch_stmt;
2012 tree taken_case;
2013
2014 switch_stmt = last_stmt (bb);
2015 taken_case = find_case_label_for_value (switch_stmt, val);
2016 dest_bb = label_to_block (CASE_LABEL (taken_case));
2017
2018 e = find_edge (bb, dest_bb);
2019 gcc_assert (e);
2020 return e;
2021 }
2022
2023
2024 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2025 We can make optimal use here of the fact that the case labels are
2026 sorted: We can do a binary search for a case matching VAL. */
2027
2028 static tree
2029 find_case_label_for_value (gimple switch_stmt, tree val)
2030 {
2031 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2032 tree default_case = gimple_switch_default_label (switch_stmt);
2033
2034 for (low = 0, high = n; high - low > 1; )
2035 {
2036 size_t i = (high + low) / 2;
2037 tree t = gimple_switch_label (switch_stmt, i);
2038 int cmp;
2039
2040 /* Cache the result of comparing CASE_LOW and val. */
2041 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2042
2043 if (cmp > 0)
2044 high = i;
2045 else
2046 low = i;
2047
2048 if (CASE_HIGH (t) == NULL)
2049 {
2050 /* A singe-valued case label. */
2051 if (cmp == 0)
2052 return t;
2053 }
2054 else
2055 {
2056 /* A case range. We can only handle integer ranges. */
2057 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2058 return t;
2059 }
2060 }
2061
2062 return default_case;
2063 }
2064
2065
2066 /* Dump a basic block on stderr. */
2067
2068 void
2069 gimple_debug_bb (basic_block bb)
2070 {
2071 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2072 }
2073
2074
2075 /* Dump basic block with index N on stderr. */
2076
2077 basic_block
2078 gimple_debug_bb_n (int n)
2079 {
2080 gimple_debug_bb (BASIC_BLOCK (n));
2081 return BASIC_BLOCK (n);
2082 }
2083
2084
2085 /* Dump the CFG on stderr.
2086
2087 FLAGS are the same used by the tree dumping functions
2088 (see TDF_* in dumpfile.h). */
2089
2090 void
2091 gimple_debug_cfg (int flags)
2092 {
2093 gimple_dump_cfg (stderr, flags);
2094 }
2095
2096
2097 /* Dump the program showing basic block boundaries on the given FILE.
2098
2099 FLAGS are the same used by the tree dumping functions (see TDF_* in
2100 tree.h). */
2101
2102 void
2103 gimple_dump_cfg (FILE *file, int flags)
2104 {
2105 if (flags & TDF_DETAILS)
2106 {
2107 dump_function_header (file, current_function_decl, flags);
2108 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2109 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2110 last_basic_block);
2111
2112 brief_dump_cfg (file, flags | TDF_COMMENT);
2113 fprintf (file, "\n");
2114 }
2115
2116 if (flags & TDF_STATS)
2117 dump_cfg_stats (file);
2118
2119 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2120 }
2121
2122
2123 /* Dump CFG statistics on FILE. */
2124
2125 void
2126 dump_cfg_stats (FILE *file)
2127 {
2128 static long max_num_merged_labels = 0;
2129 unsigned long size, total = 0;
2130 long num_edges;
2131 basic_block bb;
2132 const char * const fmt_str = "%-30s%-13s%12s\n";
2133 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2134 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2135 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2136 const char *funcname = current_function_name ();
2137
2138 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2139
2140 fprintf (file, "---------------------------------------------------------\n");
2141 fprintf (file, fmt_str, "", " Number of ", "Memory");
2142 fprintf (file, fmt_str, "", " instances ", "used ");
2143 fprintf (file, "---------------------------------------------------------\n");
2144
2145 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2146 total += size;
2147 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2148 SCALE (size), LABEL (size));
2149
2150 num_edges = 0;
2151 FOR_EACH_BB (bb)
2152 num_edges += EDGE_COUNT (bb->succs);
2153 size = num_edges * sizeof (struct edge_def);
2154 total += size;
2155 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2156
2157 fprintf (file, "---------------------------------------------------------\n");
2158 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2159 LABEL (total));
2160 fprintf (file, "---------------------------------------------------------\n");
2161 fprintf (file, "\n");
2162
2163 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2164 max_num_merged_labels = cfg_stats.num_merged_labels;
2165
2166 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2167 cfg_stats.num_merged_labels, max_num_merged_labels);
2168
2169 fprintf (file, "\n");
2170 }
2171
2172
2173 /* Dump CFG statistics on stderr. Keep extern so that it's always
2174 linked in the final executable. */
2175
2176 DEBUG_FUNCTION void
2177 debug_cfg_stats (void)
2178 {
2179 dump_cfg_stats (stderr);
2180 }
2181
2182 /*---------------------------------------------------------------------------
2183 Miscellaneous helpers
2184 ---------------------------------------------------------------------------*/
2185
2186 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2187 flow. Transfers of control flow associated with EH are excluded. */
2188
2189 static bool
2190 call_can_make_abnormal_goto (gimple t)
2191 {
2192 /* If the function has no non-local labels, then a call cannot make an
2193 abnormal transfer of control. */
2194 if (!cfun->has_nonlocal_label
2195 && !cfun->calls_setjmp)
2196 return false;
2197
2198 /* Likewise if the call has no side effects. */
2199 if (!gimple_has_side_effects (t))
2200 return false;
2201
2202 /* Likewise if the called function is leaf. */
2203 if (gimple_call_flags (t) & ECF_LEAF)
2204 return false;
2205
2206 return true;
2207 }
2208
2209
2210 /* Return true if T can make an abnormal transfer of control flow.
2211 Transfers of control flow associated with EH are excluded. */
2212
2213 bool
2214 stmt_can_make_abnormal_goto (gimple t)
2215 {
2216 if (computed_goto_p (t))
2217 return true;
2218 if (is_gimple_call (t))
2219 return call_can_make_abnormal_goto (t);
2220 return false;
2221 }
2222
2223
2224 /* Return true if T represents a stmt that always transfers control. */
2225
2226 bool
2227 is_ctrl_stmt (gimple t)
2228 {
2229 switch (gimple_code (t))
2230 {
2231 case GIMPLE_COND:
2232 case GIMPLE_SWITCH:
2233 case GIMPLE_GOTO:
2234 case GIMPLE_RETURN:
2235 case GIMPLE_RESX:
2236 return true;
2237 default:
2238 return false;
2239 }
2240 }
2241
2242
2243 /* Return true if T is a statement that may alter the flow of control
2244 (e.g., a call to a non-returning function). */
2245
2246 bool
2247 is_ctrl_altering_stmt (gimple t)
2248 {
2249 gcc_assert (t);
2250
2251 switch (gimple_code (t))
2252 {
2253 case GIMPLE_CALL:
2254 {
2255 int flags = gimple_call_flags (t);
2256
2257 /* A call alters control flow if it can make an abnormal goto. */
2258 if (call_can_make_abnormal_goto (t))
2259 return true;
2260
2261 /* A call also alters control flow if it does not return. */
2262 if (flags & ECF_NORETURN)
2263 return true;
2264
2265 /* TM ending statements have backedges out of the transaction.
2266 Return true so we split the basic block containing them.
2267 Note that the TM_BUILTIN test is merely an optimization. */
2268 if ((flags & ECF_TM_BUILTIN)
2269 && is_tm_ending_fndecl (gimple_call_fndecl (t)))
2270 return true;
2271
2272 /* BUILT_IN_RETURN call is same as return statement. */
2273 if (gimple_call_builtin_p (t, BUILT_IN_RETURN))
2274 return true;
2275 }
2276 break;
2277
2278 case GIMPLE_EH_DISPATCH:
2279 /* EH_DISPATCH branches to the individual catch handlers at
2280 this level of a try or allowed-exceptions region. It can
2281 fallthru to the next statement as well. */
2282 return true;
2283
2284 case GIMPLE_ASM:
2285 if (gimple_asm_nlabels (t) > 0)
2286 return true;
2287 break;
2288
2289 CASE_GIMPLE_OMP:
2290 /* OpenMP directives alter control flow. */
2291 return true;
2292
2293 case GIMPLE_TRANSACTION:
2294 /* A transaction start alters control flow. */
2295 return true;
2296
2297 default:
2298 break;
2299 }
2300
2301 /* If a statement can throw, it alters control flow. */
2302 return stmt_can_throw_internal (t);
2303 }
2304
2305
2306 /* Return true if T is a simple local goto. */
2307
2308 bool
2309 simple_goto_p (gimple t)
2310 {
2311 return (gimple_code (t) == GIMPLE_GOTO
2312 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2313 }
2314
2315
2316 /* Return true if STMT should start a new basic block. PREV_STMT is
2317 the statement preceding STMT. It is used when STMT is a label or a
2318 case label. Labels should only start a new basic block if their
2319 previous statement wasn't a label. Otherwise, sequence of labels
2320 would generate unnecessary basic blocks that only contain a single
2321 label. */
2322
2323 static inline bool
2324 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2325 {
2326 if (stmt == NULL)
2327 return false;
2328
2329 /* Labels start a new basic block only if the preceding statement
2330 wasn't a label of the same type. This prevents the creation of
2331 consecutive blocks that have nothing but a single label. */
2332 if (gimple_code (stmt) == GIMPLE_LABEL)
2333 {
2334 /* Nonlocal and computed GOTO targets always start a new block. */
2335 if (DECL_NONLOCAL (gimple_label_label (stmt))
2336 || FORCED_LABEL (gimple_label_label (stmt)))
2337 return true;
2338
2339 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2340 {
2341 if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
2342 return true;
2343
2344 cfg_stats.num_merged_labels++;
2345 return false;
2346 }
2347 else
2348 return true;
2349 }
2350 else if (gimple_code (stmt) == GIMPLE_CALL
2351 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2352 /* setjmp acts similar to a nonlocal GOTO target and thus should
2353 start a new block. */
2354 return true;
2355
2356 return false;
2357 }
2358
2359
2360 /* Return true if T should end a basic block. */
2361
2362 bool
2363 stmt_ends_bb_p (gimple t)
2364 {
2365 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2366 }
2367
2368 /* Remove block annotations and other data structures. */
2369
2370 void
2371 delete_tree_cfg_annotations (void)
2372 {
2373 vec_free (label_to_block_map);
2374 }
2375
2376
2377 /* Return the first statement in basic block BB. */
2378
2379 gimple
2380 first_stmt (basic_block bb)
2381 {
2382 gimple_stmt_iterator i = gsi_start_bb (bb);
2383 gimple stmt = NULL;
2384
2385 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2386 {
2387 gsi_next (&i);
2388 stmt = NULL;
2389 }
2390 return stmt;
2391 }
2392
2393 /* Return the first non-label statement in basic block BB. */
2394
2395 static gimple
2396 first_non_label_stmt (basic_block bb)
2397 {
2398 gimple_stmt_iterator i = gsi_start_bb (bb);
2399 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2400 gsi_next (&i);
2401 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2402 }
2403
2404 /* Return the last statement in basic block BB. */
2405
2406 gimple
2407 last_stmt (basic_block bb)
2408 {
2409 gimple_stmt_iterator i = gsi_last_bb (bb);
2410 gimple stmt = NULL;
2411
2412 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2413 {
2414 gsi_prev (&i);
2415 stmt = NULL;
2416 }
2417 return stmt;
2418 }
2419
2420 /* Return the last statement of an otherwise empty block. Return NULL
2421 if the block is totally empty, or if it contains more than one
2422 statement. */
2423
2424 gimple
2425 last_and_only_stmt (basic_block bb)
2426 {
2427 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2428 gimple last, prev;
2429
2430 if (gsi_end_p (i))
2431 return NULL;
2432
2433 last = gsi_stmt (i);
2434 gsi_prev_nondebug (&i);
2435 if (gsi_end_p (i))
2436 return last;
2437
2438 /* Empty statements should no longer appear in the instruction stream.
2439 Everything that might have appeared before should be deleted by
2440 remove_useless_stmts, and the optimizers should just gsi_remove
2441 instead of smashing with build_empty_stmt.
2442
2443 Thus the only thing that should appear here in a block containing
2444 one executable statement is a label. */
2445 prev = gsi_stmt (i);
2446 if (gimple_code (prev) == GIMPLE_LABEL)
2447 return last;
2448 else
2449 return NULL;
2450 }
2451
2452 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2453
2454 static void
2455 reinstall_phi_args (edge new_edge, edge old_edge)
2456 {
2457 edge_var_map_vector *v;
2458 edge_var_map *vm;
2459 int i;
2460 gimple_stmt_iterator phis;
2461
2462 v = redirect_edge_var_map_vector (old_edge);
2463 if (!v)
2464 return;
2465
2466 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2467 v->iterate (i, &vm) && !gsi_end_p (phis);
2468 i++, gsi_next (&phis))
2469 {
2470 gimple phi = gsi_stmt (phis);
2471 tree result = redirect_edge_var_map_result (vm);
2472 tree arg = redirect_edge_var_map_def (vm);
2473
2474 gcc_assert (result == gimple_phi_result (phi));
2475
2476 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2477 }
2478
2479 redirect_edge_var_map_clear (old_edge);
2480 }
2481
2482 /* Returns the basic block after which the new basic block created
2483 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2484 near its "logical" location. This is of most help to humans looking
2485 at debugging dumps. */
2486
2487 static basic_block
2488 split_edge_bb_loc (edge edge_in)
2489 {
2490 basic_block dest = edge_in->dest;
2491 basic_block dest_prev = dest->prev_bb;
2492
2493 if (dest_prev)
2494 {
2495 edge e = find_edge (dest_prev, dest);
2496 if (e && !(e->flags & EDGE_COMPLEX))
2497 return edge_in->src;
2498 }
2499 return dest_prev;
2500 }
2501
2502 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2503 Abort on abnormal edges. */
2504
2505 static basic_block
2506 gimple_split_edge (edge edge_in)
2507 {
2508 basic_block new_bb, after_bb, dest;
2509 edge new_edge, e;
2510
2511 /* Abnormal edges cannot be split. */
2512 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2513
2514 dest = edge_in->dest;
2515
2516 after_bb = split_edge_bb_loc (edge_in);
2517
2518 new_bb = create_empty_bb (after_bb);
2519 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2520 new_bb->count = edge_in->count;
2521 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2522 new_edge->probability = REG_BR_PROB_BASE;
2523 new_edge->count = edge_in->count;
2524
2525 e = redirect_edge_and_branch (edge_in, new_bb);
2526 gcc_assert (e == edge_in);
2527 reinstall_phi_args (new_edge, e);
2528
2529 return new_bb;
2530 }
2531
2532
2533 /* Verify properties of the address expression T with base object BASE. */
2534
2535 static tree
2536 verify_address (tree t, tree base)
2537 {
2538 bool old_constant;
2539 bool old_side_effects;
2540 bool new_constant;
2541 bool new_side_effects;
2542
2543 old_constant = TREE_CONSTANT (t);
2544 old_side_effects = TREE_SIDE_EFFECTS (t);
2545
2546 recompute_tree_invariant_for_addr_expr (t);
2547 new_side_effects = TREE_SIDE_EFFECTS (t);
2548 new_constant = TREE_CONSTANT (t);
2549
2550 if (old_constant != new_constant)
2551 {
2552 error ("constant not recomputed when ADDR_EXPR changed");
2553 return t;
2554 }
2555 if (old_side_effects != new_side_effects)
2556 {
2557 error ("side effects not recomputed when ADDR_EXPR changed");
2558 return t;
2559 }
2560
2561 if (!(TREE_CODE (base) == VAR_DECL
2562 || TREE_CODE (base) == PARM_DECL
2563 || TREE_CODE (base) == RESULT_DECL))
2564 return NULL_TREE;
2565
2566 if (DECL_GIMPLE_REG_P (base))
2567 {
2568 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2569 return base;
2570 }
2571
2572 return NULL_TREE;
2573 }
2574
2575 /* Callback for walk_tree, check that all elements with address taken are
2576 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2577 inside a PHI node. */
2578
2579 static tree
2580 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2581 {
2582 tree t = *tp, x;
2583
2584 if (TYPE_P (t))
2585 *walk_subtrees = 0;
2586
2587 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2588 #define CHECK_OP(N, MSG) \
2589 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2590 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2591
2592 switch (TREE_CODE (t))
2593 {
2594 case SSA_NAME:
2595 if (SSA_NAME_IN_FREE_LIST (t))
2596 {
2597 error ("SSA name in freelist but still referenced");
2598 return *tp;
2599 }
2600 break;
2601
2602 case INDIRECT_REF:
2603 error ("INDIRECT_REF in gimple IL");
2604 return t;
2605
2606 case MEM_REF:
2607 x = TREE_OPERAND (t, 0);
2608 if (!POINTER_TYPE_P (TREE_TYPE (x))
2609 || !is_gimple_mem_ref_addr (x))
2610 {
2611 error ("invalid first operand of MEM_REF");
2612 return x;
2613 }
2614 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2615 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2616 {
2617 error ("invalid offset operand of MEM_REF");
2618 return TREE_OPERAND (t, 1);
2619 }
2620 if (TREE_CODE (x) == ADDR_EXPR
2621 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2622 return x;
2623 *walk_subtrees = 0;
2624 break;
2625
2626 case ASSERT_EXPR:
2627 x = fold (ASSERT_EXPR_COND (t));
2628 if (x == boolean_false_node)
2629 {
2630 error ("ASSERT_EXPR with an always-false condition");
2631 return *tp;
2632 }
2633 break;
2634
2635 case MODIFY_EXPR:
2636 error ("MODIFY_EXPR not expected while having tuples");
2637 return *tp;
2638
2639 case ADDR_EXPR:
2640 {
2641 tree tem;
2642
2643 gcc_assert (is_gimple_address (t));
2644
2645 /* Skip any references (they will be checked when we recurse down the
2646 tree) and ensure that any variable used as a prefix is marked
2647 addressable. */
2648 for (x = TREE_OPERAND (t, 0);
2649 handled_component_p (x);
2650 x = TREE_OPERAND (x, 0))
2651 ;
2652
2653 if ((tem = verify_address (t, x)))
2654 return tem;
2655
2656 if (!(TREE_CODE (x) == VAR_DECL
2657 || TREE_CODE (x) == PARM_DECL
2658 || TREE_CODE (x) == RESULT_DECL))
2659 return NULL;
2660
2661 if (!TREE_ADDRESSABLE (x))
2662 {
2663 error ("address taken, but ADDRESSABLE bit not set");
2664 return x;
2665 }
2666
2667 break;
2668 }
2669
2670 case COND_EXPR:
2671 x = COND_EXPR_COND (t);
2672 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2673 {
2674 error ("non-integral used in condition");
2675 return x;
2676 }
2677 if (!is_gimple_condexpr (x))
2678 {
2679 error ("invalid conditional operand");
2680 return x;
2681 }
2682 break;
2683
2684 case NON_LVALUE_EXPR:
2685 case TRUTH_NOT_EXPR:
2686 gcc_unreachable ();
2687
2688 CASE_CONVERT:
2689 case FIX_TRUNC_EXPR:
2690 case FLOAT_EXPR:
2691 case NEGATE_EXPR:
2692 case ABS_EXPR:
2693 case BIT_NOT_EXPR:
2694 CHECK_OP (0, "invalid operand to unary operator");
2695 break;
2696
2697 case REALPART_EXPR:
2698 case IMAGPART_EXPR:
2699 case BIT_FIELD_REF:
2700 if (!is_gimple_reg_type (TREE_TYPE (t)))
2701 {
2702 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2703 return t;
2704 }
2705
2706 if (TREE_CODE (t) == BIT_FIELD_REF)
2707 {
2708 if (!tree_fits_uhwi_p (TREE_OPERAND (t, 1))
2709 || !tree_fits_uhwi_p (TREE_OPERAND (t, 2)))
2710 {
2711 error ("invalid position or size operand to BIT_FIELD_REF");
2712 return t;
2713 }
2714 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2715 && (TYPE_PRECISION (TREE_TYPE (t))
2716 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2717 {
2718 error ("integral result type precision does not match "
2719 "field size of BIT_FIELD_REF");
2720 return t;
2721 }
2722 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2723 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2724 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2725 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2726 {
2727 error ("mode precision of non-integral result does not "
2728 "match field size of BIT_FIELD_REF");
2729 return t;
2730 }
2731 }
2732 t = TREE_OPERAND (t, 0);
2733
2734 /* Fall-through. */
2735 case COMPONENT_REF:
2736 case ARRAY_REF:
2737 case ARRAY_RANGE_REF:
2738 case VIEW_CONVERT_EXPR:
2739 /* We have a nest of references. Verify that each of the operands
2740 that determine where to reference is either a constant or a variable,
2741 verify that the base is valid, and then show we've already checked
2742 the subtrees. */
2743 while (handled_component_p (t))
2744 {
2745 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2746 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2747 else if (TREE_CODE (t) == ARRAY_REF
2748 || TREE_CODE (t) == ARRAY_RANGE_REF)
2749 {
2750 CHECK_OP (1, "invalid array index");
2751 if (TREE_OPERAND (t, 2))
2752 CHECK_OP (2, "invalid array lower bound");
2753 if (TREE_OPERAND (t, 3))
2754 CHECK_OP (3, "invalid array stride");
2755 }
2756 else if (TREE_CODE (t) == BIT_FIELD_REF
2757 || TREE_CODE (t) == REALPART_EXPR
2758 || TREE_CODE (t) == IMAGPART_EXPR)
2759 {
2760 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
2761 "REALPART_EXPR");
2762 return t;
2763 }
2764
2765 t = TREE_OPERAND (t, 0);
2766 }
2767
2768 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2769 {
2770 error ("invalid reference prefix");
2771 return t;
2772 }
2773 *walk_subtrees = 0;
2774 break;
2775 case PLUS_EXPR:
2776 case MINUS_EXPR:
2777 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2778 POINTER_PLUS_EXPR. */
2779 if (POINTER_TYPE_P (TREE_TYPE (t)))
2780 {
2781 error ("invalid operand to plus/minus, type is a pointer");
2782 return t;
2783 }
2784 CHECK_OP (0, "invalid operand to binary operator");
2785 CHECK_OP (1, "invalid operand to binary operator");
2786 break;
2787
2788 case POINTER_PLUS_EXPR:
2789 /* Check to make sure the first operand is a pointer or reference type. */
2790 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
2791 {
2792 error ("invalid operand to pointer plus, first operand is not a pointer");
2793 return t;
2794 }
2795 /* Check to make sure the second operand is a ptrofftype. */
2796 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
2797 {
2798 error ("invalid operand to pointer plus, second operand is not an "
2799 "integer type of appropriate width");
2800 return t;
2801 }
2802 /* FALLTHROUGH */
2803 case LT_EXPR:
2804 case LE_EXPR:
2805 case GT_EXPR:
2806 case GE_EXPR:
2807 case EQ_EXPR:
2808 case NE_EXPR:
2809 case UNORDERED_EXPR:
2810 case ORDERED_EXPR:
2811 case UNLT_EXPR:
2812 case UNLE_EXPR:
2813 case UNGT_EXPR:
2814 case UNGE_EXPR:
2815 case UNEQ_EXPR:
2816 case LTGT_EXPR:
2817 case MULT_EXPR:
2818 case TRUNC_DIV_EXPR:
2819 case CEIL_DIV_EXPR:
2820 case FLOOR_DIV_EXPR:
2821 case ROUND_DIV_EXPR:
2822 case TRUNC_MOD_EXPR:
2823 case CEIL_MOD_EXPR:
2824 case FLOOR_MOD_EXPR:
2825 case ROUND_MOD_EXPR:
2826 case RDIV_EXPR:
2827 case EXACT_DIV_EXPR:
2828 case MIN_EXPR:
2829 case MAX_EXPR:
2830 case LSHIFT_EXPR:
2831 case RSHIFT_EXPR:
2832 case LROTATE_EXPR:
2833 case RROTATE_EXPR:
2834 case BIT_IOR_EXPR:
2835 case BIT_XOR_EXPR:
2836 case BIT_AND_EXPR:
2837 CHECK_OP (0, "invalid operand to binary operator");
2838 CHECK_OP (1, "invalid operand to binary operator");
2839 break;
2840
2841 case CONSTRUCTOR:
2842 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2843 *walk_subtrees = 0;
2844 break;
2845
2846 case CASE_LABEL_EXPR:
2847 if (CASE_CHAIN (t))
2848 {
2849 error ("invalid CASE_CHAIN");
2850 return t;
2851 }
2852 break;
2853
2854 default:
2855 break;
2856 }
2857 return NULL;
2858
2859 #undef CHECK_OP
2860 }
2861
2862
2863 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
2864 Returns true if there is an error, otherwise false. */
2865
2866 static bool
2867 verify_types_in_gimple_min_lval (tree expr)
2868 {
2869 tree op;
2870
2871 if (is_gimple_id (expr))
2872 return false;
2873
2874 if (TREE_CODE (expr) != TARGET_MEM_REF
2875 && TREE_CODE (expr) != MEM_REF)
2876 {
2877 error ("invalid expression for min lvalue");
2878 return true;
2879 }
2880
2881 /* TARGET_MEM_REFs are strange beasts. */
2882 if (TREE_CODE (expr) == TARGET_MEM_REF)
2883 return false;
2884
2885 op = TREE_OPERAND (expr, 0);
2886 if (!is_gimple_val (op))
2887 {
2888 error ("invalid operand in indirect reference");
2889 debug_generic_stmt (op);
2890 return true;
2891 }
2892 /* Memory references now generally can involve a value conversion. */
2893
2894 return false;
2895 }
2896
2897 /* Verify if EXPR is a valid GIMPLE reference expression. If
2898 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
2899 if there is an error, otherwise false. */
2900
2901 static bool
2902 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
2903 {
2904 while (handled_component_p (expr))
2905 {
2906 tree op = TREE_OPERAND (expr, 0);
2907
2908 if (TREE_CODE (expr) == ARRAY_REF
2909 || TREE_CODE (expr) == ARRAY_RANGE_REF)
2910 {
2911 if (!is_gimple_val (TREE_OPERAND (expr, 1))
2912 || (TREE_OPERAND (expr, 2)
2913 && !is_gimple_val (TREE_OPERAND (expr, 2)))
2914 || (TREE_OPERAND (expr, 3)
2915 && !is_gimple_val (TREE_OPERAND (expr, 3))))
2916 {
2917 error ("invalid operands to array reference");
2918 debug_generic_stmt (expr);
2919 return true;
2920 }
2921 }
2922
2923 /* Verify if the reference array element types are compatible. */
2924 if (TREE_CODE (expr) == ARRAY_REF
2925 && !useless_type_conversion_p (TREE_TYPE (expr),
2926 TREE_TYPE (TREE_TYPE (op))))
2927 {
2928 error ("type mismatch in array reference");
2929 debug_generic_stmt (TREE_TYPE (expr));
2930 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2931 return true;
2932 }
2933 if (TREE_CODE (expr) == ARRAY_RANGE_REF
2934 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
2935 TREE_TYPE (TREE_TYPE (op))))
2936 {
2937 error ("type mismatch in array range reference");
2938 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
2939 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2940 return true;
2941 }
2942
2943 if ((TREE_CODE (expr) == REALPART_EXPR
2944 || TREE_CODE (expr) == IMAGPART_EXPR)
2945 && !useless_type_conversion_p (TREE_TYPE (expr),
2946 TREE_TYPE (TREE_TYPE (op))))
2947 {
2948 error ("type mismatch in real/imagpart reference");
2949 debug_generic_stmt (TREE_TYPE (expr));
2950 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2951 return true;
2952 }
2953
2954 if (TREE_CODE (expr) == COMPONENT_REF
2955 && !useless_type_conversion_p (TREE_TYPE (expr),
2956 TREE_TYPE (TREE_OPERAND (expr, 1))))
2957 {
2958 error ("type mismatch in component reference");
2959 debug_generic_stmt (TREE_TYPE (expr));
2960 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
2961 return true;
2962 }
2963
2964 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2965 {
2966 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
2967 that their operand is not an SSA name or an invariant when
2968 requiring an lvalue (this usually means there is a SRA or IPA-SRA
2969 bug). Otherwise there is nothing to verify, gross mismatches at
2970 most invoke undefined behavior. */
2971 if (require_lvalue
2972 && (TREE_CODE (op) == SSA_NAME
2973 || is_gimple_min_invariant (op)))
2974 {
2975 error ("conversion of an SSA_NAME on the left hand side");
2976 debug_generic_stmt (expr);
2977 return true;
2978 }
2979 else if (TREE_CODE (op) == SSA_NAME
2980 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
2981 {
2982 error ("conversion of register to a different size");
2983 debug_generic_stmt (expr);
2984 return true;
2985 }
2986 else if (!handled_component_p (op))
2987 return false;
2988 }
2989
2990 expr = op;
2991 }
2992
2993 if (TREE_CODE (expr) == MEM_REF)
2994 {
2995 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
2996 {
2997 error ("invalid address operand in MEM_REF");
2998 debug_generic_stmt (expr);
2999 return true;
3000 }
3001 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3002 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3003 {
3004 error ("invalid offset operand in MEM_REF");
3005 debug_generic_stmt (expr);
3006 return true;
3007 }
3008 }
3009 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3010 {
3011 if (!TMR_BASE (expr)
3012 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3013 {
3014 error ("invalid address operand in TARGET_MEM_REF");
3015 return true;
3016 }
3017 if (!TMR_OFFSET (expr)
3018 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3019 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3020 {
3021 error ("invalid offset operand in TARGET_MEM_REF");
3022 debug_generic_stmt (expr);
3023 return true;
3024 }
3025 }
3026
3027 return ((require_lvalue || !is_gimple_min_invariant (expr))
3028 && verify_types_in_gimple_min_lval (expr));
3029 }
3030
3031 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3032 list of pointer-to types that is trivially convertible to DEST. */
3033
3034 static bool
3035 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3036 {
3037 tree src;
3038
3039 if (!TYPE_POINTER_TO (src_obj))
3040 return true;
3041
3042 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3043 if (useless_type_conversion_p (dest, src))
3044 return true;
3045
3046 return false;
3047 }
3048
3049 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3050 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3051
3052 static bool
3053 valid_fixed_convert_types_p (tree type1, tree type2)
3054 {
3055 return (FIXED_POINT_TYPE_P (type1)
3056 && (INTEGRAL_TYPE_P (type2)
3057 || SCALAR_FLOAT_TYPE_P (type2)
3058 || FIXED_POINT_TYPE_P (type2)));
3059 }
3060
3061 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3062 is a problem, otherwise false. */
3063
3064 static bool
3065 verify_gimple_call (gimple stmt)
3066 {
3067 tree fn = gimple_call_fn (stmt);
3068 tree fntype, fndecl;
3069 unsigned i;
3070
3071 if (gimple_call_internal_p (stmt))
3072 {
3073 if (fn)
3074 {
3075 error ("gimple call has two targets");
3076 debug_generic_stmt (fn);
3077 return true;
3078 }
3079 }
3080 else
3081 {
3082 if (!fn)
3083 {
3084 error ("gimple call has no target");
3085 return true;
3086 }
3087 }
3088
3089 if (fn && !is_gimple_call_addr (fn))
3090 {
3091 error ("invalid function in gimple call");
3092 debug_generic_stmt (fn);
3093 return true;
3094 }
3095
3096 if (fn
3097 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3098 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3099 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3100 {
3101 error ("non-function in gimple call");
3102 return true;
3103 }
3104
3105 fndecl = gimple_call_fndecl (stmt);
3106 if (fndecl
3107 && TREE_CODE (fndecl) == FUNCTION_DECL
3108 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3109 && !DECL_PURE_P (fndecl)
3110 && !TREE_READONLY (fndecl))
3111 {
3112 error ("invalid pure const state for function");
3113 return true;
3114 }
3115
3116 if (gimple_call_lhs (stmt)
3117 && (!is_gimple_lvalue (gimple_call_lhs (stmt))
3118 || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
3119 {
3120 error ("invalid LHS in gimple call");
3121 return true;
3122 }
3123
3124 if (gimple_call_lhs (stmt) && gimple_call_noreturn_p (stmt))
3125 {
3126 error ("LHS in noreturn call");
3127 return true;
3128 }
3129
3130 fntype = gimple_call_fntype (stmt);
3131 if (fntype
3132 && gimple_call_lhs (stmt)
3133 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3134 TREE_TYPE (fntype))
3135 /* ??? At least C++ misses conversions at assignments from
3136 void * call results.
3137 ??? Java is completely off. Especially with functions
3138 returning java.lang.Object.
3139 For now simply allow arbitrary pointer type conversions. */
3140 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3141 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3142 {
3143 error ("invalid conversion in gimple call");
3144 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3145 debug_generic_stmt (TREE_TYPE (fntype));
3146 return true;
3147 }
3148
3149 if (gimple_call_chain (stmt)
3150 && !is_gimple_val (gimple_call_chain (stmt)))
3151 {
3152 error ("invalid static chain in gimple call");
3153 debug_generic_stmt (gimple_call_chain (stmt));
3154 return true;
3155 }
3156
3157 /* If there is a static chain argument, this should not be an indirect
3158 call, and the decl should have DECL_STATIC_CHAIN set. */
3159 if (gimple_call_chain (stmt))
3160 {
3161 if (!gimple_call_fndecl (stmt))
3162 {
3163 error ("static chain in indirect gimple call");
3164 return true;
3165 }
3166 fn = TREE_OPERAND (fn, 0);
3167
3168 if (!DECL_STATIC_CHAIN (fn))
3169 {
3170 error ("static chain with function that doesn%'t use one");
3171 return true;
3172 }
3173 }
3174
3175 /* ??? The C frontend passes unpromoted arguments in case it
3176 didn't see a function declaration before the call. So for now
3177 leave the call arguments mostly unverified. Once we gimplify
3178 unit-at-a-time we have a chance to fix this. */
3179
3180 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3181 {
3182 tree arg = gimple_call_arg (stmt, i);
3183 if ((is_gimple_reg_type (TREE_TYPE (arg))
3184 && !is_gimple_val (arg))
3185 || (!is_gimple_reg_type (TREE_TYPE (arg))
3186 && !is_gimple_lvalue (arg)))
3187 {
3188 error ("invalid argument to gimple call");
3189 debug_generic_expr (arg);
3190 return true;
3191 }
3192 }
3193
3194 return false;
3195 }
3196
3197 /* Verifies the gimple comparison with the result type TYPE and
3198 the operands OP0 and OP1. */
3199
3200 static bool
3201 verify_gimple_comparison (tree type, tree op0, tree op1)
3202 {
3203 tree op0_type = TREE_TYPE (op0);
3204 tree op1_type = TREE_TYPE (op1);
3205
3206 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3207 {
3208 error ("invalid operands in gimple comparison");
3209 return true;
3210 }
3211
3212 /* For comparisons we do not have the operations type as the
3213 effective type the comparison is carried out in. Instead
3214 we require that either the first operand is trivially
3215 convertible into the second, or the other way around.
3216 Because we special-case pointers to void we allow
3217 comparisons of pointers with the same mode as well. */
3218 if (!useless_type_conversion_p (op0_type, op1_type)
3219 && !useless_type_conversion_p (op1_type, op0_type)
3220 && (!POINTER_TYPE_P (op0_type)
3221 || !POINTER_TYPE_P (op1_type)
3222 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3223 {
3224 error ("mismatching comparison operand types");
3225 debug_generic_expr (op0_type);
3226 debug_generic_expr (op1_type);
3227 return true;
3228 }
3229
3230 /* The resulting type of a comparison may be an effective boolean type. */
3231 if (INTEGRAL_TYPE_P (type)
3232 && (TREE_CODE (type) == BOOLEAN_TYPE
3233 || TYPE_PRECISION (type) == 1))
3234 {
3235 if (TREE_CODE (op0_type) == VECTOR_TYPE
3236 || TREE_CODE (op1_type) == VECTOR_TYPE)
3237 {
3238 error ("vector comparison returning a boolean");
3239 debug_generic_expr (op0_type);
3240 debug_generic_expr (op1_type);
3241 return true;
3242 }
3243 }
3244 /* Or an integer vector type with the same size and element count
3245 as the comparison operand types. */
3246 else if (TREE_CODE (type) == VECTOR_TYPE
3247 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3248 {
3249 if (TREE_CODE (op0_type) != VECTOR_TYPE
3250 || TREE_CODE (op1_type) != VECTOR_TYPE)
3251 {
3252 error ("non-vector operands in vector comparison");
3253 debug_generic_expr (op0_type);
3254 debug_generic_expr (op1_type);
3255 return true;
3256 }
3257
3258 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3259 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3260 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type))))
3261 /* The result of a vector comparison is of signed
3262 integral type. */
3263 || TYPE_UNSIGNED (TREE_TYPE (type)))
3264 {
3265 error ("invalid vector comparison resulting type");
3266 debug_generic_expr (type);
3267 return true;
3268 }
3269 }
3270 else
3271 {
3272 error ("bogus comparison result type");
3273 debug_generic_expr (type);
3274 return true;
3275 }
3276
3277 return false;
3278 }
3279
3280 /* Verify a gimple assignment statement STMT with an unary rhs.
3281 Returns true if anything is wrong. */
3282
3283 static bool
3284 verify_gimple_assign_unary (gimple stmt)
3285 {
3286 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3287 tree lhs = gimple_assign_lhs (stmt);
3288 tree lhs_type = TREE_TYPE (lhs);
3289 tree rhs1 = gimple_assign_rhs1 (stmt);
3290 tree rhs1_type = TREE_TYPE (rhs1);
3291
3292 if (!is_gimple_reg (lhs))
3293 {
3294 error ("non-register as LHS of unary operation");
3295 return true;
3296 }
3297
3298 if (!is_gimple_val (rhs1))
3299 {
3300 error ("invalid operand in unary operation");
3301 return true;
3302 }
3303
3304 /* First handle conversions. */
3305 switch (rhs_code)
3306 {
3307 CASE_CONVERT:
3308 {
3309 /* Allow conversions from pointer type to integral type only if
3310 there is no sign or zero extension involved.
3311 For targets were the precision of ptrofftype doesn't match that
3312 of pointers we need to allow arbitrary conversions to ptrofftype. */
3313 if ((POINTER_TYPE_P (lhs_type)
3314 && INTEGRAL_TYPE_P (rhs1_type))
3315 || (POINTER_TYPE_P (rhs1_type)
3316 && INTEGRAL_TYPE_P (lhs_type)
3317 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3318 || ptrofftype_p (sizetype))))
3319 return false;
3320
3321 /* Allow conversion from integral to offset type and vice versa. */
3322 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3323 && INTEGRAL_TYPE_P (rhs1_type))
3324 || (INTEGRAL_TYPE_P (lhs_type)
3325 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3326 return false;
3327
3328 /* Otherwise assert we are converting between types of the
3329 same kind. */
3330 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3331 {
3332 error ("invalid types in nop conversion");
3333 debug_generic_expr (lhs_type);
3334 debug_generic_expr (rhs1_type);
3335 return true;
3336 }
3337
3338 return false;
3339 }
3340
3341 case ADDR_SPACE_CONVERT_EXPR:
3342 {
3343 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3344 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3345 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3346 {
3347 error ("invalid types in address space conversion");
3348 debug_generic_expr (lhs_type);
3349 debug_generic_expr (rhs1_type);
3350 return true;
3351 }
3352
3353 return false;
3354 }
3355
3356 case FIXED_CONVERT_EXPR:
3357 {
3358 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3359 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3360 {
3361 error ("invalid types in fixed-point conversion");
3362 debug_generic_expr (lhs_type);
3363 debug_generic_expr (rhs1_type);
3364 return true;
3365 }
3366
3367 return false;
3368 }
3369
3370 case FLOAT_EXPR:
3371 {
3372 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3373 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3374 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3375 {
3376 error ("invalid types in conversion to floating point");
3377 debug_generic_expr (lhs_type);
3378 debug_generic_expr (rhs1_type);
3379 return true;
3380 }
3381
3382 return false;
3383 }
3384
3385 case FIX_TRUNC_EXPR:
3386 {
3387 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3388 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3389 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3390 {
3391 error ("invalid types in conversion to integer");
3392 debug_generic_expr (lhs_type);
3393 debug_generic_expr (rhs1_type);
3394 return true;
3395 }
3396
3397 return false;
3398 }
3399
3400 case VEC_UNPACK_HI_EXPR:
3401 case VEC_UNPACK_LO_EXPR:
3402 case REDUC_MAX_EXPR:
3403 case REDUC_MIN_EXPR:
3404 case REDUC_PLUS_EXPR:
3405 case VEC_UNPACK_FLOAT_HI_EXPR:
3406 case VEC_UNPACK_FLOAT_LO_EXPR:
3407 /* FIXME. */
3408 return false;
3409
3410 case NEGATE_EXPR:
3411 case ABS_EXPR:
3412 case BIT_NOT_EXPR:
3413 case PAREN_EXPR:
3414 case NON_LVALUE_EXPR:
3415 case CONJ_EXPR:
3416 break;
3417
3418 default:
3419 gcc_unreachable ();
3420 }
3421
3422 /* For the remaining codes assert there is no conversion involved. */
3423 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3424 {
3425 error ("non-trivial conversion in unary operation");
3426 debug_generic_expr (lhs_type);
3427 debug_generic_expr (rhs1_type);
3428 return true;
3429 }
3430
3431 return false;
3432 }
3433
3434 /* Verify a gimple assignment statement STMT with a binary rhs.
3435 Returns true if anything is wrong. */
3436
3437 static bool
3438 verify_gimple_assign_binary (gimple stmt)
3439 {
3440 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3441 tree lhs = gimple_assign_lhs (stmt);
3442 tree lhs_type = TREE_TYPE (lhs);
3443 tree rhs1 = gimple_assign_rhs1 (stmt);
3444 tree rhs1_type = TREE_TYPE (rhs1);
3445 tree rhs2 = gimple_assign_rhs2 (stmt);
3446 tree rhs2_type = TREE_TYPE (rhs2);
3447
3448 if (!is_gimple_reg (lhs))
3449 {
3450 error ("non-register as LHS of binary operation");
3451 return true;
3452 }
3453
3454 if (!is_gimple_val (rhs1)
3455 || !is_gimple_val (rhs2))
3456 {
3457 error ("invalid operands in binary operation");
3458 return true;
3459 }
3460
3461 /* First handle operations that involve different types. */
3462 switch (rhs_code)
3463 {
3464 case COMPLEX_EXPR:
3465 {
3466 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3467 || !(INTEGRAL_TYPE_P (rhs1_type)
3468 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3469 || !(INTEGRAL_TYPE_P (rhs2_type)
3470 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3471 {
3472 error ("type mismatch in complex expression");
3473 debug_generic_expr (lhs_type);
3474 debug_generic_expr (rhs1_type);
3475 debug_generic_expr (rhs2_type);
3476 return true;
3477 }
3478
3479 return false;
3480 }
3481
3482 case LSHIFT_EXPR:
3483 case RSHIFT_EXPR:
3484 case LROTATE_EXPR:
3485 case RROTATE_EXPR:
3486 {
3487 /* Shifts and rotates are ok on integral types, fixed point
3488 types and integer vector types. */
3489 if ((!INTEGRAL_TYPE_P (rhs1_type)
3490 && !FIXED_POINT_TYPE_P (rhs1_type)
3491 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3492 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3493 || (!INTEGRAL_TYPE_P (rhs2_type)
3494 /* Vector shifts of vectors are also ok. */
3495 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3496 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3497 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3498 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3499 || !useless_type_conversion_p (lhs_type, rhs1_type))
3500 {
3501 error ("type mismatch in shift expression");
3502 debug_generic_expr (lhs_type);
3503 debug_generic_expr (rhs1_type);
3504 debug_generic_expr (rhs2_type);
3505 return true;
3506 }
3507
3508 return false;
3509 }
3510
3511 case VEC_LSHIFT_EXPR:
3512 case VEC_RSHIFT_EXPR:
3513 {
3514 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3515 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3516 || POINTER_TYPE_P (TREE_TYPE (rhs1_type))
3517 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type))
3518 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3519 || (!INTEGRAL_TYPE_P (rhs2_type)
3520 && (TREE_CODE (rhs2_type) != VECTOR_TYPE
3521 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3522 || !useless_type_conversion_p (lhs_type, rhs1_type))
3523 {
3524 error ("type mismatch in vector shift expression");
3525 debug_generic_expr (lhs_type);
3526 debug_generic_expr (rhs1_type);
3527 debug_generic_expr (rhs2_type);
3528 return true;
3529 }
3530 /* For shifting a vector of non-integral components we
3531 only allow shifting by a constant multiple of the element size. */
3532 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3533 && (TREE_CODE (rhs2) != INTEGER_CST
3534 || !div_if_zero_remainder (EXACT_DIV_EXPR, rhs2,
3535 TYPE_SIZE (TREE_TYPE (rhs1_type)))))
3536 {
3537 error ("non-element sized vector shift of floating point vector");
3538 return true;
3539 }
3540
3541 return false;
3542 }
3543
3544 case WIDEN_LSHIFT_EXPR:
3545 {
3546 if (!INTEGRAL_TYPE_P (lhs_type)
3547 || !INTEGRAL_TYPE_P (rhs1_type)
3548 || TREE_CODE (rhs2) != INTEGER_CST
3549 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3550 {
3551 error ("type mismatch in widening vector shift expression");
3552 debug_generic_expr (lhs_type);
3553 debug_generic_expr (rhs1_type);
3554 debug_generic_expr (rhs2_type);
3555 return true;
3556 }
3557
3558 return false;
3559 }
3560
3561 case VEC_WIDEN_LSHIFT_HI_EXPR:
3562 case VEC_WIDEN_LSHIFT_LO_EXPR:
3563 {
3564 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3565 || TREE_CODE (lhs_type) != VECTOR_TYPE
3566 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3567 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3568 || TREE_CODE (rhs2) != INTEGER_CST
3569 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3570 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3571 {
3572 error ("type mismatch in widening vector shift expression");
3573 debug_generic_expr (lhs_type);
3574 debug_generic_expr (rhs1_type);
3575 debug_generic_expr (rhs2_type);
3576 return true;
3577 }
3578
3579 return false;
3580 }
3581
3582 case PLUS_EXPR:
3583 case MINUS_EXPR:
3584 {
3585 tree lhs_etype = lhs_type;
3586 tree rhs1_etype = rhs1_type;
3587 tree rhs2_etype = rhs2_type;
3588 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3589 {
3590 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3591 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3592 {
3593 error ("invalid non-vector operands to vector valued plus");
3594 return true;
3595 }
3596 lhs_etype = TREE_TYPE (lhs_type);
3597 rhs1_etype = TREE_TYPE (rhs1_type);
3598 rhs2_etype = TREE_TYPE (rhs2_type);
3599 }
3600 if (POINTER_TYPE_P (lhs_etype)
3601 || POINTER_TYPE_P (rhs1_etype)
3602 || POINTER_TYPE_P (rhs2_etype))
3603 {
3604 error ("invalid (pointer) operands to plus/minus");
3605 return true;
3606 }
3607
3608 /* Continue with generic binary expression handling. */
3609 break;
3610 }
3611
3612 case POINTER_PLUS_EXPR:
3613 {
3614 if (!POINTER_TYPE_P (rhs1_type)
3615 || !useless_type_conversion_p (lhs_type, rhs1_type)
3616 || !ptrofftype_p (rhs2_type))
3617 {
3618 error ("type mismatch in pointer plus expression");
3619 debug_generic_stmt (lhs_type);
3620 debug_generic_stmt (rhs1_type);
3621 debug_generic_stmt (rhs2_type);
3622 return true;
3623 }
3624
3625 return false;
3626 }
3627
3628 case TRUTH_ANDIF_EXPR:
3629 case TRUTH_ORIF_EXPR:
3630 case TRUTH_AND_EXPR:
3631 case TRUTH_OR_EXPR:
3632 case TRUTH_XOR_EXPR:
3633
3634 gcc_unreachable ();
3635
3636 case LT_EXPR:
3637 case LE_EXPR:
3638 case GT_EXPR:
3639 case GE_EXPR:
3640 case EQ_EXPR:
3641 case NE_EXPR:
3642 case UNORDERED_EXPR:
3643 case ORDERED_EXPR:
3644 case UNLT_EXPR:
3645 case UNLE_EXPR:
3646 case UNGT_EXPR:
3647 case UNGE_EXPR:
3648 case UNEQ_EXPR:
3649 case LTGT_EXPR:
3650 /* Comparisons are also binary, but the result type is not
3651 connected to the operand types. */
3652 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3653
3654 case WIDEN_MULT_EXPR:
3655 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3656 return true;
3657 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3658 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3659
3660 case WIDEN_SUM_EXPR:
3661 case VEC_WIDEN_MULT_HI_EXPR:
3662 case VEC_WIDEN_MULT_LO_EXPR:
3663 case VEC_WIDEN_MULT_EVEN_EXPR:
3664 case VEC_WIDEN_MULT_ODD_EXPR:
3665 case VEC_PACK_TRUNC_EXPR:
3666 case VEC_PACK_SAT_EXPR:
3667 case VEC_PACK_FIX_TRUNC_EXPR:
3668 /* FIXME. */
3669 return false;
3670
3671 case MULT_EXPR:
3672 case MULT_HIGHPART_EXPR:
3673 case TRUNC_DIV_EXPR:
3674 case CEIL_DIV_EXPR:
3675 case FLOOR_DIV_EXPR:
3676 case ROUND_DIV_EXPR:
3677 case TRUNC_MOD_EXPR:
3678 case CEIL_MOD_EXPR:
3679 case FLOOR_MOD_EXPR:
3680 case ROUND_MOD_EXPR:
3681 case RDIV_EXPR:
3682 case EXACT_DIV_EXPR:
3683 case MIN_EXPR:
3684 case MAX_EXPR:
3685 case BIT_IOR_EXPR:
3686 case BIT_XOR_EXPR:
3687 case BIT_AND_EXPR:
3688 /* Continue with generic binary expression handling. */
3689 break;
3690
3691 default:
3692 gcc_unreachable ();
3693 }
3694
3695 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3696 || !useless_type_conversion_p (lhs_type, rhs2_type))
3697 {
3698 error ("type mismatch in binary expression");
3699 debug_generic_stmt (lhs_type);
3700 debug_generic_stmt (rhs1_type);
3701 debug_generic_stmt (rhs2_type);
3702 return true;
3703 }
3704
3705 return false;
3706 }
3707
3708 /* Verify a gimple assignment statement STMT with a ternary rhs.
3709 Returns true if anything is wrong. */
3710
3711 static bool
3712 verify_gimple_assign_ternary (gimple stmt)
3713 {
3714 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3715 tree lhs = gimple_assign_lhs (stmt);
3716 tree lhs_type = TREE_TYPE (lhs);
3717 tree rhs1 = gimple_assign_rhs1 (stmt);
3718 tree rhs1_type = TREE_TYPE (rhs1);
3719 tree rhs2 = gimple_assign_rhs2 (stmt);
3720 tree rhs2_type = TREE_TYPE (rhs2);
3721 tree rhs3 = gimple_assign_rhs3 (stmt);
3722 tree rhs3_type = TREE_TYPE (rhs3);
3723
3724 if (!is_gimple_reg (lhs))
3725 {
3726 error ("non-register as LHS of ternary operation");
3727 return true;
3728 }
3729
3730 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3731 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3732 || !is_gimple_val (rhs2)
3733 || !is_gimple_val (rhs3))
3734 {
3735 error ("invalid operands in ternary operation");
3736 return true;
3737 }
3738
3739 /* First handle operations that involve different types. */
3740 switch (rhs_code)
3741 {
3742 case WIDEN_MULT_PLUS_EXPR:
3743 case WIDEN_MULT_MINUS_EXPR:
3744 if ((!INTEGRAL_TYPE_P (rhs1_type)
3745 && !FIXED_POINT_TYPE_P (rhs1_type))
3746 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3747 || !useless_type_conversion_p (lhs_type, rhs3_type)
3748 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3749 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3750 {
3751 error ("type mismatch in widening multiply-accumulate expression");
3752 debug_generic_expr (lhs_type);
3753 debug_generic_expr (rhs1_type);
3754 debug_generic_expr (rhs2_type);
3755 debug_generic_expr (rhs3_type);
3756 return true;
3757 }
3758 break;
3759
3760 case FMA_EXPR:
3761 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3762 || !useless_type_conversion_p (lhs_type, rhs2_type)
3763 || !useless_type_conversion_p (lhs_type, rhs3_type))
3764 {
3765 error ("type mismatch in fused multiply-add expression");
3766 debug_generic_expr (lhs_type);
3767 debug_generic_expr (rhs1_type);
3768 debug_generic_expr (rhs2_type);
3769 debug_generic_expr (rhs3_type);
3770 return true;
3771 }
3772 break;
3773
3774 case COND_EXPR:
3775 case VEC_COND_EXPR:
3776 if (!useless_type_conversion_p (lhs_type, rhs2_type)
3777 || !useless_type_conversion_p (lhs_type, rhs3_type))
3778 {
3779 error ("type mismatch in conditional expression");
3780 debug_generic_expr (lhs_type);
3781 debug_generic_expr (rhs2_type);
3782 debug_generic_expr (rhs3_type);
3783 return true;
3784 }
3785 break;
3786
3787 case VEC_PERM_EXPR:
3788 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3789 || !useless_type_conversion_p (lhs_type, rhs2_type))
3790 {
3791 error ("type mismatch in vector permute expression");
3792 debug_generic_expr (lhs_type);
3793 debug_generic_expr (rhs1_type);
3794 debug_generic_expr (rhs2_type);
3795 debug_generic_expr (rhs3_type);
3796 return true;
3797 }
3798
3799 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3800 || TREE_CODE (rhs2_type) != VECTOR_TYPE
3801 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
3802 {
3803 error ("vector types expected in vector permute expression");
3804 debug_generic_expr (lhs_type);
3805 debug_generic_expr (rhs1_type);
3806 debug_generic_expr (rhs2_type);
3807 debug_generic_expr (rhs3_type);
3808 return true;
3809 }
3810
3811 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
3812 || TYPE_VECTOR_SUBPARTS (rhs2_type)
3813 != TYPE_VECTOR_SUBPARTS (rhs3_type)
3814 || TYPE_VECTOR_SUBPARTS (rhs3_type)
3815 != TYPE_VECTOR_SUBPARTS (lhs_type))
3816 {
3817 error ("vectors with different element number found "
3818 "in vector permute expression");
3819 debug_generic_expr (lhs_type);
3820 debug_generic_expr (rhs1_type);
3821 debug_generic_expr (rhs2_type);
3822 debug_generic_expr (rhs3_type);
3823 return true;
3824 }
3825
3826 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
3827 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
3828 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
3829 {
3830 error ("invalid mask type in vector permute expression");
3831 debug_generic_expr (lhs_type);
3832 debug_generic_expr (rhs1_type);
3833 debug_generic_expr (rhs2_type);
3834 debug_generic_expr (rhs3_type);
3835 return true;
3836 }
3837
3838 return false;
3839
3840 case DOT_PROD_EXPR:
3841 case REALIGN_LOAD_EXPR:
3842 /* FIXME. */
3843 return false;
3844
3845 default:
3846 gcc_unreachable ();
3847 }
3848 return false;
3849 }
3850
3851 /* Verify a gimple assignment statement STMT with a single rhs.
3852 Returns true if anything is wrong. */
3853
3854 static bool
3855 verify_gimple_assign_single (gimple stmt)
3856 {
3857 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3858 tree lhs = gimple_assign_lhs (stmt);
3859 tree lhs_type = TREE_TYPE (lhs);
3860 tree rhs1 = gimple_assign_rhs1 (stmt);
3861 tree rhs1_type = TREE_TYPE (rhs1);
3862 bool res = false;
3863
3864 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3865 {
3866 error ("non-trivial conversion at assignment");
3867 debug_generic_expr (lhs_type);
3868 debug_generic_expr (rhs1_type);
3869 return true;
3870 }
3871
3872 if (gimple_clobber_p (stmt)
3873 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
3874 {
3875 error ("non-decl/MEM_REF LHS in clobber statement");
3876 debug_generic_expr (lhs);
3877 return true;
3878 }
3879
3880 if (handled_component_p (lhs))
3881 res |= verify_types_in_gimple_reference (lhs, true);
3882
3883 /* Special codes we cannot handle via their class. */
3884 switch (rhs_code)
3885 {
3886 case ADDR_EXPR:
3887 {
3888 tree op = TREE_OPERAND (rhs1, 0);
3889 if (!is_gimple_addressable (op))
3890 {
3891 error ("invalid operand in unary expression");
3892 return true;
3893 }
3894
3895 /* Technically there is no longer a need for matching types, but
3896 gimple hygiene asks for this check. In LTO we can end up
3897 combining incompatible units and thus end up with addresses
3898 of globals that change their type to a common one. */
3899 if (!in_lto_p
3900 && !types_compatible_p (TREE_TYPE (op),
3901 TREE_TYPE (TREE_TYPE (rhs1)))
3902 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
3903 TREE_TYPE (op)))
3904 {
3905 error ("type mismatch in address expression");
3906 debug_generic_stmt (TREE_TYPE (rhs1));
3907 debug_generic_stmt (TREE_TYPE (op));
3908 return true;
3909 }
3910
3911 return verify_types_in_gimple_reference (op, true);
3912 }
3913
3914 /* tcc_reference */
3915 case INDIRECT_REF:
3916 error ("INDIRECT_REF in gimple IL");
3917 return true;
3918
3919 case COMPONENT_REF:
3920 case BIT_FIELD_REF:
3921 case ARRAY_REF:
3922 case ARRAY_RANGE_REF:
3923 case VIEW_CONVERT_EXPR:
3924 case REALPART_EXPR:
3925 case IMAGPART_EXPR:
3926 case TARGET_MEM_REF:
3927 case MEM_REF:
3928 if (!is_gimple_reg (lhs)
3929 && is_gimple_reg_type (TREE_TYPE (lhs)))
3930 {
3931 error ("invalid rhs for gimple memory store");
3932 debug_generic_stmt (lhs);
3933 debug_generic_stmt (rhs1);
3934 return true;
3935 }
3936 return res || verify_types_in_gimple_reference (rhs1, false);
3937
3938 /* tcc_constant */
3939 case SSA_NAME:
3940 case INTEGER_CST:
3941 case REAL_CST:
3942 case FIXED_CST:
3943 case COMPLEX_CST:
3944 case VECTOR_CST:
3945 case STRING_CST:
3946 return res;
3947
3948 /* tcc_declaration */
3949 case CONST_DECL:
3950 return res;
3951 case VAR_DECL:
3952 case PARM_DECL:
3953 if (!is_gimple_reg (lhs)
3954 && !is_gimple_reg (rhs1)
3955 && is_gimple_reg_type (TREE_TYPE (lhs)))
3956 {
3957 error ("invalid rhs for gimple memory store");
3958 debug_generic_stmt (lhs);
3959 debug_generic_stmt (rhs1);
3960 return true;
3961 }
3962 return res;
3963
3964 case CONSTRUCTOR:
3965 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
3966 {
3967 unsigned int i;
3968 tree elt_i, elt_v, elt_t = NULL_TREE;
3969
3970 if (CONSTRUCTOR_NELTS (rhs1) == 0)
3971 return res;
3972 /* For vector CONSTRUCTORs we require that either it is empty
3973 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
3974 (then the element count must be correct to cover the whole
3975 outer vector and index must be NULL on all elements, or it is
3976 a CONSTRUCTOR of scalar elements, where we as an exception allow
3977 smaller number of elements (assuming zero filling) and
3978 consecutive indexes as compared to NULL indexes (such
3979 CONSTRUCTORs can appear in the IL from FEs). */
3980 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
3981 {
3982 if (elt_t == NULL_TREE)
3983 {
3984 elt_t = TREE_TYPE (elt_v);
3985 if (TREE_CODE (elt_t) == VECTOR_TYPE)
3986 {
3987 tree elt_t = TREE_TYPE (elt_v);
3988 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
3989 TREE_TYPE (elt_t)))
3990 {
3991 error ("incorrect type of vector CONSTRUCTOR"
3992 " elements");
3993 debug_generic_stmt (rhs1);
3994 return true;
3995 }
3996 else if (CONSTRUCTOR_NELTS (rhs1)
3997 * TYPE_VECTOR_SUBPARTS (elt_t)
3998 != TYPE_VECTOR_SUBPARTS (rhs1_type))
3999 {
4000 error ("incorrect number of vector CONSTRUCTOR"
4001 " elements");
4002 debug_generic_stmt (rhs1);
4003 return true;
4004 }
4005 }
4006 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4007 elt_t))
4008 {
4009 error ("incorrect type of vector CONSTRUCTOR elements");
4010 debug_generic_stmt (rhs1);
4011 return true;
4012 }
4013 else if (CONSTRUCTOR_NELTS (rhs1)
4014 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4015 {
4016 error ("incorrect number of vector CONSTRUCTOR elements");
4017 debug_generic_stmt (rhs1);
4018 return true;
4019 }
4020 }
4021 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4022 {
4023 error ("incorrect type of vector CONSTRUCTOR elements");
4024 debug_generic_stmt (rhs1);
4025 return true;
4026 }
4027 if (elt_i != NULL_TREE
4028 && (TREE_CODE (elt_t) == VECTOR_TYPE
4029 || TREE_CODE (elt_i) != INTEGER_CST
4030 || compare_tree_int (elt_i, i) != 0))
4031 {
4032 error ("vector CONSTRUCTOR with non-NULL element index");
4033 debug_generic_stmt (rhs1);
4034 return true;
4035 }
4036 }
4037 }
4038 return res;
4039 case OBJ_TYPE_REF:
4040 case ASSERT_EXPR:
4041 case WITH_SIZE_EXPR:
4042 /* FIXME. */
4043 return res;
4044
4045 default:;
4046 }
4047
4048 return res;
4049 }
4050
4051 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4052 is a problem, otherwise false. */
4053
4054 static bool
4055 verify_gimple_assign (gimple stmt)
4056 {
4057 switch (gimple_assign_rhs_class (stmt))
4058 {
4059 case GIMPLE_SINGLE_RHS:
4060 return verify_gimple_assign_single (stmt);
4061
4062 case GIMPLE_UNARY_RHS:
4063 return verify_gimple_assign_unary (stmt);
4064
4065 case GIMPLE_BINARY_RHS:
4066 return verify_gimple_assign_binary (stmt);
4067
4068 case GIMPLE_TERNARY_RHS:
4069 return verify_gimple_assign_ternary (stmt);
4070
4071 default:
4072 gcc_unreachable ();
4073 }
4074 }
4075
4076 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4077 is a problem, otherwise false. */
4078
4079 static bool
4080 verify_gimple_return (gimple stmt)
4081 {
4082 tree op = gimple_return_retval (stmt);
4083 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4084
4085 /* We cannot test for present return values as we do not fix up missing
4086 return values from the original source. */
4087 if (op == NULL)
4088 return false;
4089
4090 if (!is_gimple_val (op)
4091 && TREE_CODE (op) != RESULT_DECL)
4092 {
4093 error ("invalid operand in return statement");
4094 debug_generic_stmt (op);
4095 return true;
4096 }
4097
4098 if ((TREE_CODE (op) == RESULT_DECL
4099 && DECL_BY_REFERENCE (op))
4100 || (TREE_CODE (op) == SSA_NAME
4101 && SSA_NAME_VAR (op)
4102 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4103 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4104 op = TREE_TYPE (op);
4105
4106 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4107 {
4108 error ("invalid conversion in return statement");
4109 debug_generic_stmt (restype);
4110 debug_generic_stmt (TREE_TYPE (op));
4111 return true;
4112 }
4113
4114 return false;
4115 }
4116
4117
4118 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4119 is a problem, otherwise false. */
4120
4121 static bool
4122 verify_gimple_goto (gimple stmt)
4123 {
4124 tree dest = gimple_goto_dest (stmt);
4125
4126 /* ??? We have two canonical forms of direct goto destinations, a
4127 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4128 if (TREE_CODE (dest) != LABEL_DECL
4129 && (!is_gimple_val (dest)
4130 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4131 {
4132 error ("goto destination is neither a label nor a pointer");
4133 return true;
4134 }
4135
4136 return false;
4137 }
4138
4139 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4140 is a problem, otherwise false. */
4141
4142 static bool
4143 verify_gimple_switch (gimple stmt)
4144 {
4145 unsigned int i, n;
4146 tree elt, prev_upper_bound = NULL_TREE;
4147 tree index_type, elt_type = NULL_TREE;
4148
4149 if (!is_gimple_val (gimple_switch_index (stmt)))
4150 {
4151 error ("invalid operand to switch statement");
4152 debug_generic_stmt (gimple_switch_index (stmt));
4153 return true;
4154 }
4155
4156 index_type = TREE_TYPE (gimple_switch_index (stmt));
4157 if (! INTEGRAL_TYPE_P (index_type))
4158 {
4159 error ("non-integral type switch statement");
4160 debug_generic_expr (index_type);
4161 return true;
4162 }
4163
4164 elt = gimple_switch_label (stmt, 0);
4165 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4166 {
4167 error ("invalid default case label in switch statement");
4168 debug_generic_expr (elt);
4169 return true;
4170 }
4171
4172 n = gimple_switch_num_labels (stmt);
4173 for (i = 1; i < n; i++)
4174 {
4175 elt = gimple_switch_label (stmt, i);
4176
4177 if (! CASE_LOW (elt))
4178 {
4179 error ("invalid case label in switch statement");
4180 debug_generic_expr (elt);
4181 return true;
4182 }
4183 if (CASE_HIGH (elt)
4184 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4185 {
4186 error ("invalid case range in switch statement");
4187 debug_generic_expr (elt);
4188 return true;
4189 }
4190
4191 if (elt_type)
4192 {
4193 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4194 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4195 {
4196 error ("type mismatch for case label in switch statement");
4197 debug_generic_expr (elt);
4198 return true;
4199 }
4200 }
4201 else
4202 {
4203 elt_type = TREE_TYPE (CASE_LOW (elt));
4204 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4205 {
4206 error ("type precision mismatch in switch statement");
4207 return true;
4208 }
4209 }
4210
4211 if (prev_upper_bound)
4212 {
4213 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4214 {
4215 error ("case labels not sorted in switch statement");
4216 return true;
4217 }
4218 }
4219
4220 prev_upper_bound = CASE_HIGH (elt);
4221 if (! prev_upper_bound)
4222 prev_upper_bound = CASE_LOW (elt);
4223 }
4224
4225 return false;
4226 }
4227
4228 /* Verify a gimple debug statement STMT.
4229 Returns true if anything is wrong. */
4230
4231 static bool
4232 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4233 {
4234 /* There isn't much that could be wrong in a gimple debug stmt. A
4235 gimple debug bind stmt, for example, maps a tree, that's usually
4236 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4237 component or member of an aggregate type, to another tree, that
4238 can be an arbitrary expression. These stmts expand into debug
4239 insns, and are converted to debug notes by var-tracking.c. */
4240 return false;
4241 }
4242
4243 /* Verify a gimple label statement STMT.
4244 Returns true if anything is wrong. */
4245
4246 static bool
4247 verify_gimple_label (gimple stmt)
4248 {
4249 tree decl = gimple_label_label (stmt);
4250 int uid;
4251 bool err = false;
4252
4253 if (TREE_CODE (decl) != LABEL_DECL)
4254 return true;
4255 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4256 && DECL_CONTEXT (decl) != current_function_decl)
4257 {
4258 error ("label's context is not the current function decl");
4259 err |= true;
4260 }
4261
4262 uid = LABEL_DECL_UID (decl);
4263 if (cfun->cfg
4264 && (uid == -1 || (*label_to_block_map)[uid] != gimple_bb (stmt)))
4265 {
4266 error ("incorrect entry in label_to_block_map");
4267 err |= true;
4268 }
4269
4270 uid = EH_LANDING_PAD_NR (decl);
4271 if (uid)
4272 {
4273 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4274 if (decl != lp->post_landing_pad)
4275 {
4276 error ("incorrect setting of landing pad number");
4277 err |= true;
4278 }
4279 }
4280
4281 return err;
4282 }
4283
4284 /* Verify the GIMPLE statement STMT. Returns true if there is an
4285 error, otherwise false. */
4286
4287 static bool
4288 verify_gimple_stmt (gimple stmt)
4289 {
4290 switch (gimple_code (stmt))
4291 {
4292 case GIMPLE_ASSIGN:
4293 return verify_gimple_assign (stmt);
4294
4295 case GIMPLE_LABEL:
4296 return verify_gimple_label (stmt);
4297
4298 case GIMPLE_CALL:
4299 return verify_gimple_call (stmt);
4300
4301 case GIMPLE_COND:
4302 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4303 {
4304 error ("invalid comparison code in gimple cond");
4305 return true;
4306 }
4307 if (!(!gimple_cond_true_label (stmt)
4308 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4309 || !(!gimple_cond_false_label (stmt)
4310 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4311 {
4312 error ("invalid labels in gimple cond");
4313 return true;
4314 }
4315
4316 return verify_gimple_comparison (boolean_type_node,
4317 gimple_cond_lhs (stmt),
4318 gimple_cond_rhs (stmt));
4319
4320 case GIMPLE_GOTO:
4321 return verify_gimple_goto (stmt);
4322
4323 case GIMPLE_SWITCH:
4324 return verify_gimple_switch (stmt);
4325
4326 case GIMPLE_RETURN:
4327 return verify_gimple_return (stmt);
4328
4329 case GIMPLE_ASM:
4330 return false;
4331
4332 case GIMPLE_TRANSACTION:
4333 return verify_gimple_transaction (stmt);
4334
4335 /* Tuples that do not have tree operands. */
4336 case GIMPLE_NOP:
4337 case GIMPLE_PREDICT:
4338 case GIMPLE_RESX:
4339 case GIMPLE_EH_DISPATCH:
4340 case GIMPLE_EH_MUST_NOT_THROW:
4341 return false;
4342
4343 CASE_GIMPLE_OMP:
4344 /* OpenMP directives are validated by the FE and never operated
4345 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4346 non-gimple expressions when the main index variable has had
4347 its address taken. This does not affect the loop itself
4348 because the header of an GIMPLE_OMP_FOR is merely used to determine
4349 how to setup the parallel iteration. */
4350 return false;
4351
4352 case GIMPLE_DEBUG:
4353 return verify_gimple_debug (stmt);
4354
4355 default:
4356 gcc_unreachable ();
4357 }
4358 }
4359
4360 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4361 and false otherwise. */
4362
4363 static bool
4364 verify_gimple_phi (gimple phi)
4365 {
4366 bool err = false;
4367 unsigned i;
4368 tree phi_result = gimple_phi_result (phi);
4369 bool virtual_p;
4370
4371 if (!phi_result)
4372 {
4373 error ("invalid PHI result");
4374 return true;
4375 }
4376
4377 virtual_p = virtual_operand_p (phi_result);
4378 if (TREE_CODE (phi_result) != SSA_NAME
4379 || (virtual_p
4380 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4381 {
4382 error ("invalid PHI result");
4383 err = true;
4384 }
4385
4386 for (i = 0; i < gimple_phi_num_args (phi); i++)
4387 {
4388 tree t = gimple_phi_arg_def (phi, i);
4389
4390 if (!t)
4391 {
4392 error ("missing PHI def");
4393 err |= true;
4394 continue;
4395 }
4396 /* Addressable variables do have SSA_NAMEs but they
4397 are not considered gimple values. */
4398 else if ((TREE_CODE (t) == SSA_NAME
4399 && virtual_p != virtual_operand_p (t))
4400 || (virtual_p
4401 && (TREE_CODE (t) != SSA_NAME
4402 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4403 || (!virtual_p
4404 && !is_gimple_val (t)))
4405 {
4406 error ("invalid PHI argument");
4407 debug_generic_expr (t);
4408 err |= true;
4409 }
4410 #ifdef ENABLE_TYPES_CHECKING
4411 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4412 {
4413 error ("incompatible types in PHI argument %u", i);
4414 debug_generic_stmt (TREE_TYPE (phi_result));
4415 debug_generic_stmt (TREE_TYPE (t));
4416 err |= true;
4417 }
4418 #endif
4419 }
4420
4421 return err;
4422 }
4423
4424 /* Verify the GIMPLE statements inside the sequence STMTS. */
4425
4426 static bool
4427 verify_gimple_in_seq_2 (gimple_seq stmts)
4428 {
4429 gimple_stmt_iterator ittr;
4430 bool err = false;
4431
4432 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4433 {
4434 gimple stmt = gsi_stmt (ittr);
4435
4436 switch (gimple_code (stmt))
4437 {
4438 case GIMPLE_BIND:
4439 err |= verify_gimple_in_seq_2 (gimple_bind_body (stmt));
4440 break;
4441
4442 case GIMPLE_TRY:
4443 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4444 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4445 break;
4446
4447 case GIMPLE_EH_FILTER:
4448 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4449 break;
4450
4451 case GIMPLE_EH_ELSE:
4452 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (stmt));
4453 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (stmt));
4454 break;
4455
4456 case GIMPLE_CATCH:
4457 err |= verify_gimple_in_seq_2 (gimple_catch_handler (stmt));
4458 break;
4459
4460 case GIMPLE_TRANSACTION:
4461 err |= verify_gimple_transaction (stmt);
4462 break;
4463
4464 default:
4465 {
4466 bool err2 = verify_gimple_stmt (stmt);
4467 if (err2)
4468 debug_gimple_stmt (stmt);
4469 err |= err2;
4470 }
4471 }
4472 }
4473
4474 return err;
4475 }
4476
4477 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4478 is a problem, otherwise false. */
4479
4480 static bool
4481 verify_gimple_transaction (gimple stmt)
4482 {
4483 tree lab = gimple_transaction_label (stmt);
4484 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4485 return true;
4486 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4487 }
4488
4489
4490 /* Verify the GIMPLE statements inside the statement list STMTS. */
4491
4492 DEBUG_FUNCTION void
4493 verify_gimple_in_seq (gimple_seq stmts)
4494 {
4495 timevar_push (TV_TREE_STMT_VERIFY);
4496 if (verify_gimple_in_seq_2 (stmts))
4497 internal_error ("verify_gimple failed");
4498 timevar_pop (TV_TREE_STMT_VERIFY);
4499 }
4500
4501 /* Return true when the T can be shared. */
4502
4503 static bool
4504 tree_node_can_be_shared (tree t)
4505 {
4506 if (IS_TYPE_OR_DECL_P (t)
4507 || is_gimple_min_invariant (t)
4508 || TREE_CODE (t) == SSA_NAME
4509 || t == error_mark_node
4510 || TREE_CODE (t) == IDENTIFIER_NODE)
4511 return true;
4512
4513 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4514 return true;
4515
4516 if (DECL_P (t))
4517 return true;
4518
4519 return false;
4520 }
4521
4522 /* Called via walk_tree. Verify tree sharing. */
4523
4524 static tree
4525 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4526 {
4527 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4528
4529 if (tree_node_can_be_shared (*tp))
4530 {
4531 *walk_subtrees = false;
4532 return NULL;
4533 }
4534
4535 if (pointer_set_insert (visited, *tp))
4536 return *tp;
4537
4538 return NULL;
4539 }
4540
4541 /* Called via walk_gimple_stmt. Verify tree sharing. */
4542
4543 static tree
4544 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4545 {
4546 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4547 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4548 }
4549
4550 static bool eh_error_found;
4551 static int
4552 verify_eh_throw_stmt_node (void **slot, void *data)
4553 {
4554 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4555 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4556
4557 if (!pointer_set_contains (visited, node->stmt))
4558 {
4559 error ("dead STMT in EH table");
4560 debug_gimple_stmt (node->stmt);
4561 eh_error_found = true;
4562 }
4563 return 1;
4564 }
4565
4566 /* Verify if the location LOCs block is in BLOCKS. */
4567
4568 static bool
4569 verify_location (pointer_set_t *blocks, location_t loc)
4570 {
4571 tree block = LOCATION_BLOCK (loc);
4572 if (block != NULL_TREE
4573 && !pointer_set_contains (blocks, block))
4574 {
4575 error ("location references block not in block tree");
4576 return true;
4577 }
4578 if (block != NULL_TREE)
4579 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4580 return false;
4581 }
4582
4583 /* Called via walk_tree. Verify that expressions have no blocks. */
4584
4585 static tree
4586 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4587 {
4588 if (!EXPR_P (*tp))
4589 {
4590 *walk_subtrees = false;
4591 return NULL;
4592 }
4593
4594 location_t loc = EXPR_LOCATION (*tp);
4595 if (LOCATION_BLOCK (loc) != NULL)
4596 return *tp;
4597
4598 return NULL;
4599 }
4600
4601 /* Called via walk_tree. Verify locations of expressions. */
4602
4603 static tree
4604 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4605 {
4606 struct pointer_set_t *blocks = (struct pointer_set_t *) data;
4607
4608 if (TREE_CODE (*tp) == VAR_DECL
4609 && DECL_HAS_DEBUG_EXPR_P (*tp))
4610 {
4611 tree t = DECL_DEBUG_EXPR (*tp);
4612 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4613 if (addr)
4614 return addr;
4615 }
4616 if ((TREE_CODE (*tp) == VAR_DECL
4617 || TREE_CODE (*tp) == PARM_DECL
4618 || TREE_CODE (*tp) == RESULT_DECL)
4619 && DECL_HAS_VALUE_EXPR_P (*tp))
4620 {
4621 tree t = DECL_VALUE_EXPR (*tp);
4622 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4623 if (addr)
4624 return addr;
4625 }
4626
4627 if (!EXPR_P (*tp))
4628 {
4629 *walk_subtrees = false;
4630 return NULL;
4631 }
4632
4633 location_t loc = EXPR_LOCATION (*tp);
4634 if (verify_location (blocks, loc))
4635 return *tp;
4636
4637 return NULL;
4638 }
4639
4640 /* Called via walk_gimple_op. Verify locations of expressions. */
4641
4642 static tree
4643 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
4644 {
4645 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4646 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
4647 }
4648
4649 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4650
4651 static void
4652 collect_subblocks (pointer_set_t *blocks, tree block)
4653 {
4654 tree t;
4655 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
4656 {
4657 pointer_set_insert (blocks, t);
4658 collect_subblocks (blocks, t);
4659 }
4660 }
4661
4662 /* Verify the GIMPLE statements in the CFG of FN. */
4663
4664 DEBUG_FUNCTION void
4665 verify_gimple_in_cfg (struct function *fn)
4666 {
4667 basic_block bb;
4668 bool err = false;
4669 struct pointer_set_t *visited, *visited_stmts, *blocks;
4670
4671 timevar_push (TV_TREE_STMT_VERIFY);
4672 visited = pointer_set_create ();
4673 visited_stmts = pointer_set_create ();
4674
4675 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4676 blocks = pointer_set_create ();
4677 if (DECL_INITIAL (fn->decl))
4678 {
4679 pointer_set_insert (blocks, DECL_INITIAL (fn->decl));
4680 collect_subblocks (blocks, DECL_INITIAL (fn->decl));
4681 }
4682
4683 FOR_EACH_BB_FN (bb, fn)
4684 {
4685 gimple_stmt_iterator gsi;
4686
4687 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4688 {
4689 gimple phi = gsi_stmt (gsi);
4690 bool err2 = false;
4691 unsigned i;
4692
4693 pointer_set_insert (visited_stmts, phi);
4694
4695 if (gimple_bb (phi) != bb)
4696 {
4697 error ("gimple_bb (phi) is set to a wrong basic block");
4698 err2 = true;
4699 }
4700
4701 err2 |= verify_gimple_phi (phi);
4702
4703 /* Only PHI arguments have locations. */
4704 if (gimple_location (phi) != UNKNOWN_LOCATION)
4705 {
4706 error ("PHI node with location");
4707 err2 = true;
4708 }
4709
4710 for (i = 0; i < gimple_phi_num_args (phi); i++)
4711 {
4712 tree arg = gimple_phi_arg_def (phi, i);
4713 tree addr = walk_tree (&arg, verify_node_sharing_1,
4714 visited, NULL);
4715 if (addr)
4716 {
4717 error ("incorrect sharing of tree nodes");
4718 debug_generic_expr (addr);
4719 err2 |= true;
4720 }
4721 location_t loc = gimple_phi_arg_location (phi, i);
4722 if (virtual_operand_p (gimple_phi_result (phi))
4723 && loc != UNKNOWN_LOCATION)
4724 {
4725 error ("virtual PHI with argument locations");
4726 err2 = true;
4727 }
4728 addr = walk_tree (&arg, verify_expr_location_1, blocks, NULL);
4729 if (addr)
4730 {
4731 debug_generic_expr (addr);
4732 err2 = true;
4733 }
4734 err2 |= verify_location (blocks, loc);
4735 }
4736
4737 if (err2)
4738 debug_gimple_stmt (phi);
4739 err |= err2;
4740 }
4741
4742 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4743 {
4744 gimple stmt = gsi_stmt (gsi);
4745 bool err2 = false;
4746 struct walk_stmt_info wi;
4747 tree addr;
4748 int lp_nr;
4749
4750 pointer_set_insert (visited_stmts, stmt);
4751
4752 if (gimple_bb (stmt) != bb)
4753 {
4754 error ("gimple_bb (stmt) is set to a wrong basic block");
4755 err2 = true;
4756 }
4757
4758 err2 |= verify_gimple_stmt (stmt);
4759 err2 |= verify_location (blocks, gimple_location (stmt));
4760
4761 memset (&wi, 0, sizeof (wi));
4762 wi.info = (void *) visited;
4763 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
4764 if (addr)
4765 {
4766 error ("incorrect sharing of tree nodes");
4767 debug_generic_expr (addr);
4768 err2 |= true;
4769 }
4770
4771 memset (&wi, 0, sizeof (wi));
4772 wi.info = (void *) blocks;
4773 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
4774 if (addr)
4775 {
4776 debug_generic_expr (addr);
4777 err2 |= true;
4778 }
4779
4780 /* ??? Instead of not checking these stmts at all the walker
4781 should know its context via wi. */
4782 if (!is_gimple_debug (stmt)
4783 && !is_gimple_omp (stmt))
4784 {
4785 memset (&wi, 0, sizeof (wi));
4786 addr = walk_gimple_op (stmt, verify_expr, &wi);
4787 if (addr)
4788 {
4789 debug_generic_expr (addr);
4790 inform (gimple_location (stmt), "in statement");
4791 err2 |= true;
4792 }
4793 }
4794
4795 /* If the statement is marked as part of an EH region, then it is
4796 expected that the statement could throw. Verify that when we
4797 have optimizations that simplify statements such that we prove
4798 that they cannot throw, that we update other data structures
4799 to match. */
4800 lp_nr = lookup_stmt_eh_lp (stmt);
4801 if (lp_nr != 0)
4802 {
4803 if (!stmt_could_throw_p (stmt))
4804 {
4805 error ("statement marked for throw, but doesn%'t");
4806 err2 |= true;
4807 }
4808 else if (lp_nr > 0
4809 && !gsi_one_before_end_p (gsi)
4810 && stmt_can_throw_internal (stmt))
4811 {
4812 error ("statement marked for throw in middle of block");
4813 err2 |= true;
4814 }
4815 }
4816
4817 if (err2)
4818 debug_gimple_stmt (stmt);
4819 err |= err2;
4820 }
4821 }
4822
4823 eh_error_found = false;
4824 if (get_eh_throw_stmt_table (cfun))
4825 htab_traverse (get_eh_throw_stmt_table (cfun),
4826 verify_eh_throw_stmt_node,
4827 visited_stmts);
4828
4829 if (err || eh_error_found)
4830 internal_error ("verify_gimple failed");
4831
4832 pointer_set_destroy (visited);
4833 pointer_set_destroy (visited_stmts);
4834 pointer_set_destroy (blocks);
4835 verify_histograms ();
4836 timevar_pop (TV_TREE_STMT_VERIFY);
4837 }
4838
4839
4840 /* Verifies that the flow information is OK. */
4841
4842 static int
4843 gimple_verify_flow_info (void)
4844 {
4845 int err = 0;
4846 basic_block bb;
4847 gimple_stmt_iterator gsi;
4848 gimple stmt;
4849 edge e;
4850 edge_iterator ei;
4851
4852 if (ENTRY_BLOCK_PTR->il.gimple.seq || ENTRY_BLOCK_PTR->il.gimple.phi_nodes)
4853 {
4854 error ("ENTRY_BLOCK has IL associated with it");
4855 err = 1;
4856 }
4857
4858 if (EXIT_BLOCK_PTR->il.gimple.seq || EXIT_BLOCK_PTR->il.gimple.phi_nodes)
4859 {
4860 error ("EXIT_BLOCK has IL associated with it");
4861 err = 1;
4862 }
4863
4864 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4865 if (e->flags & EDGE_FALLTHRU)
4866 {
4867 error ("fallthru to exit from bb %d", e->src->index);
4868 err = 1;
4869 }
4870
4871 FOR_EACH_BB (bb)
4872 {
4873 bool found_ctrl_stmt = false;
4874
4875 stmt = NULL;
4876
4877 /* Skip labels on the start of basic block. */
4878 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4879 {
4880 tree label;
4881 gimple prev_stmt = stmt;
4882
4883 stmt = gsi_stmt (gsi);
4884
4885 if (gimple_code (stmt) != GIMPLE_LABEL)
4886 break;
4887
4888 label = gimple_label_label (stmt);
4889 if (prev_stmt && DECL_NONLOCAL (label))
4890 {
4891 error ("nonlocal label ");
4892 print_generic_expr (stderr, label, 0);
4893 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4894 bb->index);
4895 err = 1;
4896 }
4897
4898 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
4899 {
4900 error ("EH landing pad label ");
4901 print_generic_expr (stderr, label, 0);
4902 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4903 bb->index);
4904 err = 1;
4905 }
4906
4907 if (label_to_block (label) != bb)
4908 {
4909 error ("label ");
4910 print_generic_expr (stderr, label, 0);
4911 fprintf (stderr, " to block does not match in bb %d",
4912 bb->index);
4913 err = 1;
4914 }
4915
4916 if (decl_function_context (label) != current_function_decl)
4917 {
4918 error ("label ");
4919 print_generic_expr (stderr, label, 0);
4920 fprintf (stderr, " has incorrect context in bb %d",
4921 bb->index);
4922 err = 1;
4923 }
4924 }
4925
4926 /* Verify that body of basic block BB is free of control flow. */
4927 for (; !gsi_end_p (gsi); gsi_next (&gsi))
4928 {
4929 gimple stmt = gsi_stmt (gsi);
4930
4931 if (found_ctrl_stmt)
4932 {
4933 error ("control flow in the middle of basic block %d",
4934 bb->index);
4935 err = 1;
4936 }
4937
4938 if (stmt_ends_bb_p (stmt))
4939 found_ctrl_stmt = true;
4940
4941 if (gimple_code (stmt) == GIMPLE_LABEL)
4942 {
4943 error ("label ");
4944 print_generic_expr (stderr, gimple_label_label (stmt), 0);
4945 fprintf (stderr, " in the middle of basic block %d", bb->index);
4946 err = 1;
4947 }
4948 }
4949
4950 gsi = gsi_last_bb (bb);
4951 if (gsi_end_p (gsi))
4952 continue;
4953
4954 stmt = gsi_stmt (gsi);
4955
4956 if (gimple_code (stmt) == GIMPLE_LABEL)
4957 continue;
4958
4959 err |= verify_eh_edges (stmt);
4960
4961 if (is_ctrl_stmt (stmt))
4962 {
4963 FOR_EACH_EDGE (e, ei, bb->succs)
4964 if (e->flags & EDGE_FALLTHRU)
4965 {
4966 error ("fallthru edge after a control statement in bb %d",
4967 bb->index);
4968 err = 1;
4969 }
4970 }
4971
4972 if (gimple_code (stmt) != GIMPLE_COND)
4973 {
4974 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4975 after anything else but if statement. */
4976 FOR_EACH_EDGE (e, ei, bb->succs)
4977 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
4978 {
4979 error ("true/false edge after a non-GIMPLE_COND in bb %d",
4980 bb->index);
4981 err = 1;
4982 }
4983 }
4984
4985 switch (gimple_code (stmt))
4986 {
4987 case GIMPLE_COND:
4988 {
4989 edge true_edge;
4990 edge false_edge;
4991
4992 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
4993
4994 if (!true_edge
4995 || !false_edge
4996 || !(true_edge->flags & EDGE_TRUE_VALUE)
4997 || !(false_edge->flags & EDGE_FALSE_VALUE)
4998 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4999 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5000 || EDGE_COUNT (bb->succs) >= 3)
5001 {
5002 error ("wrong outgoing edge flags at end of bb %d",
5003 bb->index);
5004 err = 1;
5005 }
5006 }
5007 break;
5008
5009 case GIMPLE_GOTO:
5010 if (simple_goto_p (stmt))
5011 {
5012 error ("explicit goto at end of bb %d", bb->index);
5013 err = 1;
5014 }
5015 else
5016 {
5017 /* FIXME. We should double check that the labels in the
5018 destination blocks have their address taken. */
5019 FOR_EACH_EDGE (e, ei, bb->succs)
5020 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5021 | EDGE_FALSE_VALUE))
5022 || !(e->flags & EDGE_ABNORMAL))
5023 {
5024 error ("wrong outgoing edge flags at end of bb %d",
5025 bb->index);
5026 err = 1;
5027 }
5028 }
5029 break;
5030
5031 case GIMPLE_CALL:
5032 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5033 break;
5034 /* ... fallthru ... */
5035 case GIMPLE_RETURN:
5036 if (!single_succ_p (bb)
5037 || (single_succ_edge (bb)->flags
5038 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5039 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5040 {
5041 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5042 err = 1;
5043 }
5044 if (single_succ (bb) != EXIT_BLOCK_PTR)
5045 {
5046 error ("return edge does not point to exit in bb %d",
5047 bb->index);
5048 err = 1;
5049 }
5050 break;
5051
5052 case GIMPLE_SWITCH:
5053 {
5054 tree prev;
5055 edge e;
5056 size_t i, n;
5057
5058 n = gimple_switch_num_labels (stmt);
5059
5060 /* Mark all the destination basic blocks. */
5061 for (i = 0; i < n; ++i)
5062 {
5063 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
5064 basic_block label_bb = label_to_block (lab);
5065 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5066 label_bb->aux = (void *)1;
5067 }
5068
5069 /* Verify that the case labels are sorted. */
5070 prev = gimple_switch_label (stmt, 0);
5071 for (i = 1; i < n; ++i)
5072 {
5073 tree c = gimple_switch_label (stmt, i);
5074 if (!CASE_LOW (c))
5075 {
5076 error ("found default case not at the start of "
5077 "case vector");
5078 err = 1;
5079 continue;
5080 }
5081 if (CASE_LOW (prev)
5082 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5083 {
5084 error ("case labels not sorted: ");
5085 print_generic_expr (stderr, prev, 0);
5086 fprintf (stderr," is greater than ");
5087 print_generic_expr (stderr, c, 0);
5088 fprintf (stderr," but comes before it.\n");
5089 err = 1;
5090 }
5091 prev = c;
5092 }
5093 /* VRP will remove the default case if it can prove it will
5094 never be executed. So do not verify there always exists
5095 a default case here. */
5096
5097 FOR_EACH_EDGE (e, ei, bb->succs)
5098 {
5099 if (!e->dest->aux)
5100 {
5101 error ("extra outgoing edge %d->%d",
5102 bb->index, e->dest->index);
5103 err = 1;
5104 }
5105
5106 e->dest->aux = (void *)2;
5107 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5108 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5109 {
5110 error ("wrong outgoing edge flags at end of bb %d",
5111 bb->index);
5112 err = 1;
5113 }
5114 }
5115
5116 /* Check that we have all of them. */
5117 for (i = 0; i < n; ++i)
5118 {
5119 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
5120 basic_block label_bb = label_to_block (lab);
5121
5122 if (label_bb->aux != (void *)2)
5123 {
5124 error ("missing edge %i->%i", bb->index, label_bb->index);
5125 err = 1;
5126 }
5127 }
5128
5129 FOR_EACH_EDGE (e, ei, bb->succs)
5130 e->dest->aux = (void *)0;
5131 }
5132 break;
5133
5134 case GIMPLE_EH_DISPATCH:
5135 err |= verify_eh_dispatch_edge (stmt);
5136 break;
5137
5138 default:
5139 break;
5140 }
5141 }
5142
5143 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5144 verify_dominators (CDI_DOMINATORS);
5145
5146 return err;
5147 }
5148
5149
5150 /* Updates phi nodes after creating a forwarder block joined
5151 by edge FALLTHRU. */
5152
5153 static void
5154 gimple_make_forwarder_block (edge fallthru)
5155 {
5156 edge e;
5157 edge_iterator ei;
5158 basic_block dummy, bb;
5159 tree var;
5160 gimple_stmt_iterator gsi;
5161
5162 dummy = fallthru->src;
5163 bb = fallthru->dest;
5164
5165 if (single_pred_p (bb))
5166 return;
5167
5168 /* If we redirected a branch we must create new PHI nodes at the
5169 start of BB. */
5170 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5171 {
5172 gimple phi, new_phi;
5173
5174 phi = gsi_stmt (gsi);
5175 var = gimple_phi_result (phi);
5176 new_phi = create_phi_node (var, bb);
5177 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5178 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5179 UNKNOWN_LOCATION);
5180 }
5181
5182 /* Add the arguments we have stored on edges. */
5183 FOR_EACH_EDGE (e, ei, bb->preds)
5184 {
5185 if (e == fallthru)
5186 continue;
5187
5188 flush_pending_stmts (e);
5189 }
5190 }
5191
5192
5193 /* Return a non-special label in the head of basic block BLOCK.
5194 Create one if it doesn't exist. */
5195
5196 tree
5197 gimple_block_label (basic_block bb)
5198 {
5199 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5200 bool first = true;
5201 tree label;
5202 gimple stmt;
5203
5204 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5205 {
5206 stmt = gsi_stmt (i);
5207 if (gimple_code (stmt) != GIMPLE_LABEL)
5208 break;
5209 label = gimple_label_label (stmt);
5210 if (!DECL_NONLOCAL (label))
5211 {
5212 if (!first)
5213 gsi_move_before (&i, &s);
5214 return label;
5215 }
5216 }
5217
5218 label = create_artificial_label (UNKNOWN_LOCATION);
5219 stmt = gimple_build_label (label);
5220 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5221 return label;
5222 }
5223
5224
5225 /* Attempt to perform edge redirection by replacing a possibly complex
5226 jump instruction by a goto or by removing the jump completely.
5227 This can apply only if all edges now point to the same block. The
5228 parameters and return values are equivalent to
5229 redirect_edge_and_branch. */
5230
5231 static edge
5232 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5233 {
5234 basic_block src = e->src;
5235 gimple_stmt_iterator i;
5236 gimple stmt;
5237
5238 /* We can replace or remove a complex jump only when we have exactly
5239 two edges. */
5240 if (EDGE_COUNT (src->succs) != 2
5241 /* Verify that all targets will be TARGET. Specifically, the
5242 edge that is not E must also go to TARGET. */
5243 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5244 return NULL;
5245
5246 i = gsi_last_bb (src);
5247 if (gsi_end_p (i))
5248 return NULL;
5249
5250 stmt = gsi_stmt (i);
5251
5252 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5253 {
5254 gsi_remove (&i, true);
5255 e = ssa_redirect_edge (e, target);
5256 e->flags = EDGE_FALLTHRU;
5257 return e;
5258 }
5259
5260 return NULL;
5261 }
5262
5263
5264 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5265 edge representing the redirected branch. */
5266
5267 static edge
5268 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5269 {
5270 basic_block bb = e->src;
5271 gimple_stmt_iterator gsi;
5272 edge ret;
5273 gimple stmt;
5274
5275 if (e->flags & EDGE_ABNORMAL)
5276 return NULL;
5277
5278 if (e->dest == dest)
5279 return NULL;
5280
5281 if (e->flags & EDGE_EH)
5282 return redirect_eh_edge (e, dest);
5283
5284 if (e->src != ENTRY_BLOCK_PTR)
5285 {
5286 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5287 if (ret)
5288 return ret;
5289 }
5290
5291 gsi = gsi_last_bb (bb);
5292 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5293
5294 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5295 {
5296 case GIMPLE_COND:
5297 /* For COND_EXPR, we only need to redirect the edge. */
5298 break;
5299
5300 case GIMPLE_GOTO:
5301 /* No non-abnormal edges should lead from a non-simple goto, and
5302 simple ones should be represented implicitly. */
5303 gcc_unreachable ();
5304
5305 case GIMPLE_SWITCH:
5306 {
5307 tree label = gimple_block_label (dest);
5308 tree cases = get_cases_for_edge (e, stmt);
5309
5310 /* If we have a list of cases associated with E, then use it
5311 as it's a lot faster than walking the entire case vector. */
5312 if (cases)
5313 {
5314 edge e2 = find_edge (e->src, dest);
5315 tree last, first;
5316
5317 first = cases;
5318 while (cases)
5319 {
5320 last = cases;
5321 CASE_LABEL (cases) = label;
5322 cases = CASE_CHAIN (cases);
5323 }
5324
5325 /* If there was already an edge in the CFG, then we need
5326 to move all the cases associated with E to E2. */
5327 if (e2)
5328 {
5329 tree cases2 = get_cases_for_edge (e2, stmt);
5330
5331 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5332 CASE_CHAIN (cases2) = first;
5333 }
5334 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5335 }
5336 else
5337 {
5338 size_t i, n = gimple_switch_num_labels (stmt);
5339
5340 for (i = 0; i < n; i++)
5341 {
5342 tree elt = gimple_switch_label (stmt, i);
5343 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5344 CASE_LABEL (elt) = label;
5345 }
5346 }
5347 }
5348 break;
5349
5350 case GIMPLE_ASM:
5351 {
5352 int i, n = gimple_asm_nlabels (stmt);
5353 tree label = NULL;
5354
5355 for (i = 0; i < n; ++i)
5356 {
5357 tree cons = gimple_asm_label_op (stmt, i);
5358 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5359 {
5360 if (!label)
5361 label = gimple_block_label (dest);
5362 TREE_VALUE (cons) = label;
5363 }
5364 }
5365
5366 /* If we didn't find any label matching the former edge in the
5367 asm labels, we must be redirecting the fallthrough
5368 edge. */
5369 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5370 }
5371 break;
5372
5373 case GIMPLE_RETURN:
5374 gsi_remove (&gsi, true);
5375 e->flags |= EDGE_FALLTHRU;
5376 break;
5377
5378 case GIMPLE_OMP_RETURN:
5379 case GIMPLE_OMP_CONTINUE:
5380 case GIMPLE_OMP_SECTIONS_SWITCH:
5381 case GIMPLE_OMP_FOR:
5382 /* The edges from OMP constructs can be simply redirected. */
5383 break;
5384
5385 case GIMPLE_EH_DISPATCH:
5386 if (!(e->flags & EDGE_FALLTHRU))
5387 redirect_eh_dispatch_edge (stmt, e, dest);
5388 break;
5389
5390 case GIMPLE_TRANSACTION:
5391 /* The ABORT edge has a stored label associated with it, otherwise
5392 the edges are simply redirectable. */
5393 if (e->flags == 0)
5394 gimple_transaction_set_label (stmt, gimple_block_label (dest));
5395 break;
5396
5397 default:
5398 /* Otherwise it must be a fallthru edge, and we don't need to
5399 do anything besides redirecting it. */
5400 gcc_assert (e->flags & EDGE_FALLTHRU);
5401 break;
5402 }
5403
5404 /* Update/insert PHI nodes as necessary. */
5405
5406 /* Now update the edges in the CFG. */
5407 e = ssa_redirect_edge (e, dest);
5408
5409 return e;
5410 }
5411
5412 /* Returns true if it is possible to remove edge E by redirecting
5413 it to the destination of the other edge from E->src. */
5414
5415 static bool
5416 gimple_can_remove_branch_p (const_edge e)
5417 {
5418 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5419 return false;
5420
5421 return true;
5422 }
5423
5424 /* Simple wrapper, as we can always redirect fallthru edges. */
5425
5426 static basic_block
5427 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5428 {
5429 e = gimple_redirect_edge_and_branch (e, dest);
5430 gcc_assert (e);
5431
5432 return NULL;
5433 }
5434
5435
5436 /* Splits basic block BB after statement STMT (but at least after the
5437 labels). If STMT is NULL, BB is split just after the labels. */
5438
5439 static basic_block
5440 gimple_split_block (basic_block bb, void *stmt)
5441 {
5442 gimple_stmt_iterator gsi;
5443 gimple_stmt_iterator gsi_tgt;
5444 gimple act;
5445 gimple_seq list;
5446 basic_block new_bb;
5447 edge e;
5448 edge_iterator ei;
5449
5450 new_bb = create_empty_bb (bb);
5451
5452 /* Redirect the outgoing edges. */
5453 new_bb->succs = bb->succs;
5454 bb->succs = NULL;
5455 FOR_EACH_EDGE (e, ei, new_bb->succs)
5456 e->src = new_bb;
5457
5458 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5459 stmt = NULL;
5460
5461 /* Move everything from GSI to the new basic block. */
5462 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5463 {
5464 act = gsi_stmt (gsi);
5465 if (gimple_code (act) == GIMPLE_LABEL)
5466 continue;
5467
5468 if (!stmt)
5469 break;
5470
5471 if (stmt == act)
5472 {
5473 gsi_next (&gsi);
5474 break;
5475 }
5476 }
5477
5478 if (gsi_end_p (gsi))
5479 return new_bb;
5480
5481 /* Split the statement list - avoid re-creating new containers as this
5482 brings ugly quadratic memory consumption in the inliner.
5483 (We are still quadratic since we need to update stmt BB pointers,
5484 sadly.) */
5485 gsi_split_seq_before (&gsi, &list);
5486 set_bb_seq (new_bb, list);
5487 for (gsi_tgt = gsi_start (list);
5488 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5489 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5490
5491 return new_bb;
5492 }
5493
5494
5495 /* Moves basic block BB after block AFTER. */
5496
5497 static bool
5498 gimple_move_block_after (basic_block bb, basic_block after)
5499 {
5500 if (bb->prev_bb == after)
5501 return true;
5502
5503 unlink_block (bb);
5504 link_block (bb, after);
5505
5506 return true;
5507 }
5508
5509
5510 /* Return TRUE if block BB has no executable statements, otherwise return
5511 FALSE. */
5512
5513 static bool
5514 gimple_empty_block_p (basic_block bb)
5515 {
5516 /* BB must have no executable statements. */
5517 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5518 if (phi_nodes (bb))
5519 return false;
5520 if (gsi_end_p (gsi))
5521 return true;
5522 if (is_gimple_debug (gsi_stmt (gsi)))
5523 gsi_next_nondebug (&gsi);
5524 return gsi_end_p (gsi);
5525 }
5526
5527
5528 /* Split a basic block if it ends with a conditional branch and if the
5529 other part of the block is not empty. */
5530
5531 static basic_block
5532 gimple_split_block_before_cond_jump (basic_block bb)
5533 {
5534 gimple last, split_point;
5535 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5536 if (gsi_end_p (gsi))
5537 return NULL;
5538 last = gsi_stmt (gsi);
5539 if (gimple_code (last) != GIMPLE_COND
5540 && gimple_code (last) != GIMPLE_SWITCH)
5541 return NULL;
5542 gsi_prev_nondebug (&gsi);
5543 split_point = gsi_stmt (gsi);
5544 return split_block (bb, split_point)->dest;
5545 }
5546
5547
5548 /* Return true if basic_block can be duplicated. */
5549
5550 static bool
5551 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5552 {
5553 return true;
5554 }
5555
5556 /* Create a duplicate of the basic block BB. NOTE: This does not
5557 preserve SSA form. */
5558
5559 static basic_block
5560 gimple_duplicate_bb (basic_block bb)
5561 {
5562 basic_block new_bb;
5563 gimple_stmt_iterator gsi, gsi_tgt;
5564 gimple_seq phis = phi_nodes (bb);
5565 gimple phi, stmt, copy;
5566
5567 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
5568
5569 /* Copy the PHI nodes. We ignore PHI node arguments here because
5570 the incoming edges have not been setup yet. */
5571 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
5572 {
5573 phi = gsi_stmt (gsi);
5574 copy = create_phi_node (NULL_TREE, new_bb);
5575 create_new_def_for (gimple_phi_result (phi), copy,
5576 gimple_phi_result_ptr (copy));
5577 gimple_set_uid (copy, gimple_uid (phi));
5578 }
5579
5580 gsi_tgt = gsi_start_bb (new_bb);
5581 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5582 {
5583 def_operand_p def_p;
5584 ssa_op_iter op_iter;
5585 tree lhs;
5586
5587 stmt = gsi_stmt (gsi);
5588 if (gimple_code (stmt) == GIMPLE_LABEL)
5589 continue;
5590
5591 /* Don't duplicate label debug stmts. */
5592 if (gimple_debug_bind_p (stmt)
5593 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5594 == LABEL_DECL)
5595 continue;
5596
5597 /* Create a new copy of STMT and duplicate STMT's virtual
5598 operands. */
5599 copy = gimple_copy (stmt);
5600 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5601
5602 maybe_duplicate_eh_stmt (copy, stmt);
5603 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5604
5605 /* When copying around a stmt writing into a local non-user
5606 aggregate, make sure it won't share stack slot with other
5607 vars. */
5608 lhs = gimple_get_lhs (stmt);
5609 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5610 {
5611 tree base = get_base_address (lhs);
5612 if (base
5613 && (TREE_CODE (base) == VAR_DECL
5614 || TREE_CODE (base) == RESULT_DECL)
5615 && DECL_IGNORED_P (base)
5616 && !TREE_STATIC (base)
5617 && !DECL_EXTERNAL (base)
5618 && (TREE_CODE (base) != VAR_DECL
5619 || !DECL_HAS_VALUE_EXPR_P (base)))
5620 DECL_NONSHAREABLE (base) = 1;
5621 }
5622
5623 /* Create new names for all the definitions created by COPY and
5624 add replacement mappings for each new name. */
5625 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5626 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5627 }
5628
5629 return new_bb;
5630 }
5631
5632 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5633
5634 static void
5635 add_phi_args_after_copy_edge (edge e_copy)
5636 {
5637 basic_block bb, bb_copy = e_copy->src, dest;
5638 edge e;
5639 edge_iterator ei;
5640 gimple phi, phi_copy;
5641 tree def;
5642 gimple_stmt_iterator psi, psi_copy;
5643
5644 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5645 return;
5646
5647 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5648
5649 if (e_copy->dest->flags & BB_DUPLICATED)
5650 dest = get_bb_original (e_copy->dest);
5651 else
5652 dest = e_copy->dest;
5653
5654 e = find_edge (bb, dest);
5655 if (!e)
5656 {
5657 /* During loop unrolling the target of the latch edge is copied.
5658 In this case we are not looking for edge to dest, but to
5659 duplicated block whose original was dest. */
5660 FOR_EACH_EDGE (e, ei, bb->succs)
5661 {
5662 if ((e->dest->flags & BB_DUPLICATED)
5663 && get_bb_original (e->dest) == dest)
5664 break;
5665 }
5666
5667 gcc_assert (e != NULL);
5668 }
5669
5670 for (psi = gsi_start_phis (e->dest),
5671 psi_copy = gsi_start_phis (e_copy->dest);
5672 !gsi_end_p (psi);
5673 gsi_next (&psi), gsi_next (&psi_copy))
5674 {
5675 phi = gsi_stmt (psi);
5676 phi_copy = gsi_stmt (psi_copy);
5677 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5678 add_phi_arg (phi_copy, def, e_copy,
5679 gimple_phi_arg_location_from_edge (phi, e));
5680 }
5681 }
5682
5683
5684 /* Basic block BB_COPY was created by code duplication. Add phi node
5685 arguments for edges going out of BB_COPY. The blocks that were
5686 duplicated have BB_DUPLICATED set. */
5687
5688 void
5689 add_phi_args_after_copy_bb (basic_block bb_copy)
5690 {
5691 edge e_copy;
5692 edge_iterator ei;
5693
5694 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5695 {
5696 add_phi_args_after_copy_edge (e_copy);
5697 }
5698 }
5699
5700 /* Blocks in REGION_COPY array of length N_REGION were created by
5701 duplication of basic blocks. Add phi node arguments for edges
5702 going from these blocks. If E_COPY is not NULL, also add
5703 phi node arguments for its destination.*/
5704
5705 void
5706 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5707 edge e_copy)
5708 {
5709 unsigned i;
5710
5711 for (i = 0; i < n_region; i++)
5712 region_copy[i]->flags |= BB_DUPLICATED;
5713
5714 for (i = 0; i < n_region; i++)
5715 add_phi_args_after_copy_bb (region_copy[i]);
5716 if (e_copy)
5717 add_phi_args_after_copy_edge (e_copy);
5718
5719 for (i = 0; i < n_region; i++)
5720 region_copy[i]->flags &= ~BB_DUPLICATED;
5721 }
5722
5723 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5724 important exit edge EXIT. By important we mean that no SSA name defined
5725 inside region is live over the other exit edges of the region. All entry
5726 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5727 to the duplicate of the region. Dominance and loop information is
5728 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
5729 UPDATE_DOMINANCE is false then we assume that the caller will update the
5730 dominance information after calling this function. The new basic
5731 blocks are stored to REGION_COPY in the same order as they had in REGION,
5732 provided that REGION_COPY is not NULL.
5733 The function returns false if it is unable to copy the region,
5734 true otherwise. */
5735
5736 bool
5737 gimple_duplicate_sese_region (edge entry, edge exit,
5738 basic_block *region, unsigned n_region,
5739 basic_block *region_copy,
5740 bool update_dominance)
5741 {
5742 unsigned i;
5743 bool free_region_copy = false, copying_header = false;
5744 struct loop *loop = entry->dest->loop_father;
5745 edge exit_copy;
5746 vec<basic_block> doms;
5747 edge redirected;
5748 int total_freq = 0, entry_freq = 0;
5749 gcov_type total_count = 0, entry_count = 0;
5750
5751 if (!can_copy_bbs_p (region, n_region))
5752 return false;
5753
5754 /* Some sanity checking. Note that we do not check for all possible
5755 missuses of the functions. I.e. if you ask to copy something weird,
5756 it will work, but the state of structures probably will not be
5757 correct. */
5758 for (i = 0; i < n_region; i++)
5759 {
5760 /* We do not handle subloops, i.e. all the blocks must belong to the
5761 same loop. */
5762 if (region[i]->loop_father != loop)
5763 return false;
5764
5765 if (region[i] != entry->dest
5766 && region[i] == loop->header)
5767 return false;
5768 }
5769
5770 set_loop_copy (loop, loop);
5771
5772 /* In case the function is used for loop header copying (which is the primary
5773 use), ensure that EXIT and its copy will be new latch and entry edges. */
5774 if (loop->header == entry->dest)
5775 {
5776 copying_header = true;
5777 set_loop_copy (loop, loop_outer (loop));
5778
5779 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5780 return false;
5781
5782 for (i = 0; i < n_region; i++)
5783 if (region[i] != exit->src
5784 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5785 return false;
5786 }
5787
5788 if (!region_copy)
5789 {
5790 region_copy = XNEWVEC (basic_block, n_region);
5791 free_region_copy = true;
5792 }
5793
5794 initialize_original_copy_tables ();
5795
5796 /* Record blocks outside the region that are dominated by something
5797 inside. */
5798 if (update_dominance)
5799 {
5800 doms.create (0);
5801 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5802 }
5803
5804 if (entry->dest->count)
5805 {
5806 total_count = entry->dest->count;
5807 entry_count = entry->count;
5808 /* Fix up corner cases, to avoid division by zero or creation of negative
5809 frequencies. */
5810 if (entry_count > total_count)
5811 entry_count = total_count;
5812 }
5813 else
5814 {
5815 total_freq = entry->dest->frequency;
5816 entry_freq = EDGE_FREQUENCY (entry);
5817 /* Fix up corner cases, to avoid division by zero or creation of negative
5818 frequencies. */
5819 if (total_freq == 0)
5820 total_freq = 1;
5821 else if (entry_freq > total_freq)
5822 entry_freq = total_freq;
5823 }
5824
5825 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5826 split_edge_bb_loc (entry), update_dominance);
5827 if (total_count)
5828 {
5829 scale_bbs_frequencies_gcov_type (region, n_region,
5830 total_count - entry_count,
5831 total_count);
5832 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
5833 total_count);
5834 }
5835 else
5836 {
5837 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5838 total_freq);
5839 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5840 }
5841
5842 if (copying_header)
5843 {
5844 loop->header = exit->dest;
5845 loop->latch = exit->src;
5846 }
5847
5848 /* Redirect the entry and add the phi node arguments. */
5849 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
5850 gcc_assert (redirected != NULL);
5851 flush_pending_stmts (entry);
5852
5853 /* Concerning updating of dominators: We must recount dominators
5854 for entry block and its copy. Anything that is outside of the
5855 region, but was dominated by something inside needs recounting as
5856 well. */
5857 if (update_dominance)
5858 {
5859 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
5860 doms.safe_push (get_bb_original (entry->dest));
5861 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5862 doms.release ();
5863 }
5864
5865 /* Add the other PHI node arguments. */
5866 add_phi_args_after_copy (region_copy, n_region, NULL);
5867
5868 if (free_region_copy)
5869 free (region_copy);
5870
5871 free_original_copy_tables ();
5872 return true;
5873 }
5874
5875 /* Checks if BB is part of the region defined by N_REGION BBS. */
5876 static bool
5877 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
5878 {
5879 unsigned int n;
5880
5881 for (n = 0; n < n_region; n++)
5882 {
5883 if (bb == bbs[n])
5884 return true;
5885 }
5886 return false;
5887 }
5888
5889 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
5890 are stored to REGION_COPY in the same order in that they appear
5891 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5892 the region, EXIT an exit from it. The condition guarding EXIT
5893 is moved to ENTRY. Returns true if duplication succeeds, false
5894 otherwise.
5895
5896 For example,
5897
5898 some_code;
5899 if (cond)
5900 A;
5901 else
5902 B;
5903
5904 is transformed to
5905
5906 if (cond)
5907 {
5908 some_code;
5909 A;
5910 }
5911 else
5912 {
5913 some_code;
5914 B;
5915 }
5916 */
5917
5918 bool
5919 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
5920 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
5921 basic_block *region_copy ATTRIBUTE_UNUSED)
5922 {
5923 unsigned i;
5924 bool free_region_copy = false;
5925 struct loop *loop = exit->dest->loop_father;
5926 struct loop *orig_loop = entry->dest->loop_father;
5927 basic_block switch_bb, entry_bb, nentry_bb;
5928 vec<basic_block> doms;
5929 int total_freq = 0, exit_freq = 0;
5930 gcov_type total_count = 0, exit_count = 0;
5931 edge exits[2], nexits[2], e;
5932 gimple_stmt_iterator gsi;
5933 gimple cond_stmt;
5934 edge sorig, snew;
5935 basic_block exit_bb;
5936 gimple_stmt_iterator psi;
5937 gimple phi;
5938 tree def;
5939 struct loop *target, *aloop, *cloop;
5940
5941 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
5942 exits[0] = exit;
5943 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
5944
5945 if (!can_copy_bbs_p (region, n_region))
5946 return false;
5947
5948 initialize_original_copy_tables ();
5949 set_loop_copy (orig_loop, loop);
5950
5951 target= loop;
5952 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
5953 {
5954 if (bb_part_of_region_p (aloop->header, region, n_region))
5955 {
5956 cloop = duplicate_loop (aloop, target);
5957 duplicate_subloops (aloop, cloop);
5958 }
5959 }
5960
5961 if (!region_copy)
5962 {
5963 region_copy = XNEWVEC (basic_block, n_region);
5964 free_region_copy = true;
5965 }
5966
5967 gcc_assert (!need_ssa_update_p (cfun));
5968
5969 /* Record blocks outside the region that are dominated by something
5970 inside. */
5971 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5972
5973 if (exit->src->count)
5974 {
5975 total_count = exit->src->count;
5976 exit_count = exit->count;
5977 /* Fix up corner cases, to avoid division by zero or creation of negative
5978 frequencies. */
5979 if (exit_count > total_count)
5980 exit_count = total_count;
5981 }
5982 else
5983 {
5984 total_freq = exit->src->frequency;
5985 exit_freq = EDGE_FREQUENCY (exit);
5986 /* Fix up corner cases, to avoid division by zero or creation of negative
5987 frequencies. */
5988 if (total_freq == 0)
5989 total_freq = 1;
5990 if (exit_freq > total_freq)
5991 exit_freq = total_freq;
5992 }
5993
5994 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
5995 split_edge_bb_loc (exit), true);
5996 if (total_count)
5997 {
5998 scale_bbs_frequencies_gcov_type (region, n_region,
5999 total_count - exit_count,
6000 total_count);
6001 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
6002 total_count);
6003 }
6004 else
6005 {
6006 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6007 total_freq);
6008 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6009 }
6010
6011 /* Create the switch block, and put the exit condition to it. */
6012 entry_bb = entry->dest;
6013 nentry_bb = get_bb_copy (entry_bb);
6014 if (!last_stmt (entry->src)
6015 || !stmt_ends_bb_p (last_stmt (entry->src)))
6016 switch_bb = entry->src;
6017 else
6018 switch_bb = split_edge (entry);
6019 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6020
6021 gsi = gsi_last_bb (switch_bb);
6022 cond_stmt = last_stmt (exit->src);
6023 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6024 cond_stmt = gimple_copy (cond_stmt);
6025
6026 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6027
6028 sorig = single_succ_edge (switch_bb);
6029 sorig->flags = exits[1]->flags;
6030 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6031
6032 /* Register the new edge from SWITCH_BB in loop exit lists. */
6033 rescan_loop_exit (snew, true, false);
6034
6035 /* Add the PHI node arguments. */
6036 add_phi_args_after_copy (region_copy, n_region, snew);
6037
6038 /* Get rid of now superfluous conditions and associated edges (and phi node
6039 arguments). */
6040 exit_bb = exit->dest;
6041
6042 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6043 PENDING_STMT (e) = NULL;
6044
6045 /* The latch of ORIG_LOOP was copied, and so was the backedge
6046 to the original header. We redirect this backedge to EXIT_BB. */
6047 for (i = 0; i < n_region; i++)
6048 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6049 {
6050 gcc_assert (single_succ_edge (region_copy[i]));
6051 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6052 PENDING_STMT (e) = NULL;
6053 for (psi = gsi_start_phis (exit_bb);
6054 !gsi_end_p (psi);
6055 gsi_next (&psi))
6056 {
6057 phi = gsi_stmt (psi);
6058 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6059 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6060 }
6061 }
6062 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6063 PENDING_STMT (e) = NULL;
6064
6065 /* Anything that is outside of the region, but was dominated by something
6066 inside needs to update dominance info. */
6067 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6068 doms.release ();
6069 /* Update the SSA web. */
6070 update_ssa (TODO_update_ssa);
6071
6072 if (free_region_copy)
6073 free (region_copy);
6074
6075 free_original_copy_tables ();
6076 return true;
6077 }
6078
6079 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6080 adding blocks when the dominator traversal reaches EXIT. This
6081 function silently assumes that ENTRY strictly dominates EXIT. */
6082
6083 void
6084 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6085 vec<basic_block> *bbs_p)
6086 {
6087 basic_block son;
6088
6089 for (son = first_dom_son (CDI_DOMINATORS, entry);
6090 son;
6091 son = next_dom_son (CDI_DOMINATORS, son))
6092 {
6093 bbs_p->safe_push (son);
6094 if (son != exit)
6095 gather_blocks_in_sese_region (son, exit, bbs_p);
6096 }
6097 }
6098
6099 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6100 The duplicates are recorded in VARS_MAP. */
6101
6102 static void
6103 replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
6104 tree to_context)
6105 {
6106 tree t = *tp, new_t;
6107 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6108 void **loc;
6109
6110 if (DECL_CONTEXT (t) == to_context)
6111 return;
6112
6113 loc = pointer_map_contains (vars_map, t);
6114
6115 if (!loc)
6116 {
6117 loc = pointer_map_insert (vars_map, t);
6118
6119 if (SSA_VAR_P (t))
6120 {
6121 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6122 add_local_decl (f, new_t);
6123 }
6124 else
6125 {
6126 gcc_assert (TREE_CODE (t) == CONST_DECL);
6127 new_t = copy_node (t);
6128 }
6129 DECL_CONTEXT (new_t) = to_context;
6130
6131 *loc = new_t;
6132 }
6133 else
6134 new_t = (tree) *loc;
6135
6136 *tp = new_t;
6137 }
6138
6139
6140 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6141 VARS_MAP maps old ssa names and var_decls to the new ones. */
6142
6143 static tree
6144 replace_ssa_name (tree name, struct pointer_map_t *vars_map,
6145 tree to_context)
6146 {
6147 void **loc;
6148 tree new_name;
6149
6150 gcc_assert (!virtual_operand_p (name));
6151
6152 loc = pointer_map_contains (vars_map, name);
6153
6154 if (!loc)
6155 {
6156 tree decl = SSA_NAME_VAR (name);
6157 if (decl)
6158 {
6159 replace_by_duplicate_decl (&decl, vars_map, to_context);
6160 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6161 decl, SSA_NAME_DEF_STMT (name));
6162 if (SSA_NAME_IS_DEFAULT_DEF (name))
6163 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context),
6164 decl, new_name);
6165 }
6166 else
6167 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6168 name, SSA_NAME_DEF_STMT (name));
6169
6170 loc = pointer_map_insert (vars_map, name);
6171 *loc = new_name;
6172 }
6173 else
6174 new_name = (tree) *loc;
6175
6176 return new_name;
6177 }
6178
6179 struct move_stmt_d
6180 {
6181 tree orig_block;
6182 tree new_block;
6183 tree from_context;
6184 tree to_context;
6185 struct pointer_map_t *vars_map;
6186 htab_t new_label_map;
6187 struct pointer_map_t *eh_map;
6188 bool remap_decls_p;
6189 };
6190
6191 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6192 contained in *TP if it has been ORIG_BLOCK previously and change the
6193 DECL_CONTEXT of every local variable referenced in *TP. */
6194
6195 static tree
6196 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6197 {
6198 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6199 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6200 tree t = *tp;
6201
6202 if (EXPR_P (t))
6203 {
6204 tree block = TREE_BLOCK (t);
6205 if (block == p->orig_block
6206 || (p->orig_block == NULL_TREE
6207 && block != NULL_TREE))
6208 TREE_SET_BLOCK (t, p->new_block);
6209 #ifdef ENABLE_CHECKING
6210 else if (block != NULL_TREE)
6211 {
6212 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6213 block = BLOCK_SUPERCONTEXT (block);
6214 gcc_assert (block == p->orig_block);
6215 }
6216 #endif
6217 }
6218 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6219 {
6220 if (TREE_CODE (t) == SSA_NAME)
6221 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6222 else if (TREE_CODE (t) == LABEL_DECL)
6223 {
6224 if (p->new_label_map)
6225 {
6226 struct tree_map in, *out;
6227 in.base.from = t;
6228 out = (struct tree_map *)
6229 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6230 if (out)
6231 *tp = t = out->to;
6232 }
6233
6234 DECL_CONTEXT (t) = p->to_context;
6235 }
6236 else if (p->remap_decls_p)
6237 {
6238 /* Replace T with its duplicate. T should no longer appear in the
6239 parent function, so this looks wasteful; however, it may appear
6240 in referenced_vars, and more importantly, as virtual operands of
6241 statements, and in alias lists of other variables. It would be
6242 quite difficult to expunge it from all those places. ??? It might
6243 suffice to do this for addressable variables. */
6244 if ((TREE_CODE (t) == VAR_DECL
6245 && !is_global_var (t))
6246 || TREE_CODE (t) == CONST_DECL)
6247 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6248 }
6249 *walk_subtrees = 0;
6250 }
6251 else if (TYPE_P (t))
6252 *walk_subtrees = 0;
6253
6254 return NULL_TREE;
6255 }
6256
6257 /* Helper for move_stmt_r. Given an EH region number for the source
6258 function, map that to the duplicate EH regio number in the dest. */
6259
6260 static int
6261 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6262 {
6263 eh_region old_r, new_r;
6264 void **slot;
6265
6266 old_r = get_eh_region_from_number (old_nr);
6267 slot = pointer_map_contains (p->eh_map, old_r);
6268 new_r = (eh_region) *slot;
6269
6270 return new_r->index;
6271 }
6272
6273 /* Similar, but operate on INTEGER_CSTs. */
6274
6275 static tree
6276 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6277 {
6278 int old_nr, new_nr;
6279
6280 old_nr = tree_to_shwi (old_t_nr);
6281 new_nr = move_stmt_eh_region_nr (old_nr, p);
6282
6283 return build_int_cst (integer_type_node, new_nr);
6284 }
6285
6286 /* Like move_stmt_op, but for gimple statements.
6287
6288 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6289 contained in the current statement in *GSI_P and change the
6290 DECL_CONTEXT of every local variable referenced in the current
6291 statement. */
6292
6293 static tree
6294 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6295 struct walk_stmt_info *wi)
6296 {
6297 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6298 gimple stmt = gsi_stmt (*gsi_p);
6299 tree block = gimple_block (stmt);
6300
6301 if (block == p->orig_block
6302 || (p->orig_block == NULL_TREE
6303 && block != NULL_TREE))
6304 gimple_set_block (stmt, p->new_block);
6305
6306 switch (gimple_code (stmt))
6307 {
6308 case GIMPLE_CALL:
6309 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6310 {
6311 tree r, fndecl = gimple_call_fndecl (stmt);
6312 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6313 switch (DECL_FUNCTION_CODE (fndecl))
6314 {
6315 case BUILT_IN_EH_COPY_VALUES:
6316 r = gimple_call_arg (stmt, 1);
6317 r = move_stmt_eh_region_tree_nr (r, p);
6318 gimple_call_set_arg (stmt, 1, r);
6319 /* FALLTHRU */
6320
6321 case BUILT_IN_EH_POINTER:
6322 case BUILT_IN_EH_FILTER:
6323 r = gimple_call_arg (stmt, 0);
6324 r = move_stmt_eh_region_tree_nr (r, p);
6325 gimple_call_set_arg (stmt, 0, r);
6326 break;
6327
6328 default:
6329 break;
6330 }
6331 }
6332 break;
6333
6334 case GIMPLE_RESX:
6335 {
6336 int r = gimple_resx_region (stmt);
6337 r = move_stmt_eh_region_nr (r, p);
6338 gimple_resx_set_region (stmt, r);
6339 }
6340 break;
6341
6342 case GIMPLE_EH_DISPATCH:
6343 {
6344 int r = gimple_eh_dispatch_region (stmt);
6345 r = move_stmt_eh_region_nr (r, p);
6346 gimple_eh_dispatch_set_region (stmt, r);
6347 }
6348 break;
6349
6350 case GIMPLE_OMP_RETURN:
6351 case GIMPLE_OMP_CONTINUE:
6352 break;
6353 default:
6354 if (is_gimple_omp (stmt))
6355 {
6356 /* Do not remap variables inside OMP directives. Variables
6357 referenced in clauses and directive header belong to the
6358 parent function and should not be moved into the child
6359 function. */
6360 bool save_remap_decls_p = p->remap_decls_p;
6361 p->remap_decls_p = false;
6362 *handled_ops_p = true;
6363
6364 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6365 move_stmt_op, wi);
6366
6367 p->remap_decls_p = save_remap_decls_p;
6368 }
6369 break;
6370 }
6371
6372 return NULL_TREE;
6373 }
6374
6375 /* Move basic block BB from function CFUN to function DEST_FN. The
6376 block is moved out of the original linked list and placed after
6377 block AFTER in the new list. Also, the block is removed from the
6378 original array of blocks and placed in DEST_FN's array of blocks.
6379 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6380 updated to reflect the moved edges.
6381
6382 The local variables are remapped to new instances, VARS_MAP is used
6383 to record the mapping. */
6384
6385 static void
6386 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6387 basic_block after, bool update_edge_count_p,
6388 struct move_stmt_d *d)
6389 {
6390 struct control_flow_graph *cfg;
6391 edge_iterator ei;
6392 edge e;
6393 gimple_stmt_iterator si;
6394 unsigned old_len, new_len;
6395
6396 /* Remove BB from dominance structures. */
6397 delete_from_dominance_info (CDI_DOMINATORS, bb);
6398
6399 /* Move BB from its current loop to the copy in the new function. */
6400 if (current_loops)
6401 {
6402 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6403 if (new_loop)
6404 bb->loop_father = new_loop;
6405 }
6406
6407 /* Link BB to the new linked list. */
6408 move_block_after (bb, after);
6409
6410 /* Update the edge count in the corresponding flowgraphs. */
6411 if (update_edge_count_p)
6412 FOR_EACH_EDGE (e, ei, bb->succs)
6413 {
6414 cfun->cfg->x_n_edges--;
6415 dest_cfun->cfg->x_n_edges++;
6416 }
6417
6418 /* Remove BB from the original basic block array. */
6419 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6420 cfun->cfg->x_n_basic_blocks--;
6421
6422 /* Grow DEST_CFUN's basic block array if needed. */
6423 cfg = dest_cfun->cfg;
6424 cfg->x_n_basic_blocks++;
6425 if (bb->index >= cfg->x_last_basic_block)
6426 cfg->x_last_basic_block = bb->index + 1;
6427
6428 old_len = vec_safe_length (cfg->x_basic_block_info);
6429 if ((unsigned) cfg->x_last_basic_block >= old_len)
6430 {
6431 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6432 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6433 }
6434
6435 (*cfg->x_basic_block_info)[bb->index] = bb;
6436
6437 /* Remap the variables in phi nodes. */
6438 for (si = gsi_start_phis (bb); !gsi_end_p (si); )
6439 {
6440 gimple phi = gsi_stmt (si);
6441 use_operand_p use;
6442 tree op = PHI_RESULT (phi);
6443 ssa_op_iter oi;
6444 unsigned i;
6445
6446 if (virtual_operand_p (op))
6447 {
6448 /* Remove the phi nodes for virtual operands (alias analysis will be
6449 run for the new function, anyway). */
6450 remove_phi_node (&si, true);
6451 continue;
6452 }
6453
6454 SET_PHI_RESULT (phi,
6455 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6456 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6457 {
6458 op = USE_FROM_PTR (use);
6459 if (TREE_CODE (op) == SSA_NAME)
6460 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6461 }
6462
6463 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6464 {
6465 location_t locus = gimple_phi_arg_location (phi, i);
6466 tree block = LOCATION_BLOCK (locus);
6467
6468 if (locus == UNKNOWN_LOCATION)
6469 continue;
6470 if (d->orig_block == NULL_TREE || block == d->orig_block)
6471 {
6472 if (d->new_block == NULL_TREE)
6473 locus = LOCATION_LOCUS (locus);
6474 else
6475 locus = COMBINE_LOCATION_DATA (line_table, locus, d->new_block);
6476 gimple_phi_arg_set_location (phi, i, locus);
6477 }
6478 }
6479
6480 gsi_next (&si);
6481 }
6482
6483 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6484 {
6485 gimple stmt = gsi_stmt (si);
6486 struct walk_stmt_info wi;
6487
6488 memset (&wi, 0, sizeof (wi));
6489 wi.info = d;
6490 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6491
6492 if (gimple_code (stmt) == GIMPLE_LABEL)
6493 {
6494 tree label = gimple_label_label (stmt);
6495 int uid = LABEL_DECL_UID (label);
6496
6497 gcc_assert (uid > -1);
6498
6499 old_len = vec_safe_length (cfg->x_label_to_block_map);
6500 if (old_len <= (unsigned) uid)
6501 {
6502 new_len = 3 * uid / 2 + 1;
6503 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6504 }
6505
6506 (*cfg->x_label_to_block_map)[uid] = bb;
6507 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6508
6509 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6510
6511 if (uid >= dest_cfun->cfg->last_label_uid)
6512 dest_cfun->cfg->last_label_uid = uid + 1;
6513 }
6514
6515 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6516 remove_stmt_from_eh_lp_fn (cfun, stmt);
6517
6518 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6519 gimple_remove_stmt_histograms (cfun, stmt);
6520
6521 /* We cannot leave any operands allocated from the operand caches of
6522 the current function. */
6523 free_stmt_operands (stmt);
6524 push_cfun (dest_cfun);
6525 update_stmt (stmt);
6526 pop_cfun ();
6527 }
6528
6529 FOR_EACH_EDGE (e, ei, bb->succs)
6530 if (e->goto_locus != UNKNOWN_LOCATION)
6531 {
6532 tree block = LOCATION_BLOCK (e->goto_locus);
6533 if (d->orig_block == NULL_TREE
6534 || block == d->orig_block)
6535 e->goto_locus = d->new_block ?
6536 COMBINE_LOCATION_DATA (line_table, e->goto_locus, d->new_block) :
6537 LOCATION_LOCUS (e->goto_locus);
6538 }
6539 }
6540
6541 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6542 the outermost EH region. Use REGION as the incoming base EH region. */
6543
6544 static eh_region
6545 find_outermost_region_in_block (struct function *src_cfun,
6546 basic_block bb, eh_region region)
6547 {
6548 gimple_stmt_iterator si;
6549
6550 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6551 {
6552 gimple stmt = gsi_stmt (si);
6553 eh_region stmt_region;
6554 int lp_nr;
6555
6556 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6557 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6558 if (stmt_region)
6559 {
6560 if (region == NULL)
6561 region = stmt_region;
6562 else if (stmt_region != region)
6563 {
6564 region = eh_region_outermost (src_cfun, stmt_region, region);
6565 gcc_assert (region != NULL);
6566 }
6567 }
6568 }
6569
6570 return region;
6571 }
6572
6573 static tree
6574 new_label_mapper (tree decl, void *data)
6575 {
6576 htab_t hash = (htab_t) data;
6577 struct tree_map *m;
6578 void **slot;
6579
6580 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6581
6582 m = XNEW (struct tree_map);
6583 m->hash = DECL_UID (decl);
6584 m->base.from = decl;
6585 m->to = create_artificial_label (UNKNOWN_LOCATION);
6586 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6587 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6588 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6589
6590 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6591 gcc_assert (*slot == NULL);
6592
6593 *slot = m;
6594
6595 return m->to;
6596 }
6597
6598 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6599 subblocks. */
6600
6601 static void
6602 replace_block_vars_by_duplicates (tree block, struct pointer_map_t *vars_map,
6603 tree to_context)
6604 {
6605 tree *tp, t;
6606
6607 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6608 {
6609 t = *tp;
6610 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6611 continue;
6612 replace_by_duplicate_decl (&t, vars_map, to_context);
6613 if (t != *tp)
6614 {
6615 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6616 {
6617 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6618 DECL_HAS_VALUE_EXPR_P (t) = 1;
6619 }
6620 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6621 *tp = t;
6622 }
6623 }
6624
6625 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6626 replace_block_vars_by_duplicates (block, vars_map, to_context);
6627 }
6628
6629 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6630 from FN1 to FN2. */
6631
6632 static void
6633 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
6634 struct loop *loop)
6635 {
6636 /* Discard it from the old loop array. */
6637 (*get_loops (fn1))[loop->num] = NULL;
6638
6639 /* Place it in the new loop array, assigning it a new number. */
6640 loop->num = number_of_loops (fn2);
6641 vec_safe_push (loops_for_fn (fn2)->larray, loop);
6642
6643 /* Recurse to children. */
6644 for (loop = loop->inner; loop; loop = loop->next)
6645 fixup_loop_arrays_after_move (fn1, fn2, loop);
6646 }
6647
6648 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6649 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6650 single basic block in the original CFG and the new basic block is
6651 returned. DEST_CFUN must not have a CFG yet.
6652
6653 Note that the region need not be a pure SESE region. Blocks inside
6654 the region may contain calls to abort/exit. The only restriction
6655 is that ENTRY_BB should be the only entry point and it must
6656 dominate EXIT_BB.
6657
6658 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6659 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6660 to the new function.
6661
6662 All local variables referenced in the region are assumed to be in
6663 the corresponding BLOCK_VARS and unexpanded variable lists
6664 associated with DEST_CFUN. */
6665
6666 basic_block
6667 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
6668 basic_block exit_bb, tree orig_block)
6669 {
6670 vec<basic_block> bbs, dom_bbs;
6671 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6672 basic_block after, bb, *entry_pred, *exit_succ, abb;
6673 struct function *saved_cfun = cfun;
6674 int *entry_flag, *exit_flag;
6675 unsigned *entry_prob, *exit_prob;
6676 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
6677 edge e;
6678 edge_iterator ei;
6679 htab_t new_label_map;
6680 struct pointer_map_t *vars_map, *eh_map;
6681 struct loop *loop = entry_bb->loop_father;
6682 struct loop *loop0 = get_loop (saved_cfun, 0);
6683 struct move_stmt_d d;
6684
6685 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6686 region. */
6687 gcc_assert (entry_bb != exit_bb
6688 && (!exit_bb
6689 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
6690
6691 /* Collect all the blocks in the region. Manually add ENTRY_BB
6692 because it won't be added by dfs_enumerate_from. */
6693 bbs.create (0);
6694 bbs.safe_push (entry_bb);
6695 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6696
6697 /* The blocks that used to be dominated by something in BBS will now be
6698 dominated by the new block. */
6699 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6700 bbs.address (),
6701 bbs.length ());
6702
6703 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6704 the predecessor edges to ENTRY_BB and the successor edges to
6705 EXIT_BB so that we can re-attach them to the new basic block that
6706 will replace the region. */
6707 num_entry_edges = EDGE_COUNT (entry_bb->preds);
6708 entry_pred = XNEWVEC (basic_block, num_entry_edges);
6709 entry_flag = XNEWVEC (int, num_entry_edges);
6710 entry_prob = XNEWVEC (unsigned, num_entry_edges);
6711 i = 0;
6712 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
6713 {
6714 entry_prob[i] = e->probability;
6715 entry_flag[i] = e->flags;
6716 entry_pred[i++] = e->src;
6717 remove_edge (e);
6718 }
6719
6720 if (exit_bb)
6721 {
6722 num_exit_edges = EDGE_COUNT (exit_bb->succs);
6723 exit_succ = XNEWVEC (basic_block, num_exit_edges);
6724 exit_flag = XNEWVEC (int, num_exit_edges);
6725 exit_prob = XNEWVEC (unsigned, num_exit_edges);
6726 i = 0;
6727 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
6728 {
6729 exit_prob[i] = e->probability;
6730 exit_flag[i] = e->flags;
6731 exit_succ[i++] = e->dest;
6732 remove_edge (e);
6733 }
6734 }
6735 else
6736 {
6737 num_exit_edges = 0;
6738 exit_succ = NULL;
6739 exit_flag = NULL;
6740 exit_prob = NULL;
6741 }
6742
6743 /* Switch context to the child function to initialize DEST_FN's CFG. */
6744 gcc_assert (dest_cfun->cfg == NULL);
6745 push_cfun (dest_cfun);
6746
6747 init_empty_tree_cfg ();
6748
6749 /* Initialize EH information for the new function. */
6750 eh_map = NULL;
6751 new_label_map = NULL;
6752 if (saved_cfun->eh)
6753 {
6754 eh_region region = NULL;
6755
6756 FOR_EACH_VEC_ELT (bbs, i, bb)
6757 region = find_outermost_region_in_block (saved_cfun, bb, region);
6758
6759 init_eh_for_function ();
6760 if (region != NULL)
6761 {
6762 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6763 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
6764 new_label_mapper, new_label_map);
6765 }
6766 }
6767
6768 /* Initialize an empty loop tree. */
6769 struct loops *loops = ggc_alloc_cleared_loops ();
6770 init_loops_structure (dest_cfun, loops, 1);
6771 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
6772 set_loops_for_fn (dest_cfun, loops);
6773
6774 /* Move the outlined loop tree part. */
6775 num_nodes = bbs.length ();
6776 FOR_EACH_VEC_ELT (bbs, i, bb)
6777 {
6778 if (bb->loop_father->header == bb)
6779 {
6780 struct loop *this_loop = bb->loop_father;
6781 struct loop *outer = loop_outer (this_loop);
6782 if (outer == loop
6783 /* If the SESE region contains some bbs ending with
6784 a noreturn call, those are considered to belong
6785 to the outermost loop in saved_cfun, rather than
6786 the entry_bb's loop_father. */
6787 || outer == loop0)
6788 {
6789 if (outer != loop)
6790 num_nodes -= this_loop->num_nodes;
6791 flow_loop_tree_node_remove (bb->loop_father);
6792 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
6793 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
6794 }
6795 }
6796 else if (bb->loop_father == loop0 && loop0 != loop)
6797 num_nodes--;
6798
6799 /* Remove loop exits from the outlined region. */
6800 if (loops_for_fn (saved_cfun)->exits)
6801 FOR_EACH_EDGE (e, ei, bb->succs)
6802 {
6803 void **slot = htab_find_slot_with_hash
6804 (loops_for_fn (saved_cfun)->exits, e,
6805 htab_hash_pointer (e), NO_INSERT);
6806 if (slot)
6807 htab_clear_slot (loops_for_fn (saved_cfun)->exits, slot);
6808 }
6809 }
6810
6811
6812 /* Adjust the number of blocks in the tree root of the outlined part. */
6813 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
6814
6815 /* Setup a mapping to be used by move_block_to_fn. */
6816 loop->aux = current_loops->tree_root;
6817 loop0->aux = current_loops->tree_root;
6818
6819 pop_cfun ();
6820
6821 /* Move blocks from BBS into DEST_CFUN. */
6822 gcc_assert (bbs.length () >= 2);
6823 after = dest_cfun->cfg->x_entry_block_ptr;
6824 vars_map = pointer_map_create ();
6825
6826 memset (&d, 0, sizeof (d));
6827 d.orig_block = orig_block;
6828 d.new_block = DECL_INITIAL (dest_cfun->decl);
6829 d.from_context = cfun->decl;
6830 d.to_context = dest_cfun->decl;
6831 d.vars_map = vars_map;
6832 d.new_label_map = new_label_map;
6833 d.eh_map = eh_map;
6834 d.remap_decls_p = true;
6835
6836 FOR_EACH_VEC_ELT (bbs, i, bb)
6837 {
6838 /* No need to update edge counts on the last block. It has
6839 already been updated earlier when we detached the region from
6840 the original CFG. */
6841 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
6842 after = bb;
6843 }
6844
6845 loop->aux = NULL;
6846 loop0->aux = NULL;
6847 /* Loop sizes are no longer correct, fix them up. */
6848 loop->num_nodes -= num_nodes;
6849 for (struct loop *outer = loop_outer (loop);
6850 outer; outer = loop_outer (outer))
6851 outer->num_nodes -= num_nodes;
6852 loop0->num_nodes -= bbs.length () - num_nodes;
6853
6854 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vect_loops)
6855 {
6856 struct loop *aloop;
6857 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
6858 if (aloop != NULL)
6859 {
6860 if (aloop->simduid)
6861 {
6862 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
6863 d.to_context);
6864 dest_cfun->has_simduid_loops = true;
6865 }
6866 if (aloop->force_vect)
6867 dest_cfun->has_force_vect_loops = true;
6868 }
6869 }
6870
6871 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
6872 if (orig_block)
6873 {
6874 tree block;
6875 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6876 == NULL_TREE);
6877 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6878 = BLOCK_SUBBLOCKS (orig_block);
6879 for (block = BLOCK_SUBBLOCKS (orig_block);
6880 block; block = BLOCK_CHAIN (block))
6881 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
6882 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
6883 }
6884
6885 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
6886 vars_map, dest_cfun->decl);
6887
6888 if (new_label_map)
6889 htab_delete (new_label_map);
6890 if (eh_map)
6891 pointer_map_destroy (eh_map);
6892 pointer_map_destroy (vars_map);
6893
6894 /* Rewire the entry and exit blocks. The successor to the entry
6895 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6896 the child function. Similarly, the predecessor of DEST_FN's
6897 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
6898 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6899 various CFG manipulation function get to the right CFG.
6900
6901 FIXME, this is silly. The CFG ought to become a parameter to
6902 these helpers. */
6903 push_cfun (dest_cfun);
6904 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
6905 if (exit_bb)
6906 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
6907 pop_cfun ();
6908
6909 /* Back in the original function, the SESE region has disappeared,
6910 create a new basic block in its place. */
6911 bb = create_empty_bb (entry_pred[0]);
6912 if (current_loops)
6913 add_bb_to_loop (bb, loop);
6914 for (i = 0; i < num_entry_edges; i++)
6915 {
6916 e = make_edge (entry_pred[i], bb, entry_flag[i]);
6917 e->probability = entry_prob[i];
6918 }
6919
6920 for (i = 0; i < num_exit_edges; i++)
6921 {
6922 e = make_edge (bb, exit_succ[i], exit_flag[i]);
6923 e->probability = exit_prob[i];
6924 }
6925
6926 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
6927 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
6928 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
6929 dom_bbs.release ();
6930
6931 if (exit_bb)
6932 {
6933 free (exit_prob);
6934 free (exit_flag);
6935 free (exit_succ);
6936 }
6937 free (entry_prob);
6938 free (entry_flag);
6939 free (entry_pred);
6940 bbs.release ();
6941
6942 return bb;
6943 }
6944
6945
6946 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
6947 */
6948
6949 void
6950 dump_function_to_file (tree fndecl, FILE *file, int flags)
6951 {
6952 tree arg, var, old_current_fndecl = current_function_decl;
6953 struct function *dsf;
6954 bool ignore_topmost_bind = false, any_var = false;
6955 basic_block bb;
6956 tree chain;
6957 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
6958 && decl_is_tm_clone (fndecl));
6959 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
6960
6961 current_function_decl = fndecl;
6962 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
6963
6964 arg = DECL_ARGUMENTS (fndecl);
6965 while (arg)
6966 {
6967 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
6968 fprintf (file, " ");
6969 print_generic_expr (file, arg, dump_flags);
6970 if (flags & TDF_VERBOSE)
6971 print_node (file, "", arg, 4);
6972 if (DECL_CHAIN (arg))
6973 fprintf (file, ", ");
6974 arg = DECL_CHAIN (arg);
6975 }
6976 fprintf (file, ")\n");
6977
6978 if (flags & TDF_VERBOSE)
6979 print_node (file, "", fndecl, 2);
6980
6981 dsf = DECL_STRUCT_FUNCTION (fndecl);
6982 if (dsf && (flags & TDF_EH))
6983 dump_eh_tree (file, dsf);
6984
6985 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
6986 {
6987 dump_node (fndecl, TDF_SLIM | flags, file);
6988 current_function_decl = old_current_fndecl;
6989 return;
6990 }
6991
6992 /* When GIMPLE is lowered, the variables are no longer available in
6993 BIND_EXPRs, so display them separately. */
6994 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
6995 {
6996 unsigned ix;
6997 ignore_topmost_bind = true;
6998
6999 fprintf (file, "{\n");
7000 if (!vec_safe_is_empty (fun->local_decls))
7001 FOR_EACH_LOCAL_DECL (fun, ix, var)
7002 {
7003 print_generic_decl (file, var, flags);
7004 if (flags & TDF_VERBOSE)
7005 print_node (file, "", var, 4);
7006 fprintf (file, "\n");
7007
7008 any_var = true;
7009 }
7010 if (gimple_in_ssa_p (cfun))
7011 for (ix = 1; ix < num_ssa_names; ++ix)
7012 {
7013 tree name = ssa_name (ix);
7014 if (name && !SSA_NAME_VAR (name))
7015 {
7016 fprintf (file, " ");
7017 print_generic_expr (file, TREE_TYPE (name), flags);
7018 fprintf (file, " ");
7019 print_generic_expr (file, name, flags);
7020 fprintf (file, ";\n");
7021
7022 any_var = true;
7023 }
7024 }
7025 }
7026
7027 if (fun && fun->decl == fndecl
7028 && fun->cfg
7029 && basic_block_info_for_function (fun))
7030 {
7031 /* If the CFG has been built, emit a CFG-based dump. */
7032 if (!ignore_topmost_bind)
7033 fprintf (file, "{\n");
7034
7035 if (any_var && n_basic_blocks_for_fn (fun))
7036 fprintf (file, "\n");
7037
7038 FOR_EACH_BB_FN (bb, fun)
7039 dump_bb (file, bb, 2, flags | TDF_COMMENT);
7040
7041 fprintf (file, "}\n");
7042 }
7043 else if (DECL_SAVED_TREE (fndecl) == NULL)
7044 {
7045 /* The function is now in GIMPLE form but the CFG has not been
7046 built yet. Emit the single sequence of GIMPLE statements
7047 that make up its body. */
7048 gimple_seq body = gimple_body (fndecl);
7049
7050 if (gimple_seq_first_stmt (body)
7051 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7052 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7053 print_gimple_seq (file, body, 0, flags);
7054 else
7055 {
7056 if (!ignore_topmost_bind)
7057 fprintf (file, "{\n");
7058
7059 if (any_var)
7060 fprintf (file, "\n");
7061
7062 print_gimple_seq (file, body, 2, flags);
7063 fprintf (file, "}\n");
7064 }
7065 }
7066 else
7067 {
7068 int indent;
7069
7070 /* Make a tree based dump. */
7071 chain = DECL_SAVED_TREE (fndecl);
7072 if (chain && TREE_CODE (chain) == BIND_EXPR)
7073 {
7074 if (ignore_topmost_bind)
7075 {
7076 chain = BIND_EXPR_BODY (chain);
7077 indent = 2;
7078 }
7079 else
7080 indent = 0;
7081 }
7082 else
7083 {
7084 if (!ignore_topmost_bind)
7085 fprintf (file, "{\n");
7086 indent = 2;
7087 }
7088
7089 if (any_var)
7090 fprintf (file, "\n");
7091
7092 print_generic_stmt_indented (file, chain, flags, indent);
7093 if (ignore_topmost_bind)
7094 fprintf (file, "}\n");
7095 }
7096
7097 if (flags & TDF_ENUMERATE_LOCALS)
7098 dump_enumerated_decls (file, flags);
7099 fprintf (file, "\n\n");
7100
7101 current_function_decl = old_current_fndecl;
7102 }
7103
7104 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7105
7106 DEBUG_FUNCTION void
7107 debug_function (tree fn, int flags)
7108 {
7109 dump_function_to_file (fn, stderr, flags);
7110 }
7111
7112
7113 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7114
7115 static void
7116 print_pred_bbs (FILE *file, basic_block bb)
7117 {
7118 edge e;
7119 edge_iterator ei;
7120
7121 FOR_EACH_EDGE (e, ei, bb->preds)
7122 fprintf (file, "bb_%d ", e->src->index);
7123 }
7124
7125
7126 /* Print on FILE the indexes for the successors of basic_block BB. */
7127
7128 static void
7129 print_succ_bbs (FILE *file, basic_block bb)
7130 {
7131 edge e;
7132 edge_iterator ei;
7133
7134 FOR_EACH_EDGE (e, ei, bb->succs)
7135 fprintf (file, "bb_%d ", e->dest->index);
7136 }
7137
7138 /* Print to FILE the basic block BB following the VERBOSITY level. */
7139
7140 void
7141 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7142 {
7143 char *s_indent = (char *) alloca ((size_t) indent + 1);
7144 memset ((void *) s_indent, ' ', (size_t) indent);
7145 s_indent[indent] = '\0';
7146
7147 /* Print basic_block's header. */
7148 if (verbosity >= 2)
7149 {
7150 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7151 print_pred_bbs (file, bb);
7152 fprintf (file, "}, succs = {");
7153 print_succ_bbs (file, bb);
7154 fprintf (file, "})\n");
7155 }
7156
7157 /* Print basic_block's body. */
7158 if (verbosity >= 3)
7159 {
7160 fprintf (file, "%s {\n", s_indent);
7161 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7162 fprintf (file, "%s }\n", s_indent);
7163 }
7164 }
7165
7166 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7167
7168 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7169 VERBOSITY level this outputs the contents of the loop, or just its
7170 structure. */
7171
7172 static void
7173 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7174 {
7175 char *s_indent;
7176 basic_block bb;
7177
7178 if (loop == NULL)
7179 return;
7180
7181 s_indent = (char *) alloca ((size_t) indent + 1);
7182 memset ((void *) s_indent, ' ', (size_t) indent);
7183 s_indent[indent] = '\0';
7184
7185 /* Print loop's header. */
7186 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7187 if (loop->header)
7188 fprintf (file, "header = %d", loop->header->index);
7189 else
7190 {
7191 fprintf (file, "deleted)\n");
7192 return;
7193 }
7194 if (loop->latch)
7195 fprintf (file, ", latch = %d", loop->latch->index);
7196 else
7197 fprintf (file, ", multiple latches");
7198 fprintf (file, ", niter = ");
7199 print_generic_expr (file, loop->nb_iterations, 0);
7200
7201 if (loop->any_upper_bound)
7202 {
7203 fprintf (file, ", upper_bound = ");
7204 dump_double_int (file, loop->nb_iterations_upper_bound, true);
7205 }
7206
7207 if (loop->any_estimate)
7208 {
7209 fprintf (file, ", estimate = ");
7210 dump_double_int (file, loop->nb_iterations_estimate, true);
7211 }
7212 fprintf (file, ")\n");
7213
7214 /* Print loop's body. */
7215 if (verbosity >= 1)
7216 {
7217 fprintf (file, "%s{\n", s_indent);
7218 FOR_EACH_BB (bb)
7219 if (bb->loop_father == loop)
7220 print_loops_bb (file, bb, indent, verbosity);
7221
7222 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7223 fprintf (file, "%s}\n", s_indent);
7224 }
7225 }
7226
7227 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7228 spaces. Following VERBOSITY level this outputs the contents of the
7229 loop, or just its structure. */
7230
7231 static void
7232 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7233 int verbosity)
7234 {
7235 if (loop == NULL)
7236 return;
7237
7238 print_loop (file, loop, indent, verbosity);
7239 print_loop_and_siblings (file, loop->next, indent, verbosity);
7240 }
7241
7242 /* Follow a CFG edge from the entry point of the program, and on entry
7243 of a loop, pretty print the loop structure on FILE. */
7244
7245 void
7246 print_loops (FILE *file, int verbosity)
7247 {
7248 basic_block bb;
7249
7250 bb = ENTRY_BLOCK_PTR;
7251 if (bb && bb->loop_father)
7252 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7253 }
7254
7255 /* Dump a loop. */
7256
7257 DEBUG_FUNCTION void
7258 debug (struct loop &ref)
7259 {
7260 print_loop (stderr, &ref, 0, /*verbosity*/0);
7261 }
7262
7263 DEBUG_FUNCTION void
7264 debug (struct loop *ptr)
7265 {
7266 if (ptr)
7267 debug (*ptr);
7268 else
7269 fprintf (stderr, "<nil>\n");
7270 }
7271
7272 /* Dump a loop verbosely. */
7273
7274 DEBUG_FUNCTION void
7275 debug_verbose (struct loop &ref)
7276 {
7277 print_loop (stderr, &ref, 0, /*verbosity*/3);
7278 }
7279
7280 DEBUG_FUNCTION void
7281 debug_verbose (struct loop *ptr)
7282 {
7283 if (ptr)
7284 debug (*ptr);
7285 else
7286 fprintf (stderr, "<nil>\n");
7287 }
7288
7289
7290 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7291
7292 DEBUG_FUNCTION void
7293 debug_loops (int verbosity)
7294 {
7295 print_loops (stderr, verbosity);
7296 }
7297
7298 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7299
7300 DEBUG_FUNCTION void
7301 debug_loop (struct loop *loop, int verbosity)
7302 {
7303 print_loop (stderr, loop, 0, verbosity);
7304 }
7305
7306 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7307 level. */
7308
7309 DEBUG_FUNCTION void
7310 debug_loop_num (unsigned num, int verbosity)
7311 {
7312 debug_loop (get_loop (cfun, num), verbosity);
7313 }
7314
7315 /* Return true if BB ends with a call, possibly followed by some
7316 instructions that must stay with the call. Return false,
7317 otherwise. */
7318
7319 static bool
7320 gimple_block_ends_with_call_p (basic_block bb)
7321 {
7322 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7323 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7324 }
7325
7326
7327 /* Return true if BB ends with a conditional branch. Return false,
7328 otherwise. */
7329
7330 static bool
7331 gimple_block_ends_with_condjump_p (const_basic_block bb)
7332 {
7333 gimple stmt = last_stmt (CONST_CAST_BB (bb));
7334 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7335 }
7336
7337
7338 /* Return true if we need to add fake edge to exit at statement T.
7339 Helper function for gimple_flow_call_edges_add. */
7340
7341 static bool
7342 need_fake_edge_p (gimple t)
7343 {
7344 tree fndecl = NULL_TREE;
7345 int call_flags = 0;
7346
7347 /* NORETURN and LONGJMP calls already have an edge to exit.
7348 CONST and PURE calls do not need one.
7349 We don't currently check for CONST and PURE here, although
7350 it would be a good idea, because those attributes are
7351 figured out from the RTL in mark_constant_function, and
7352 the counter incrementation code from -fprofile-arcs
7353 leads to different results from -fbranch-probabilities. */
7354 if (is_gimple_call (t))
7355 {
7356 fndecl = gimple_call_fndecl (t);
7357 call_flags = gimple_call_flags (t);
7358 }
7359
7360 if (is_gimple_call (t)
7361 && fndecl
7362 && DECL_BUILT_IN (fndecl)
7363 && (call_flags & ECF_NOTHROW)
7364 && !(call_flags & ECF_RETURNS_TWICE)
7365 /* fork() doesn't really return twice, but the effect of
7366 wrapping it in __gcov_fork() which calls __gcov_flush()
7367 and clears the counters before forking has the same
7368 effect as returning twice. Force a fake edge. */
7369 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7370 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7371 return false;
7372
7373 if (is_gimple_call (t))
7374 {
7375 edge_iterator ei;
7376 edge e;
7377 basic_block bb;
7378
7379 if (!(call_flags & ECF_NORETURN))
7380 return true;
7381
7382 bb = gimple_bb (t);
7383 FOR_EACH_EDGE (e, ei, bb->succs)
7384 if ((e->flags & EDGE_FAKE) == 0)
7385 return true;
7386 }
7387
7388 if (gimple_code (t) == GIMPLE_ASM
7389 && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
7390 return true;
7391
7392 return false;
7393 }
7394
7395
7396 /* Add fake edges to the function exit for any non constant and non
7397 noreturn calls (or noreturn calls with EH/abnormal edges),
7398 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7399 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7400 that were split.
7401
7402 The goal is to expose cases in which entering a basic block does
7403 not imply that all subsequent instructions must be executed. */
7404
7405 static int
7406 gimple_flow_call_edges_add (sbitmap blocks)
7407 {
7408 int i;
7409 int blocks_split = 0;
7410 int last_bb = last_basic_block;
7411 bool check_last_block = false;
7412
7413 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
7414 return 0;
7415
7416 if (! blocks)
7417 check_last_block = true;
7418 else
7419 check_last_block = bitmap_bit_p (blocks, EXIT_BLOCK_PTR->prev_bb->index);
7420
7421 /* In the last basic block, before epilogue generation, there will be
7422 a fallthru edge to EXIT. Special care is required if the last insn
7423 of the last basic block is a call because make_edge folds duplicate
7424 edges, which would result in the fallthru edge also being marked
7425 fake, which would result in the fallthru edge being removed by
7426 remove_fake_edges, which would result in an invalid CFG.
7427
7428 Moreover, we can't elide the outgoing fake edge, since the block
7429 profiler needs to take this into account in order to solve the minimal
7430 spanning tree in the case that the call doesn't return.
7431
7432 Handle this by adding a dummy instruction in a new last basic block. */
7433 if (check_last_block)
7434 {
7435 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
7436 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7437 gimple t = NULL;
7438
7439 if (!gsi_end_p (gsi))
7440 t = gsi_stmt (gsi);
7441
7442 if (t && need_fake_edge_p (t))
7443 {
7444 edge e;
7445
7446 e = find_edge (bb, EXIT_BLOCK_PTR);
7447 if (e)
7448 {
7449 gsi_insert_on_edge (e, gimple_build_nop ());
7450 gsi_commit_edge_inserts ();
7451 }
7452 }
7453 }
7454
7455 /* Now add fake edges to the function exit for any non constant
7456 calls since there is no way that we can determine if they will
7457 return or not... */
7458 for (i = 0; i < last_bb; i++)
7459 {
7460 basic_block bb = BASIC_BLOCK (i);
7461 gimple_stmt_iterator gsi;
7462 gimple stmt, last_stmt;
7463
7464 if (!bb)
7465 continue;
7466
7467 if (blocks && !bitmap_bit_p (blocks, i))
7468 continue;
7469
7470 gsi = gsi_last_nondebug_bb (bb);
7471 if (!gsi_end_p (gsi))
7472 {
7473 last_stmt = gsi_stmt (gsi);
7474 do
7475 {
7476 stmt = gsi_stmt (gsi);
7477 if (need_fake_edge_p (stmt))
7478 {
7479 edge e;
7480
7481 /* The handling above of the final block before the
7482 epilogue should be enough to verify that there is
7483 no edge to the exit block in CFG already.
7484 Calling make_edge in such case would cause us to
7485 mark that edge as fake and remove it later. */
7486 #ifdef ENABLE_CHECKING
7487 if (stmt == last_stmt)
7488 {
7489 e = find_edge (bb, EXIT_BLOCK_PTR);
7490 gcc_assert (e == NULL);
7491 }
7492 #endif
7493
7494 /* Note that the following may create a new basic block
7495 and renumber the existing basic blocks. */
7496 if (stmt != last_stmt)
7497 {
7498 e = split_block (bb, stmt);
7499 if (e)
7500 blocks_split++;
7501 }
7502 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
7503 }
7504 gsi_prev (&gsi);
7505 }
7506 while (!gsi_end_p (gsi));
7507 }
7508 }
7509
7510 if (blocks_split)
7511 verify_flow_info ();
7512
7513 return blocks_split;
7514 }
7515
7516 /* Removes edge E and all the blocks dominated by it, and updates dominance
7517 information. The IL in E->src needs to be updated separately.
7518 If dominance info is not available, only the edge E is removed.*/
7519
7520 void
7521 remove_edge_and_dominated_blocks (edge e)
7522 {
7523 vec<basic_block> bbs_to_remove = vNULL;
7524 vec<basic_block> bbs_to_fix_dom = vNULL;
7525 bitmap df, df_idom;
7526 edge f;
7527 edge_iterator ei;
7528 bool none_removed = false;
7529 unsigned i;
7530 basic_block bb, dbb;
7531 bitmap_iterator bi;
7532
7533 if (!dom_info_available_p (CDI_DOMINATORS))
7534 {
7535 remove_edge (e);
7536 return;
7537 }
7538
7539 /* No updating is needed for edges to exit. */
7540 if (e->dest == EXIT_BLOCK_PTR)
7541 {
7542 if (cfgcleanup_altered_bbs)
7543 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7544 remove_edge (e);
7545 return;
7546 }
7547
7548 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7549 that is not dominated by E->dest, then this set is empty. Otherwise,
7550 all the basic blocks dominated by E->dest are removed.
7551
7552 Also, to DF_IDOM we store the immediate dominators of the blocks in
7553 the dominance frontier of E (i.e., of the successors of the
7554 removed blocks, if there are any, and of E->dest otherwise). */
7555 FOR_EACH_EDGE (f, ei, e->dest->preds)
7556 {
7557 if (f == e)
7558 continue;
7559
7560 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7561 {
7562 none_removed = true;
7563 break;
7564 }
7565 }
7566
7567 df = BITMAP_ALLOC (NULL);
7568 df_idom = BITMAP_ALLOC (NULL);
7569
7570 if (none_removed)
7571 bitmap_set_bit (df_idom,
7572 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7573 else
7574 {
7575 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7576 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7577 {
7578 FOR_EACH_EDGE (f, ei, bb->succs)
7579 {
7580 if (f->dest != EXIT_BLOCK_PTR)
7581 bitmap_set_bit (df, f->dest->index);
7582 }
7583 }
7584 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7585 bitmap_clear_bit (df, bb->index);
7586
7587 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7588 {
7589 bb = BASIC_BLOCK (i);
7590 bitmap_set_bit (df_idom,
7591 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7592 }
7593 }
7594
7595 if (cfgcleanup_altered_bbs)
7596 {
7597 /* Record the set of the altered basic blocks. */
7598 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7599 bitmap_ior_into (cfgcleanup_altered_bbs, df);
7600 }
7601
7602 /* Remove E and the cancelled blocks. */
7603 if (none_removed)
7604 remove_edge (e);
7605 else
7606 {
7607 /* Walk backwards so as to get a chance to substitute all
7608 released DEFs into debug stmts. See
7609 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7610 details. */
7611 for (i = bbs_to_remove.length (); i-- > 0; )
7612 delete_basic_block (bbs_to_remove[i]);
7613 }
7614
7615 /* Update the dominance information. The immediate dominator may change only
7616 for blocks whose immediate dominator belongs to DF_IDOM:
7617
7618 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7619 removal. Let Z the arbitrary block such that idom(Z) = Y and
7620 Z dominates X after the removal. Before removal, there exists a path P
7621 from Y to X that avoids Z. Let F be the last edge on P that is
7622 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7623 dominates W, and because of P, Z does not dominate W), and W belongs to
7624 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7625 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
7626 {
7627 bb = BASIC_BLOCK (i);
7628 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
7629 dbb;
7630 dbb = next_dom_son (CDI_DOMINATORS, dbb))
7631 bbs_to_fix_dom.safe_push (dbb);
7632 }
7633
7634 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
7635
7636 BITMAP_FREE (df);
7637 BITMAP_FREE (df_idom);
7638 bbs_to_remove.release ();
7639 bbs_to_fix_dom.release ();
7640 }
7641
7642 /* Purge dead EH edges from basic block BB. */
7643
7644 bool
7645 gimple_purge_dead_eh_edges (basic_block bb)
7646 {
7647 bool changed = false;
7648 edge e;
7649 edge_iterator ei;
7650 gimple stmt = last_stmt (bb);
7651
7652 if (stmt && stmt_can_throw_internal (stmt))
7653 return false;
7654
7655 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7656 {
7657 if (e->flags & EDGE_EH)
7658 {
7659 remove_edge_and_dominated_blocks (e);
7660 changed = true;
7661 }
7662 else
7663 ei_next (&ei);
7664 }
7665
7666 return changed;
7667 }
7668
7669 /* Purge dead EH edges from basic block listed in BLOCKS. */
7670
7671 bool
7672 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
7673 {
7674 bool changed = false;
7675 unsigned i;
7676 bitmap_iterator bi;
7677
7678 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7679 {
7680 basic_block bb = BASIC_BLOCK (i);
7681
7682 /* Earlier gimple_purge_dead_eh_edges could have removed
7683 this basic block already. */
7684 gcc_assert (bb || changed);
7685 if (bb != NULL)
7686 changed |= gimple_purge_dead_eh_edges (bb);
7687 }
7688
7689 return changed;
7690 }
7691
7692 /* Purge dead abnormal call edges from basic block BB. */
7693
7694 bool
7695 gimple_purge_dead_abnormal_call_edges (basic_block bb)
7696 {
7697 bool changed = false;
7698 edge e;
7699 edge_iterator ei;
7700 gimple stmt = last_stmt (bb);
7701
7702 if (!cfun->has_nonlocal_label
7703 && !cfun->calls_setjmp)
7704 return false;
7705
7706 if (stmt && stmt_can_make_abnormal_goto (stmt))
7707 return false;
7708
7709 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7710 {
7711 if (e->flags & EDGE_ABNORMAL)
7712 {
7713 if (e->flags & EDGE_FALLTHRU)
7714 e->flags &= ~EDGE_ABNORMAL;
7715 else
7716 remove_edge_and_dominated_blocks (e);
7717 changed = true;
7718 }
7719 else
7720 ei_next (&ei);
7721 }
7722
7723 return changed;
7724 }
7725
7726 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
7727
7728 bool
7729 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
7730 {
7731 bool changed = false;
7732 unsigned i;
7733 bitmap_iterator bi;
7734
7735 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7736 {
7737 basic_block bb = BASIC_BLOCK (i);
7738
7739 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
7740 this basic block already. */
7741 gcc_assert (bb || changed);
7742 if (bb != NULL)
7743 changed |= gimple_purge_dead_abnormal_call_edges (bb);
7744 }
7745
7746 return changed;
7747 }
7748
7749 /* This function is called whenever a new edge is created or
7750 redirected. */
7751
7752 static void
7753 gimple_execute_on_growing_pred (edge e)
7754 {
7755 basic_block bb = e->dest;
7756
7757 if (!gimple_seq_empty_p (phi_nodes (bb)))
7758 reserve_phi_args_for_new_edge (bb);
7759 }
7760
7761 /* This function is called immediately before edge E is removed from
7762 the edge vector E->dest->preds. */
7763
7764 static void
7765 gimple_execute_on_shrinking_pred (edge e)
7766 {
7767 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
7768 remove_phi_args (e);
7769 }
7770
7771 /*---------------------------------------------------------------------------
7772 Helper functions for Loop versioning
7773 ---------------------------------------------------------------------------*/
7774
7775 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
7776 of 'first'. Both of them are dominated by 'new_head' basic block. When
7777 'new_head' was created by 'second's incoming edge it received phi arguments
7778 on the edge by split_edge(). Later, additional edge 'e' was created to
7779 connect 'new_head' and 'first'. Now this routine adds phi args on this
7780 additional edge 'e' that new_head to second edge received as part of edge
7781 splitting. */
7782
7783 static void
7784 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
7785 basic_block new_head, edge e)
7786 {
7787 gimple phi1, phi2;
7788 gimple_stmt_iterator psi1, psi2;
7789 tree def;
7790 edge e2 = find_edge (new_head, second);
7791
7792 /* Because NEW_HEAD has been created by splitting SECOND's incoming
7793 edge, we should always have an edge from NEW_HEAD to SECOND. */
7794 gcc_assert (e2 != NULL);
7795
7796 /* Browse all 'second' basic block phi nodes and add phi args to
7797 edge 'e' for 'first' head. PHI args are always in correct order. */
7798
7799 for (psi2 = gsi_start_phis (second),
7800 psi1 = gsi_start_phis (first);
7801 !gsi_end_p (psi2) && !gsi_end_p (psi1);
7802 gsi_next (&psi2), gsi_next (&psi1))
7803 {
7804 phi1 = gsi_stmt (psi1);
7805 phi2 = gsi_stmt (psi2);
7806 def = PHI_ARG_DEF (phi2, e2->dest_idx);
7807 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
7808 }
7809 }
7810
7811
7812 /* Adds a if else statement to COND_BB with condition COND_EXPR.
7813 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
7814 the destination of the ELSE part. */
7815
7816 static void
7817 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
7818 basic_block second_head ATTRIBUTE_UNUSED,
7819 basic_block cond_bb, void *cond_e)
7820 {
7821 gimple_stmt_iterator gsi;
7822 gimple new_cond_expr;
7823 tree cond_expr = (tree) cond_e;
7824 edge e0;
7825
7826 /* Build new conditional expr */
7827 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
7828 NULL_TREE, NULL_TREE);
7829
7830 /* Add new cond in cond_bb. */
7831 gsi = gsi_last_bb (cond_bb);
7832 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
7833
7834 /* Adjust edges appropriately to connect new head with first head
7835 as well as second head. */
7836 e0 = single_succ_edge (cond_bb);
7837 e0->flags &= ~EDGE_FALLTHRU;
7838 e0->flags |= EDGE_FALSE_VALUE;
7839 }
7840
7841
7842 /* Do book-keeping of basic block BB for the profile consistency checker.
7843 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
7844 then do post-pass accounting. Store the counting in RECORD. */
7845 static void
7846 gimple_account_profile_record (basic_block bb, int after_pass,
7847 struct profile_record *record)
7848 {
7849 gimple_stmt_iterator i;
7850 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
7851 {
7852 record->size[after_pass]
7853 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
7854 if (profile_status == PROFILE_READ)
7855 record->time[after_pass]
7856 += estimate_num_insns (gsi_stmt (i),
7857 &eni_time_weights) * bb->count;
7858 else if (profile_status == PROFILE_GUESSED)
7859 record->time[after_pass]
7860 += estimate_num_insns (gsi_stmt (i),
7861 &eni_time_weights) * bb->frequency;
7862 }
7863 }
7864
7865 struct cfg_hooks gimple_cfg_hooks = {
7866 "gimple",
7867 gimple_verify_flow_info,
7868 gimple_dump_bb, /* dump_bb */
7869 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
7870 create_bb, /* create_basic_block */
7871 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
7872 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
7873 gimple_can_remove_branch_p, /* can_remove_branch_p */
7874 remove_bb, /* delete_basic_block */
7875 gimple_split_block, /* split_block */
7876 gimple_move_block_after, /* move_block_after */
7877 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
7878 gimple_merge_blocks, /* merge_blocks */
7879 gimple_predict_edge, /* predict_edge */
7880 gimple_predicted_by_p, /* predicted_by_p */
7881 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
7882 gimple_duplicate_bb, /* duplicate_block */
7883 gimple_split_edge, /* split_edge */
7884 gimple_make_forwarder_block, /* make_forward_block */
7885 NULL, /* tidy_fallthru_edge */
7886 NULL, /* force_nonfallthru */
7887 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
7888 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
7889 gimple_flow_call_edges_add, /* flow_call_edges_add */
7890 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
7891 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
7892 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
7893 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
7894 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
7895 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
7896 flush_pending_stmts, /* flush_pending_stmts */
7897 gimple_empty_block_p, /* block_empty_p */
7898 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
7899 gimple_account_profile_record,
7900 };
7901
7902
7903 /* Split all critical edges. */
7904
7905 static unsigned int
7906 split_critical_edges (void)
7907 {
7908 basic_block bb;
7909 edge e;
7910 edge_iterator ei;
7911
7912 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
7913 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
7914 mappings around the calls to split_edge. */
7915 start_recording_case_labels ();
7916 FOR_ALL_BB (bb)
7917 {
7918 FOR_EACH_EDGE (e, ei, bb->succs)
7919 {
7920 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
7921 split_edge (e);
7922 /* PRE inserts statements to edges and expects that
7923 since split_critical_edges was done beforehand, committing edge
7924 insertions will not split more edges. In addition to critical
7925 edges we must split edges that have multiple successors and
7926 end by control flow statements, such as RESX.
7927 Go ahead and split them too. This matches the logic in
7928 gimple_find_edge_insert_loc. */
7929 else if ((!single_pred_p (e->dest)
7930 || !gimple_seq_empty_p (phi_nodes (e->dest))
7931 || e->dest == EXIT_BLOCK_PTR)
7932 && e->src != ENTRY_BLOCK_PTR
7933 && !(e->flags & EDGE_ABNORMAL))
7934 {
7935 gimple_stmt_iterator gsi;
7936
7937 gsi = gsi_last_bb (e->src);
7938 if (!gsi_end_p (gsi)
7939 && stmt_ends_bb_p (gsi_stmt (gsi))
7940 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
7941 && !gimple_call_builtin_p (gsi_stmt (gsi),
7942 BUILT_IN_RETURN)))
7943 split_edge (e);
7944 }
7945 }
7946 }
7947 end_recording_case_labels ();
7948 return 0;
7949 }
7950
7951 namespace {
7952
7953 const pass_data pass_data_split_crit_edges =
7954 {
7955 GIMPLE_PASS, /* type */
7956 "crited", /* name */
7957 OPTGROUP_NONE, /* optinfo_flags */
7958 false, /* has_gate */
7959 true, /* has_execute */
7960 TV_TREE_SPLIT_EDGES, /* tv_id */
7961 PROP_cfg, /* properties_required */
7962 PROP_no_crit_edges, /* properties_provided */
7963 0, /* properties_destroyed */
7964 0, /* todo_flags_start */
7965 TODO_verify_flow, /* todo_flags_finish */
7966 };
7967
7968 class pass_split_crit_edges : public gimple_opt_pass
7969 {
7970 public:
7971 pass_split_crit_edges (gcc::context *ctxt)
7972 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
7973 {}
7974
7975 /* opt_pass methods: */
7976 unsigned int execute () { return split_critical_edges (); }
7977
7978 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
7979 }; // class pass_split_crit_edges
7980
7981 } // anon namespace
7982
7983 gimple_opt_pass *
7984 make_pass_split_crit_edges (gcc::context *ctxt)
7985 {
7986 return new pass_split_crit_edges (ctxt);
7987 }
7988
7989
7990 /* Build a ternary operation and gimplify it. Emit code before GSI.
7991 Return the gimple_val holding the result. */
7992
7993 tree
7994 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
7995 tree type, tree a, tree b, tree c)
7996 {
7997 tree ret;
7998 location_t loc = gimple_location (gsi_stmt (*gsi));
7999
8000 ret = fold_build3_loc (loc, code, type, a, b, c);
8001 STRIP_NOPS (ret);
8002
8003 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8004 GSI_SAME_STMT);
8005 }
8006
8007 /* Build a binary operation and gimplify it. Emit code before GSI.
8008 Return the gimple_val holding the result. */
8009
8010 tree
8011 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8012 tree type, tree a, tree b)
8013 {
8014 tree ret;
8015
8016 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8017 STRIP_NOPS (ret);
8018
8019 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8020 GSI_SAME_STMT);
8021 }
8022
8023 /* Build a unary operation and gimplify it. Emit code before GSI.
8024 Return the gimple_val holding the result. */
8025
8026 tree
8027 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8028 tree a)
8029 {
8030 tree ret;
8031
8032 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8033 STRIP_NOPS (ret);
8034
8035 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8036 GSI_SAME_STMT);
8037 }
8038
8039
8040 \f
8041 /* Emit return warnings. */
8042
8043 static unsigned int
8044 execute_warn_function_return (void)
8045 {
8046 source_location location;
8047 gimple last;
8048 edge e;
8049 edge_iterator ei;
8050
8051 if (!targetm.warn_func_return (cfun->decl))
8052 return 0;
8053
8054 /* If we have a path to EXIT, then we do return. */
8055 if (TREE_THIS_VOLATILE (cfun->decl)
8056 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
8057 {
8058 location = UNKNOWN_LOCATION;
8059 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
8060 {
8061 last = last_stmt (e->src);
8062 if ((gimple_code (last) == GIMPLE_RETURN
8063 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8064 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8065 break;
8066 }
8067 if (location == UNKNOWN_LOCATION)
8068 location = cfun->function_end_locus;
8069 warning_at (location, 0, "%<noreturn%> function does return");
8070 }
8071
8072 /* If we see "return;" in some basic block, then we do reach the end
8073 without returning a value. */
8074 else if (warn_return_type
8075 && !TREE_NO_WARNING (cfun->decl)
8076 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
8077 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
8078 {
8079 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
8080 {
8081 gimple last = last_stmt (e->src);
8082 if (gimple_code (last) == GIMPLE_RETURN
8083 && gimple_return_retval (last) == NULL
8084 && !gimple_no_warning_p (last))
8085 {
8086 location = gimple_location (last);
8087 if (location == UNKNOWN_LOCATION)
8088 location = cfun->function_end_locus;
8089 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8090 TREE_NO_WARNING (cfun->decl) = 1;
8091 break;
8092 }
8093 }
8094 }
8095 return 0;
8096 }
8097
8098
8099 /* Given a basic block B which ends with a conditional and has
8100 precisely two successors, determine which of the edges is taken if
8101 the conditional is true and which is taken if the conditional is
8102 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8103
8104 void
8105 extract_true_false_edges_from_block (basic_block b,
8106 edge *true_edge,
8107 edge *false_edge)
8108 {
8109 edge e = EDGE_SUCC (b, 0);
8110
8111 if (e->flags & EDGE_TRUE_VALUE)
8112 {
8113 *true_edge = e;
8114 *false_edge = EDGE_SUCC (b, 1);
8115 }
8116 else
8117 {
8118 *false_edge = e;
8119 *true_edge = EDGE_SUCC (b, 1);
8120 }
8121 }
8122
8123 namespace {
8124
8125 const pass_data pass_data_warn_function_return =
8126 {
8127 GIMPLE_PASS, /* type */
8128 "*warn_function_return", /* name */
8129 OPTGROUP_NONE, /* optinfo_flags */
8130 false, /* has_gate */
8131 true, /* has_execute */
8132 TV_NONE, /* tv_id */
8133 PROP_cfg, /* properties_required */
8134 0, /* properties_provided */
8135 0, /* properties_destroyed */
8136 0, /* todo_flags_start */
8137 0, /* todo_flags_finish */
8138 };
8139
8140 class pass_warn_function_return : public gimple_opt_pass
8141 {
8142 public:
8143 pass_warn_function_return (gcc::context *ctxt)
8144 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8145 {}
8146
8147 /* opt_pass methods: */
8148 unsigned int execute () { return execute_warn_function_return (); }
8149
8150 }; // class pass_warn_function_return
8151
8152 } // anon namespace
8153
8154 gimple_opt_pass *
8155 make_pass_warn_function_return (gcc::context *ctxt)
8156 {
8157 return new pass_warn_function_return (ctxt);
8158 }
8159
8160 /* Walk a gimplified function and warn for functions whose return value is
8161 ignored and attribute((warn_unused_result)) is set. This is done before
8162 inlining, so we don't have to worry about that. */
8163
8164 static void
8165 do_warn_unused_result (gimple_seq seq)
8166 {
8167 tree fdecl, ftype;
8168 gimple_stmt_iterator i;
8169
8170 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8171 {
8172 gimple g = gsi_stmt (i);
8173
8174 switch (gimple_code (g))
8175 {
8176 case GIMPLE_BIND:
8177 do_warn_unused_result (gimple_bind_body (g));
8178 break;
8179 case GIMPLE_TRY:
8180 do_warn_unused_result (gimple_try_eval (g));
8181 do_warn_unused_result (gimple_try_cleanup (g));
8182 break;
8183 case GIMPLE_CATCH:
8184 do_warn_unused_result (gimple_catch_handler (g));
8185 break;
8186 case GIMPLE_EH_FILTER:
8187 do_warn_unused_result (gimple_eh_filter_failure (g));
8188 break;
8189
8190 case GIMPLE_CALL:
8191 if (gimple_call_lhs (g))
8192 break;
8193 if (gimple_call_internal_p (g))
8194 break;
8195
8196 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8197 LHS. All calls whose value is ignored should be
8198 represented like this. Look for the attribute. */
8199 fdecl = gimple_call_fndecl (g);
8200 ftype = gimple_call_fntype (g);
8201
8202 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8203 {
8204 location_t loc = gimple_location (g);
8205
8206 if (fdecl)
8207 warning_at (loc, OPT_Wunused_result,
8208 "ignoring return value of %qD, "
8209 "declared with attribute warn_unused_result",
8210 fdecl);
8211 else
8212 warning_at (loc, OPT_Wunused_result,
8213 "ignoring return value of function "
8214 "declared with attribute warn_unused_result");
8215 }
8216 break;
8217
8218 default:
8219 /* Not a container, not a call, or a call whose value is used. */
8220 break;
8221 }
8222 }
8223 }
8224
8225 static unsigned int
8226 run_warn_unused_result (void)
8227 {
8228 do_warn_unused_result (gimple_body (current_function_decl));
8229 return 0;
8230 }
8231
8232 static bool
8233 gate_warn_unused_result (void)
8234 {
8235 return flag_warn_unused_result;
8236 }
8237
8238 namespace {
8239
8240 const pass_data pass_data_warn_unused_result =
8241 {
8242 GIMPLE_PASS, /* type */
8243 "*warn_unused_result", /* name */
8244 OPTGROUP_NONE, /* optinfo_flags */
8245 true, /* has_gate */
8246 true, /* has_execute */
8247 TV_NONE, /* tv_id */
8248 PROP_gimple_any, /* properties_required */
8249 0, /* properties_provided */
8250 0, /* properties_destroyed */
8251 0, /* todo_flags_start */
8252 0, /* todo_flags_finish */
8253 };
8254
8255 class pass_warn_unused_result : public gimple_opt_pass
8256 {
8257 public:
8258 pass_warn_unused_result (gcc::context *ctxt)
8259 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8260 {}
8261
8262 /* opt_pass methods: */
8263 bool gate () { return gate_warn_unused_result (); }
8264 unsigned int execute () { return run_warn_unused_result (); }
8265
8266 }; // class pass_warn_unused_result
8267
8268 } // anon namespace
8269
8270 gimple_opt_pass *
8271 make_pass_warn_unused_result (gcc::context *ctxt)
8272 {
8273 return new pass_warn_unused_result (ctxt);
8274 }
8275
8276 /* IPA passes, compilation of earlier functions or inlining
8277 might have changed some properties, such as marked functions nothrow,
8278 pure, const or noreturn.
8279 Remove redundant edges and basic blocks, and create new ones if necessary.
8280
8281 This pass can't be executed as stand alone pass from pass manager, because
8282 in between inlining and this fixup the verify_flow_info would fail. */
8283
8284 unsigned int
8285 execute_fixup_cfg (void)
8286 {
8287 basic_block bb;
8288 gimple_stmt_iterator gsi;
8289 int todo = gimple_in_ssa_p (cfun) ? TODO_verify_ssa : 0;
8290 gcov_type count_scale;
8291 edge e;
8292 edge_iterator ei;
8293
8294 count_scale
8295 = GCOV_COMPUTE_SCALE (cgraph_get_node (current_function_decl)->count,
8296 ENTRY_BLOCK_PTR->count);
8297
8298 ENTRY_BLOCK_PTR->count = cgraph_get_node (current_function_decl)->count;
8299 EXIT_BLOCK_PTR->count = apply_scale (EXIT_BLOCK_PTR->count,
8300 count_scale);
8301
8302 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
8303 e->count = apply_scale (e->count, count_scale);
8304
8305 FOR_EACH_BB (bb)
8306 {
8307 bb->count = apply_scale (bb->count, count_scale);
8308 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
8309 {
8310 gimple stmt = gsi_stmt (gsi);
8311 tree decl = is_gimple_call (stmt)
8312 ? gimple_call_fndecl (stmt)
8313 : NULL;
8314 if (decl)
8315 {
8316 int flags = gimple_call_flags (stmt);
8317 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
8318 {
8319 if (gimple_purge_dead_abnormal_call_edges (bb))
8320 todo |= TODO_cleanup_cfg;
8321
8322 if (gimple_in_ssa_p (cfun))
8323 {
8324 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8325 update_stmt (stmt);
8326 }
8327 }
8328
8329 if (flags & ECF_NORETURN
8330 && fixup_noreturn_call (stmt))
8331 todo |= TODO_cleanup_cfg;
8332 }
8333
8334 if (maybe_clean_eh_stmt (stmt)
8335 && gimple_purge_dead_eh_edges (bb))
8336 todo |= TODO_cleanup_cfg;
8337 }
8338
8339 FOR_EACH_EDGE (e, ei, bb->succs)
8340 e->count = apply_scale (e->count, count_scale);
8341
8342 /* If we have a basic block with no successors that does not
8343 end with a control statement or a noreturn call end it with
8344 a call to __builtin_unreachable. This situation can occur
8345 when inlining a noreturn call that does in fact return. */
8346 if (EDGE_COUNT (bb->succs) == 0)
8347 {
8348 gimple stmt = last_stmt (bb);
8349 if (!stmt
8350 || (!is_ctrl_stmt (stmt)
8351 && (!is_gimple_call (stmt)
8352 || (gimple_call_flags (stmt) & ECF_NORETURN) == 0)))
8353 {
8354 stmt = gimple_build_call
8355 (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
8356 gimple_stmt_iterator gsi = gsi_last_bb (bb);
8357 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
8358 }
8359 }
8360 }
8361 if (count_scale != REG_BR_PROB_BASE)
8362 compute_function_frequency ();
8363
8364 /* We just processed all calls. */
8365 if (cfun->gimple_df)
8366 vec_free (MODIFIED_NORETURN_CALLS (cfun));
8367
8368 /* Dump a textual representation of the flowgraph. */
8369 if (dump_file)
8370 gimple_dump_cfg (dump_file, dump_flags);
8371
8372 if (current_loops
8373 && (todo & TODO_cleanup_cfg))
8374 loops_state_set (LOOPS_NEED_FIXUP);
8375
8376 return todo;
8377 }
8378
8379 namespace {
8380
8381 const pass_data pass_data_fixup_cfg =
8382 {
8383 GIMPLE_PASS, /* type */
8384 "*free_cfg_annotations", /* name */
8385 OPTGROUP_NONE, /* optinfo_flags */
8386 false, /* has_gate */
8387 true, /* has_execute */
8388 TV_NONE, /* tv_id */
8389 PROP_cfg, /* properties_required */
8390 0, /* properties_provided */
8391 0, /* properties_destroyed */
8392 0, /* todo_flags_start */
8393 0, /* todo_flags_finish */
8394 };
8395
8396 class pass_fixup_cfg : public gimple_opt_pass
8397 {
8398 public:
8399 pass_fixup_cfg (gcc::context *ctxt)
8400 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
8401 {}
8402
8403 /* opt_pass methods: */
8404 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
8405 unsigned int execute () { return execute_fixup_cfg (); }
8406
8407 }; // class pass_fixup_cfg
8408
8409 } // anon namespace
8410
8411 gimple_opt_pass *
8412 make_pass_fixup_cfg (gcc::context *ctxt)
8413 {
8414 return new pass_fixup_cfg (ctxt);
8415 }
8416
8417 /* Garbage collection support for edge_def. */
8418
8419 extern void gt_ggc_mx (tree&);
8420 extern void gt_ggc_mx (gimple&);
8421 extern void gt_ggc_mx (rtx&);
8422 extern void gt_ggc_mx (basic_block&);
8423
8424 void
8425 gt_ggc_mx (edge_def *e)
8426 {
8427 tree block = LOCATION_BLOCK (e->goto_locus);
8428 gt_ggc_mx (e->src);
8429 gt_ggc_mx (e->dest);
8430 if (current_ir_type () == IR_GIMPLE)
8431 gt_ggc_mx (e->insns.g);
8432 else
8433 gt_ggc_mx (e->insns.r);
8434 gt_ggc_mx (block);
8435 }
8436
8437 /* PCH support for edge_def. */
8438
8439 extern void gt_pch_nx (tree&);
8440 extern void gt_pch_nx (gimple&);
8441 extern void gt_pch_nx (rtx&);
8442 extern void gt_pch_nx (basic_block&);
8443
8444 void
8445 gt_pch_nx (edge_def *e)
8446 {
8447 tree block = LOCATION_BLOCK (e->goto_locus);
8448 gt_pch_nx (e->src);
8449 gt_pch_nx (e->dest);
8450 if (current_ir_type () == IR_GIMPLE)
8451 gt_pch_nx (e->insns.g);
8452 else
8453 gt_pch_nx (e->insns.r);
8454 gt_pch_nx (block);
8455 }
8456
8457 void
8458 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
8459 {
8460 tree block = LOCATION_BLOCK (e->goto_locus);
8461 op (&(e->src), cookie);
8462 op (&(e->dest), cookie);
8463 if (current_ir_type () == IR_GIMPLE)
8464 op (&(e->insns.g), cookie);
8465 else
8466 op (&(e->insns.r), cookie);
8467 op (&(block), cookie);
8468 }