gimple.h: Remove all includes.
[gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2013 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "hash-table.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "trans-mem.h"
28 #include "stor-layout.h"
29 #include "print-tree.h"
30 #include "tm_p.h"
31 #include "basic-block.h"
32 #include "flags.h"
33 #include "function.h"
34 #include "gimple-pretty-print.h"
35 #include "pointer-set.h"
36 #include "tree-ssa-alias.h"
37 #include "internal-fn.h"
38 #include "gimple-fold.h"
39 #include "tree-eh.h"
40 #include "gimple-expr.h"
41 #include "is-a.h"
42 #include "gimple.h"
43 #include "gimple-iterator.h"
44 #include "gimplify-me.h"
45 #include "gimple-walk.h"
46 #include "gimple-ssa.h"
47 #include "cgraph.h"
48 #include "tree-cfg.h"
49 #include "tree-phinodes.h"
50 #include "ssa-iterators.h"
51 #include "stringpool.h"
52 #include "tree-ssanames.h"
53 #include "tree-ssa-loop-manip.h"
54 #include "tree-ssa-loop-niter.h"
55 #include "tree-into-ssa.h"
56 #include "expr.h"
57 #include "tree-dfa.h"
58 #include "tree-ssa.h"
59 #include "tree-dump.h"
60 #include "tree-pass.h"
61 #include "diagnostic-core.h"
62 #include "except.h"
63 #include "cfgloop.h"
64 #include "tree-ssa-propagate.h"
65 #include "value-prof.h"
66 #include "tree-inline.h"
67 #include "target.h"
68 #include "tree-ssa-live.h"
69 #include "omp-low.h"
70 #include "tree-cfgcleanup.h"
71
72 /* This file contains functions for building the Control Flow Graph (CFG)
73 for a function tree. */
74
75 /* Local declarations. */
76
77 /* Initial capacity for the basic block array. */
78 static const int initial_cfg_capacity = 20;
79
80 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
81 which use a particular edge. The CASE_LABEL_EXPRs are chained together
82 via their CASE_CHAIN field, which we clear after we're done with the
83 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
84
85 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
86 update the case vector in response to edge redirections.
87
88 Right now this table is set up and torn down at key points in the
89 compilation process. It would be nice if we could make the table
90 more persistent. The key is getting notification of changes to
91 the CFG (particularly edge removal, creation and redirection). */
92
93 static struct pointer_map_t *edge_to_cases;
94
95 /* If we record edge_to_cases, this bitmap will hold indexes
96 of basic blocks that end in a GIMPLE_SWITCH which we touched
97 due to edge manipulations. */
98
99 static bitmap touched_switch_bbs;
100
101 /* CFG statistics. */
102 struct cfg_stats_d
103 {
104 long num_merged_labels;
105 };
106
107 static struct cfg_stats_d cfg_stats;
108
109 /* Nonzero if we found a computed goto while building basic blocks. */
110 static bool found_computed_goto;
111
112 /* Hash table to store last discriminator assigned for each locus. */
113 struct locus_discrim_map
114 {
115 location_t locus;
116 int discriminator;
117 };
118
119 /* Hashtable helpers. */
120
121 struct locus_discrim_hasher : typed_free_remove <locus_discrim_map>
122 {
123 typedef locus_discrim_map value_type;
124 typedef locus_discrim_map compare_type;
125 static inline hashval_t hash (const value_type *);
126 static inline bool equal (const value_type *, const compare_type *);
127 };
128
129 /* Trivial hash function for a location_t. ITEM is a pointer to
130 a hash table entry that maps a location_t to a discriminator. */
131
132 inline hashval_t
133 locus_discrim_hasher::hash (const value_type *item)
134 {
135 return LOCATION_LINE (item->locus);
136 }
137
138 /* Equality function for the locus-to-discriminator map. A and B
139 point to the two hash table entries to compare. */
140
141 inline bool
142 locus_discrim_hasher::equal (const value_type *a, const compare_type *b)
143 {
144 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
145 }
146
147 static hash_table <locus_discrim_hasher> discriminator_per_locus;
148
149 /* Basic blocks and flowgraphs. */
150 static void make_blocks (gimple_seq);
151 static void factor_computed_gotos (void);
152
153 /* Edges. */
154 static void make_edges (void);
155 static void assign_discriminators (void);
156 static void make_cond_expr_edges (basic_block);
157 static void make_gimple_switch_edges (basic_block);
158 static void make_goto_expr_edges (basic_block);
159 static void make_gimple_asm_edges (basic_block);
160 static edge gimple_redirect_edge_and_branch (edge, basic_block);
161 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
162 static unsigned int split_critical_edges (void);
163
164 /* Various helpers. */
165 static inline bool stmt_starts_bb_p (gimple, gimple);
166 static int gimple_verify_flow_info (void);
167 static void gimple_make_forwarder_block (edge);
168 static gimple first_non_label_stmt (basic_block);
169 static bool verify_gimple_transaction (gimple);
170
171 /* Flowgraph optimization and cleanup. */
172 static void gimple_merge_blocks (basic_block, basic_block);
173 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
174 static void remove_bb (basic_block);
175 static edge find_taken_edge_computed_goto (basic_block, tree);
176 static edge find_taken_edge_cond_expr (basic_block, tree);
177 static edge find_taken_edge_switch_expr (basic_block, tree);
178 static tree find_case_label_for_value (gimple, tree);
179
180 void
181 init_empty_tree_cfg_for_function (struct function *fn)
182 {
183 /* Initialize the basic block array. */
184 init_flow (fn);
185 profile_status_for_function (fn) = PROFILE_ABSENT;
186 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
187 last_basic_block_for_function (fn) = NUM_FIXED_BLOCKS;
188 vec_alloc (basic_block_info_for_function (fn), initial_cfg_capacity);
189 vec_safe_grow_cleared (basic_block_info_for_function (fn),
190 initial_cfg_capacity);
191
192 /* Build a mapping of labels to their associated blocks. */
193 vec_alloc (label_to_block_map_for_function (fn), initial_cfg_capacity);
194 vec_safe_grow_cleared (label_to_block_map_for_function (fn),
195 initial_cfg_capacity);
196
197 SET_BASIC_BLOCK_FOR_FUNCTION (fn, ENTRY_BLOCK,
198 ENTRY_BLOCK_PTR_FOR_FN (fn));
199 SET_BASIC_BLOCK_FOR_FUNCTION (fn, EXIT_BLOCK,
200 EXIT_BLOCK_PTR_FOR_FN (fn));
201
202 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
203 = EXIT_BLOCK_PTR_FOR_FN (fn);
204 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
205 = ENTRY_BLOCK_PTR_FOR_FN (fn);
206 }
207
208 void
209 init_empty_tree_cfg (void)
210 {
211 init_empty_tree_cfg_for_function (cfun);
212 }
213
214 /*---------------------------------------------------------------------------
215 Create basic blocks
216 ---------------------------------------------------------------------------*/
217
218 /* Entry point to the CFG builder for trees. SEQ is the sequence of
219 statements to be added to the flowgraph. */
220
221 static void
222 build_gimple_cfg (gimple_seq seq)
223 {
224 /* Register specific gimple functions. */
225 gimple_register_cfg_hooks ();
226
227 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
228
229 init_empty_tree_cfg ();
230
231 found_computed_goto = 0;
232 make_blocks (seq);
233
234 /* Computed gotos are hell to deal with, especially if there are
235 lots of them with a large number of destinations. So we factor
236 them to a common computed goto location before we build the
237 edge list. After we convert back to normal form, we will un-factor
238 the computed gotos since factoring introduces an unwanted jump. */
239 if (found_computed_goto)
240 factor_computed_gotos ();
241
242 /* Make sure there is always at least one block, even if it's empty. */
243 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
244 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
245
246 /* Adjust the size of the array. */
247 if (basic_block_info->length () < (size_t) n_basic_blocks_for_fn (cfun))
248 vec_safe_grow_cleared (basic_block_info, n_basic_blocks_for_fn (cfun));
249
250 /* To speed up statement iterator walks, we first purge dead labels. */
251 cleanup_dead_labels ();
252
253 /* Group case nodes to reduce the number of edges.
254 We do this after cleaning up dead labels because otherwise we miss
255 a lot of obvious case merging opportunities. */
256 group_case_labels ();
257
258 /* Create the edges of the flowgraph. */
259 discriminator_per_locus.create (13);
260 make_edges ();
261 assign_discriminators ();
262 cleanup_dead_labels ();
263 discriminator_per_locus.dispose ();
264 }
265
266
267 /* Search for ANNOTATE call with annot_expr_ivdep_kind; if found, remove
268 it and set loop->safelen to INT_MAX. We assume that the annotation
269 comes immediately before the condition. */
270
271 static void
272 replace_loop_annotate ()
273 {
274 struct loop *loop;
275 basic_block bb;
276 gimple_stmt_iterator gsi;
277 gimple stmt;
278
279 FOR_EACH_LOOP (loop, 0)
280 {
281 gsi = gsi_last_bb (loop->header);
282 stmt = gsi_stmt (gsi);
283 if (stmt && gimple_code (stmt) == GIMPLE_COND)
284 {
285 gsi_prev_nondebug (&gsi);
286 if (gsi_end_p (gsi))
287 continue;
288 stmt = gsi_stmt (gsi);
289 if (gimple_code (stmt) != GIMPLE_CALL)
290 continue;
291 if (!gimple_call_internal_p (stmt)
292 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
293 continue;
294 if ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1))
295 != annot_expr_ivdep_kind)
296 continue;
297 stmt = gimple_build_assign (gimple_call_lhs (stmt),
298 gimple_call_arg (stmt, 0));
299 gsi_replace (&gsi, stmt, true);
300 loop->safelen = INT_MAX;
301 }
302 }
303
304 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
305 FOR_EACH_BB (bb)
306 {
307 gsi = gsi_last_bb (bb);
308 stmt = gsi_stmt (gsi);
309 if (stmt && gimple_code (stmt) == GIMPLE_COND)
310 gsi_prev_nondebug (&gsi);
311 if (gsi_end_p (gsi))
312 continue;
313 stmt = gsi_stmt (gsi);
314 if (gimple_code (stmt) != GIMPLE_CALL)
315 continue;
316 if (!gimple_call_internal_p (stmt)
317 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
318 continue;
319 if ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1))
320 != annot_expr_ivdep_kind)
321 continue;
322 warning_at (gimple_location (stmt), 0, "ignoring %<GCC ivdep%> "
323 "annotation");
324 stmt = gimple_build_assign (gimple_call_lhs (stmt),
325 gimple_call_arg (stmt, 0));
326 gsi_replace (&gsi, stmt, true);
327 }
328 }
329
330
331 static unsigned int
332 execute_build_cfg (void)
333 {
334 gimple_seq body = gimple_body (current_function_decl);
335
336 build_gimple_cfg (body);
337 gimple_set_body (current_function_decl, NULL);
338 if (dump_file && (dump_flags & TDF_DETAILS))
339 {
340 fprintf (dump_file, "Scope blocks:\n");
341 dump_scope_blocks (dump_file, dump_flags);
342 }
343 cleanup_tree_cfg ();
344 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
345 replace_loop_annotate ();
346 return 0;
347 }
348
349 namespace {
350
351 const pass_data pass_data_build_cfg =
352 {
353 GIMPLE_PASS, /* type */
354 "cfg", /* name */
355 OPTGROUP_NONE, /* optinfo_flags */
356 false, /* has_gate */
357 true, /* has_execute */
358 TV_TREE_CFG, /* tv_id */
359 PROP_gimple_leh, /* properties_required */
360 ( PROP_cfg | PROP_loops ), /* properties_provided */
361 0, /* properties_destroyed */
362 0, /* todo_flags_start */
363 TODO_verify_stmts, /* todo_flags_finish */
364 };
365
366 class pass_build_cfg : public gimple_opt_pass
367 {
368 public:
369 pass_build_cfg (gcc::context *ctxt)
370 : gimple_opt_pass (pass_data_build_cfg, ctxt)
371 {}
372
373 /* opt_pass methods: */
374 unsigned int execute () { return execute_build_cfg (); }
375
376 }; // class pass_build_cfg
377
378 } // anon namespace
379
380 gimple_opt_pass *
381 make_pass_build_cfg (gcc::context *ctxt)
382 {
383 return new pass_build_cfg (ctxt);
384 }
385
386
387 /* Return true if T is a computed goto. */
388
389 static bool
390 computed_goto_p (gimple t)
391 {
392 return (gimple_code (t) == GIMPLE_GOTO
393 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
394 }
395
396 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
397 the other edge points to a bb with just __builtin_unreachable ().
398 I.e. return true for C->M edge in:
399 <bb C>:
400 ...
401 if (something)
402 goto <bb N>;
403 else
404 goto <bb M>;
405 <bb N>:
406 __builtin_unreachable ();
407 <bb M>: */
408
409 bool
410 assert_unreachable_fallthru_edge_p (edge e)
411 {
412 basic_block pred_bb = e->src;
413 gimple last = last_stmt (pred_bb);
414 if (last && gimple_code (last) == GIMPLE_COND)
415 {
416 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
417 if (other_bb == e->dest)
418 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
419 if (EDGE_COUNT (other_bb->succs) == 0)
420 {
421 gimple_stmt_iterator gsi = gsi_after_labels (other_bb);
422 gimple stmt;
423
424 if (gsi_end_p (gsi))
425 return false;
426 stmt = gsi_stmt (gsi);
427 if (is_gimple_debug (stmt))
428 {
429 gsi_next_nondebug (&gsi);
430 if (gsi_end_p (gsi))
431 return false;
432 stmt = gsi_stmt (gsi);
433 }
434 return gimple_call_builtin_p (stmt, BUILT_IN_UNREACHABLE);
435 }
436 }
437 return false;
438 }
439
440
441 /* Search the CFG for any computed gotos. If found, factor them to a
442 common computed goto site. Also record the location of that site so
443 that we can un-factor the gotos after we have converted back to
444 normal form. */
445
446 static void
447 factor_computed_gotos (void)
448 {
449 basic_block bb;
450 tree factored_label_decl = NULL;
451 tree var = NULL;
452 gimple factored_computed_goto_label = NULL;
453 gimple factored_computed_goto = NULL;
454
455 /* We know there are one or more computed gotos in this function.
456 Examine the last statement in each basic block to see if the block
457 ends with a computed goto. */
458
459 FOR_EACH_BB (bb)
460 {
461 gimple_stmt_iterator gsi = gsi_last_bb (bb);
462 gimple last;
463
464 if (gsi_end_p (gsi))
465 continue;
466
467 last = gsi_stmt (gsi);
468
469 /* Ignore the computed goto we create when we factor the original
470 computed gotos. */
471 if (last == factored_computed_goto)
472 continue;
473
474 /* If the last statement is a computed goto, factor it. */
475 if (computed_goto_p (last))
476 {
477 gimple assignment;
478
479 /* The first time we find a computed goto we need to create
480 the factored goto block and the variable each original
481 computed goto will use for their goto destination. */
482 if (!factored_computed_goto)
483 {
484 basic_block new_bb = create_empty_bb (bb);
485 gimple_stmt_iterator new_gsi = gsi_start_bb (new_bb);
486
487 /* Create the destination of the factored goto. Each original
488 computed goto will put its desired destination into this
489 variable and jump to the label we create immediately
490 below. */
491 var = create_tmp_var (ptr_type_node, "gotovar");
492
493 /* Build a label for the new block which will contain the
494 factored computed goto. */
495 factored_label_decl = create_artificial_label (UNKNOWN_LOCATION);
496 factored_computed_goto_label
497 = gimple_build_label (factored_label_decl);
498 gsi_insert_after (&new_gsi, factored_computed_goto_label,
499 GSI_NEW_STMT);
500
501 /* Build our new computed goto. */
502 factored_computed_goto = gimple_build_goto (var);
503 gsi_insert_after (&new_gsi, factored_computed_goto, GSI_NEW_STMT);
504 }
505
506 /* Copy the original computed goto's destination into VAR. */
507 assignment = gimple_build_assign (var, gimple_goto_dest (last));
508 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
509
510 /* And re-vector the computed goto to the new destination. */
511 gimple_goto_set_dest (last, factored_label_decl);
512 }
513 }
514 }
515
516
517 /* Build a flowgraph for the sequence of stmts SEQ. */
518
519 static void
520 make_blocks (gimple_seq seq)
521 {
522 gimple_stmt_iterator i = gsi_start (seq);
523 gimple stmt = NULL;
524 bool start_new_block = true;
525 bool first_stmt_of_seq = true;
526 basic_block bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
527
528 while (!gsi_end_p (i))
529 {
530 gimple prev_stmt;
531
532 prev_stmt = stmt;
533 stmt = gsi_stmt (i);
534
535 /* If the statement starts a new basic block or if we have determined
536 in a previous pass that we need to create a new block for STMT, do
537 so now. */
538 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
539 {
540 if (!first_stmt_of_seq)
541 gsi_split_seq_before (&i, &seq);
542 bb = create_basic_block (seq, NULL, bb);
543 start_new_block = false;
544 }
545
546 /* Now add STMT to BB and create the subgraphs for special statement
547 codes. */
548 gimple_set_bb (stmt, bb);
549
550 if (computed_goto_p (stmt))
551 found_computed_goto = true;
552
553 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
554 next iteration. */
555 if (stmt_ends_bb_p (stmt))
556 {
557 /* If the stmt can make abnormal goto use a new temporary
558 for the assignment to the LHS. This makes sure the old value
559 of the LHS is available on the abnormal edge. Otherwise
560 we will end up with overlapping life-ranges for abnormal
561 SSA names. */
562 if (gimple_has_lhs (stmt)
563 && stmt_can_make_abnormal_goto (stmt)
564 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
565 {
566 tree lhs = gimple_get_lhs (stmt);
567 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
568 gimple s = gimple_build_assign (lhs, tmp);
569 gimple_set_location (s, gimple_location (stmt));
570 gimple_set_block (s, gimple_block (stmt));
571 gimple_set_lhs (stmt, tmp);
572 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
573 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
574 DECL_GIMPLE_REG_P (tmp) = 1;
575 gsi_insert_after (&i, s, GSI_SAME_STMT);
576 }
577 start_new_block = true;
578 }
579
580 gsi_next (&i);
581 first_stmt_of_seq = false;
582 }
583 }
584
585
586 /* Create and return a new empty basic block after bb AFTER. */
587
588 static basic_block
589 create_bb (void *h, void *e, basic_block after)
590 {
591 basic_block bb;
592
593 gcc_assert (!e);
594
595 /* Create and initialize a new basic block. Since alloc_block uses
596 GC allocation that clears memory to allocate a basic block, we do
597 not have to clear the newly allocated basic block here. */
598 bb = alloc_block ();
599
600 bb->index = last_basic_block;
601 bb->flags = BB_NEW;
602 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
603
604 /* Add the new block to the linked list of blocks. */
605 link_block (bb, after);
606
607 /* Grow the basic block array if needed. */
608 if ((size_t) last_basic_block == basic_block_info->length ())
609 {
610 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
611 vec_safe_grow_cleared (basic_block_info, new_size);
612 }
613
614 /* Add the newly created block to the array. */
615 SET_BASIC_BLOCK (last_basic_block, bb);
616
617 n_basic_blocks_for_fn (cfun)++;
618 last_basic_block++;
619
620 return bb;
621 }
622
623
624 /*---------------------------------------------------------------------------
625 Edge creation
626 ---------------------------------------------------------------------------*/
627
628 /* Fold COND_EXPR_COND of each COND_EXPR. */
629
630 void
631 fold_cond_expr_cond (void)
632 {
633 basic_block bb;
634
635 FOR_EACH_BB (bb)
636 {
637 gimple stmt = last_stmt (bb);
638
639 if (stmt && gimple_code (stmt) == GIMPLE_COND)
640 {
641 location_t loc = gimple_location (stmt);
642 tree cond;
643 bool zerop, onep;
644
645 fold_defer_overflow_warnings ();
646 cond = fold_binary_loc (loc, gimple_cond_code (stmt), boolean_type_node,
647 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
648 if (cond)
649 {
650 zerop = integer_zerop (cond);
651 onep = integer_onep (cond);
652 }
653 else
654 zerop = onep = false;
655
656 fold_undefer_overflow_warnings (zerop || onep,
657 stmt,
658 WARN_STRICT_OVERFLOW_CONDITIONAL);
659 if (zerop)
660 gimple_cond_make_false (stmt);
661 else if (onep)
662 gimple_cond_make_true (stmt);
663 }
664 }
665 }
666
667 /* Join all the blocks in the flowgraph. */
668
669 static void
670 make_edges (void)
671 {
672 basic_block bb;
673 struct omp_region *cur_region = NULL;
674
675 /* Create an edge from entry to the first block with executable
676 statements in it. */
677 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), BASIC_BLOCK (NUM_FIXED_BLOCKS),
678 EDGE_FALLTHRU);
679
680 /* Traverse the basic block array placing edges. */
681 FOR_EACH_BB (bb)
682 {
683 gimple last = last_stmt (bb);
684 bool fallthru;
685
686 if (last)
687 {
688 enum gimple_code code = gimple_code (last);
689 switch (code)
690 {
691 case GIMPLE_GOTO:
692 make_goto_expr_edges (bb);
693 fallthru = false;
694 break;
695 case GIMPLE_RETURN:
696 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
697 fallthru = false;
698 break;
699 case GIMPLE_COND:
700 make_cond_expr_edges (bb);
701 fallthru = false;
702 break;
703 case GIMPLE_SWITCH:
704 make_gimple_switch_edges (bb);
705 fallthru = false;
706 break;
707 case GIMPLE_RESX:
708 make_eh_edges (last);
709 fallthru = false;
710 break;
711 case GIMPLE_EH_DISPATCH:
712 fallthru = make_eh_dispatch_edges (last);
713 break;
714
715 case GIMPLE_CALL:
716 /* If this function receives a nonlocal goto, then we need to
717 make edges from this call site to all the nonlocal goto
718 handlers. */
719 if (stmt_can_make_abnormal_goto (last))
720 make_abnormal_goto_edges (bb, true);
721
722 /* If this statement has reachable exception handlers, then
723 create abnormal edges to them. */
724 make_eh_edges (last);
725
726 /* BUILTIN_RETURN is really a return statement. */
727 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
728 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0), fallthru =
729 false;
730 /* Some calls are known not to return. */
731 else
732 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
733 break;
734
735 case GIMPLE_ASSIGN:
736 /* A GIMPLE_ASSIGN may throw internally and thus be considered
737 control-altering. */
738 if (is_ctrl_altering_stmt (last))
739 make_eh_edges (last);
740 fallthru = true;
741 break;
742
743 case GIMPLE_ASM:
744 make_gimple_asm_edges (bb);
745 fallthru = true;
746 break;
747
748 CASE_GIMPLE_OMP:
749 fallthru = make_gimple_omp_edges (bb, &cur_region);
750 break;
751
752 case GIMPLE_TRANSACTION:
753 {
754 tree abort_label = gimple_transaction_label (last);
755 if (abort_label)
756 make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT);
757 fallthru = true;
758 }
759 break;
760
761 default:
762 gcc_assert (!stmt_ends_bb_p (last));
763 fallthru = true;
764 }
765 }
766 else
767 fallthru = true;
768
769 if (fallthru)
770 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
771 }
772
773 free_omp_regions ();
774
775 /* Fold COND_EXPR_COND of each COND_EXPR. */
776 fold_cond_expr_cond ();
777 }
778
779 /* Find the next available discriminator value for LOCUS. The
780 discriminator distinguishes among several basic blocks that
781 share a common locus, allowing for more accurate sample-based
782 profiling. */
783
784 static int
785 next_discriminator_for_locus (location_t locus)
786 {
787 struct locus_discrim_map item;
788 struct locus_discrim_map **slot;
789
790 item.locus = locus;
791 item.discriminator = 0;
792 slot = discriminator_per_locus.find_slot_with_hash (
793 &item, LOCATION_LINE (locus), INSERT);
794 gcc_assert (slot);
795 if (*slot == HTAB_EMPTY_ENTRY)
796 {
797 *slot = XNEW (struct locus_discrim_map);
798 gcc_assert (*slot);
799 (*slot)->locus = locus;
800 (*slot)->discriminator = 0;
801 }
802 (*slot)->discriminator++;
803 return (*slot)->discriminator;
804 }
805
806 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
807
808 static bool
809 same_line_p (location_t locus1, location_t locus2)
810 {
811 expanded_location from, to;
812
813 if (locus1 == locus2)
814 return true;
815
816 from = expand_location (locus1);
817 to = expand_location (locus2);
818
819 if (from.line != to.line)
820 return false;
821 if (from.file == to.file)
822 return true;
823 return (from.file != NULL
824 && to.file != NULL
825 && filename_cmp (from.file, to.file) == 0);
826 }
827
828 /* Assign discriminators to each basic block. */
829
830 static void
831 assign_discriminators (void)
832 {
833 basic_block bb;
834
835 FOR_EACH_BB (bb)
836 {
837 edge e;
838 edge_iterator ei;
839 gimple last = last_stmt (bb);
840 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
841
842 if (locus == UNKNOWN_LOCATION)
843 continue;
844
845 FOR_EACH_EDGE (e, ei, bb->succs)
846 {
847 gimple first = first_non_label_stmt (e->dest);
848 gimple last = last_stmt (e->dest);
849 if ((first && same_line_p (locus, gimple_location (first)))
850 || (last && same_line_p (locus, gimple_location (last))))
851 {
852 if (e->dest->discriminator != 0 && bb->discriminator == 0)
853 bb->discriminator = next_discriminator_for_locus (locus);
854 else
855 e->dest->discriminator = next_discriminator_for_locus (locus);
856 }
857 }
858 }
859 }
860
861 /* Create the edges for a GIMPLE_COND starting at block BB. */
862
863 static void
864 make_cond_expr_edges (basic_block bb)
865 {
866 gimple entry = last_stmt (bb);
867 gimple then_stmt, else_stmt;
868 basic_block then_bb, else_bb;
869 tree then_label, else_label;
870 edge e;
871
872 gcc_assert (entry);
873 gcc_assert (gimple_code (entry) == GIMPLE_COND);
874
875 /* Entry basic blocks for each component. */
876 then_label = gimple_cond_true_label (entry);
877 else_label = gimple_cond_false_label (entry);
878 then_bb = label_to_block (then_label);
879 else_bb = label_to_block (else_label);
880 then_stmt = first_stmt (then_bb);
881 else_stmt = first_stmt (else_bb);
882
883 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
884 e->goto_locus = gimple_location (then_stmt);
885 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
886 if (e)
887 e->goto_locus = gimple_location (else_stmt);
888
889 /* We do not need the labels anymore. */
890 gimple_cond_set_true_label (entry, NULL_TREE);
891 gimple_cond_set_false_label (entry, NULL_TREE);
892 }
893
894
895 /* Called for each element in the hash table (P) as we delete the
896 edge to cases hash table.
897
898 Clear all the TREE_CHAINs to prevent problems with copying of
899 SWITCH_EXPRs and structure sharing rules, then free the hash table
900 element. */
901
902 static bool
903 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
904 void *data ATTRIBUTE_UNUSED)
905 {
906 tree t, next;
907
908 for (t = (tree) *value; t; t = next)
909 {
910 next = CASE_CHAIN (t);
911 CASE_CHAIN (t) = NULL;
912 }
913
914 *value = NULL;
915 return true;
916 }
917
918 /* Start recording information mapping edges to case labels. */
919
920 void
921 start_recording_case_labels (void)
922 {
923 gcc_assert (edge_to_cases == NULL);
924 edge_to_cases = pointer_map_create ();
925 touched_switch_bbs = BITMAP_ALLOC (NULL);
926 }
927
928 /* Return nonzero if we are recording information for case labels. */
929
930 static bool
931 recording_case_labels_p (void)
932 {
933 return (edge_to_cases != NULL);
934 }
935
936 /* Stop recording information mapping edges to case labels and
937 remove any information we have recorded. */
938 void
939 end_recording_case_labels (void)
940 {
941 bitmap_iterator bi;
942 unsigned i;
943 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
944 pointer_map_destroy (edge_to_cases);
945 edge_to_cases = NULL;
946 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
947 {
948 basic_block bb = BASIC_BLOCK (i);
949 if (bb)
950 {
951 gimple stmt = last_stmt (bb);
952 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
953 group_case_labels_stmt (stmt);
954 }
955 }
956 BITMAP_FREE (touched_switch_bbs);
957 }
958
959 /* If we are inside a {start,end}_recording_cases block, then return
960 a chain of CASE_LABEL_EXPRs from T which reference E.
961
962 Otherwise return NULL. */
963
964 static tree
965 get_cases_for_edge (edge e, gimple t)
966 {
967 void **slot;
968 size_t i, n;
969
970 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
971 chains available. Return NULL so the caller can detect this case. */
972 if (!recording_case_labels_p ())
973 return NULL;
974
975 slot = pointer_map_contains (edge_to_cases, e);
976 if (slot)
977 return (tree) *slot;
978
979 /* If we did not find E in the hash table, then this must be the first
980 time we have been queried for information about E & T. Add all the
981 elements from T to the hash table then perform the query again. */
982
983 n = gimple_switch_num_labels (t);
984 for (i = 0; i < n; i++)
985 {
986 tree elt = gimple_switch_label (t, i);
987 tree lab = CASE_LABEL (elt);
988 basic_block label_bb = label_to_block (lab);
989 edge this_edge = find_edge (e->src, label_bb);
990
991 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
992 a new chain. */
993 slot = pointer_map_insert (edge_to_cases, this_edge);
994 CASE_CHAIN (elt) = (tree) *slot;
995 *slot = elt;
996 }
997
998 return (tree) *pointer_map_contains (edge_to_cases, e);
999 }
1000
1001 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1002
1003 static void
1004 make_gimple_switch_edges (basic_block bb)
1005 {
1006 gimple entry = last_stmt (bb);
1007 size_t i, n;
1008
1009 n = gimple_switch_num_labels (entry);
1010
1011 for (i = 0; i < n; ++i)
1012 {
1013 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1014 basic_block label_bb = label_to_block (lab);
1015 make_edge (bb, label_bb, 0);
1016 }
1017 }
1018
1019
1020 /* Return the basic block holding label DEST. */
1021
1022 basic_block
1023 label_to_block_fn (struct function *ifun, tree dest)
1024 {
1025 int uid = LABEL_DECL_UID (dest);
1026
1027 /* We would die hard when faced by an undefined label. Emit a label to
1028 the very first basic block. This will hopefully make even the dataflow
1029 and undefined variable warnings quite right. */
1030 if (seen_error () && uid < 0)
1031 {
1032 gimple_stmt_iterator gsi = gsi_start_bb (BASIC_BLOCK (NUM_FIXED_BLOCKS));
1033 gimple stmt;
1034
1035 stmt = gimple_build_label (dest);
1036 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1037 uid = LABEL_DECL_UID (dest);
1038 }
1039 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1040 return NULL;
1041 return (*ifun->cfg->x_label_to_block_map)[uid];
1042 }
1043
1044 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
1045 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
1046
1047 void
1048 make_abnormal_goto_edges (basic_block bb, bool for_call)
1049 {
1050 basic_block target_bb;
1051 gimple_stmt_iterator gsi;
1052
1053 FOR_EACH_BB (target_bb)
1054 {
1055 for (gsi = gsi_start_bb (target_bb); !gsi_end_p (gsi); gsi_next (&gsi))
1056 {
1057 gimple label_stmt = gsi_stmt (gsi);
1058 tree target;
1059
1060 if (gimple_code (label_stmt) != GIMPLE_LABEL)
1061 break;
1062
1063 target = gimple_label_label (label_stmt);
1064
1065 /* Make an edge to every label block that has been marked as a
1066 potential target for a computed goto or a non-local goto. */
1067 if ((FORCED_LABEL (target) && !for_call)
1068 || (DECL_NONLOCAL (target) && for_call))
1069 {
1070 make_edge (bb, target_bb, EDGE_ABNORMAL);
1071 break;
1072 }
1073 }
1074 if (!gsi_end_p (gsi)
1075 && is_gimple_debug (gsi_stmt (gsi)))
1076 gsi_next_nondebug (&gsi);
1077 if (!gsi_end_p (gsi))
1078 {
1079 /* Make an edge to every setjmp-like call. */
1080 gimple call_stmt = gsi_stmt (gsi);
1081 if (is_gimple_call (call_stmt)
1082 && (gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE))
1083 make_edge (bb, target_bb, EDGE_ABNORMAL);
1084 }
1085 }
1086 }
1087
1088 /* Create edges for a goto statement at block BB. */
1089
1090 static void
1091 make_goto_expr_edges (basic_block bb)
1092 {
1093 gimple_stmt_iterator last = gsi_last_bb (bb);
1094 gimple goto_t = gsi_stmt (last);
1095
1096 /* A simple GOTO creates normal edges. */
1097 if (simple_goto_p (goto_t))
1098 {
1099 tree dest = gimple_goto_dest (goto_t);
1100 basic_block label_bb = label_to_block (dest);
1101 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1102 e->goto_locus = gimple_location (goto_t);
1103 gsi_remove (&last, true);
1104 return;
1105 }
1106
1107 /* A computed GOTO creates abnormal edges. */
1108 make_abnormal_goto_edges (bb, false);
1109 }
1110
1111 /* Create edges for an asm statement with labels at block BB. */
1112
1113 static void
1114 make_gimple_asm_edges (basic_block bb)
1115 {
1116 gimple stmt = last_stmt (bb);
1117 int i, n = gimple_asm_nlabels (stmt);
1118
1119 for (i = 0; i < n; ++i)
1120 {
1121 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1122 basic_block label_bb = label_to_block (label);
1123 make_edge (bb, label_bb, 0);
1124 }
1125 }
1126
1127 /*---------------------------------------------------------------------------
1128 Flowgraph analysis
1129 ---------------------------------------------------------------------------*/
1130
1131 /* Cleanup useless labels in basic blocks. This is something we wish
1132 to do early because it allows us to group case labels before creating
1133 the edges for the CFG, and it speeds up block statement iterators in
1134 all passes later on.
1135 We rerun this pass after CFG is created, to get rid of the labels that
1136 are no longer referenced. After then we do not run it any more, since
1137 (almost) no new labels should be created. */
1138
1139 /* A map from basic block index to the leading label of that block. */
1140 static struct label_record
1141 {
1142 /* The label. */
1143 tree label;
1144
1145 /* True if the label is referenced from somewhere. */
1146 bool used;
1147 } *label_for_bb;
1148
1149 /* Given LABEL return the first label in the same basic block. */
1150
1151 static tree
1152 main_block_label (tree label)
1153 {
1154 basic_block bb = label_to_block (label);
1155 tree main_label = label_for_bb[bb->index].label;
1156
1157 /* label_to_block possibly inserted undefined label into the chain. */
1158 if (!main_label)
1159 {
1160 label_for_bb[bb->index].label = label;
1161 main_label = label;
1162 }
1163
1164 label_for_bb[bb->index].used = true;
1165 return main_label;
1166 }
1167
1168 /* Clean up redundant labels within the exception tree. */
1169
1170 static void
1171 cleanup_dead_labels_eh (void)
1172 {
1173 eh_landing_pad lp;
1174 eh_region r;
1175 tree lab;
1176 int i;
1177
1178 if (cfun->eh == NULL)
1179 return;
1180
1181 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1182 if (lp && lp->post_landing_pad)
1183 {
1184 lab = main_block_label (lp->post_landing_pad);
1185 if (lab != lp->post_landing_pad)
1186 {
1187 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1188 EH_LANDING_PAD_NR (lab) = lp->index;
1189 }
1190 }
1191
1192 FOR_ALL_EH_REGION (r)
1193 switch (r->type)
1194 {
1195 case ERT_CLEANUP:
1196 case ERT_MUST_NOT_THROW:
1197 break;
1198
1199 case ERT_TRY:
1200 {
1201 eh_catch c;
1202 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1203 {
1204 lab = c->label;
1205 if (lab)
1206 c->label = main_block_label (lab);
1207 }
1208 }
1209 break;
1210
1211 case ERT_ALLOWED_EXCEPTIONS:
1212 lab = r->u.allowed.label;
1213 if (lab)
1214 r->u.allowed.label = main_block_label (lab);
1215 break;
1216 }
1217 }
1218
1219
1220 /* Cleanup redundant labels. This is a three-step process:
1221 1) Find the leading label for each block.
1222 2) Redirect all references to labels to the leading labels.
1223 3) Cleanup all useless labels. */
1224
1225 void
1226 cleanup_dead_labels (void)
1227 {
1228 basic_block bb;
1229 label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
1230
1231 /* Find a suitable label for each block. We use the first user-defined
1232 label if there is one, or otherwise just the first label we see. */
1233 FOR_EACH_BB (bb)
1234 {
1235 gimple_stmt_iterator i;
1236
1237 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1238 {
1239 tree label;
1240 gimple stmt = gsi_stmt (i);
1241
1242 if (gimple_code (stmt) != GIMPLE_LABEL)
1243 break;
1244
1245 label = gimple_label_label (stmt);
1246
1247 /* If we have not yet seen a label for the current block,
1248 remember this one and see if there are more labels. */
1249 if (!label_for_bb[bb->index].label)
1250 {
1251 label_for_bb[bb->index].label = label;
1252 continue;
1253 }
1254
1255 /* If we did see a label for the current block already, but it
1256 is an artificially created label, replace it if the current
1257 label is a user defined label. */
1258 if (!DECL_ARTIFICIAL (label)
1259 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1260 {
1261 label_for_bb[bb->index].label = label;
1262 break;
1263 }
1264 }
1265 }
1266
1267 /* Now redirect all jumps/branches to the selected label.
1268 First do so for each block ending in a control statement. */
1269 FOR_EACH_BB (bb)
1270 {
1271 gimple stmt = last_stmt (bb);
1272 tree label, new_label;
1273
1274 if (!stmt)
1275 continue;
1276
1277 switch (gimple_code (stmt))
1278 {
1279 case GIMPLE_COND:
1280 label = gimple_cond_true_label (stmt);
1281 if (label)
1282 {
1283 new_label = main_block_label (label);
1284 if (new_label != label)
1285 gimple_cond_set_true_label (stmt, new_label);
1286 }
1287
1288 label = gimple_cond_false_label (stmt);
1289 if (label)
1290 {
1291 new_label = main_block_label (label);
1292 if (new_label != label)
1293 gimple_cond_set_false_label (stmt, new_label);
1294 }
1295 break;
1296
1297 case GIMPLE_SWITCH:
1298 {
1299 size_t i, n = gimple_switch_num_labels (stmt);
1300
1301 /* Replace all destination labels. */
1302 for (i = 0; i < n; ++i)
1303 {
1304 tree case_label = gimple_switch_label (stmt, i);
1305 label = CASE_LABEL (case_label);
1306 new_label = main_block_label (label);
1307 if (new_label != label)
1308 CASE_LABEL (case_label) = new_label;
1309 }
1310 break;
1311 }
1312
1313 case GIMPLE_ASM:
1314 {
1315 int i, n = gimple_asm_nlabels (stmt);
1316
1317 for (i = 0; i < n; ++i)
1318 {
1319 tree cons = gimple_asm_label_op (stmt, i);
1320 tree label = main_block_label (TREE_VALUE (cons));
1321 TREE_VALUE (cons) = label;
1322 }
1323 break;
1324 }
1325
1326 /* We have to handle gotos until they're removed, and we don't
1327 remove them until after we've created the CFG edges. */
1328 case GIMPLE_GOTO:
1329 if (!computed_goto_p (stmt))
1330 {
1331 label = gimple_goto_dest (stmt);
1332 new_label = main_block_label (label);
1333 if (new_label != label)
1334 gimple_goto_set_dest (stmt, new_label);
1335 }
1336 break;
1337
1338 case GIMPLE_TRANSACTION:
1339 {
1340 tree label = gimple_transaction_label (stmt);
1341 if (label)
1342 {
1343 tree new_label = main_block_label (label);
1344 if (new_label != label)
1345 gimple_transaction_set_label (stmt, new_label);
1346 }
1347 }
1348 break;
1349
1350 default:
1351 break;
1352 }
1353 }
1354
1355 /* Do the same for the exception region tree labels. */
1356 cleanup_dead_labels_eh ();
1357
1358 /* Finally, purge dead labels. All user-defined labels and labels that
1359 can be the target of non-local gotos and labels which have their
1360 address taken are preserved. */
1361 FOR_EACH_BB (bb)
1362 {
1363 gimple_stmt_iterator i;
1364 tree label_for_this_bb = label_for_bb[bb->index].label;
1365
1366 if (!label_for_this_bb)
1367 continue;
1368
1369 /* If the main label of the block is unused, we may still remove it. */
1370 if (!label_for_bb[bb->index].used)
1371 label_for_this_bb = NULL;
1372
1373 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1374 {
1375 tree label;
1376 gimple stmt = gsi_stmt (i);
1377
1378 if (gimple_code (stmt) != GIMPLE_LABEL)
1379 break;
1380
1381 label = gimple_label_label (stmt);
1382
1383 if (label == label_for_this_bb
1384 || !DECL_ARTIFICIAL (label)
1385 || DECL_NONLOCAL (label)
1386 || FORCED_LABEL (label))
1387 gsi_next (&i);
1388 else
1389 gsi_remove (&i, true);
1390 }
1391 }
1392
1393 free (label_for_bb);
1394 }
1395
1396 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1397 the ones jumping to the same label.
1398 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1399
1400 void
1401 group_case_labels_stmt (gimple stmt)
1402 {
1403 int old_size = gimple_switch_num_labels (stmt);
1404 int i, j, new_size = old_size;
1405 basic_block default_bb = NULL;
1406
1407 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1408
1409 /* Look for possible opportunities to merge cases. */
1410 i = 1;
1411 while (i < old_size)
1412 {
1413 tree base_case, base_high;
1414 basic_block base_bb;
1415
1416 base_case = gimple_switch_label (stmt, i);
1417
1418 gcc_assert (base_case);
1419 base_bb = label_to_block (CASE_LABEL (base_case));
1420
1421 /* Discard cases that have the same destination as the
1422 default case. */
1423 if (base_bb == default_bb)
1424 {
1425 gimple_switch_set_label (stmt, i, NULL_TREE);
1426 i++;
1427 new_size--;
1428 continue;
1429 }
1430
1431 base_high = CASE_HIGH (base_case)
1432 ? CASE_HIGH (base_case)
1433 : CASE_LOW (base_case);
1434 i++;
1435
1436 /* Try to merge case labels. Break out when we reach the end
1437 of the label vector or when we cannot merge the next case
1438 label with the current one. */
1439 while (i < old_size)
1440 {
1441 tree merge_case = gimple_switch_label (stmt, i);
1442 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1443 double_int bhp1 = tree_to_double_int (base_high) + double_int_one;
1444
1445 /* Merge the cases if they jump to the same place,
1446 and their ranges are consecutive. */
1447 if (merge_bb == base_bb
1448 && tree_to_double_int (CASE_LOW (merge_case)) == bhp1)
1449 {
1450 base_high = CASE_HIGH (merge_case) ?
1451 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1452 CASE_HIGH (base_case) = base_high;
1453 gimple_switch_set_label (stmt, i, NULL_TREE);
1454 new_size--;
1455 i++;
1456 }
1457 else
1458 break;
1459 }
1460 }
1461
1462 /* Compress the case labels in the label vector, and adjust the
1463 length of the vector. */
1464 for (i = 0, j = 0; i < new_size; i++)
1465 {
1466 while (! gimple_switch_label (stmt, j))
1467 j++;
1468 gimple_switch_set_label (stmt, i,
1469 gimple_switch_label (stmt, j++));
1470 }
1471
1472 gcc_assert (new_size <= old_size);
1473 gimple_switch_set_num_labels (stmt, new_size);
1474 }
1475
1476 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1477 and scan the sorted vector of cases. Combine the ones jumping to the
1478 same label. */
1479
1480 void
1481 group_case_labels (void)
1482 {
1483 basic_block bb;
1484
1485 FOR_EACH_BB (bb)
1486 {
1487 gimple stmt = last_stmt (bb);
1488 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1489 group_case_labels_stmt (stmt);
1490 }
1491 }
1492
1493 /* Checks whether we can merge block B into block A. */
1494
1495 static bool
1496 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1497 {
1498 gimple stmt;
1499 gimple_stmt_iterator gsi;
1500
1501 if (!single_succ_p (a))
1502 return false;
1503
1504 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1505 return false;
1506
1507 if (single_succ (a) != b)
1508 return false;
1509
1510 if (!single_pred_p (b))
1511 return false;
1512
1513 if (b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1514 return false;
1515
1516 /* If A ends by a statement causing exceptions or something similar, we
1517 cannot merge the blocks. */
1518 stmt = last_stmt (a);
1519 if (stmt && stmt_ends_bb_p (stmt))
1520 return false;
1521
1522 /* Do not allow a block with only a non-local label to be merged. */
1523 if (stmt
1524 && gimple_code (stmt) == GIMPLE_LABEL
1525 && DECL_NONLOCAL (gimple_label_label (stmt)))
1526 return false;
1527
1528 /* Examine the labels at the beginning of B. */
1529 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
1530 {
1531 tree lab;
1532 stmt = gsi_stmt (gsi);
1533 if (gimple_code (stmt) != GIMPLE_LABEL)
1534 break;
1535 lab = gimple_label_label (stmt);
1536
1537 /* Do not remove user forced labels or for -O0 any user labels. */
1538 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1539 return false;
1540 }
1541
1542 /* Protect the loop latches. */
1543 if (current_loops && b->loop_father->latch == b)
1544 return false;
1545
1546 /* It must be possible to eliminate all phi nodes in B. If ssa form
1547 is not up-to-date and a name-mapping is registered, we cannot eliminate
1548 any phis. Symbols marked for renaming are never a problem though. */
1549 for (gsi = gsi_start_phis (b); !gsi_end_p (gsi); gsi_next (&gsi))
1550 {
1551 gimple phi = gsi_stmt (gsi);
1552 /* Technically only new names matter. */
1553 if (name_registered_for_update_p (PHI_RESULT (phi)))
1554 return false;
1555 }
1556
1557 /* When not optimizing, don't merge if we'd lose goto_locus. */
1558 if (!optimize
1559 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1560 {
1561 location_t goto_locus = single_succ_edge (a)->goto_locus;
1562 gimple_stmt_iterator prev, next;
1563 prev = gsi_last_nondebug_bb (a);
1564 next = gsi_after_labels (b);
1565 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1566 gsi_next_nondebug (&next);
1567 if ((gsi_end_p (prev)
1568 || gimple_location (gsi_stmt (prev)) != goto_locus)
1569 && (gsi_end_p (next)
1570 || gimple_location (gsi_stmt (next)) != goto_locus))
1571 return false;
1572 }
1573
1574 return true;
1575 }
1576
1577 /* Replaces all uses of NAME by VAL. */
1578
1579 void
1580 replace_uses_by (tree name, tree val)
1581 {
1582 imm_use_iterator imm_iter;
1583 use_operand_p use;
1584 gimple stmt;
1585 edge e;
1586
1587 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1588 {
1589 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1590 {
1591 replace_exp (use, val);
1592
1593 if (gimple_code (stmt) == GIMPLE_PHI)
1594 {
1595 e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
1596 if (e->flags & EDGE_ABNORMAL)
1597 {
1598 /* This can only occur for virtual operands, since
1599 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1600 would prevent replacement. */
1601 gcc_checking_assert (virtual_operand_p (name));
1602 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1603 }
1604 }
1605 }
1606
1607 if (gimple_code (stmt) != GIMPLE_PHI)
1608 {
1609 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1610 gimple orig_stmt = stmt;
1611 size_t i;
1612
1613 /* Mark the block if we changed the last stmt in it. */
1614 if (cfgcleanup_altered_bbs
1615 && stmt_ends_bb_p (stmt))
1616 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1617
1618 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1619 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1620 only change sth from non-invariant to invariant, and only
1621 when propagating constants. */
1622 if (is_gimple_min_invariant (val))
1623 for (i = 0; i < gimple_num_ops (stmt); i++)
1624 {
1625 tree op = gimple_op (stmt, i);
1626 /* Operands may be empty here. For example, the labels
1627 of a GIMPLE_COND are nulled out following the creation
1628 of the corresponding CFG edges. */
1629 if (op && TREE_CODE (op) == ADDR_EXPR)
1630 recompute_tree_invariant_for_addr_expr (op);
1631 }
1632
1633 if (fold_stmt (&gsi))
1634 stmt = gsi_stmt (gsi);
1635
1636 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1637 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1638
1639 update_stmt (stmt);
1640 }
1641 }
1642
1643 gcc_checking_assert (has_zero_uses (name));
1644
1645 /* Also update the trees stored in loop structures. */
1646 if (current_loops)
1647 {
1648 struct loop *loop;
1649
1650 FOR_EACH_LOOP (loop, 0)
1651 {
1652 substitute_in_loop_info (loop, name, val);
1653 }
1654 }
1655 }
1656
1657 /* Merge block B into block A. */
1658
1659 static void
1660 gimple_merge_blocks (basic_block a, basic_block b)
1661 {
1662 gimple_stmt_iterator last, gsi, psi;
1663
1664 if (dump_file)
1665 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1666
1667 /* Remove all single-valued PHI nodes from block B of the form
1668 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1669 gsi = gsi_last_bb (a);
1670 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1671 {
1672 gimple phi = gsi_stmt (psi);
1673 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1674 gimple copy;
1675 bool may_replace_uses = (virtual_operand_p (def)
1676 || may_propagate_copy (def, use));
1677
1678 /* In case we maintain loop closed ssa form, do not propagate arguments
1679 of loop exit phi nodes. */
1680 if (current_loops
1681 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1682 && !virtual_operand_p (def)
1683 && TREE_CODE (use) == SSA_NAME
1684 && a->loop_father != b->loop_father)
1685 may_replace_uses = false;
1686
1687 if (!may_replace_uses)
1688 {
1689 gcc_assert (!virtual_operand_p (def));
1690
1691 /* Note that just emitting the copies is fine -- there is no problem
1692 with ordering of phi nodes. This is because A is the single
1693 predecessor of B, therefore results of the phi nodes cannot
1694 appear as arguments of the phi nodes. */
1695 copy = gimple_build_assign (def, use);
1696 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1697 remove_phi_node (&psi, false);
1698 }
1699 else
1700 {
1701 /* If we deal with a PHI for virtual operands, we can simply
1702 propagate these without fussing with folding or updating
1703 the stmt. */
1704 if (virtual_operand_p (def))
1705 {
1706 imm_use_iterator iter;
1707 use_operand_p use_p;
1708 gimple stmt;
1709
1710 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1711 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1712 SET_USE (use_p, use);
1713
1714 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1715 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1716 }
1717 else
1718 replace_uses_by (def, use);
1719
1720 remove_phi_node (&psi, true);
1721 }
1722 }
1723
1724 /* Ensure that B follows A. */
1725 move_block_after (b, a);
1726
1727 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1728 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1729
1730 /* Remove labels from B and set gimple_bb to A for other statements. */
1731 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1732 {
1733 gimple stmt = gsi_stmt (gsi);
1734 if (gimple_code (stmt) == GIMPLE_LABEL)
1735 {
1736 tree label = gimple_label_label (stmt);
1737 int lp_nr;
1738
1739 gsi_remove (&gsi, false);
1740
1741 /* Now that we can thread computed gotos, we might have
1742 a situation where we have a forced label in block B
1743 However, the label at the start of block B might still be
1744 used in other ways (think about the runtime checking for
1745 Fortran assigned gotos). So we can not just delete the
1746 label. Instead we move the label to the start of block A. */
1747 if (FORCED_LABEL (label))
1748 {
1749 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1750 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1751 }
1752 /* Other user labels keep around in a form of a debug stmt. */
1753 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1754 {
1755 gimple dbg = gimple_build_debug_bind (label,
1756 integer_zero_node,
1757 stmt);
1758 gimple_debug_bind_reset_value (dbg);
1759 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1760 }
1761
1762 lp_nr = EH_LANDING_PAD_NR (label);
1763 if (lp_nr)
1764 {
1765 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1766 lp->post_landing_pad = NULL;
1767 }
1768 }
1769 else
1770 {
1771 gimple_set_bb (stmt, a);
1772 gsi_next (&gsi);
1773 }
1774 }
1775
1776 /* Merge the sequences. */
1777 last = gsi_last_bb (a);
1778 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1779 set_bb_seq (b, NULL);
1780
1781 if (cfgcleanup_altered_bbs)
1782 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1783 }
1784
1785
1786 /* Return the one of two successors of BB that is not reachable by a
1787 complex edge, if there is one. Else, return BB. We use
1788 this in optimizations that use post-dominators for their heuristics,
1789 to catch the cases in C++ where function calls are involved. */
1790
1791 basic_block
1792 single_noncomplex_succ (basic_block bb)
1793 {
1794 edge e0, e1;
1795 if (EDGE_COUNT (bb->succs) != 2)
1796 return bb;
1797
1798 e0 = EDGE_SUCC (bb, 0);
1799 e1 = EDGE_SUCC (bb, 1);
1800 if (e0->flags & EDGE_COMPLEX)
1801 return e1->dest;
1802 if (e1->flags & EDGE_COMPLEX)
1803 return e0->dest;
1804
1805 return bb;
1806 }
1807
1808 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1809
1810 void
1811 notice_special_calls (gimple call)
1812 {
1813 int flags = gimple_call_flags (call);
1814
1815 if (flags & ECF_MAY_BE_ALLOCA)
1816 cfun->calls_alloca = true;
1817 if (flags & ECF_RETURNS_TWICE)
1818 cfun->calls_setjmp = true;
1819 }
1820
1821
1822 /* Clear flags set by notice_special_calls. Used by dead code removal
1823 to update the flags. */
1824
1825 void
1826 clear_special_calls (void)
1827 {
1828 cfun->calls_alloca = false;
1829 cfun->calls_setjmp = false;
1830 }
1831
1832 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1833
1834 static void
1835 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1836 {
1837 /* Since this block is no longer reachable, we can just delete all
1838 of its PHI nodes. */
1839 remove_phi_nodes (bb);
1840
1841 /* Remove edges to BB's successors. */
1842 while (EDGE_COUNT (bb->succs) > 0)
1843 remove_edge (EDGE_SUCC (bb, 0));
1844 }
1845
1846
1847 /* Remove statements of basic block BB. */
1848
1849 static void
1850 remove_bb (basic_block bb)
1851 {
1852 gimple_stmt_iterator i;
1853
1854 if (dump_file)
1855 {
1856 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1857 if (dump_flags & TDF_DETAILS)
1858 {
1859 dump_bb (dump_file, bb, 0, dump_flags);
1860 fprintf (dump_file, "\n");
1861 }
1862 }
1863
1864 if (current_loops)
1865 {
1866 struct loop *loop = bb->loop_father;
1867
1868 /* If a loop gets removed, clean up the information associated
1869 with it. */
1870 if (loop->latch == bb
1871 || loop->header == bb)
1872 free_numbers_of_iterations_estimates_loop (loop);
1873 }
1874
1875 /* Remove all the instructions in the block. */
1876 if (bb_seq (bb) != NULL)
1877 {
1878 /* Walk backwards so as to get a chance to substitute all
1879 released DEFs into debug stmts. See
1880 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
1881 details. */
1882 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
1883 {
1884 gimple stmt = gsi_stmt (i);
1885 if (gimple_code (stmt) == GIMPLE_LABEL
1886 && (FORCED_LABEL (gimple_label_label (stmt))
1887 || DECL_NONLOCAL (gimple_label_label (stmt))))
1888 {
1889 basic_block new_bb;
1890 gimple_stmt_iterator new_gsi;
1891
1892 /* A non-reachable non-local label may still be referenced.
1893 But it no longer needs to carry the extra semantics of
1894 non-locality. */
1895 if (DECL_NONLOCAL (gimple_label_label (stmt)))
1896 {
1897 DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
1898 FORCED_LABEL (gimple_label_label (stmt)) = 1;
1899 }
1900
1901 new_bb = bb->prev_bb;
1902 new_gsi = gsi_start_bb (new_bb);
1903 gsi_remove (&i, false);
1904 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
1905 }
1906 else
1907 {
1908 /* Release SSA definitions if we are in SSA. Note that we
1909 may be called when not in SSA. For example,
1910 final_cleanup calls this function via
1911 cleanup_tree_cfg. */
1912 if (gimple_in_ssa_p (cfun))
1913 release_defs (stmt);
1914
1915 gsi_remove (&i, true);
1916 }
1917
1918 if (gsi_end_p (i))
1919 i = gsi_last_bb (bb);
1920 else
1921 gsi_prev (&i);
1922 }
1923 }
1924
1925 remove_phi_nodes_and_edges_for_unreachable_block (bb);
1926 bb->il.gimple.seq = NULL;
1927 bb->il.gimple.phi_nodes = NULL;
1928 }
1929
1930
1931 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
1932 predicate VAL, return the edge that will be taken out of the block.
1933 If VAL does not match a unique edge, NULL is returned. */
1934
1935 edge
1936 find_taken_edge (basic_block bb, tree val)
1937 {
1938 gimple stmt;
1939
1940 stmt = last_stmt (bb);
1941
1942 gcc_assert (stmt);
1943 gcc_assert (is_ctrl_stmt (stmt));
1944
1945 if (val == NULL)
1946 return NULL;
1947
1948 if (!is_gimple_min_invariant (val))
1949 return NULL;
1950
1951 if (gimple_code (stmt) == GIMPLE_COND)
1952 return find_taken_edge_cond_expr (bb, val);
1953
1954 if (gimple_code (stmt) == GIMPLE_SWITCH)
1955 return find_taken_edge_switch_expr (bb, val);
1956
1957 if (computed_goto_p (stmt))
1958 {
1959 /* Only optimize if the argument is a label, if the argument is
1960 not a label then we can not construct a proper CFG.
1961
1962 It may be the case that we only need to allow the LABEL_REF to
1963 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
1964 appear inside a LABEL_EXPR just to be safe. */
1965 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
1966 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
1967 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
1968 return NULL;
1969 }
1970
1971 gcc_unreachable ();
1972 }
1973
1974 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
1975 statement, determine which of the outgoing edges will be taken out of the
1976 block. Return NULL if either edge may be taken. */
1977
1978 static edge
1979 find_taken_edge_computed_goto (basic_block bb, tree val)
1980 {
1981 basic_block dest;
1982 edge e = NULL;
1983
1984 dest = label_to_block (val);
1985 if (dest)
1986 {
1987 e = find_edge (bb, dest);
1988 gcc_assert (e != NULL);
1989 }
1990
1991 return e;
1992 }
1993
1994 /* Given a constant value VAL and the entry block BB to a COND_EXPR
1995 statement, determine which of the two edges will be taken out of the
1996 block. Return NULL if either edge may be taken. */
1997
1998 static edge
1999 find_taken_edge_cond_expr (basic_block bb, tree val)
2000 {
2001 edge true_edge, false_edge;
2002
2003 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2004
2005 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2006 return (integer_zerop (val) ? false_edge : true_edge);
2007 }
2008
2009 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2010 statement, determine which edge will be taken out of the block. Return
2011 NULL if any edge may be taken. */
2012
2013 static edge
2014 find_taken_edge_switch_expr (basic_block bb, tree val)
2015 {
2016 basic_block dest_bb;
2017 edge e;
2018 gimple switch_stmt;
2019 tree taken_case;
2020
2021 switch_stmt = last_stmt (bb);
2022 taken_case = find_case_label_for_value (switch_stmt, val);
2023 dest_bb = label_to_block (CASE_LABEL (taken_case));
2024
2025 e = find_edge (bb, dest_bb);
2026 gcc_assert (e);
2027 return e;
2028 }
2029
2030
2031 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2032 We can make optimal use here of the fact that the case labels are
2033 sorted: We can do a binary search for a case matching VAL. */
2034
2035 static tree
2036 find_case_label_for_value (gimple switch_stmt, tree val)
2037 {
2038 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2039 tree default_case = gimple_switch_default_label (switch_stmt);
2040
2041 for (low = 0, high = n; high - low > 1; )
2042 {
2043 size_t i = (high + low) / 2;
2044 tree t = gimple_switch_label (switch_stmt, i);
2045 int cmp;
2046
2047 /* Cache the result of comparing CASE_LOW and val. */
2048 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2049
2050 if (cmp > 0)
2051 high = i;
2052 else
2053 low = i;
2054
2055 if (CASE_HIGH (t) == NULL)
2056 {
2057 /* A singe-valued case label. */
2058 if (cmp == 0)
2059 return t;
2060 }
2061 else
2062 {
2063 /* A case range. We can only handle integer ranges. */
2064 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2065 return t;
2066 }
2067 }
2068
2069 return default_case;
2070 }
2071
2072
2073 /* Dump a basic block on stderr. */
2074
2075 void
2076 gimple_debug_bb (basic_block bb)
2077 {
2078 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2079 }
2080
2081
2082 /* Dump basic block with index N on stderr. */
2083
2084 basic_block
2085 gimple_debug_bb_n (int n)
2086 {
2087 gimple_debug_bb (BASIC_BLOCK (n));
2088 return BASIC_BLOCK (n);
2089 }
2090
2091
2092 /* Dump the CFG on stderr.
2093
2094 FLAGS are the same used by the tree dumping functions
2095 (see TDF_* in dumpfile.h). */
2096
2097 void
2098 gimple_debug_cfg (int flags)
2099 {
2100 gimple_dump_cfg (stderr, flags);
2101 }
2102
2103
2104 /* Dump the program showing basic block boundaries on the given FILE.
2105
2106 FLAGS are the same used by the tree dumping functions (see TDF_* in
2107 tree.h). */
2108
2109 void
2110 gimple_dump_cfg (FILE *file, int flags)
2111 {
2112 if (flags & TDF_DETAILS)
2113 {
2114 dump_function_header (file, current_function_decl, flags);
2115 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2116 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2117 last_basic_block);
2118
2119 brief_dump_cfg (file, flags | TDF_COMMENT);
2120 fprintf (file, "\n");
2121 }
2122
2123 if (flags & TDF_STATS)
2124 dump_cfg_stats (file);
2125
2126 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2127 }
2128
2129
2130 /* Dump CFG statistics on FILE. */
2131
2132 void
2133 dump_cfg_stats (FILE *file)
2134 {
2135 static long max_num_merged_labels = 0;
2136 unsigned long size, total = 0;
2137 long num_edges;
2138 basic_block bb;
2139 const char * const fmt_str = "%-30s%-13s%12s\n";
2140 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2141 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2142 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2143 const char *funcname = current_function_name ();
2144
2145 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2146
2147 fprintf (file, "---------------------------------------------------------\n");
2148 fprintf (file, fmt_str, "", " Number of ", "Memory");
2149 fprintf (file, fmt_str, "", " instances ", "used ");
2150 fprintf (file, "---------------------------------------------------------\n");
2151
2152 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2153 total += size;
2154 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2155 SCALE (size), LABEL (size));
2156
2157 num_edges = 0;
2158 FOR_EACH_BB (bb)
2159 num_edges += EDGE_COUNT (bb->succs);
2160 size = num_edges * sizeof (struct edge_def);
2161 total += size;
2162 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2163
2164 fprintf (file, "---------------------------------------------------------\n");
2165 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2166 LABEL (total));
2167 fprintf (file, "---------------------------------------------------------\n");
2168 fprintf (file, "\n");
2169
2170 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2171 max_num_merged_labels = cfg_stats.num_merged_labels;
2172
2173 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2174 cfg_stats.num_merged_labels, max_num_merged_labels);
2175
2176 fprintf (file, "\n");
2177 }
2178
2179
2180 /* Dump CFG statistics on stderr. Keep extern so that it's always
2181 linked in the final executable. */
2182
2183 DEBUG_FUNCTION void
2184 debug_cfg_stats (void)
2185 {
2186 dump_cfg_stats (stderr);
2187 }
2188
2189 /*---------------------------------------------------------------------------
2190 Miscellaneous helpers
2191 ---------------------------------------------------------------------------*/
2192
2193 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2194 flow. Transfers of control flow associated with EH are excluded. */
2195
2196 static bool
2197 call_can_make_abnormal_goto (gimple t)
2198 {
2199 /* If the function has no non-local labels, then a call cannot make an
2200 abnormal transfer of control. */
2201 if (!cfun->has_nonlocal_label
2202 && !cfun->calls_setjmp)
2203 return false;
2204
2205 /* Likewise if the call has no side effects. */
2206 if (!gimple_has_side_effects (t))
2207 return false;
2208
2209 /* Likewise if the called function is leaf. */
2210 if (gimple_call_flags (t) & ECF_LEAF)
2211 return false;
2212
2213 return true;
2214 }
2215
2216
2217 /* Return true if T can make an abnormal transfer of control flow.
2218 Transfers of control flow associated with EH are excluded. */
2219
2220 bool
2221 stmt_can_make_abnormal_goto (gimple t)
2222 {
2223 if (computed_goto_p (t))
2224 return true;
2225 if (is_gimple_call (t))
2226 return call_can_make_abnormal_goto (t);
2227 return false;
2228 }
2229
2230
2231 /* Return true if T represents a stmt that always transfers control. */
2232
2233 bool
2234 is_ctrl_stmt (gimple t)
2235 {
2236 switch (gimple_code (t))
2237 {
2238 case GIMPLE_COND:
2239 case GIMPLE_SWITCH:
2240 case GIMPLE_GOTO:
2241 case GIMPLE_RETURN:
2242 case GIMPLE_RESX:
2243 return true;
2244 default:
2245 return false;
2246 }
2247 }
2248
2249
2250 /* Return true if T is a statement that may alter the flow of control
2251 (e.g., a call to a non-returning function). */
2252
2253 bool
2254 is_ctrl_altering_stmt (gimple t)
2255 {
2256 gcc_assert (t);
2257
2258 switch (gimple_code (t))
2259 {
2260 case GIMPLE_CALL:
2261 {
2262 int flags = gimple_call_flags (t);
2263
2264 /* A call alters control flow if it can make an abnormal goto. */
2265 if (call_can_make_abnormal_goto (t))
2266 return true;
2267
2268 /* A call also alters control flow if it does not return. */
2269 if (flags & ECF_NORETURN)
2270 return true;
2271
2272 /* TM ending statements have backedges out of the transaction.
2273 Return true so we split the basic block containing them.
2274 Note that the TM_BUILTIN test is merely an optimization. */
2275 if ((flags & ECF_TM_BUILTIN)
2276 && is_tm_ending_fndecl (gimple_call_fndecl (t)))
2277 return true;
2278
2279 /* BUILT_IN_RETURN call is same as return statement. */
2280 if (gimple_call_builtin_p (t, BUILT_IN_RETURN))
2281 return true;
2282 }
2283 break;
2284
2285 case GIMPLE_EH_DISPATCH:
2286 /* EH_DISPATCH branches to the individual catch handlers at
2287 this level of a try or allowed-exceptions region. It can
2288 fallthru to the next statement as well. */
2289 return true;
2290
2291 case GIMPLE_ASM:
2292 if (gimple_asm_nlabels (t) > 0)
2293 return true;
2294 break;
2295
2296 CASE_GIMPLE_OMP:
2297 /* OpenMP directives alter control flow. */
2298 return true;
2299
2300 case GIMPLE_TRANSACTION:
2301 /* A transaction start alters control flow. */
2302 return true;
2303
2304 default:
2305 break;
2306 }
2307
2308 /* If a statement can throw, it alters control flow. */
2309 return stmt_can_throw_internal (t);
2310 }
2311
2312
2313 /* Return true if T is a simple local goto. */
2314
2315 bool
2316 simple_goto_p (gimple t)
2317 {
2318 return (gimple_code (t) == GIMPLE_GOTO
2319 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2320 }
2321
2322
2323 /* Return true if STMT should start a new basic block. PREV_STMT is
2324 the statement preceding STMT. It is used when STMT is a label or a
2325 case label. Labels should only start a new basic block if their
2326 previous statement wasn't a label. Otherwise, sequence of labels
2327 would generate unnecessary basic blocks that only contain a single
2328 label. */
2329
2330 static inline bool
2331 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2332 {
2333 if (stmt == NULL)
2334 return false;
2335
2336 /* Labels start a new basic block only if the preceding statement
2337 wasn't a label of the same type. This prevents the creation of
2338 consecutive blocks that have nothing but a single label. */
2339 if (gimple_code (stmt) == GIMPLE_LABEL)
2340 {
2341 /* Nonlocal and computed GOTO targets always start a new block. */
2342 if (DECL_NONLOCAL (gimple_label_label (stmt))
2343 || FORCED_LABEL (gimple_label_label (stmt)))
2344 return true;
2345
2346 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2347 {
2348 if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
2349 return true;
2350
2351 cfg_stats.num_merged_labels++;
2352 return false;
2353 }
2354 else
2355 return true;
2356 }
2357 else if (gimple_code (stmt) == GIMPLE_CALL
2358 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2359 /* setjmp acts similar to a nonlocal GOTO target and thus should
2360 start a new block. */
2361 return true;
2362
2363 return false;
2364 }
2365
2366
2367 /* Return true if T should end a basic block. */
2368
2369 bool
2370 stmt_ends_bb_p (gimple t)
2371 {
2372 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2373 }
2374
2375 /* Remove block annotations and other data structures. */
2376
2377 void
2378 delete_tree_cfg_annotations (void)
2379 {
2380 vec_free (label_to_block_map);
2381 }
2382
2383
2384 /* Return the first statement in basic block BB. */
2385
2386 gimple
2387 first_stmt (basic_block bb)
2388 {
2389 gimple_stmt_iterator i = gsi_start_bb (bb);
2390 gimple stmt = NULL;
2391
2392 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2393 {
2394 gsi_next (&i);
2395 stmt = NULL;
2396 }
2397 return stmt;
2398 }
2399
2400 /* Return the first non-label statement in basic block BB. */
2401
2402 static gimple
2403 first_non_label_stmt (basic_block bb)
2404 {
2405 gimple_stmt_iterator i = gsi_start_bb (bb);
2406 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2407 gsi_next (&i);
2408 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2409 }
2410
2411 /* Return the last statement in basic block BB. */
2412
2413 gimple
2414 last_stmt (basic_block bb)
2415 {
2416 gimple_stmt_iterator i = gsi_last_bb (bb);
2417 gimple stmt = NULL;
2418
2419 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2420 {
2421 gsi_prev (&i);
2422 stmt = NULL;
2423 }
2424 return stmt;
2425 }
2426
2427 /* Return the last statement of an otherwise empty block. Return NULL
2428 if the block is totally empty, or if it contains more than one
2429 statement. */
2430
2431 gimple
2432 last_and_only_stmt (basic_block bb)
2433 {
2434 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2435 gimple last, prev;
2436
2437 if (gsi_end_p (i))
2438 return NULL;
2439
2440 last = gsi_stmt (i);
2441 gsi_prev_nondebug (&i);
2442 if (gsi_end_p (i))
2443 return last;
2444
2445 /* Empty statements should no longer appear in the instruction stream.
2446 Everything that might have appeared before should be deleted by
2447 remove_useless_stmts, and the optimizers should just gsi_remove
2448 instead of smashing with build_empty_stmt.
2449
2450 Thus the only thing that should appear here in a block containing
2451 one executable statement is a label. */
2452 prev = gsi_stmt (i);
2453 if (gimple_code (prev) == GIMPLE_LABEL)
2454 return last;
2455 else
2456 return NULL;
2457 }
2458
2459 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2460
2461 static void
2462 reinstall_phi_args (edge new_edge, edge old_edge)
2463 {
2464 edge_var_map_vector *v;
2465 edge_var_map *vm;
2466 int i;
2467 gimple_stmt_iterator phis;
2468
2469 v = redirect_edge_var_map_vector (old_edge);
2470 if (!v)
2471 return;
2472
2473 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2474 v->iterate (i, &vm) && !gsi_end_p (phis);
2475 i++, gsi_next (&phis))
2476 {
2477 gimple phi = gsi_stmt (phis);
2478 tree result = redirect_edge_var_map_result (vm);
2479 tree arg = redirect_edge_var_map_def (vm);
2480
2481 gcc_assert (result == gimple_phi_result (phi));
2482
2483 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2484 }
2485
2486 redirect_edge_var_map_clear (old_edge);
2487 }
2488
2489 /* Returns the basic block after which the new basic block created
2490 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2491 near its "logical" location. This is of most help to humans looking
2492 at debugging dumps. */
2493
2494 static basic_block
2495 split_edge_bb_loc (edge edge_in)
2496 {
2497 basic_block dest = edge_in->dest;
2498 basic_block dest_prev = dest->prev_bb;
2499
2500 if (dest_prev)
2501 {
2502 edge e = find_edge (dest_prev, dest);
2503 if (e && !(e->flags & EDGE_COMPLEX))
2504 return edge_in->src;
2505 }
2506 return dest_prev;
2507 }
2508
2509 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2510 Abort on abnormal edges. */
2511
2512 static basic_block
2513 gimple_split_edge (edge edge_in)
2514 {
2515 basic_block new_bb, after_bb, dest;
2516 edge new_edge, e;
2517
2518 /* Abnormal edges cannot be split. */
2519 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2520
2521 dest = edge_in->dest;
2522
2523 after_bb = split_edge_bb_loc (edge_in);
2524
2525 new_bb = create_empty_bb (after_bb);
2526 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2527 new_bb->count = edge_in->count;
2528 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2529 new_edge->probability = REG_BR_PROB_BASE;
2530 new_edge->count = edge_in->count;
2531
2532 e = redirect_edge_and_branch (edge_in, new_bb);
2533 gcc_assert (e == edge_in);
2534 reinstall_phi_args (new_edge, e);
2535
2536 return new_bb;
2537 }
2538
2539
2540 /* Verify properties of the address expression T with base object BASE. */
2541
2542 static tree
2543 verify_address (tree t, tree base)
2544 {
2545 bool old_constant;
2546 bool old_side_effects;
2547 bool new_constant;
2548 bool new_side_effects;
2549
2550 old_constant = TREE_CONSTANT (t);
2551 old_side_effects = TREE_SIDE_EFFECTS (t);
2552
2553 recompute_tree_invariant_for_addr_expr (t);
2554 new_side_effects = TREE_SIDE_EFFECTS (t);
2555 new_constant = TREE_CONSTANT (t);
2556
2557 if (old_constant != new_constant)
2558 {
2559 error ("constant not recomputed when ADDR_EXPR changed");
2560 return t;
2561 }
2562 if (old_side_effects != new_side_effects)
2563 {
2564 error ("side effects not recomputed when ADDR_EXPR changed");
2565 return t;
2566 }
2567
2568 if (!(TREE_CODE (base) == VAR_DECL
2569 || TREE_CODE (base) == PARM_DECL
2570 || TREE_CODE (base) == RESULT_DECL))
2571 return NULL_TREE;
2572
2573 if (DECL_GIMPLE_REG_P (base))
2574 {
2575 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2576 return base;
2577 }
2578
2579 return NULL_TREE;
2580 }
2581
2582 /* Callback for walk_tree, check that all elements with address taken are
2583 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2584 inside a PHI node. */
2585
2586 static tree
2587 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2588 {
2589 tree t = *tp, x;
2590
2591 if (TYPE_P (t))
2592 *walk_subtrees = 0;
2593
2594 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2595 #define CHECK_OP(N, MSG) \
2596 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2597 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2598
2599 switch (TREE_CODE (t))
2600 {
2601 case SSA_NAME:
2602 if (SSA_NAME_IN_FREE_LIST (t))
2603 {
2604 error ("SSA name in freelist but still referenced");
2605 return *tp;
2606 }
2607 break;
2608
2609 case INDIRECT_REF:
2610 error ("INDIRECT_REF in gimple IL");
2611 return t;
2612
2613 case MEM_REF:
2614 x = TREE_OPERAND (t, 0);
2615 if (!POINTER_TYPE_P (TREE_TYPE (x))
2616 || !is_gimple_mem_ref_addr (x))
2617 {
2618 error ("invalid first operand of MEM_REF");
2619 return x;
2620 }
2621 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2622 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2623 {
2624 error ("invalid offset operand of MEM_REF");
2625 return TREE_OPERAND (t, 1);
2626 }
2627 if (TREE_CODE (x) == ADDR_EXPR
2628 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2629 return x;
2630 *walk_subtrees = 0;
2631 break;
2632
2633 case ASSERT_EXPR:
2634 x = fold (ASSERT_EXPR_COND (t));
2635 if (x == boolean_false_node)
2636 {
2637 error ("ASSERT_EXPR with an always-false condition");
2638 return *tp;
2639 }
2640 break;
2641
2642 case MODIFY_EXPR:
2643 error ("MODIFY_EXPR not expected while having tuples");
2644 return *tp;
2645
2646 case ADDR_EXPR:
2647 {
2648 tree tem;
2649
2650 gcc_assert (is_gimple_address (t));
2651
2652 /* Skip any references (they will be checked when we recurse down the
2653 tree) and ensure that any variable used as a prefix is marked
2654 addressable. */
2655 for (x = TREE_OPERAND (t, 0);
2656 handled_component_p (x);
2657 x = TREE_OPERAND (x, 0))
2658 ;
2659
2660 if ((tem = verify_address (t, x)))
2661 return tem;
2662
2663 if (!(TREE_CODE (x) == VAR_DECL
2664 || TREE_CODE (x) == PARM_DECL
2665 || TREE_CODE (x) == RESULT_DECL))
2666 return NULL;
2667
2668 if (!TREE_ADDRESSABLE (x))
2669 {
2670 error ("address taken, but ADDRESSABLE bit not set");
2671 return x;
2672 }
2673
2674 break;
2675 }
2676
2677 case COND_EXPR:
2678 x = COND_EXPR_COND (t);
2679 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2680 {
2681 error ("non-integral used in condition");
2682 return x;
2683 }
2684 if (!is_gimple_condexpr (x))
2685 {
2686 error ("invalid conditional operand");
2687 return x;
2688 }
2689 break;
2690
2691 case NON_LVALUE_EXPR:
2692 case TRUTH_NOT_EXPR:
2693 gcc_unreachable ();
2694
2695 CASE_CONVERT:
2696 case FIX_TRUNC_EXPR:
2697 case FLOAT_EXPR:
2698 case NEGATE_EXPR:
2699 case ABS_EXPR:
2700 case BIT_NOT_EXPR:
2701 CHECK_OP (0, "invalid operand to unary operator");
2702 break;
2703
2704 case REALPART_EXPR:
2705 case IMAGPART_EXPR:
2706 case BIT_FIELD_REF:
2707 if (!is_gimple_reg_type (TREE_TYPE (t)))
2708 {
2709 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2710 return t;
2711 }
2712
2713 if (TREE_CODE (t) == BIT_FIELD_REF)
2714 {
2715 if (!tree_fits_uhwi_p (TREE_OPERAND (t, 1))
2716 || !tree_fits_uhwi_p (TREE_OPERAND (t, 2)))
2717 {
2718 error ("invalid position or size operand to BIT_FIELD_REF");
2719 return t;
2720 }
2721 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2722 && (TYPE_PRECISION (TREE_TYPE (t))
2723 != tree_to_uhwi (TREE_OPERAND (t, 1))))
2724 {
2725 error ("integral result type precision does not match "
2726 "field size of BIT_FIELD_REF");
2727 return t;
2728 }
2729 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2730 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2731 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2732 != tree_to_uhwi (TREE_OPERAND (t, 1))))
2733 {
2734 error ("mode precision of non-integral result does not "
2735 "match field size of BIT_FIELD_REF");
2736 return t;
2737 }
2738 }
2739 t = TREE_OPERAND (t, 0);
2740
2741 /* Fall-through. */
2742 case COMPONENT_REF:
2743 case ARRAY_REF:
2744 case ARRAY_RANGE_REF:
2745 case VIEW_CONVERT_EXPR:
2746 /* We have a nest of references. Verify that each of the operands
2747 that determine where to reference is either a constant or a variable,
2748 verify that the base is valid, and then show we've already checked
2749 the subtrees. */
2750 while (handled_component_p (t))
2751 {
2752 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2753 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2754 else if (TREE_CODE (t) == ARRAY_REF
2755 || TREE_CODE (t) == ARRAY_RANGE_REF)
2756 {
2757 CHECK_OP (1, "invalid array index");
2758 if (TREE_OPERAND (t, 2))
2759 CHECK_OP (2, "invalid array lower bound");
2760 if (TREE_OPERAND (t, 3))
2761 CHECK_OP (3, "invalid array stride");
2762 }
2763 else if (TREE_CODE (t) == BIT_FIELD_REF
2764 || TREE_CODE (t) == REALPART_EXPR
2765 || TREE_CODE (t) == IMAGPART_EXPR)
2766 {
2767 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
2768 "REALPART_EXPR");
2769 return t;
2770 }
2771
2772 t = TREE_OPERAND (t, 0);
2773 }
2774
2775 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2776 {
2777 error ("invalid reference prefix");
2778 return t;
2779 }
2780 *walk_subtrees = 0;
2781 break;
2782 case PLUS_EXPR:
2783 case MINUS_EXPR:
2784 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2785 POINTER_PLUS_EXPR. */
2786 if (POINTER_TYPE_P (TREE_TYPE (t)))
2787 {
2788 error ("invalid operand to plus/minus, type is a pointer");
2789 return t;
2790 }
2791 CHECK_OP (0, "invalid operand to binary operator");
2792 CHECK_OP (1, "invalid operand to binary operator");
2793 break;
2794
2795 case POINTER_PLUS_EXPR:
2796 /* Check to make sure the first operand is a pointer or reference type. */
2797 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
2798 {
2799 error ("invalid operand to pointer plus, first operand is not a pointer");
2800 return t;
2801 }
2802 /* Check to make sure the second operand is a ptrofftype. */
2803 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
2804 {
2805 error ("invalid operand to pointer plus, second operand is not an "
2806 "integer type of appropriate width");
2807 return t;
2808 }
2809 /* FALLTHROUGH */
2810 case LT_EXPR:
2811 case LE_EXPR:
2812 case GT_EXPR:
2813 case GE_EXPR:
2814 case EQ_EXPR:
2815 case NE_EXPR:
2816 case UNORDERED_EXPR:
2817 case ORDERED_EXPR:
2818 case UNLT_EXPR:
2819 case UNLE_EXPR:
2820 case UNGT_EXPR:
2821 case UNGE_EXPR:
2822 case UNEQ_EXPR:
2823 case LTGT_EXPR:
2824 case MULT_EXPR:
2825 case TRUNC_DIV_EXPR:
2826 case CEIL_DIV_EXPR:
2827 case FLOOR_DIV_EXPR:
2828 case ROUND_DIV_EXPR:
2829 case TRUNC_MOD_EXPR:
2830 case CEIL_MOD_EXPR:
2831 case FLOOR_MOD_EXPR:
2832 case ROUND_MOD_EXPR:
2833 case RDIV_EXPR:
2834 case EXACT_DIV_EXPR:
2835 case MIN_EXPR:
2836 case MAX_EXPR:
2837 case LSHIFT_EXPR:
2838 case RSHIFT_EXPR:
2839 case LROTATE_EXPR:
2840 case RROTATE_EXPR:
2841 case BIT_IOR_EXPR:
2842 case BIT_XOR_EXPR:
2843 case BIT_AND_EXPR:
2844 CHECK_OP (0, "invalid operand to binary operator");
2845 CHECK_OP (1, "invalid operand to binary operator");
2846 break;
2847
2848 case CONSTRUCTOR:
2849 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2850 *walk_subtrees = 0;
2851 break;
2852
2853 case CASE_LABEL_EXPR:
2854 if (CASE_CHAIN (t))
2855 {
2856 error ("invalid CASE_CHAIN");
2857 return t;
2858 }
2859 break;
2860
2861 default:
2862 break;
2863 }
2864 return NULL;
2865
2866 #undef CHECK_OP
2867 }
2868
2869
2870 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
2871 Returns true if there is an error, otherwise false. */
2872
2873 static bool
2874 verify_types_in_gimple_min_lval (tree expr)
2875 {
2876 tree op;
2877
2878 if (is_gimple_id (expr))
2879 return false;
2880
2881 if (TREE_CODE (expr) != TARGET_MEM_REF
2882 && TREE_CODE (expr) != MEM_REF)
2883 {
2884 error ("invalid expression for min lvalue");
2885 return true;
2886 }
2887
2888 /* TARGET_MEM_REFs are strange beasts. */
2889 if (TREE_CODE (expr) == TARGET_MEM_REF)
2890 return false;
2891
2892 op = TREE_OPERAND (expr, 0);
2893 if (!is_gimple_val (op))
2894 {
2895 error ("invalid operand in indirect reference");
2896 debug_generic_stmt (op);
2897 return true;
2898 }
2899 /* Memory references now generally can involve a value conversion. */
2900
2901 return false;
2902 }
2903
2904 /* Verify if EXPR is a valid GIMPLE reference expression. If
2905 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
2906 if there is an error, otherwise false. */
2907
2908 static bool
2909 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
2910 {
2911 while (handled_component_p (expr))
2912 {
2913 tree op = TREE_OPERAND (expr, 0);
2914
2915 if (TREE_CODE (expr) == ARRAY_REF
2916 || TREE_CODE (expr) == ARRAY_RANGE_REF)
2917 {
2918 if (!is_gimple_val (TREE_OPERAND (expr, 1))
2919 || (TREE_OPERAND (expr, 2)
2920 && !is_gimple_val (TREE_OPERAND (expr, 2)))
2921 || (TREE_OPERAND (expr, 3)
2922 && !is_gimple_val (TREE_OPERAND (expr, 3))))
2923 {
2924 error ("invalid operands to array reference");
2925 debug_generic_stmt (expr);
2926 return true;
2927 }
2928 }
2929
2930 /* Verify if the reference array element types are compatible. */
2931 if (TREE_CODE (expr) == ARRAY_REF
2932 && !useless_type_conversion_p (TREE_TYPE (expr),
2933 TREE_TYPE (TREE_TYPE (op))))
2934 {
2935 error ("type mismatch in array reference");
2936 debug_generic_stmt (TREE_TYPE (expr));
2937 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2938 return true;
2939 }
2940 if (TREE_CODE (expr) == ARRAY_RANGE_REF
2941 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
2942 TREE_TYPE (TREE_TYPE (op))))
2943 {
2944 error ("type mismatch in array range reference");
2945 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
2946 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2947 return true;
2948 }
2949
2950 if ((TREE_CODE (expr) == REALPART_EXPR
2951 || TREE_CODE (expr) == IMAGPART_EXPR)
2952 && !useless_type_conversion_p (TREE_TYPE (expr),
2953 TREE_TYPE (TREE_TYPE (op))))
2954 {
2955 error ("type mismatch in real/imagpart reference");
2956 debug_generic_stmt (TREE_TYPE (expr));
2957 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2958 return true;
2959 }
2960
2961 if (TREE_CODE (expr) == COMPONENT_REF
2962 && !useless_type_conversion_p (TREE_TYPE (expr),
2963 TREE_TYPE (TREE_OPERAND (expr, 1))))
2964 {
2965 error ("type mismatch in component reference");
2966 debug_generic_stmt (TREE_TYPE (expr));
2967 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
2968 return true;
2969 }
2970
2971 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2972 {
2973 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
2974 that their operand is not an SSA name or an invariant when
2975 requiring an lvalue (this usually means there is a SRA or IPA-SRA
2976 bug). Otherwise there is nothing to verify, gross mismatches at
2977 most invoke undefined behavior. */
2978 if (require_lvalue
2979 && (TREE_CODE (op) == SSA_NAME
2980 || is_gimple_min_invariant (op)))
2981 {
2982 error ("conversion of an SSA_NAME on the left hand side");
2983 debug_generic_stmt (expr);
2984 return true;
2985 }
2986 else if (TREE_CODE (op) == SSA_NAME
2987 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
2988 {
2989 error ("conversion of register to a different size");
2990 debug_generic_stmt (expr);
2991 return true;
2992 }
2993 else if (!handled_component_p (op))
2994 return false;
2995 }
2996
2997 expr = op;
2998 }
2999
3000 if (TREE_CODE (expr) == MEM_REF)
3001 {
3002 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3003 {
3004 error ("invalid address operand in MEM_REF");
3005 debug_generic_stmt (expr);
3006 return true;
3007 }
3008 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3009 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3010 {
3011 error ("invalid offset operand in MEM_REF");
3012 debug_generic_stmt (expr);
3013 return true;
3014 }
3015 }
3016 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3017 {
3018 if (!TMR_BASE (expr)
3019 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3020 {
3021 error ("invalid address operand in TARGET_MEM_REF");
3022 return true;
3023 }
3024 if (!TMR_OFFSET (expr)
3025 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3026 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3027 {
3028 error ("invalid offset operand in TARGET_MEM_REF");
3029 debug_generic_stmt (expr);
3030 return true;
3031 }
3032 }
3033
3034 return ((require_lvalue || !is_gimple_min_invariant (expr))
3035 && verify_types_in_gimple_min_lval (expr));
3036 }
3037
3038 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3039 list of pointer-to types that is trivially convertible to DEST. */
3040
3041 static bool
3042 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3043 {
3044 tree src;
3045
3046 if (!TYPE_POINTER_TO (src_obj))
3047 return true;
3048
3049 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3050 if (useless_type_conversion_p (dest, src))
3051 return true;
3052
3053 return false;
3054 }
3055
3056 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3057 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3058
3059 static bool
3060 valid_fixed_convert_types_p (tree type1, tree type2)
3061 {
3062 return (FIXED_POINT_TYPE_P (type1)
3063 && (INTEGRAL_TYPE_P (type2)
3064 || SCALAR_FLOAT_TYPE_P (type2)
3065 || FIXED_POINT_TYPE_P (type2)));
3066 }
3067
3068 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3069 is a problem, otherwise false. */
3070
3071 static bool
3072 verify_gimple_call (gimple stmt)
3073 {
3074 tree fn = gimple_call_fn (stmt);
3075 tree fntype, fndecl;
3076 unsigned i;
3077
3078 if (gimple_call_internal_p (stmt))
3079 {
3080 if (fn)
3081 {
3082 error ("gimple call has two targets");
3083 debug_generic_stmt (fn);
3084 return true;
3085 }
3086 }
3087 else
3088 {
3089 if (!fn)
3090 {
3091 error ("gimple call has no target");
3092 return true;
3093 }
3094 }
3095
3096 if (fn && !is_gimple_call_addr (fn))
3097 {
3098 error ("invalid function in gimple call");
3099 debug_generic_stmt (fn);
3100 return true;
3101 }
3102
3103 if (fn
3104 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3105 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3106 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3107 {
3108 error ("non-function in gimple call");
3109 return true;
3110 }
3111
3112 fndecl = gimple_call_fndecl (stmt);
3113 if (fndecl
3114 && TREE_CODE (fndecl) == FUNCTION_DECL
3115 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3116 && !DECL_PURE_P (fndecl)
3117 && !TREE_READONLY (fndecl))
3118 {
3119 error ("invalid pure const state for function");
3120 return true;
3121 }
3122
3123 if (gimple_call_lhs (stmt)
3124 && (!is_gimple_lvalue (gimple_call_lhs (stmt))
3125 || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
3126 {
3127 error ("invalid LHS in gimple call");
3128 return true;
3129 }
3130
3131 if (gimple_call_lhs (stmt) && gimple_call_noreturn_p (stmt))
3132 {
3133 error ("LHS in noreturn call");
3134 return true;
3135 }
3136
3137 fntype = gimple_call_fntype (stmt);
3138 if (fntype
3139 && gimple_call_lhs (stmt)
3140 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3141 TREE_TYPE (fntype))
3142 /* ??? At least C++ misses conversions at assignments from
3143 void * call results.
3144 ??? Java is completely off. Especially with functions
3145 returning java.lang.Object.
3146 For now simply allow arbitrary pointer type conversions. */
3147 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3148 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3149 {
3150 error ("invalid conversion in gimple call");
3151 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3152 debug_generic_stmt (TREE_TYPE (fntype));
3153 return true;
3154 }
3155
3156 if (gimple_call_chain (stmt)
3157 && !is_gimple_val (gimple_call_chain (stmt)))
3158 {
3159 error ("invalid static chain in gimple call");
3160 debug_generic_stmt (gimple_call_chain (stmt));
3161 return true;
3162 }
3163
3164 /* If there is a static chain argument, this should not be an indirect
3165 call, and the decl should have DECL_STATIC_CHAIN set. */
3166 if (gimple_call_chain (stmt))
3167 {
3168 if (!gimple_call_fndecl (stmt))
3169 {
3170 error ("static chain in indirect gimple call");
3171 return true;
3172 }
3173 fn = TREE_OPERAND (fn, 0);
3174
3175 if (!DECL_STATIC_CHAIN (fn))
3176 {
3177 error ("static chain with function that doesn%'t use one");
3178 return true;
3179 }
3180 }
3181
3182 /* ??? The C frontend passes unpromoted arguments in case it
3183 didn't see a function declaration before the call. So for now
3184 leave the call arguments mostly unverified. Once we gimplify
3185 unit-at-a-time we have a chance to fix this. */
3186
3187 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3188 {
3189 tree arg = gimple_call_arg (stmt, i);
3190 if ((is_gimple_reg_type (TREE_TYPE (arg))
3191 && !is_gimple_val (arg))
3192 || (!is_gimple_reg_type (TREE_TYPE (arg))
3193 && !is_gimple_lvalue (arg)))
3194 {
3195 error ("invalid argument to gimple call");
3196 debug_generic_expr (arg);
3197 return true;
3198 }
3199 }
3200
3201 return false;
3202 }
3203
3204 /* Verifies the gimple comparison with the result type TYPE and
3205 the operands OP0 and OP1. */
3206
3207 static bool
3208 verify_gimple_comparison (tree type, tree op0, tree op1)
3209 {
3210 tree op0_type = TREE_TYPE (op0);
3211 tree op1_type = TREE_TYPE (op1);
3212
3213 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3214 {
3215 error ("invalid operands in gimple comparison");
3216 return true;
3217 }
3218
3219 /* For comparisons we do not have the operations type as the
3220 effective type the comparison is carried out in. Instead
3221 we require that either the first operand is trivially
3222 convertible into the second, or the other way around.
3223 Because we special-case pointers to void we allow
3224 comparisons of pointers with the same mode as well. */
3225 if (!useless_type_conversion_p (op0_type, op1_type)
3226 && !useless_type_conversion_p (op1_type, op0_type)
3227 && (!POINTER_TYPE_P (op0_type)
3228 || !POINTER_TYPE_P (op1_type)
3229 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3230 {
3231 error ("mismatching comparison operand types");
3232 debug_generic_expr (op0_type);
3233 debug_generic_expr (op1_type);
3234 return true;
3235 }
3236
3237 /* The resulting type of a comparison may be an effective boolean type. */
3238 if (INTEGRAL_TYPE_P (type)
3239 && (TREE_CODE (type) == BOOLEAN_TYPE
3240 || TYPE_PRECISION (type) == 1))
3241 {
3242 if (TREE_CODE (op0_type) == VECTOR_TYPE
3243 || TREE_CODE (op1_type) == VECTOR_TYPE)
3244 {
3245 error ("vector comparison returning a boolean");
3246 debug_generic_expr (op0_type);
3247 debug_generic_expr (op1_type);
3248 return true;
3249 }
3250 }
3251 /* Or an integer vector type with the same size and element count
3252 as the comparison operand types. */
3253 else if (TREE_CODE (type) == VECTOR_TYPE
3254 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3255 {
3256 if (TREE_CODE (op0_type) != VECTOR_TYPE
3257 || TREE_CODE (op1_type) != VECTOR_TYPE)
3258 {
3259 error ("non-vector operands in vector comparison");
3260 debug_generic_expr (op0_type);
3261 debug_generic_expr (op1_type);
3262 return true;
3263 }
3264
3265 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3266 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3267 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type))))
3268 /* The result of a vector comparison is of signed
3269 integral type. */
3270 || TYPE_UNSIGNED (TREE_TYPE (type)))
3271 {
3272 error ("invalid vector comparison resulting type");
3273 debug_generic_expr (type);
3274 return true;
3275 }
3276 }
3277 else
3278 {
3279 error ("bogus comparison result type");
3280 debug_generic_expr (type);
3281 return true;
3282 }
3283
3284 return false;
3285 }
3286
3287 /* Verify a gimple assignment statement STMT with an unary rhs.
3288 Returns true if anything is wrong. */
3289
3290 static bool
3291 verify_gimple_assign_unary (gimple stmt)
3292 {
3293 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3294 tree lhs = gimple_assign_lhs (stmt);
3295 tree lhs_type = TREE_TYPE (lhs);
3296 tree rhs1 = gimple_assign_rhs1 (stmt);
3297 tree rhs1_type = TREE_TYPE (rhs1);
3298
3299 if (!is_gimple_reg (lhs))
3300 {
3301 error ("non-register as LHS of unary operation");
3302 return true;
3303 }
3304
3305 if (!is_gimple_val (rhs1))
3306 {
3307 error ("invalid operand in unary operation");
3308 return true;
3309 }
3310
3311 /* First handle conversions. */
3312 switch (rhs_code)
3313 {
3314 CASE_CONVERT:
3315 {
3316 /* Allow conversions from pointer type to integral type only if
3317 there is no sign or zero extension involved.
3318 For targets were the precision of ptrofftype doesn't match that
3319 of pointers we need to allow arbitrary conversions to ptrofftype. */
3320 if ((POINTER_TYPE_P (lhs_type)
3321 && INTEGRAL_TYPE_P (rhs1_type))
3322 || (POINTER_TYPE_P (rhs1_type)
3323 && INTEGRAL_TYPE_P (lhs_type)
3324 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3325 || ptrofftype_p (sizetype))))
3326 return false;
3327
3328 /* Allow conversion from integral to offset type and vice versa. */
3329 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3330 && INTEGRAL_TYPE_P (rhs1_type))
3331 || (INTEGRAL_TYPE_P (lhs_type)
3332 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3333 return false;
3334
3335 /* Otherwise assert we are converting between types of the
3336 same kind. */
3337 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3338 {
3339 error ("invalid types in nop conversion");
3340 debug_generic_expr (lhs_type);
3341 debug_generic_expr (rhs1_type);
3342 return true;
3343 }
3344
3345 return false;
3346 }
3347
3348 case ADDR_SPACE_CONVERT_EXPR:
3349 {
3350 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3351 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3352 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3353 {
3354 error ("invalid types in address space conversion");
3355 debug_generic_expr (lhs_type);
3356 debug_generic_expr (rhs1_type);
3357 return true;
3358 }
3359
3360 return false;
3361 }
3362
3363 case FIXED_CONVERT_EXPR:
3364 {
3365 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3366 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3367 {
3368 error ("invalid types in fixed-point conversion");
3369 debug_generic_expr (lhs_type);
3370 debug_generic_expr (rhs1_type);
3371 return true;
3372 }
3373
3374 return false;
3375 }
3376
3377 case FLOAT_EXPR:
3378 {
3379 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3380 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3381 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3382 {
3383 error ("invalid types in conversion to floating point");
3384 debug_generic_expr (lhs_type);
3385 debug_generic_expr (rhs1_type);
3386 return true;
3387 }
3388
3389 return false;
3390 }
3391
3392 case FIX_TRUNC_EXPR:
3393 {
3394 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3395 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3396 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3397 {
3398 error ("invalid types in conversion to integer");
3399 debug_generic_expr (lhs_type);
3400 debug_generic_expr (rhs1_type);
3401 return true;
3402 }
3403
3404 return false;
3405 }
3406
3407 case VEC_UNPACK_HI_EXPR:
3408 case VEC_UNPACK_LO_EXPR:
3409 case REDUC_MAX_EXPR:
3410 case REDUC_MIN_EXPR:
3411 case REDUC_PLUS_EXPR:
3412 case VEC_UNPACK_FLOAT_HI_EXPR:
3413 case VEC_UNPACK_FLOAT_LO_EXPR:
3414 /* FIXME. */
3415 return false;
3416
3417 case NEGATE_EXPR:
3418 case ABS_EXPR:
3419 case BIT_NOT_EXPR:
3420 case PAREN_EXPR:
3421 case NON_LVALUE_EXPR:
3422 case CONJ_EXPR:
3423 break;
3424
3425 default:
3426 gcc_unreachable ();
3427 }
3428
3429 /* For the remaining codes assert there is no conversion involved. */
3430 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3431 {
3432 error ("non-trivial conversion in unary operation");
3433 debug_generic_expr (lhs_type);
3434 debug_generic_expr (rhs1_type);
3435 return true;
3436 }
3437
3438 return false;
3439 }
3440
3441 /* Verify a gimple assignment statement STMT with a binary rhs.
3442 Returns true if anything is wrong. */
3443
3444 static bool
3445 verify_gimple_assign_binary (gimple stmt)
3446 {
3447 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3448 tree lhs = gimple_assign_lhs (stmt);
3449 tree lhs_type = TREE_TYPE (lhs);
3450 tree rhs1 = gimple_assign_rhs1 (stmt);
3451 tree rhs1_type = TREE_TYPE (rhs1);
3452 tree rhs2 = gimple_assign_rhs2 (stmt);
3453 tree rhs2_type = TREE_TYPE (rhs2);
3454
3455 if (!is_gimple_reg (lhs))
3456 {
3457 error ("non-register as LHS of binary operation");
3458 return true;
3459 }
3460
3461 if (!is_gimple_val (rhs1)
3462 || !is_gimple_val (rhs2))
3463 {
3464 error ("invalid operands in binary operation");
3465 return true;
3466 }
3467
3468 /* First handle operations that involve different types. */
3469 switch (rhs_code)
3470 {
3471 case COMPLEX_EXPR:
3472 {
3473 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3474 || !(INTEGRAL_TYPE_P (rhs1_type)
3475 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3476 || !(INTEGRAL_TYPE_P (rhs2_type)
3477 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3478 {
3479 error ("type mismatch in complex expression");
3480 debug_generic_expr (lhs_type);
3481 debug_generic_expr (rhs1_type);
3482 debug_generic_expr (rhs2_type);
3483 return true;
3484 }
3485
3486 return false;
3487 }
3488
3489 case LSHIFT_EXPR:
3490 case RSHIFT_EXPR:
3491 case LROTATE_EXPR:
3492 case RROTATE_EXPR:
3493 {
3494 /* Shifts and rotates are ok on integral types, fixed point
3495 types and integer vector types. */
3496 if ((!INTEGRAL_TYPE_P (rhs1_type)
3497 && !FIXED_POINT_TYPE_P (rhs1_type)
3498 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3499 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3500 || (!INTEGRAL_TYPE_P (rhs2_type)
3501 /* Vector shifts of vectors are also ok. */
3502 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3503 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3504 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3505 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3506 || !useless_type_conversion_p (lhs_type, rhs1_type))
3507 {
3508 error ("type mismatch in shift expression");
3509 debug_generic_expr (lhs_type);
3510 debug_generic_expr (rhs1_type);
3511 debug_generic_expr (rhs2_type);
3512 return true;
3513 }
3514
3515 return false;
3516 }
3517
3518 case VEC_LSHIFT_EXPR:
3519 case VEC_RSHIFT_EXPR:
3520 {
3521 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3522 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3523 || POINTER_TYPE_P (TREE_TYPE (rhs1_type))
3524 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type))
3525 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3526 || (!INTEGRAL_TYPE_P (rhs2_type)
3527 && (TREE_CODE (rhs2_type) != VECTOR_TYPE
3528 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3529 || !useless_type_conversion_p (lhs_type, rhs1_type))
3530 {
3531 error ("type mismatch in vector shift expression");
3532 debug_generic_expr (lhs_type);
3533 debug_generic_expr (rhs1_type);
3534 debug_generic_expr (rhs2_type);
3535 return true;
3536 }
3537 /* For shifting a vector of non-integral components we
3538 only allow shifting by a constant multiple of the element size. */
3539 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3540 && (TREE_CODE (rhs2) != INTEGER_CST
3541 || !div_if_zero_remainder (EXACT_DIV_EXPR, rhs2,
3542 TYPE_SIZE (TREE_TYPE (rhs1_type)))))
3543 {
3544 error ("non-element sized vector shift of floating point vector");
3545 return true;
3546 }
3547
3548 return false;
3549 }
3550
3551 case WIDEN_LSHIFT_EXPR:
3552 {
3553 if (!INTEGRAL_TYPE_P (lhs_type)
3554 || !INTEGRAL_TYPE_P (rhs1_type)
3555 || TREE_CODE (rhs2) != INTEGER_CST
3556 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3557 {
3558 error ("type mismatch in widening vector shift expression");
3559 debug_generic_expr (lhs_type);
3560 debug_generic_expr (rhs1_type);
3561 debug_generic_expr (rhs2_type);
3562 return true;
3563 }
3564
3565 return false;
3566 }
3567
3568 case VEC_WIDEN_LSHIFT_HI_EXPR:
3569 case VEC_WIDEN_LSHIFT_LO_EXPR:
3570 {
3571 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3572 || TREE_CODE (lhs_type) != VECTOR_TYPE
3573 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3574 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3575 || TREE_CODE (rhs2) != INTEGER_CST
3576 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3577 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3578 {
3579 error ("type mismatch in widening vector shift expression");
3580 debug_generic_expr (lhs_type);
3581 debug_generic_expr (rhs1_type);
3582 debug_generic_expr (rhs2_type);
3583 return true;
3584 }
3585
3586 return false;
3587 }
3588
3589 case PLUS_EXPR:
3590 case MINUS_EXPR:
3591 {
3592 tree lhs_etype = lhs_type;
3593 tree rhs1_etype = rhs1_type;
3594 tree rhs2_etype = rhs2_type;
3595 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3596 {
3597 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3598 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3599 {
3600 error ("invalid non-vector operands to vector valued plus");
3601 return true;
3602 }
3603 lhs_etype = TREE_TYPE (lhs_type);
3604 rhs1_etype = TREE_TYPE (rhs1_type);
3605 rhs2_etype = TREE_TYPE (rhs2_type);
3606 }
3607 if (POINTER_TYPE_P (lhs_etype)
3608 || POINTER_TYPE_P (rhs1_etype)
3609 || POINTER_TYPE_P (rhs2_etype))
3610 {
3611 error ("invalid (pointer) operands to plus/minus");
3612 return true;
3613 }
3614
3615 /* Continue with generic binary expression handling. */
3616 break;
3617 }
3618
3619 case POINTER_PLUS_EXPR:
3620 {
3621 if (!POINTER_TYPE_P (rhs1_type)
3622 || !useless_type_conversion_p (lhs_type, rhs1_type)
3623 || !ptrofftype_p (rhs2_type))
3624 {
3625 error ("type mismatch in pointer plus expression");
3626 debug_generic_stmt (lhs_type);
3627 debug_generic_stmt (rhs1_type);
3628 debug_generic_stmt (rhs2_type);
3629 return true;
3630 }
3631
3632 return false;
3633 }
3634
3635 case TRUTH_ANDIF_EXPR:
3636 case TRUTH_ORIF_EXPR:
3637 case TRUTH_AND_EXPR:
3638 case TRUTH_OR_EXPR:
3639 case TRUTH_XOR_EXPR:
3640
3641 gcc_unreachable ();
3642
3643 case LT_EXPR:
3644 case LE_EXPR:
3645 case GT_EXPR:
3646 case GE_EXPR:
3647 case EQ_EXPR:
3648 case NE_EXPR:
3649 case UNORDERED_EXPR:
3650 case ORDERED_EXPR:
3651 case UNLT_EXPR:
3652 case UNLE_EXPR:
3653 case UNGT_EXPR:
3654 case UNGE_EXPR:
3655 case UNEQ_EXPR:
3656 case LTGT_EXPR:
3657 /* Comparisons are also binary, but the result type is not
3658 connected to the operand types. */
3659 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3660
3661 case WIDEN_MULT_EXPR:
3662 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3663 return true;
3664 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3665 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3666
3667 case WIDEN_SUM_EXPR:
3668 case VEC_WIDEN_MULT_HI_EXPR:
3669 case VEC_WIDEN_MULT_LO_EXPR:
3670 case VEC_WIDEN_MULT_EVEN_EXPR:
3671 case VEC_WIDEN_MULT_ODD_EXPR:
3672 case VEC_PACK_TRUNC_EXPR:
3673 case VEC_PACK_SAT_EXPR:
3674 case VEC_PACK_FIX_TRUNC_EXPR:
3675 /* FIXME. */
3676 return false;
3677
3678 case MULT_EXPR:
3679 case MULT_HIGHPART_EXPR:
3680 case TRUNC_DIV_EXPR:
3681 case CEIL_DIV_EXPR:
3682 case FLOOR_DIV_EXPR:
3683 case ROUND_DIV_EXPR:
3684 case TRUNC_MOD_EXPR:
3685 case CEIL_MOD_EXPR:
3686 case FLOOR_MOD_EXPR:
3687 case ROUND_MOD_EXPR:
3688 case RDIV_EXPR:
3689 case EXACT_DIV_EXPR:
3690 case MIN_EXPR:
3691 case MAX_EXPR:
3692 case BIT_IOR_EXPR:
3693 case BIT_XOR_EXPR:
3694 case BIT_AND_EXPR:
3695 /* Continue with generic binary expression handling. */
3696 break;
3697
3698 default:
3699 gcc_unreachable ();
3700 }
3701
3702 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3703 || !useless_type_conversion_p (lhs_type, rhs2_type))
3704 {
3705 error ("type mismatch in binary expression");
3706 debug_generic_stmt (lhs_type);
3707 debug_generic_stmt (rhs1_type);
3708 debug_generic_stmt (rhs2_type);
3709 return true;
3710 }
3711
3712 return false;
3713 }
3714
3715 /* Verify a gimple assignment statement STMT with a ternary rhs.
3716 Returns true if anything is wrong. */
3717
3718 static bool
3719 verify_gimple_assign_ternary (gimple stmt)
3720 {
3721 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3722 tree lhs = gimple_assign_lhs (stmt);
3723 tree lhs_type = TREE_TYPE (lhs);
3724 tree rhs1 = gimple_assign_rhs1 (stmt);
3725 tree rhs1_type = TREE_TYPE (rhs1);
3726 tree rhs2 = gimple_assign_rhs2 (stmt);
3727 tree rhs2_type = TREE_TYPE (rhs2);
3728 tree rhs3 = gimple_assign_rhs3 (stmt);
3729 tree rhs3_type = TREE_TYPE (rhs3);
3730
3731 if (!is_gimple_reg (lhs))
3732 {
3733 error ("non-register as LHS of ternary operation");
3734 return true;
3735 }
3736
3737 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3738 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3739 || !is_gimple_val (rhs2)
3740 || !is_gimple_val (rhs3))
3741 {
3742 error ("invalid operands in ternary operation");
3743 return true;
3744 }
3745
3746 /* First handle operations that involve different types. */
3747 switch (rhs_code)
3748 {
3749 case WIDEN_MULT_PLUS_EXPR:
3750 case WIDEN_MULT_MINUS_EXPR:
3751 if ((!INTEGRAL_TYPE_P (rhs1_type)
3752 && !FIXED_POINT_TYPE_P (rhs1_type))
3753 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3754 || !useless_type_conversion_p (lhs_type, rhs3_type)
3755 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3756 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3757 {
3758 error ("type mismatch in widening multiply-accumulate expression");
3759 debug_generic_expr (lhs_type);
3760 debug_generic_expr (rhs1_type);
3761 debug_generic_expr (rhs2_type);
3762 debug_generic_expr (rhs3_type);
3763 return true;
3764 }
3765 break;
3766
3767 case FMA_EXPR:
3768 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3769 || !useless_type_conversion_p (lhs_type, rhs2_type)
3770 || !useless_type_conversion_p (lhs_type, rhs3_type))
3771 {
3772 error ("type mismatch in fused multiply-add expression");
3773 debug_generic_expr (lhs_type);
3774 debug_generic_expr (rhs1_type);
3775 debug_generic_expr (rhs2_type);
3776 debug_generic_expr (rhs3_type);
3777 return true;
3778 }
3779 break;
3780
3781 case COND_EXPR:
3782 case VEC_COND_EXPR:
3783 if (!useless_type_conversion_p (lhs_type, rhs2_type)
3784 || !useless_type_conversion_p (lhs_type, rhs3_type))
3785 {
3786 error ("type mismatch in conditional expression");
3787 debug_generic_expr (lhs_type);
3788 debug_generic_expr (rhs2_type);
3789 debug_generic_expr (rhs3_type);
3790 return true;
3791 }
3792 break;
3793
3794 case VEC_PERM_EXPR:
3795 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3796 || !useless_type_conversion_p (lhs_type, rhs2_type))
3797 {
3798 error ("type mismatch in vector permute expression");
3799 debug_generic_expr (lhs_type);
3800 debug_generic_expr (rhs1_type);
3801 debug_generic_expr (rhs2_type);
3802 debug_generic_expr (rhs3_type);
3803 return true;
3804 }
3805
3806 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3807 || TREE_CODE (rhs2_type) != VECTOR_TYPE
3808 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
3809 {
3810 error ("vector types expected in vector permute expression");
3811 debug_generic_expr (lhs_type);
3812 debug_generic_expr (rhs1_type);
3813 debug_generic_expr (rhs2_type);
3814 debug_generic_expr (rhs3_type);
3815 return true;
3816 }
3817
3818 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
3819 || TYPE_VECTOR_SUBPARTS (rhs2_type)
3820 != TYPE_VECTOR_SUBPARTS (rhs3_type)
3821 || TYPE_VECTOR_SUBPARTS (rhs3_type)
3822 != TYPE_VECTOR_SUBPARTS (lhs_type))
3823 {
3824 error ("vectors with different element number found "
3825 "in vector permute expression");
3826 debug_generic_expr (lhs_type);
3827 debug_generic_expr (rhs1_type);
3828 debug_generic_expr (rhs2_type);
3829 debug_generic_expr (rhs3_type);
3830 return true;
3831 }
3832
3833 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
3834 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
3835 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
3836 {
3837 error ("invalid mask type in vector permute expression");
3838 debug_generic_expr (lhs_type);
3839 debug_generic_expr (rhs1_type);
3840 debug_generic_expr (rhs2_type);
3841 debug_generic_expr (rhs3_type);
3842 return true;
3843 }
3844
3845 return false;
3846
3847 case DOT_PROD_EXPR:
3848 case REALIGN_LOAD_EXPR:
3849 /* FIXME. */
3850 return false;
3851
3852 default:
3853 gcc_unreachable ();
3854 }
3855 return false;
3856 }
3857
3858 /* Verify a gimple assignment statement STMT with a single rhs.
3859 Returns true if anything is wrong. */
3860
3861 static bool
3862 verify_gimple_assign_single (gimple stmt)
3863 {
3864 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3865 tree lhs = gimple_assign_lhs (stmt);
3866 tree lhs_type = TREE_TYPE (lhs);
3867 tree rhs1 = gimple_assign_rhs1 (stmt);
3868 tree rhs1_type = TREE_TYPE (rhs1);
3869 bool res = false;
3870
3871 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3872 {
3873 error ("non-trivial conversion at assignment");
3874 debug_generic_expr (lhs_type);
3875 debug_generic_expr (rhs1_type);
3876 return true;
3877 }
3878
3879 if (gimple_clobber_p (stmt)
3880 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
3881 {
3882 error ("non-decl/MEM_REF LHS in clobber statement");
3883 debug_generic_expr (lhs);
3884 return true;
3885 }
3886
3887 if (handled_component_p (lhs))
3888 res |= verify_types_in_gimple_reference (lhs, true);
3889
3890 /* Special codes we cannot handle via their class. */
3891 switch (rhs_code)
3892 {
3893 case ADDR_EXPR:
3894 {
3895 tree op = TREE_OPERAND (rhs1, 0);
3896 if (!is_gimple_addressable (op))
3897 {
3898 error ("invalid operand in unary expression");
3899 return true;
3900 }
3901
3902 /* Technically there is no longer a need for matching types, but
3903 gimple hygiene asks for this check. In LTO we can end up
3904 combining incompatible units and thus end up with addresses
3905 of globals that change their type to a common one. */
3906 if (!in_lto_p
3907 && !types_compatible_p (TREE_TYPE (op),
3908 TREE_TYPE (TREE_TYPE (rhs1)))
3909 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
3910 TREE_TYPE (op)))
3911 {
3912 error ("type mismatch in address expression");
3913 debug_generic_stmt (TREE_TYPE (rhs1));
3914 debug_generic_stmt (TREE_TYPE (op));
3915 return true;
3916 }
3917
3918 return verify_types_in_gimple_reference (op, true);
3919 }
3920
3921 /* tcc_reference */
3922 case INDIRECT_REF:
3923 error ("INDIRECT_REF in gimple IL");
3924 return true;
3925
3926 case COMPONENT_REF:
3927 case BIT_FIELD_REF:
3928 case ARRAY_REF:
3929 case ARRAY_RANGE_REF:
3930 case VIEW_CONVERT_EXPR:
3931 case REALPART_EXPR:
3932 case IMAGPART_EXPR:
3933 case TARGET_MEM_REF:
3934 case MEM_REF:
3935 if (!is_gimple_reg (lhs)
3936 && is_gimple_reg_type (TREE_TYPE (lhs)))
3937 {
3938 error ("invalid rhs for gimple memory store");
3939 debug_generic_stmt (lhs);
3940 debug_generic_stmt (rhs1);
3941 return true;
3942 }
3943 return res || verify_types_in_gimple_reference (rhs1, false);
3944
3945 /* tcc_constant */
3946 case SSA_NAME:
3947 case INTEGER_CST:
3948 case REAL_CST:
3949 case FIXED_CST:
3950 case COMPLEX_CST:
3951 case VECTOR_CST:
3952 case STRING_CST:
3953 return res;
3954
3955 /* tcc_declaration */
3956 case CONST_DECL:
3957 return res;
3958 case VAR_DECL:
3959 case PARM_DECL:
3960 if (!is_gimple_reg (lhs)
3961 && !is_gimple_reg (rhs1)
3962 && is_gimple_reg_type (TREE_TYPE (lhs)))
3963 {
3964 error ("invalid rhs for gimple memory store");
3965 debug_generic_stmt (lhs);
3966 debug_generic_stmt (rhs1);
3967 return true;
3968 }
3969 return res;
3970
3971 case CONSTRUCTOR:
3972 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
3973 {
3974 unsigned int i;
3975 tree elt_i, elt_v, elt_t = NULL_TREE;
3976
3977 if (CONSTRUCTOR_NELTS (rhs1) == 0)
3978 return res;
3979 /* For vector CONSTRUCTORs we require that either it is empty
3980 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
3981 (then the element count must be correct to cover the whole
3982 outer vector and index must be NULL on all elements, or it is
3983 a CONSTRUCTOR of scalar elements, where we as an exception allow
3984 smaller number of elements (assuming zero filling) and
3985 consecutive indexes as compared to NULL indexes (such
3986 CONSTRUCTORs can appear in the IL from FEs). */
3987 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
3988 {
3989 if (elt_t == NULL_TREE)
3990 {
3991 elt_t = TREE_TYPE (elt_v);
3992 if (TREE_CODE (elt_t) == VECTOR_TYPE)
3993 {
3994 tree elt_t = TREE_TYPE (elt_v);
3995 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
3996 TREE_TYPE (elt_t)))
3997 {
3998 error ("incorrect type of vector CONSTRUCTOR"
3999 " elements");
4000 debug_generic_stmt (rhs1);
4001 return true;
4002 }
4003 else if (CONSTRUCTOR_NELTS (rhs1)
4004 * TYPE_VECTOR_SUBPARTS (elt_t)
4005 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4006 {
4007 error ("incorrect number of vector CONSTRUCTOR"
4008 " elements");
4009 debug_generic_stmt (rhs1);
4010 return true;
4011 }
4012 }
4013 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4014 elt_t))
4015 {
4016 error ("incorrect type of vector CONSTRUCTOR elements");
4017 debug_generic_stmt (rhs1);
4018 return true;
4019 }
4020 else if (CONSTRUCTOR_NELTS (rhs1)
4021 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4022 {
4023 error ("incorrect number of vector CONSTRUCTOR elements");
4024 debug_generic_stmt (rhs1);
4025 return true;
4026 }
4027 }
4028 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4029 {
4030 error ("incorrect type of vector CONSTRUCTOR elements");
4031 debug_generic_stmt (rhs1);
4032 return true;
4033 }
4034 if (elt_i != NULL_TREE
4035 && (TREE_CODE (elt_t) == VECTOR_TYPE
4036 || TREE_CODE (elt_i) != INTEGER_CST
4037 || compare_tree_int (elt_i, i) != 0))
4038 {
4039 error ("vector CONSTRUCTOR with non-NULL element index");
4040 debug_generic_stmt (rhs1);
4041 return true;
4042 }
4043 }
4044 }
4045 return res;
4046 case OBJ_TYPE_REF:
4047 case ASSERT_EXPR:
4048 case WITH_SIZE_EXPR:
4049 /* FIXME. */
4050 return res;
4051
4052 default:;
4053 }
4054
4055 return res;
4056 }
4057
4058 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4059 is a problem, otherwise false. */
4060
4061 static bool
4062 verify_gimple_assign (gimple stmt)
4063 {
4064 switch (gimple_assign_rhs_class (stmt))
4065 {
4066 case GIMPLE_SINGLE_RHS:
4067 return verify_gimple_assign_single (stmt);
4068
4069 case GIMPLE_UNARY_RHS:
4070 return verify_gimple_assign_unary (stmt);
4071
4072 case GIMPLE_BINARY_RHS:
4073 return verify_gimple_assign_binary (stmt);
4074
4075 case GIMPLE_TERNARY_RHS:
4076 return verify_gimple_assign_ternary (stmt);
4077
4078 default:
4079 gcc_unreachable ();
4080 }
4081 }
4082
4083 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4084 is a problem, otherwise false. */
4085
4086 static bool
4087 verify_gimple_return (gimple stmt)
4088 {
4089 tree op = gimple_return_retval (stmt);
4090 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4091
4092 /* We cannot test for present return values as we do not fix up missing
4093 return values from the original source. */
4094 if (op == NULL)
4095 return false;
4096
4097 if (!is_gimple_val (op)
4098 && TREE_CODE (op) != RESULT_DECL)
4099 {
4100 error ("invalid operand in return statement");
4101 debug_generic_stmt (op);
4102 return true;
4103 }
4104
4105 if ((TREE_CODE (op) == RESULT_DECL
4106 && DECL_BY_REFERENCE (op))
4107 || (TREE_CODE (op) == SSA_NAME
4108 && SSA_NAME_VAR (op)
4109 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4110 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4111 op = TREE_TYPE (op);
4112
4113 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4114 {
4115 error ("invalid conversion in return statement");
4116 debug_generic_stmt (restype);
4117 debug_generic_stmt (TREE_TYPE (op));
4118 return true;
4119 }
4120
4121 return false;
4122 }
4123
4124
4125 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4126 is a problem, otherwise false. */
4127
4128 static bool
4129 verify_gimple_goto (gimple stmt)
4130 {
4131 tree dest = gimple_goto_dest (stmt);
4132
4133 /* ??? We have two canonical forms of direct goto destinations, a
4134 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4135 if (TREE_CODE (dest) != LABEL_DECL
4136 && (!is_gimple_val (dest)
4137 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4138 {
4139 error ("goto destination is neither a label nor a pointer");
4140 return true;
4141 }
4142
4143 return false;
4144 }
4145
4146 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4147 is a problem, otherwise false. */
4148
4149 static bool
4150 verify_gimple_switch (gimple stmt)
4151 {
4152 unsigned int i, n;
4153 tree elt, prev_upper_bound = NULL_TREE;
4154 tree index_type, elt_type = NULL_TREE;
4155
4156 if (!is_gimple_val (gimple_switch_index (stmt)))
4157 {
4158 error ("invalid operand to switch statement");
4159 debug_generic_stmt (gimple_switch_index (stmt));
4160 return true;
4161 }
4162
4163 index_type = TREE_TYPE (gimple_switch_index (stmt));
4164 if (! INTEGRAL_TYPE_P (index_type))
4165 {
4166 error ("non-integral type switch statement");
4167 debug_generic_expr (index_type);
4168 return true;
4169 }
4170
4171 elt = gimple_switch_label (stmt, 0);
4172 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4173 {
4174 error ("invalid default case label in switch statement");
4175 debug_generic_expr (elt);
4176 return true;
4177 }
4178
4179 n = gimple_switch_num_labels (stmt);
4180 for (i = 1; i < n; i++)
4181 {
4182 elt = gimple_switch_label (stmt, i);
4183
4184 if (! CASE_LOW (elt))
4185 {
4186 error ("invalid case label in switch statement");
4187 debug_generic_expr (elt);
4188 return true;
4189 }
4190 if (CASE_HIGH (elt)
4191 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4192 {
4193 error ("invalid case range in switch statement");
4194 debug_generic_expr (elt);
4195 return true;
4196 }
4197
4198 if (elt_type)
4199 {
4200 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4201 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4202 {
4203 error ("type mismatch for case label in switch statement");
4204 debug_generic_expr (elt);
4205 return true;
4206 }
4207 }
4208 else
4209 {
4210 elt_type = TREE_TYPE (CASE_LOW (elt));
4211 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4212 {
4213 error ("type precision mismatch in switch statement");
4214 return true;
4215 }
4216 }
4217
4218 if (prev_upper_bound)
4219 {
4220 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4221 {
4222 error ("case labels not sorted in switch statement");
4223 return true;
4224 }
4225 }
4226
4227 prev_upper_bound = CASE_HIGH (elt);
4228 if (! prev_upper_bound)
4229 prev_upper_bound = CASE_LOW (elt);
4230 }
4231
4232 return false;
4233 }
4234
4235 /* Verify a gimple debug statement STMT.
4236 Returns true if anything is wrong. */
4237
4238 static bool
4239 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4240 {
4241 /* There isn't much that could be wrong in a gimple debug stmt. A
4242 gimple debug bind stmt, for example, maps a tree, that's usually
4243 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4244 component or member of an aggregate type, to another tree, that
4245 can be an arbitrary expression. These stmts expand into debug
4246 insns, and are converted to debug notes by var-tracking.c. */
4247 return false;
4248 }
4249
4250 /* Verify a gimple label statement STMT.
4251 Returns true if anything is wrong. */
4252
4253 static bool
4254 verify_gimple_label (gimple stmt)
4255 {
4256 tree decl = gimple_label_label (stmt);
4257 int uid;
4258 bool err = false;
4259
4260 if (TREE_CODE (decl) != LABEL_DECL)
4261 return true;
4262 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4263 && DECL_CONTEXT (decl) != current_function_decl)
4264 {
4265 error ("label's context is not the current function decl");
4266 err |= true;
4267 }
4268
4269 uid = LABEL_DECL_UID (decl);
4270 if (cfun->cfg
4271 && (uid == -1 || (*label_to_block_map)[uid] != gimple_bb (stmt)))
4272 {
4273 error ("incorrect entry in label_to_block_map");
4274 err |= true;
4275 }
4276
4277 uid = EH_LANDING_PAD_NR (decl);
4278 if (uid)
4279 {
4280 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4281 if (decl != lp->post_landing_pad)
4282 {
4283 error ("incorrect setting of landing pad number");
4284 err |= true;
4285 }
4286 }
4287
4288 return err;
4289 }
4290
4291 /* Verify the GIMPLE statement STMT. Returns true if there is an
4292 error, otherwise false. */
4293
4294 static bool
4295 verify_gimple_stmt (gimple stmt)
4296 {
4297 switch (gimple_code (stmt))
4298 {
4299 case GIMPLE_ASSIGN:
4300 return verify_gimple_assign (stmt);
4301
4302 case GIMPLE_LABEL:
4303 return verify_gimple_label (stmt);
4304
4305 case GIMPLE_CALL:
4306 return verify_gimple_call (stmt);
4307
4308 case GIMPLE_COND:
4309 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4310 {
4311 error ("invalid comparison code in gimple cond");
4312 return true;
4313 }
4314 if (!(!gimple_cond_true_label (stmt)
4315 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4316 || !(!gimple_cond_false_label (stmt)
4317 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4318 {
4319 error ("invalid labels in gimple cond");
4320 return true;
4321 }
4322
4323 return verify_gimple_comparison (boolean_type_node,
4324 gimple_cond_lhs (stmt),
4325 gimple_cond_rhs (stmt));
4326
4327 case GIMPLE_GOTO:
4328 return verify_gimple_goto (stmt);
4329
4330 case GIMPLE_SWITCH:
4331 return verify_gimple_switch (stmt);
4332
4333 case GIMPLE_RETURN:
4334 return verify_gimple_return (stmt);
4335
4336 case GIMPLE_ASM:
4337 return false;
4338
4339 case GIMPLE_TRANSACTION:
4340 return verify_gimple_transaction (stmt);
4341
4342 /* Tuples that do not have tree operands. */
4343 case GIMPLE_NOP:
4344 case GIMPLE_PREDICT:
4345 case GIMPLE_RESX:
4346 case GIMPLE_EH_DISPATCH:
4347 case GIMPLE_EH_MUST_NOT_THROW:
4348 return false;
4349
4350 CASE_GIMPLE_OMP:
4351 /* OpenMP directives are validated by the FE and never operated
4352 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4353 non-gimple expressions when the main index variable has had
4354 its address taken. This does not affect the loop itself
4355 because the header of an GIMPLE_OMP_FOR is merely used to determine
4356 how to setup the parallel iteration. */
4357 return false;
4358
4359 case GIMPLE_DEBUG:
4360 return verify_gimple_debug (stmt);
4361
4362 default:
4363 gcc_unreachable ();
4364 }
4365 }
4366
4367 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4368 and false otherwise. */
4369
4370 static bool
4371 verify_gimple_phi (gimple phi)
4372 {
4373 bool err = false;
4374 unsigned i;
4375 tree phi_result = gimple_phi_result (phi);
4376 bool virtual_p;
4377
4378 if (!phi_result)
4379 {
4380 error ("invalid PHI result");
4381 return true;
4382 }
4383
4384 virtual_p = virtual_operand_p (phi_result);
4385 if (TREE_CODE (phi_result) != SSA_NAME
4386 || (virtual_p
4387 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4388 {
4389 error ("invalid PHI result");
4390 err = true;
4391 }
4392
4393 for (i = 0; i < gimple_phi_num_args (phi); i++)
4394 {
4395 tree t = gimple_phi_arg_def (phi, i);
4396
4397 if (!t)
4398 {
4399 error ("missing PHI def");
4400 err |= true;
4401 continue;
4402 }
4403 /* Addressable variables do have SSA_NAMEs but they
4404 are not considered gimple values. */
4405 else if ((TREE_CODE (t) == SSA_NAME
4406 && virtual_p != virtual_operand_p (t))
4407 || (virtual_p
4408 && (TREE_CODE (t) != SSA_NAME
4409 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4410 || (!virtual_p
4411 && !is_gimple_val (t)))
4412 {
4413 error ("invalid PHI argument");
4414 debug_generic_expr (t);
4415 err |= true;
4416 }
4417 #ifdef ENABLE_TYPES_CHECKING
4418 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4419 {
4420 error ("incompatible types in PHI argument %u", i);
4421 debug_generic_stmt (TREE_TYPE (phi_result));
4422 debug_generic_stmt (TREE_TYPE (t));
4423 err |= true;
4424 }
4425 #endif
4426 }
4427
4428 return err;
4429 }
4430
4431 /* Verify the GIMPLE statements inside the sequence STMTS. */
4432
4433 static bool
4434 verify_gimple_in_seq_2 (gimple_seq stmts)
4435 {
4436 gimple_stmt_iterator ittr;
4437 bool err = false;
4438
4439 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4440 {
4441 gimple stmt = gsi_stmt (ittr);
4442
4443 switch (gimple_code (stmt))
4444 {
4445 case GIMPLE_BIND:
4446 err |= verify_gimple_in_seq_2 (gimple_bind_body (stmt));
4447 break;
4448
4449 case GIMPLE_TRY:
4450 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4451 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4452 break;
4453
4454 case GIMPLE_EH_FILTER:
4455 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4456 break;
4457
4458 case GIMPLE_EH_ELSE:
4459 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (stmt));
4460 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (stmt));
4461 break;
4462
4463 case GIMPLE_CATCH:
4464 err |= verify_gimple_in_seq_2 (gimple_catch_handler (stmt));
4465 break;
4466
4467 case GIMPLE_TRANSACTION:
4468 err |= verify_gimple_transaction (stmt);
4469 break;
4470
4471 default:
4472 {
4473 bool err2 = verify_gimple_stmt (stmt);
4474 if (err2)
4475 debug_gimple_stmt (stmt);
4476 err |= err2;
4477 }
4478 }
4479 }
4480
4481 return err;
4482 }
4483
4484 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4485 is a problem, otherwise false. */
4486
4487 static bool
4488 verify_gimple_transaction (gimple stmt)
4489 {
4490 tree lab = gimple_transaction_label (stmt);
4491 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4492 return true;
4493 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4494 }
4495
4496
4497 /* Verify the GIMPLE statements inside the statement list STMTS. */
4498
4499 DEBUG_FUNCTION void
4500 verify_gimple_in_seq (gimple_seq stmts)
4501 {
4502 timevar_push (TV_TREE_STMT_VERIFY);
4503 if (verify_gimple_in_seq_2 (stmts))
4504 internal_error ("verify_gimple failed");
4505 timevar_pop (TV_TREE_STMT_VERIFY);
4506 }
4507
4508 /* Return true when the T can be shared. */
4509
4510 static bool
4511 tree_node_can_be_shared (tree t)
4512 {
4513 if (IS_TYPE_OR_DECL_P (t)
4514 || is_gimple_min_invariant (t)
4515 || TREE_CODE (t) == SSA_NAME
4516 || t == error_mark_node
4517 || TREE_CODE (t) == IDENTIFIER_NODE)
4518 return true;
4519
4520 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4521 return true;
4522
4523 if (DECL_P (t))
4524 return true;
4525
4526 return false;
4527 }
4528
4529 /* Called via walk_tree. Verify tree sharing. */
4530
4531 static tree
4532 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4533 {
4534 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4535
4536 if (tree_node_can_be_shared (*tp))
4537 {
4538 *walk_subtrees = false;
4539 return NULL;
4540 }
4541
4542 if (pointer_set_insert (visited, *tp))
4543 return *tp;
4544
4545 return NULL;
4546 }
4547
4548 /* Called via walk_gimple_stmt. Verify tree sharing. */
4549
4550 static tree
4551 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4552 {
4553 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4554 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4555 }
4556
4557 static bool eh_error_found;
4558 static int
4559 verify_eh_throw_stmt_node (void **slot, void *data)
4560 {
4561 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4562 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4563
4564 if (!pointer_set_contains (visited, node->stmt))
4565 {
4566 error ("dead STMT in EH table");
4567 debug_gimple_stmt (node->stmt);
4568 eh_error_found = true;
4569 }
4570 return 1;
4571 }
4572
4573 /* Verify if the location LOCs block is in BLOCKS. */
4574
4575 static bool
4576 verify_location (pointer_set_t *blocks, location_t loc)
4577 {
4578 tree block = LOCATION_BLOCK (loc);
4579 if (block != NULL_TREE
4580 && !pointer_set_contains (blocks, block))
4581 {
4582 error ("location references block not in block tree");
4583 return true;
4584 }
4585 if (block != NULL_TREE)
4586 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4587 return false;
4588 }
4589
4590 /* Called via walk_tree. Verify that expressions have no blocks. */
4591
4592 static tree
4593 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4594 {
4595 if (!EXPR_P (*tp))
4596 {
4597 *walk_subtrees = false;
4598 return NULL;
4599 }
4600
4601 location_t loc = EXPR_LOCATION (*tp);
4602 if (LOCATION_BLOCK (loc) != NULL)
4603 return *tp;
4604
4605 return NULL;
4606 }
4607
4608 /* Called via walk_tree. Verify locations of expressions. */
4609
4610 static tree
4611 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4612 {
4613 struct pointer_set_t *blocks = (struct pointer_set_t *) data;
4614
4615 if (TREE_CODE (*tp) == VAR_DECL
4616 && DECL_HAS_DEBUG_EXPR_P (*tp))
4617 {
4618 tree t = DECL_DEBUG_EXPR (*tp);
4619 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4620 if (addr)
4621 return addr;
4622 }
4623 if ((TREE_CODE (*tp) == VAR_DECL
4624 || TREE_CODE (*tp) == PARM_DECL
4625 || TREE_CODE (*tp) == RESULT_DECL)
4626 && DECL_HAS_VALUE_EXPR_P (*tp))
4627 {
4628 tree t = DECL_VALUE_EXPR (*tp);
4629 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4630 if (addr)
4631 return addr;
4632 }
4633
4634 if (!EXPR_P (*tp))
4635 {
4636 *walk_subtrees = false;
4637 return NULL;
4638 }
4639
4640 location_t loc = EXPR_LOCATION (*tp);
4641 if (verify_location (blocks, loc))
4642 return *tp;
4643
4644 return NULL;
4645 }
4646
4647 /* Called via walk_gimple_op. Verify locations of expressions. */
4648
4649 static tree
4650 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
4651 {
4652 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4653 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
4654 }
4655
4656 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4657
4658 static void
4659 collect_subblocks (pointer_set_t *blocks, tree block)
4660 {
4661 tree t;
4662 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
4663 {
4664 pointer_set_insert (blocks, t);
4665 collect_subblocks (blocks, t);
4666 }
4667 }
4668
4669 /* Verify the GIMPLE statements in the CFG of FN. */
4670
4671 DEBUG_FUNCTION void
4672 verify_gimple_in_cfg (struct function *fn)
4673 {
4674 basic_block bb;
4675 bool err = false;
4676 struct pointer_set_t *visited, *visited_stmts, *blocks;
4677
4678 timevar_push (TV_TREE_STMT_VERIFY);
4679 visited = pointer_set_create ();
4680 visited_stmts = pointer_set_create ();
4681
4682 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4683 blocks = pointer_set_create ();
4684 if (DECL_INITIAL (fn->decl))
4685 {
4686 pointer_set_insert (blocks, DECL_INITIAL (fn->decl));
4687 collect_subblocks (blocks, DECL_INITIAL (fn->decl));
4688 }
4689
4690 FOR_EACH_BB_FN (bb, fn)
4691 {
4692 gimple_stmt_iterator gsi;
4693
4694 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4695 {
4696 gimple phi = gsi_stmt (gsi);
4697 bool err2 = false;
4698 unsigned i;
4699
4700 pointer_set_insert (visited_stmts, phi);
4701
4702 if (gimple_bb (phi) != bb)
4703 {
4704 error ("gimple_bb (phi) is set to a wrong basic block");
4705 err2 = true;
4706 }
4707
4708 err2 |= verify_gimple_phi (phi);
4709
4710 /* Only PHI arguments have locations. */
4711 if (gimple_location (phi) != UNKNOWN_LOCATION)
4712 {
4713 error ("PHI node with location");
4714 err2 = true;
4715 }
4716
4717 for (i = 0; i < gimple_phi_num_args (phi); i++)
4718 {
4719 tree arg = gimple_phi_arg_def (phi, i);
4720 tree addr = walk_tree (&arg, verify_node_sharing_1,
4721 visited, NULL);
4722 if (addr)
4723 {
4724 error ("incorrect sharing of tree nodes");
4725 debug_generic_expr (addr);
4726 err2 |= true;
4727 }
4728 location_t loc = gimple_phi_arg_location (phi, i);
4729 if (virtual_operand_p (gimple_phi_result (phi))
4730 && loc != UNKNOWN_LOCATION)
4731 {
4732 error ("virtual PHI with argument locations");
4733 err2 = true;
4734 }
4735 addr = walk_tree (&arg, verify_expr_location_1, blocks, NULL);
4736 if (addr)
4737 {
4738 debug_generic_expr (addr);
4739 err2 = true;
4740 }
4741 err2 |= verify_location (blocks, loc);
4742 }
4743
4744 if (err2)
4745 debug_gimple_stmt (phi);
4746 err |= err2;
4747 }
4748
4749 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4750 {
4751 gimple stmt = gsi_stmt (gsi);
4752 bool err2 = false;
4753 struct walk_stmt_info wi;
4754 tree addr;
4755 int lp_nr;
4756
4757 pointer_set_insert (visited_stmts, stmt);
4758
4759 if (gimple_bb (stmt) != bb)
4760 {
4761 error ("gimple_bb (stmt) is set to a wrong basic block");
4762 err2 = true;
4763 }
4764
4765 err2 |= verify_gimple_stmt (stmt);
4766 err2 |= verify_location (blocks, gimple_location (stmt));
4767
4768 memset (&wi, 0, sizeof (wi));
4769 wi.info = (void *) visited;
4770 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
4771 if (addr)
4772 {
4773 error ("incorrect sharing of tree nodes");
4774 debug_generic_expr (addr);
4775 err2 |= true;
4776 }
4777
4778 memset (&wi, 0, sizeof (wi));
4779 wi.info = (void *) blocks;
4780 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
4781 if (addr)
4782 {
4783 debug_generic_expr (addr);
4784 err2 |= true;
4785 }
4786
4787 /* ??? Instead of not checking these stmts at all the walker
4788 should know its context via wi. */
4789 if (!is_gimple_debug (stmt)
4790 && !is_gimple_omp (stmt))
4791 {
4792 memset (&wi, 0, sizeof (wi));
4793 addr = walk_gimple_op (stmt, verify_expr, &wi);
4794 if (addr)
4795 {
4796 debug_generic_expr (addr);
4797 inform (gimple_location (stmt), "in statement");
4798 err2 |= true;
4799 }
4800 }
4801
4802 /* If the statement is marked as part of an EH region, then it is
4803 expected that the statement could throw. Verify that when we
4804 have optimizations that simplify statements such that we prove
4805 that they cannot throw, that we update other data structures
4806 to match. */
4807 lp_nr = lookup_stmt_eh_lp (stmt);
4808 if (lp_nr != 0)
4809 {
4810 if (!stmt_could_throw_p (stmt))
4811 {
4812 error ("statement marked for throw, but doesn%'t");
4813 err2 |= true;
4814 }
4815 else if (lp_nr > 0
4816 && !gsi_one_before_end_p (gsi)
4817 && stmt_can_throw_internal (stmt))
4818 {
4819 error ("statement marked for throw in middle of block");
4820 err2 |= true;
4821 }
4822 }
4823
4824 if (err2)
4825 debug_gimple_stmt (stmt);
4826 err |= err2;
4827 }
4828 }
4829
4830 eh_error_found = false;
4831 if (get_eh_throw_stmt_table (cfun))
4832 htab_traverse (get_eh_throw_stmt_table (cfun),
4833 verify_eh_throw_stmt_node,
4834 visited_stmts);
4835
4836 if (err || eh_error_found)
4837 internal_error ("verify_gimple failed");
4838
4839 pointer_set_destroy (visited);
4840 pointer_set_destroy (visited_stmts);
4841 pointer_set_destroy (blocks);
4842 verify_histograms ();
4843 timevar_pop (TV_TREE_STMT_VERIFY);
4844 }
4845
4846
4847 /* Verifies that the flow information is OK. */
4848
4849 static int
4850 gimple_verify_flow_info (void)
4851 {
4852 int err = 0;
4853 basic_block bb;
4854 gimple_stmt_iterator gsi;
4855 gimple stmt;
4856 edge e;
4857 edge_iterator ei;
4858
4859 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
4860 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
4861 {
4862 error ("ENTRY_BLOCK has IL associated with it");
4863 err = 1;
4864 }
4865
4866 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
4867 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
4868 {
4869 error ("EXIT_BLOCK has IL associated with it");
4870 err = 1;
4871 }
4872
4873 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
4874 if (e->flags & EDGE_FALLTHRU)
4875 {
4876 error ("fallthru to exit from bb %d", e->src->index);
4877 err = 1;
4878 }
4879
4880 FOR_EACH_BB (bb)
4881 {
4882 bool found_ctrl_stmt = false;
4883
4884 stmt = NULL;
4885
4886 /* Skip labels on the start of basic block. */
4887 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4888 {
4889 tree label;
4890 gimple prev_stmt = stmt;
4891
4892 stmt = gsi_stmt (gsi);
4893
4894 if (gimple_code (stmt) != GIMPLE_LABEL)
4895 break;
4896
4897 label = gimple_label_label (stmt);
4898 if (prev_stmt && DECL_NONLOCAL (label))
4899 {
4900 error ("nonlocal label ");
4901 print_generic_expr (stderr, label, 0);
4902 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4903 bb->index);
4904 err = 1;
4905 }
4906
4907 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
4908 {
4909 error ("EH landing pad label ");
4910 print_generic_expr (stderr, label, 0);
4911 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4912 bb->index);
4913 err = 1;
4914 }
4915
4916 if (label_to_block (label) != bb)
4917 {
4918 error ("label ");
4919 print_generic_expr (stderr, label, 0);
4920 fprintf (stderr, " to block does not match in bb %d",
4921 bb->index);
4922 err = 1;
4923 }
4924
4925 if (decl_function_context (label) != current_function_decl)
4926 {
4927 error ("label ");
4928 print_generic_expr (stderr, label, 0);
4929 fprintf (stderr, " has incorrect context in bb %d",
4930 bb->index);
4931 err = 1;
4932 }
4933 }
4934
4935 /* Verify that body of basic block BB is free of control flow. */
4936 for (; !gsi_end_p (gsi); gsi_next (&gsi))
4937 {
4938 gimple stmt = gsi_stmt (gsi);
4939
4940 if (found_ctrl_stmt)
4941 {
4942 error ("control flow in the middle of basic block %d",
4943 bb->index);
4944 err = 1;
4945 }
4946
4947 if (stmt_ends_bb_p (stmt))
4948 found_ctrl_stmt = true;
4949
4950 if (gimple_code (stmt) == GIMPLE_LABEL)
4951 {
4952 error ("label ");
4953 print_generic_expr (stderr, gimple_label_label (stmt), 0);
4954 fprintf (stderr, " in the middle of basic block %d", bb->index);
4955 err = 1;
4956 }
4957 }
4958
4959 gsi = gsi_last_bb (bb);
4960 if (gsi_end_p (gsi))
4961 continue;
4962
4963 stmt = gsi_stmt (gsi);
4964
4965 if (gimple_code (stmt) == GIMPLE_LABEL)
4966 continue;
4967
4968 err |= verify_eh_edges (stmt);
4969
4970 if (is_ctrl_stmt (stmt))
4971 {
4972 FOR_EACH_EDGE (e, ei, bb->succs)
4973 if (e->flags & EDGE_FALLTHRU)
4974 {
4975 error ("fallthru edge after a control statement in bb %d",
4976 bb->index);
4977 err = 1;
4978 }
4979 }
4980
4981 if (gimple_code (stmt) != GIMPLE_COND)
4982 {
4983 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4984 after anything else but if statement. */
4985 FOR_EACH_EDGE (e, ei, bb->succs)
4986 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
4987 {
4988 error ("true/false edge after a non-GIMPLE_COND in bb %d",
4989 bb->index);
4990 err = 1;
4991 }
4992 }
4993
4994 switch (gimple_code (stmt))
4995 {
4996 case GIMPLE_COND:
4997 {
4998 edge true_edge;
4999 edge false_edge;
5000
5001 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5002
5003 if (!true_edge
5004 || !false_edge
5005 || !(true_edge->flags & EDGE_TRUE_VALUE)
5006 || !(false_edge->flags & EDGE_FALSE_VALUE)
5007 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5008 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5009 || EDGE_COUNT (bb->succs) >= 3)
5010 {
5011 error ("wrong outgoing edge flags at end of bb %d",
5012 bb->index);
5013 err = 1;
5014 }
5015 }
5016 break;
5017
5018 case GIMPLE_GOTO:
5019 if (simple_goto_p (stmt))
5020 {
5021 error ("explicit goto at end of bb %d", bb->index);
5022 err = 1;
5023 }
5024 else
5025 {
5026 /* FIXME. We should double check that the labels in the
5027 destination blocks have their address taken. */
5028 FOR_EACH_EDGE (e, ei, bb->succs)
5029 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5030 | EDGE_FALSE_VALUE))
5031 || !(e->flags & EDGE_ABNORMAL))
5032 {
5033 error ("wrong outgoing edge flags at end of bb %d",
5034 bb->index);
5035 err = 1;
5036 }
5037 }
5038 break;
5039
5040 case GIMPLE_CALL:
5041 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5042 break;
5043 /* ... fallthru ... */
5044 case GIMPLE_RETURN:
5045 if (!single_succ_p (bb)
5046 || (single_succ_edge (bb)->flags
5047 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5048 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5049 {
5050 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5051 err = 1;
5052 }
5053 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5054 {
5055 error ("return edge does not point to exit in bb %d",
5056 bb->index);
5057 err = 1;
5058 }
5059 break;
5060
5061 case GIMPLE_SWITCH:
5062 {
5063 tree prev;
5064 edge e;
5065 size_t i, n;
5066
5067 n = gimple_switch_num_labels (stmt);
5068
5069 /* Mark all the destination basic blocks. */
5070 for (i = 0; i < n; ++i)
5071 {
5072 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
5073 basic_block label_bb = label_to_block (lab);
5074 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5075 label_bb->aux = (void *)1;
5076 }
5077
5078 /* Verify that the case labels are sorted. */
5079 prev = gimple_switch_label (stmt, 0);
5080 for (i = 1; i < n; ++i)
5081 {
5082 tree c = gimple_switch_label (stmt, i);
5083 if (!CASE_LOW (c))
5084 {
5085 error ("found default case not at the start of "
5086 "case vector");
5087 err = 1;
5088 continue;
5089 }
5090 if (CASE_LOW (prev)
5091 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5092 {
5093 error ("case labels not sorted: ");
5094 print_generic_expr (stderr, prev, 0);
5095 fprintf (stderr," is greater than ");
5096 print_generic_expr (stderr, c, 0);
5097 fprintf (stderr," but comes before it.\n");
5098 err = 1;
5099 }
5100 prev = c;
5101 }
5102 /* VRP will remove the default case if it can prove it will
5103 never be executed. So do not verify there always exists
5104 a default case here. */
5105
5106 FOR_EACH_EDGE (e, ei, bb->succs)
5107 {
5108 if (!e->dest->aux)
5109 {
5110 error ("extra outgoing edge %d->%d",
5111 bb->index, e->dest->index);
5112 err = 1;
5113 }
5114
5115 e->dest->aux = (void *)2;
5116 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5117 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5118 {
5119 error ("wrong outgoing edge flags at end of bb %d",
5120 bb->index);
5121 err = 1;
5122 }
5123 }
5124
5125 /* Check that we have all of them. */
5126 for (i = 0; i < n; ++i)
5127 {
5128 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
5129 basic_block label_bb = label_to_block (lab);
5130
5131 if (label_bb->aux != (void *)2)
5132 {
5133 error ("missing edge %i->%i", bb->index, label_bb->index);
5134 err = 1;
5135 }
5136 }
5137
5138 FOR_EACH_EDGE (e, ei, bb->succs)
5139 e->dest->aux = (void *)0;
5140 }
5141 break;
5142
5143 case GIMPLE_EH_DISPATCH:
5144 err |= verify_eh_dispatch_edge (stmt);
5145 break;
5146
5147 default:
5148 break;
5149 }
5150 }
5151
5152 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5153 verify_dominators (CDI_DOMINATORS);
5154
5155 return err;
5156 }
5157
5158
5159 /* Updates phi nodes after creating a forwarder block joined
5160 by edge FALLTHRU. */
5161
5162 static void
5163 gimple_make_forwarder_block (edge fallthru)
5164 {
5165 edge e;
5166 edge_iterator ei;
5167 basic_block dummy, bb;
5168 tree var;
5169 gimple_stmt_iterator gsi;
5170
5171 dummy = fallthru->src;
5172 bb = fallthru->dest;
5173
5174 if (single_pred_p (bb))
5175 return;
5176
5177 /* If we redirected a branch we must create new PHI nodes at the
5178 start of BB. */
5179 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5180 {
5181 gimple phi, new_phi;
5182
5183 phi = gsi_stmt (gsi);
5184 var = gimple_phi_result (phi);
5185 new_phi = create_phi_node (var, bb);
5186 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5187 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5188 UNKNOWN_LOCATION);
5189 }
5190
5191 /* Add the arguments we have stored on edges. */
5192 FOR_EACH_EDGE (e, ei, bb->preds)
5193 {
5194 if (e == fallthru)
5195 continue;
5196
5197 flush_pending_stmts (e);
5198 }
5199 }
5200
5201
5202 /* Return a non-special label in the head of basic block BLOCK.
5203 Create one if it doesn't exist. */
5204
5205 tree
5206 gimple_block_label (basic_block bb)
5207 {
5208 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5209 bool first = true;
5210 tree label;
5211 gimple stmt;
5212
5213 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5214 {
5215 stmt = gsi_stmt (i);
5216 if (gimple_code (stmt) != GIMPLE_LABEL)
5217 break;
5218 label = gimple_label_label (stmt);
5219 if (!DECL_NONLOCAL (label))
5220 {
5221 if (!first)
5222 gsi_move_before (&i, &s);
5223 return label;
5224 }
5225 }
5226
5227 label = create_artificial_label (UNKNOWN_LOCATION);
5228 stmt = gimple_build_label (label);
5229 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5230 return label;
5231 }
5232
5233
5234 /* Attempt to perform edge redirection by replacing a possibly complex
5235 jump instruction by a goto or by removing the jump completely.
5236 This can apply only if all edges now point to the same block. The
5237 parameters and return values are equivalent to
5238 redirect_edge_and_branch. */
5239
5240 static edge
5241 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5242 {
5243 basic_block src = e->src;
5244 gimple_stmt_iterator i;
5245 gimple stmt;
5246
5247 /* We can replace or remove a complex jump only when we have exactly
5248 two edges. */
5249 if (EDGE_COUNT (src->succs) != 2
5250 /* Verify that all targets will be TARGET. Specifically, the
5251 edge that is not E must also go to TARGET. */
5252 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5253 return NULL;
5254
5255 i = gsi_last_bb (src);
5256 if (gsi_end_p (i))
5257 return NULL;
5258
5259 stmt = gsi_stmt (i);
5260
5261 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5262 {
5263 gsi_remove (&i, true);
5264 e = ssa_redirect_edge (e, target);
5265 e->flags = EDGE_FALLTHRU;
5266 return e;
5267 }
5268
5269 return NULL;
5270 }
5271
5272
5273 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5274 edge representing the redirected branch. */
5275
5276 static edge
5277 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5278 {
5279 basic_block bb = e->src;
5280 gimple_stmt_iterator gsi;
5281 edge ret;
5282 gimple stmt;
5283
5284 if (e->flags & EDGE_ABNORMAL)
5285 return NULL;
5286
5287 if (e->dest == dest)
5288 return NULL;
5289
5290 if (e->flags & EDGE_EH)
5291 return redirect_eh_edge (e, dest);
5292
5293 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5294 {
5295 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5296 if (ret)
5297 return ret;
5298 }
5299
5300 gsi = gsi_last_bb (bb);
5301 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5302
5303 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5304 {
5305 case GIMPLE_COND:
5306 /* For COND_EXPR, we only need to redirect the edge. */
5307 break;
5308
5309 case GIMPLE_GOTO:
5310 /* No non-abnormal edges should lead from a non-simple goto, and
5311 simple ones should be represented implicitly. */
5312 gcc_unreachable ();
5313
5314 case GIMPLE_SWITCH:
5315 {
5316 tree label = gimple_block_label (dest);
5317 tree cases = get_cases_for_edge (e, stmt);
5318
5319 /* If we have a list of cases associated with E, then use it
5320 as it's a lot faster than walking the entire case vector. */
5321 if (cases)
5322 {
5323 edge e2 = find_edge (e->src, dest);
5324 tree last, first;
5325
5326 first = cases;
5327 while (cases)
5328 {
5329 last = cases;
5330 CASE_LABEL (cases) = label;
5331 cases = CASE_CHAIN (cases);
5332 }
5333
5334 /* If there was already an edge in the CFG, then we need
5335 to move all the cases associated with E to E2. */
5336 if (e2)
5337 {
5338 tree cases2 = get_cases_for_edge (e2, stmt);
5339
5340 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5341 CASE_CHAIN (cases2) = first;
5342 }
5343 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5344 }
5345 else
5346 {
5347 size_t i, n = gimple_switch_num_labels (stmt);
5348
5349 for (i = 0; i < n; i++)
5350 {
5351 tree elt = gimple_switch_label (stmt, i);
5352 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5353 CASE_LABEL (elt) = label;
5354 }
5355 }
5356 }
5357 break;
5358
5359 case GIMPLE_ASM:
5360 {
5361 int i, n = gimple_asm_nlabels (stmt);
5362 tree label = NULL;
5363
5364 for (i = 0; i < n; ++i)
5365 {
5366 tree cons = gimple_asm_label_op (stmt, i);
5367 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5368 {
5369 if (!label)
5370 label = gimple_block_label (dest);
5371 TREE_VALUE (cons) = label;
5372 }
5373 }
5374
5375 /* If we didn't find any label matching the former edge in the
5376 asm labels, we must be redirecting the fallthrough
5377 edge. */
5378 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5379 }
5380 break;
5381
5382 case GIMPLE_RETURN:
5383 gsi_remove (&gsi, true);
5384 e->flags |= EDGE_FALLTHRU;
5385 break;
5386
5387 case GIMPLE_OMP_RETURN:
5388 case GIMPLE_OMP_CONTINUE:
5389 case GIMPLE_OMP_SECTIONS_SWITCH:
5390 case GIMPLE_OMP_FOR:
5391 /* The edges from OMP constructs can be simply redirected. */
5392 break;
5393
5394 case GIMPLE_EH_DISPATCH:
5395 if (!(e->flags & EDGE_FALLTHRU))
5396 redirect_eh_dispatch_edge (stmt, e, dest);
5397 break;
5398
5399 case GIMPLE_TRANSACTION:
5400 /* The ABORT edge has a stored label associated with it, otherwise
5401 the edges are simply redirectable. */
5402 if (e->flags == 0)
5403 gimple_transaction_set_label (stmt, gimple_block_label (dest));
5404 break;
5405
5406 default:
5407 /* Otherwise it must be a fallthru edge, and we don't need to
5408 do anything besides redirecting it. */
5409 gcc_assert (e->flags & EDGE_FALLTHRU);
5410 break;
5411 }
5412
5413 /* Update/insert PHI nodes as necessary. */
5414
5415 /* Now update the edges in the CFG. */
5416 e = ssa_redirect_edge (e, dest);
5417
5418 return e;
5419 }
5420
5421 /* Returns true if it is possible to remove edge E by redirecting
5422 it to the destination of the other edge from E->src. */
5423
5424 static bool
5425 gimple_can_remove_branch_p (const_edge e)
5426 {
5427 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5428 return false;
5429
5430 return true;
5431 }
5432
5433 /* Simple wrapper, as we can always redirect fallthru edges. */
5434
5435 static basic_block
5436 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5437 {
5438 e = gimple_redirect_edge_and_branch (e, dest);
5439 gcc_assert (e);
5440
5441 return NULL;
5442 }
5443
5444
5445 /* Splits basic block BB after statement STMT (but at least after the
5446 labels). If STMT is NULL, BB is split just after the labels. */
5447
5448 static basic_block
5449 gimple_split_block (basic_block bb, void *stmt)
5450 {
5451 gimple_stmt_iterator gsi;
5452 gimple_stmt_iterator gsi_tgt;
5453 gimple act;
5454 gimple_seq list;
5455 basic_block new_bb;
5456 edge e;
5457 edge_iterator ei;
5458
5459 new_bb = create_empty_bb (bb);
5460
5461 /* Redirect the outgoing edges. */
5462 new_bb->succs = bb->succs;
5463 bb->succs = NULL;
5464 FOR_EACH_EDGE (e, ei, new_bb->succs)
5465 e->src = new_bb;
5466
5467 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5468 stmt = NULL;
5469
5470 /* Move everything from GSI to the new basic block. */
5471 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5472 {
5473 act = gsi_stmt (gsi);
5474 if (gimple_code (act) == GIMPLE_LABEL)
5475 continue;
5476
5477 if (!stmt)
5478 break;
5479
5480 if (stmt == act)
5481 {
5482 gsi_next (&gsi);
5483 break;
5484 }
5485 }
5486
5487 if (gsi_end_p (gsi))
5488 return new_bb;
5489
5490 /* Split the statement list - avoid re-creating new containers as this
5491 brings ugly quadratic memory consumption in the inliner.
5492 (We are still quadratic since we need to update stmt BB pointers,
5493 sadly.) */
5494 gsi_split_seq_before (&gsi, &list);
5495 set_bb_seq (new_bb, list);
5496 for (gsi_tgt = gsi_start (list);
5497 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5498 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5499
5500 return new_bb;
5501 }
5502
5503
5504 /* Moves basic block BB after block AFTER. */
5505
5506 static bool
5507 gimple_move_block_after (basic_block bb, basic_block after)
5508 {
5509 if (bb->prev_bb == after)
5510 return true;
5511
5512 unlink_block (bb);
5513 link_block (bb, after);
5514
5515 return true;
5516 }
5517
5518
5519 /* Return TRUE if block BB has no executable statements, otherwise return
5520 FALSE. */
5521
5522 static bool
5523 gimple_empty_block_p (basic_block bb)
5524 {
5525 /* BB must have no executable statements. */
5526 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5527 if (phi_nodes (bb))
5528 return false;
5529 if (gsi_end_p (gsi))
5530 return true;
5531 if (is_gimple_debug (gsi_stmt (gsi)))
5532 gsi_next_nondebug (&gsi);
5533 return gsi_end_p (gsi);
5534 }
5535
5536
5537 /* Split a basic block if it ends with a conditional branch and if the
5538 other part of the block is not empty. */
5539
5540 static basic_block
5541 gimple_split_block_before_cond_jump (basic_block bb)
5542 {
5543 gimple last, split_point;
5544 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5545 if (gsi_end_p (gsi))
5546 return NULL;
5547 last = gsi_stmt (gsi);
5548 if (gimple_code (last) != GIMPLE_COND
5549 && gimple_code (last) != GIMPLE_SWITCH)
5550 return NULL;
5551 gsi_prev_nondebug (&gsi);
5552 split_point = gsi_stmt (gsi);
5553 return split_block (bb, split_point)->dest;
5554 }
5555
5556
5557 /* Return true if basic_block can be duplicated. */
5558
5559 static bool
5560 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5561 {
5562 return true;
5563 }
5564
5565 /* Create a duplicate of the basic block BB. NOTE: This does not
5566 preserve SSA form. */
5567
5568 static basic_block
5569 gimple_duplicate_bb (basic_block bb)
5570 {
5571 basic_block new_bb;
5572 gimple_stmt_iterator gsi, gsi_tgt;
5573 gimple_seq phis = phi_nodes (bb);
5574 gimple phi, stmt, copy;
5575
5576 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
5577
5578 /* Copy the PHI nodes. We ignore PHI node arguments here because
5579 the incoming edges have not been setup yet. */
5580 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
5581 {
5582 phi = gsi_stmt (gsi);
5583 copy = create_phi_node (NULL_TREE, new_bb);
5584 create_new_def_for (gimple_phi_result (phi), copy,
5585 gimple_phi_result_ptr (copy));
5586 gimple_set_uid (copy, gimple_uid (phi));
5587 }
5588
5589 gsi_tgt = gsi_start_bb (new_bb);
5590 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5591 {
5592 def_operand_p def_p;
5593 ssa_op_iter op_iter;
5594 tree lhs;
5595
5596 stmt = gsi_stmt (gsi);
5597 if (gimple_code (stmt) == GIMPLE_LABEL)
5598 continue;
5599
5600 /* Don't duplicate label debug stmts. */
5601 if (gimple_debug_bind_p (stmt)
5602 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5603 == LABEL_DECL)
5604 continue;
5605
5606 /* Create a new copy of STMT and duplicate STMT's virtual
5607 operands. */
5608 copy = gimple_copy (stmt);
5609 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5610
5611 maybe_duplicate_eh_stmt (copy, stmt);
5612 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5613
5614 /* When copying around a stmt writing into a local non-user
5615 aggregate, make sure it won't share stack slot with other
5616 vars. */
5617 lhs = gimple_get_lhs (stmt);
5618 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5619 {
5620 tree base = get_base_address (lhs);
5621 if (base
5622 && (TREE_CODE (base) == VAR_DECL
5623 || TREE_CODE (base) == RESULT_DECL)
5624 && DECL_IGNORED_P (base)
5625 && !TREE_STATIC (base)
5626 && !DECL_EXTERNAL (base)
5627 && (TREE_CODE (base) != VAR_DECL
5628 || !DECL_HAS_VALUE_EXPR_P (base)))
5629 DECL_NONSHAREABLE (base) = 1;
5630 }
5631
5632 /* Create new names for all the definitions created by COPY and
5633 add replacement mappings for each new name. */
5634 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5635 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5636 }
5637
5638 return new_bb;
5639 }
5640
5641 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5642
5643 static void
5644 add_phi_args_after_copy_edge (edge e_copy)
5645 {
5646 basic_block bb, bb_copy = e_copy->src, dest;
5647 edge e;
5648 edge_iterator ei;
5649 gimple phi, phi_copy;
5650 tree def;
5651 gimple_stmt_iterator psi, psi_copy;
5652
5653 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5654 return;
5655
5656 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5657
5658 if (e_copy->dest->flags & BB_DUPLICATED)
5659 dest = get_bb_original (e_copy->dest);
5660 else
5661 dest = e_copy->dest;
5662
5663 e = find_edge (bb, dest);
5664 if (!e)
5665 {
5666 /* During loop unrolling the target of the latch edge is copied.
5667 In this case we are not looking for edge to dest, but to
5668 duplicated block whose original was dest. */
5669 FOR_EACH_EDGE (e, ei, bb->succs)
5670 {
5671 if ((e->dest->flags & BB_DUPLICATED)
5672 && get_bb_original (e->dest) == dest)
5673 break;
5674 }
5675
5676 gcc_assert (e != NULL);
5677 }
5678
5679 for (psi = gsi_start_phis (e->dest),
5680 psi_copy = gsi_start_phis (e_copy->dest);
5681 !gsi_end_p (psi);
5682 gsi_next (&psi), gsi_next (&psi_copy))
5683 {
5684 phi = gsi_stmt (psi);
5685 phi_copy = gsi_stmt (psi_copy);
5686 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5687 add_phi_arg (phi_copy, def, e_copy,
5688 gimple_phi_arg_location_from_edge (phi, e));
5689 }
5690 }
5691
5692
5693 /* Basic block BB_COPY was created by code duplication. Add phi node
5694 arguments for edges going out of BB_COPY. The blocks that were
5695 duplicated have BB_DUPLICATED set. */
5696
5697 void
5698 add_phi_args_after_copy_bb (basic_block bb_copy)
5699 {
5700 edge e_copy;
5701 edge_iterator ei;
5702
5703 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5704 {
5705 add_phi_args_after_copy_edge (e_copy);
5706 }
5707 }
5708
5709 /* Blocks in REGION_COPY array of length N_REGION were created by
5710 duplication of basic blocks. Add phi node arguments for edges
5711 going from these blocks. If E_COPY is not NULL, also add
5712 phi node arguments for its destination.*/
5713
5714 void
5715 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5716 edge e_copy)
5717 {
5718 unsigned i;
5719
5720 for (i = 0; i < n_region; i++)
5721 region_copy[i]->flags |= BB_DUPLICATED;
5722
5723 for (i = 0; i < n_region; i++)
5724 add_phi_args_after_copy_bb (region_copy[i]);
5725 if (e_copy)
5726 add_phi_args_after_copy_edge (e_copy);
5727
5728 for (i = 0; i < n_region; i++)
5729 region_copy[i]->flags &= ~BB_DUPLICATED;
5730 }
5731
5732 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5733 important exit edge EXIT. By important we mean that no SSA name defined
5734 inside region is live over the other exit edges of the region. All entry
5735 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5736 to the duplicate of the region. Dominance and loop information is
5737 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
5738 UPDATE_DOMINANCE is false then we assume that the caller will update the
5739 dominance information after calling this function. The new basic
5740 blocks are stored to REGION_COPY in the same order as they had in REGION,
5741 provided that REGION_COPY is not NULL.
5742 The function returns false if it is unable to copy the region,
5743 true otherwise. */
5744
5745 bool
5746 gimple_duplicate_sese_region (edge entry, edge exit,
5747 basic_block *region, unsigned n_region,
5748 basic_block *region_copy,
5749 bool update_dominance)
5750 {
5751 unsigned i;
5752 bool free_region_copy = false, copying_header = false;
5753 struct loop *loop = entry->dest->loop_father;
5754 edge exit_copy;
5755 vec<basic_block> doms;
5756 edge redirected;
5757 int total_freq = 0, entry_freq = 0;
5758 gcov_type total_count = 0, entry_count = 0;
5759
5760 if (!can_copy_bbs_p (region, n_region))
5761 return false;
5762
5763 /* Some sanity checking. Note that we do not check for all possible
5764 missuses of the functions. I.e. if you ask to copy something weird,
5765 it will work, but the state of structures probably will not be
5766 correct. */
5767 for (i = 0; i < n_region; i++)
5768 {
5769 /* We do not handle subloops, i.e. all the blocks must belong to the
5770 same loop. */
5771 if (region[i]->loop_father != loop)
5772 return false;
5773
5774 if (region[i] != entry->dest
5775 && region[i] == loop->header)
5776 return false;
5777 }
5778
5779 set_loop_copy (loop, loop);
5780
5781 /* In case the function is used for loop header copying (which is the primary
5782 use), ensure that EXIT and its copy will be new latch and entry edges. */
5783 if (loop->header == entry->dest)
5784 {
5785 copying_header = true;
5786 set_loop_copy (loop, loop_outer (loop));
5787
5788 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5789 return false;
5790
5791 for (i = 0; i < n_region; i++)
5792 if (region[i] != exit->src
5793 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5794 return false;
5795 }
5796
5797 if (!region_copy)
5798 {
5799 region_copy = XNEWVEC (basic_block, n_region);
5800 free_region_copy = true;
5801 }
5802
5803 initialize_original_copy_tables ();
5804
5805 /* Record blocks outside the region that are dominated by something
5806 inside. */
5807 if (update_dominance)
5808 {
5809 doms.create (0);
5810 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5811 }
5812
5813 if (entry->dest->count)
5814 {
5815 total_count = entry->dest->count;
5816 entry_count = entry->count;
5817 /* Fix up corner cases, to avoid division by zero or creation of negative
5818 frequencies. */
5819 if (entry_count > total_count)
5820 entry_count = total_count;
5821 }
5822 else
5823 {
5824 total_freq = entry->dest->frequency;
5825 entry_freq = EDGE_FREQUENCY (entry);
5826 /* Fix up corner cases, to avoid division by zero or creation of negative
5827 frequencies. */
5828 if (total_freq == 0)
5829 total_freq = 1;
5830 else if (entry_freq > total_freq)
5831 entry_freq = total_freq;
5832 }
5833
5834 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5835 split_edge_bb_loc (entry), update_dominance);
5836 if (total_count)
5837 {
5838 scale_bbs_frequencies_gcov_type (region, n_region,
5839 total_count - entry_count,
5840 total_count);
5841 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
5842 total_count);
5843 }
5844 else
5845 {
5846 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5847 total_freq);
5848 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5849 }
5850
5851 if (copying_header)
5852 {
5853 loop->header = exit->dest;
5854 loop->latch = exit->src;
5855 }
5856
5857 /* Redirect the entry and add the phi node arguments. */
5858 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
5859 gcc_assert (redirected != NULL);
5860 flush_pending_stmts (entry);
5861
5862 /* Concerning updating of dominators: We must recount dominators
5863 for entry block and its copy. Anything that is outside of the
5864 region, but was dominated by something inside needs recounting as
5865 well. */
5866 if (update_dominance)
5867 {
5868 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
5869 doms.safe_push (get_bb_original (entry->dest));
5870 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5871 doms.release ();
5872 }
5873
5874 /* Add the other PHI node arguments. */
5875 add_phi_args_after_copy (region_copy, n_region, NULL);
5876
5877 if (free_region_copy)
5878 free (region_copy);
5879
5880 free_original_copy_tables ();
5881 return true;
5882 }
5883
5884 /* Checks if BB is part of the region defined by N_REGION BBS. */
5885 static bool
5886 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
5887 {
5888 unsigned int n;
5889
5890 for (n = 0; n < n_region; n++)
5891 {
5892 if (bb == bbs[n])
5893 return true;
5894 }
5895 return false;
5896 }
5897
5898 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
5899 are stored to REGION_COPY in the same order in that they appear
5900 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5901 the region, EXIT an exit from it. The condition guarding EXIT
5902 is moved to ENTRY. Returns true if duplication succeeds, false
5903 otherwise.
5904
5905 For example,
5906
5907 some_code;
5908 if (cond)
5909 A;
5910 else
5911 B;
5912
5913 is transformed to
5914
5915 if (cond)
5916 {
5917 some_code;
5918 A;
5919 }
5920 else
5921 {
5922 some_code;
5923 B;
5924 }
5925 */
5926
5927 bool
5928 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
5929 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
5930 basic_block *region_copy ATTRIBUTE_UNUSED)
5931 {
5932 unsigned i;
5933 bool free_region_copy = false;
5934 struct loop *loop = exit->dest->loop_father;
5935 struct loop *orig_loop = entry->dest->loop_father;
5936 basic_block switch_bb, entry_bb, nentry_bb;
5937 vec<basic_block> doms;
5938 int total_freq = 0, exit_freq = 0;
5939 gcov_type total_count = 0, exit_count = 0;
5940 edge exits[2], nexits[2], e;
5941 gimple_stmt_iterator gsi;
5942 gimple cond_stmt;
5943 edge sorig, snew;
5944 basic_block exit_bb;
5945 gimple_stmt_iterator psi;
5946 gimple phi;
5947 tree def;
5948 struct loop *target, *aloop, *cloop;
5949
5950 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
5951 exits[0] = exit;
5952 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
5953
5954 if (!can_copy_bbs_p (region, n_region))
5955 return false;
5956
5957 initialize_original_copy_tables ();
5958 set_loop_copy (orig_loop, loop);
5959
5960 target= loop;
5961 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
5962 {
5963 if (bb_part_of_region_p (aloop->header, region, n_region))
5964 {
5965 cloop = duplicate_loop (aloop, target);
5966 duplicate_subloops (aloop, cloop);
5967 }
5968 }
5969
5970 if (!region_copy)
5971 {
5972 region_copy = XNEWVEC (basic_block, n_region);
5973 free_region_copy = true;
5974 }
5975
5976 gcc_assert (!need_ssa_update_p (cfun));
5977
5978 /* Record blocks outside the region that are dominated by something
5979 inside. */
5980 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5981
5982 if (exit->src->count)
5983 {
5984 total_count = exit->src->count;
5985 exit_count = exit->count;
5986 /* Fix up corner cases, to avoid division by zero or creation of negative
5987 frequencies. */
5988 if (exit_count > total_count)
5989 exit_count = total_count;
5990 }
5991 else
5992 {
5993 total_freq = exit->src->frequency;
5994 exit_freq = EDGE_FREQUENCY (exit);
5995 /* Fix up corner cases, to avoid division by zero or creation of negative
5996 frequencies. */
5997 if (total_freq == 0)
5998 total_freq = 1;
5999 if (exit_freq > total_freq)
6000 exit_freq = total_freq;
6001 }
6002
6003 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6004 split_edge_bb_loc (exit), true);
6005 if (total_count)
6006 {
6007 scale_bbs_frequencies_gcov_type (region, n_region,
6008 total_count - exit_count,
6009 total_count);
6010 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
6011 total_count);
6012 }
6013 else
6014 {
6015 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6016 total_freq);
6017 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6018 }
6019
6020 /* Create the switch block, and put the exit condition to it. */
6021 entry_bb = entry->dest;
6022 nentry_bb = get_bb_copy (entry_bb);
6023 if (!last_stmt (entry->src)
6024 || !stmt_ends_bb_p (last_stmt (entry->src)))
6025 switch_bb = entry->src;
6026 else
6027 switch_bb = split_edge (entry);
6028 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6029
6030 gsi = gsi_last_bb (switch_bb);
6031 cond_stmt = last_stmt (exit->src);
6032 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6033 cond_stmt = gimple_copy (cond_stmt);
6034
6035 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6036
6037 sorig = single_succ_edge (switch_bb);
6038 sorig->flags = exits[1]->flags;
6039 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6040
6041 /* Register the new edge from SWITCH_BB in loop exit lists. */
6042 rescan_loop_exit (snew, true, false);
6043
6044 /* Add the PHI node arguments. */
6045 add_phi_args_after_copy (region_copy, n_region, snew);
6046
6047 /* Get rid of now superfluous conditions and associated edges (and phi node
6048 arguments). */
6049 exit_bb = exit->dest;
6050
6051 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6052 PENDING_STMT (e) = NULL;
6053
6054 /* The latch of ORIG_LOOP was copied, and so was the backedge
6055 to the original header. We redirect this backedge to EXIT_BB. */
6056 for (i = 0; i < n_region; i++)
6057 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6058 {
6059 gcc_assert (single_succ_edge (region_copy[i]));
6060 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6061 PENDING_STMT (e) = NULL;
6062 for (psi = gsi_start_phis (exit_bb);
6063 !gsi_end_p (psi);
6064 gsi_next (&psi))
6065 {
6066 phi = gsi_stmt (psi);
6067 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6068 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6069 }
6070 }
6071 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6072 PENDING_STMT (e) = NULL;
6073
6074 /* Anything that is outside of the region, but was dominated by something
6075 inside needs to update dominance info. */
6076 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6077 doms.release ();
6078 /* Update the SSA web. */
6079 update_ssa (TODO_update_ssa);
6080
6081 if (free_region_copy)
6082 free (region_copy);
6083
6084 free_original_copy_tables ();
6085 return true;
6086 }
6087
6088 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6089 adding blocks when the dominator traversal reaches EXIT. This
6090 function silently assumes that ENTRY strictly dominates EXIT. */
6091
6092 void
6093 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6094 vec<basic_block> *bbs_p)
6095 {
6096 basic_block son;
6097
6098 for (son = first_dom_son (CDI_DOMINATORS, entry);
6099 son;
6100 son = next_dom_son (CDI_DOMINATORS, son))
6101 {
6102 bbs_p->safe_push (son);
6103 if (son != exit)
6104 gather_blocks_in_sese_region (son, exit, bbs_p);
6105 }
6106 }
6107
6108 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6109 The duplicates are recorded in VARS_MAP. */
6110
6111 static void
6112 replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
6113 tree to_context)
6114 {
6115 tree t = *tp, new_t;
6116 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6117 void **loc;
6118
6119 if (DECL_CONTEXT (t) == to_context)
6120 return;
6121
6122 loc = pointer_map_contains (vars_map, t);
6123
6124 if (!loc)
6125 {
6126 loc = pointer_map_insert (vars_map, t);
6127
6128 if (SSA_VAR_P (t))
6129 {
6130 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6131 add_local_decl (f, new_t);
6132 }
6133 else
6134 {
6135 gcc_assert (TREE_CODE (t) == CONST_DECL);
6136 new_t = copy_node (t);
6137 }
6138 DECL_CONTEXT (new_t) = to_context;
6139
6140 *loc = new_t;
6141 }
6142 else
6143 new_t = (tree) *loc;
6144
6145 *tp = new_t;
6146 }
6147
6148
6149 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6150 VARS_MAP maps old ssa names and var_decls to the new ones. */
6151
6152 static tree
6153 replace_ssa_name (tree name, struct pointer_map_t *vars_map,
6154 tree to_context)
6155 {
6156 void **loc;
6157 tree new_name;
6158
6159 gcc_assert (!virtual_operand_p (name));
6160
6161 loc = pointer_map_contains (vars_map, name);
6162
6163 if (!loc)
6164 {
6165 tree decl = SSA_NAME_VAR (name);
6166 if (decl)
6167 {
6168 replace_by_duplicate_decl (&decl, vars_map, to_context);
6169 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6170 decl, SSA_NAME_DEF_STMT (name));
6171 if (SSA_NAME_IS_DEFAULT_DEF (name))
6172 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context),
6173 decl, new_name);
6174 }
6175 else
6176 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6177 name, SSA_NAME_DEF_STMT (name));
6178
6179 loc = pointer_map_insert (vars_map, name);
6180 *loc = new_name;
6181 }
6182 else
6183 new_name = (tree) *loc;
6184
6185 return new_name;
6186 }
6187
6188 struct move_stmt_d
6189 {
6190 tree orig_block;
6191 tree new_block;
6192 tree from_context;
6193 tree to_context;
6194 struct pointer_map_t *vars_map;
6195 htab_t new_label_map;
6196 struct pointer_map_t *eh_map;
6197 bool remap_decls_p;
6198 };
6199
6200 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6201 contained in *TP if it has been ORIG_BLOCK previously and change the
6202 DECL_CONTEXT of every local variable referenced in *TP. */
6203
6204 static tree
6205 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6206 {
6207 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6208 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6209 tree t = *tp;
6210
6211 if (EXPR_P (t))
6212 {
6213 tree block = TREE_BLOCK (t);
6214 if (block == p->orig_block
6215 || (p->orig_block == NULL_TREE
6216 && block != NULL_TREE))
6217 TREE_SET_BLOCK (t, p->new_block);
6218 #ifdef ENABLE_CHECKING
6219 else if (block != NULL_TREE)
6220 {
6221 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6222 block = BLOCK_SUPERCONTEXT (block);
6223 gcc_assert (block == p->orig_block);
6224 }
6225 #endif
6226 }
6227 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6228 {
6229 if (TREE_CODE (t) == SSA_NAME)
6230 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6231 else if (TREE_CODE (t) == LABEL_DECL)
6232 {
6233 if (p->new_label_map)
6234 {
6235 struct tree_map in, *out;
6236 in.base.from = t;
6237 out = (struct tree_map *)
6238 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6239 if (out)
6240 *tp = t = out->to;
6241 }
6242
6243 DECL_CONTEXT (t) = p->to_context;
6244 }
6245 else if (p->remap_decls_p)
6246 {
6247 /* Replace T with its duplicate. T should no longer appear in the
6248 parent function, so this looks wasteful; however, it may appear
6249 in referenced_vars, and more importantly, as virtual operands of
6250 statements, and in alias lists of other variables. It would be
6251 quite difficult to expunge it from all those places. ??? It might
6252 suffice to do this for addressable variables. */
6253 if ((TREE_CODE (t) == VAR_DECL
6254 && !is_global_var (t))
6255 || TREE_CODE (t) == CONST_DECL)
6256 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6257 }
6258 *walk_subtrees = 0;
6259 }
6260 else if (TYPE_P (t))
6261 *walk_subtrees = 0;
6262
6263 return NULL_TREE;
6264 }
6265
6266 /* Helper for move_stmt_r. Given an EH region number for the source
6267 function, map that to the duplicate EH regio number in the dest. */
6268
6269 static int
6270 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6271 {
6272 eh_region old_r, new_r;
6273 void **slot;
6274
6275 old_r = get_eh_region_from_number (old_nr);
6276 slot = pointer_map_contains (p->eh_map, old_r);
6277 new_r = (eh_region) *slot;
6278
6279 return new_r->index;
6280 }
6281
6282 /* Similar, but operate on INTEGER_CSTs. */
6283
6284 static tree
6285 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6286 {
6287 int old_nr, new_nr;
6288
6289 old_nr = tree_to_shwi (old_t_nr);
6290 new_nr = move_stmt_eh_region_nr (old_nr, p);
6291
6292 return build_int_cst (integer_type_node, new_nr);
6293 }
6294
6295 /* Like move_stmt_op, but for gimple statements.
6296
6297 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6298 contained in the current statement in *GSI_P and change the
6299 DECL_CONTEXT of every local variable referenced in the current
6300 statement. */
6301
6302 static tree
6303 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6304 struct walk_stmt_info *wi)
6305 {
6306 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6307 gimple stmt = gsi_stmt (*gsi_p);
6308 tree block = gimple_block (stmt);
6309
6310 if (block == p->orig_block
6311 || (p->orig_block == NULL_TREE
6312 && block != NULL_TREE))
6313 gimple_set_block (stmt, p->new_block);
6314
6315 switch (gimple_code (stmt))
6316 {
6317 case GIMPLE_CALL:
6318 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6319 {
6320 tree r, fndecl = gimple_call_fndecl (stmt);
6321 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6322 switch (DECL_FUNCTION_CODE (fndecl))
6323 {
6324 case BUILT_IN_EH_COPY_VALUES:
6325 r = gimple_call_arg (stmt, 1);
6326 r = move_stmt_eh_region_tree_nr (r, p);
6327 gimple_call_set_arg (stmt, 1, r);
6328 /* FALLTHRU */
6329
6330 case BUILT_IN_EH_POINTER:
6331 case BUILT_IN_EH_FILTER:
6332 r = gimple_call_arg (stmt, 0);
6333 r = move_stmt_eh_region_tree_nr (r, p);
6334 gimple_call_set_arg (stmt, 0, r);
6335 break;
6336
6337 default:
6338 break;
6339 }
6340 }
6341 break;
6342
6343 case GIMPLE_RESX:
6344 {
6345 int r = gimple_resx_region (stmt);
6346 r = move_stmt_eh_region_nr (r, p);
6347 gimple_resx_set_region (stmt, r);
6348 }
6349 break;
6350
6351 case GIMPLE_EH_DISPATCH:
6352 {
6353 int r = gimple_eh_dispatch_region (stmt);
6354 r = move_stmt_eh_region_nr (r, p);
6355 gimple_eh_dispatch_set_region (stmt, r);
6356 }
6357 break;
6358
6359 case GIMPLE_OMP_RETURN:
6360 case GIMPLE_OMP_CONTINUE:
6361 break;
6362 default:
6363 if (is_gimple_omp (stmt))
6364 {
6365 /* Do not remap variables inside OMP directives. Variables
6366 referenced in clauses and directive header belong to the
6367 parent function and should not be moved into the child
6368 function. */
6369 bool save_remap_decls_p = p->remap_decls_p;
6370 p->remap_decls_p = false;
6371 *handled_ops_p = true;
6372
6373 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6374 move_stmt_op, wi);
6375
6376 p->remap_decls_p = save_remap_decls_p;
6377 }
6378 break;
6379 }
6380
6381 return NULL_TREE;
6382 }
6383
6384 /* Move basic block BB from function CFUN to function DEST_FN. The
6385 block is moved out of the original linked list and placed after
6386 block AFTER in the new list. Also, the block is removed from the
6387 original array of blocks and placed in DEST_FN's array of blocks.
6388 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6389 updated to reflect the moved edges.
6390
6391 The local variables are remapped to new instances, VARS_MAP is used
6392 to record the mapping. */
6393
6394 static void
6395 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6396 basic_block after, bool update_edge_count_p,
6397 struct move_stmt_d *d)
6398 {
6399 struct control_flow_graph *cfg;
6400 edge_iterator ei;
6401 edge e;
6402 gimple_stmt_iterator si;
6403 unsigned old_len, new_len;
6404
6405 /* Remove BB from dominance structures. */
6406 delete_from_dominance_info (CDI_DOMINATORS, bb);
6407
6408 /* Move BB from its current loop to the copy in the new function. */
6409 if (current_loops)
6410 {
6411 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6412 if (new_loop)
6413 bb->loop_father = new_loop;
6414 }
6415
6416 /* Link BB to the new linked list. */
6417 move_block_after (bb, after);
6418
6419 /* Update the edge count in the corresponding flowgraphs. */
6420 if (update_edge_count_p)
6421 FOR_EACH_EDGE (e, ei, bb->succs)
6422 {
6423 cfun->cfg->x_n_edges--;
6424 dest_cfun->cfg->x_n_edges++;
6425 }
6426
6427 /* Remove BB from the original basic block array. */
6428 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6429 cfun->cfg->x_n_basic_blocks--;
6430
6431 /* Grow DEST_CFUN's basic block array if needed. */
6432 cfg = dest_cfun->cfg;
6433 cfg->x_n_basic_blocks++;
6434 if (bb->index >= cfg->x_last_basic_block)
6435 cfg->x_last_basic_block = bb->index + 1;
6436
6437 old_len = vec_safe_length (cfg->x_basic_block_info);
6438 if ((unsigned) cfg->x_last_basic_block >= old_len)
6439 {
6440 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6441 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6442 }
6443
6444 (*cfg->x_basic_block_info)[bb->index] = bb;
6445
6446 /* Remap the variables in phi nodes. */
6447 for (si = gsi_start_phis (bb); !gsi_end_p (si); )
6448 {
6449 gimple phi = gsi_stmt (si);
6450 use_operand_p use;
6451 tree op = PHI_RESULT (phi);
6452 ssa_op_iter oi;
6453 unsigned i;
6454
6455 if (virtual_operand_p (op))
6456 {
6457 /* Remove the phi nodes for virtual operands (alias analysis will be
6458 run for the new function, anyway). */
6459 remove_phi_node (&si, true);
6460 continue;
6461 }
6462
6463 SET_PHI_RESULT (phi,
6464 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6465 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6466 {
6467 op = USE_FROM_PTR (use);
6468 if (TREE_CODE (op) == SSA_NAME)
6469 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6470 }
6471
6472 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6473 {
6474 location_t locus = gimple_phi_arg_location (phi, i);
6475 tree block = LOCATION_BLOCK (locus);
6476
6477 if (locus == UNKNOWN_LOCATION)
6478 continue;
6479 if (d->orig_block == NULL_TREE || block == d->orig_block)
6480 {
6481 if (d->new_block == NULL_TREE)
6482 locus = LOCATION_LOCUS (locus);
6483 else
6484 locus = COMBINE_LOCATION_DATA (line_table, locus, d->new_block);
6485 gimple_phi_arg_set_location (phi, i, locus);
6486 }
6487 }
6488
6489 gsi_next (&si);
6490 }
6491
6492 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6493 {
6494 gimple stmt = gsi_stmt (si);
6495 struct walk_stmt_info wi;
6496
6497 memset (&wi, 0, sizeof (wi));
6498 wi.info = d;
6499 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6500
6501 if (gimple_code (stmt) == GIMPLE_LABEL)
6502 {
6503 tree label = gimple_label_label (stmt);
6504 int uid = LABEL_DECL_UID (label);
6505
6506 gcc_assert (uid > -1);
6507
6508 old_len = vec_safe_length (cfg->x_label_to_block_map);
6509 if (old_len <= (unsigned) uid)
6510 {
6511 new_len = 3 * uid / 2 + 1;
6512 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6513 }
6514
6515 (*cfg->x_label_to_block_map)[uid] = bb;
6516 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6517
6518 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6519
6520 if (uid >= dest_cfun->cfg->last_label_uid)
6521 dest_cfun->cfg->last_label_uid = uid + 1;
6522 }
6523
6524 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6525 remove_stmt_from_eh_lp_fn (cfun, stmt);
6526
6527 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6528 gimple_remove_stmt_histograms (cfun, stmt);
6529
6530 /* We cannot leave any operands allocated from the operand caches of
6531 the current function. */
6532 free_stmt_operands (stmt);
6533 push_cfun (dest_cfun);
6534 update_stmt (stmt);
6535 pop_cfun ();
6536 }
6537
6538 FOR_EACH_EDGE (e, ei, bb->succs)
6539 if (e->goto_locus != UNKNOWN_LOCATION)
6540 {
6541 tree block = LOCATION_BLOCK (e->goto_locus);
6542 if (d->orig_block == NULL_TREE
6543 || block == d->orig_block)
6544 e->goto_locus = d->new_block ?
6545 COMBINE_LOCATION_DATA (line_table, e->goto_locus, d->new_block) :
6546 LOCATION_LOCUS (e->goto_locus);
6547 }
6548 }
6549
6550 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6551 the outermost EH region. Use REGION as the incoming base EH region. */
6552
6553 static eh_region
6554 find_outermost_region_in_block (struct function *src_cfun,
6555 basic_block bb, eh_region region)
6556 {
6557 gimple_stmt_iterator si;
6558
6559 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6560 {
6561 gimple stmt = gsi_stmt (si);
6562 eh_region stmt_region;
6563 int lp_nr;
6564
6565 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6566 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6567 if (stmt_region)
6568 {
6569 if (region == NULL)
6570 region = stmt_region;
6571 else if (stmt_region != region)
6572 {
6573 region = eh_region_outermost (src_cfun, stmt_region, region);
6574 gcc_assert (region != NULL);
6575 }
6576 }
6577 }
6578
6579 return region;
6580 }
6581
6582 static tree
6583 new_label_mapper (tree decl, void *data)
6584 {
6585 htab_t hash = (htab_t) data;
6586 struct tree_map *m;
6587 void **slot;
6588
6589 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6590
6591 m = XNEW (struct tree_map);
6592 m->hash = DECL_UID (decl);
6593 m->base.from = decl;
6594 m->to = create_artificial_label (UNKNOWN_LOCATION);
6595 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6596 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6597 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6598
6599 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6600 gcc_assert (*slot == NULL);
6601
6602 *slot = m;
6603
6604 return m->to;
6605 }
6606
6607 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6608 subblocks. */
6609
6610 static void
6611 replace_block_vars_by_duplicates (tree block, struct pointer_map_t *vars_map,
6612 tree to_context)
6613 {
6614 tree *tp, t;
6615
6616 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6617 {
6618 t = *tp;
6619 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6620 continue;
6621 replace_by_duplicate_decl (&t, vars_map, to_context);
6622 if (t != *tp)
6623 {
6624 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6625 {
6626 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6627 DECL_HAS_VALUE_EXPR_P (t) = 1;
6628 }
6629 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6630 *tp = t;
6631 }
6632 }
6633
6634 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6635 replace_block_vars_by_duplicates (block, vars_map, to_context);
6636 }
6637
6638 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6639 from FN1 to FN2. */
6640
6641 static void
6642 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
6643 struct loop *loop)
6644 {
6645 /* Discard it from the old loop array. */
6646 (*get_loops (fn1))[loop->num] = NULL;
6647
6648 /* Place it in the new loop array, assigning it a new number. */
6649 loop->num = number_of_loops (fn2);
6650 vec_safe_push (loops_for_fn (fn2)->larray, loop);
6651
6652 /* Recurse to children. */
6653 for (loop = loop->inner; loop; loop = loop->next)
6654 fixup_loop_arrays_after_move (fn1, fn2, loop);
6655 }
6656
6657 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6658 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6659 single basic block in the original CFG and the new basic block is
6660 returned. DEST_CFUN must not have a CFG yet.
6661
6662 Note that the region need not be a pure SESE region. Blocks inside
6663 the region may contain calls to abort/exit. The only restriction
6664 is that ENTRY_BB should be the only entry point and it must
6665 dominate EXIT_BB.
6666
6667 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6668 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6669 to the new function.
6670
6671 All local variables referenced in the region are assumed to be in
6672 the corresponding BLOCK_VARS and unexpanded variable lists
6673 associated with DEST_CFUN. */
6674
6675 basic_block
6676 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
6677 basic_block exit_bb, tree orig_block)
6678 {
6679 vec<basic_block> bbs, dom_bbs;
6680 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6681 basic_block after, bb, *entry_pred, *exit_succ, abb;
6682 struct function *saved_cfun = cfun;
6683 int *entry_flag, *exit_flag;
6684 unsigned *entry_prob, *exit_prob;
6685 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
6686 edge e;
6687 edge_iterator ei;
6688 htab_t new_label_map;
6689 struct pointer_map_t *vars_map, *eh_map;
6690 struct loop *loop = entry_bb->loop_father;
6691 struct loop *loop0 = get_loop (saved_cfun, 0);
6692 struct move_stmt_d d;
6693
6694 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6695 region. */
6696 gcc_assert (entry_bb != exit_bb
6697 && (!exit_bb
6698 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
6699
6700 /* Collect all the blocks in the region. Manually add ENTRY_BB
6701 because it won't be added by dfs_enumerate_from. */
6702 bbs.create (0);
6703 bbs.safe_push (entry_bb);
6704 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6705
6706 /* The blocks that used to be dominated by something in BBS will now be
6707 dominated by the new block. */
6708 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6709 bbs.address (),
6710 bbs.length ());
6711
6712 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6713 the predecessor edges to ENTRY_BB and the successor edges to
6714 EXIT_BB so that we can re-attach them to the new basic block that
6715 will replace the region. */
6716 num_entry_edges = EDGE_COUNT (entry_bb->preds);
6717 entry_pred = XNEWVEC (basic_block, num_entry_edges);
6718 entry_flag = XNEWVEC (int, num_entry_edges);
6719 entry_prob = XNEWVEC (unsigned, num_entry_edges);
6720 i = 0;
6721 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
6722 {
6723 entry_prob[i] = e->probability;
6724 entry_flag[i] = e->flags;
6725 entry_pred[i++] = e->src;
6726 remove_edge (e);
6727 }
6728
6729 if (exit_bb)
6730 {
6731 num_exit_edges = EDGE_COUNT (exit_bb->succs);
6732 exit_succ = XNEWVEC (basic_block, num_exit_edges);
6733 exit_flag = XNEWVEC (int, num_exit_edges);
6734 exit_prob = XNEWVEC (unsigned, num_exit_edges);
6735 i = 0;
6736 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
6737 {
6738 exit_prob[i] = e->probability;
6739 exit_flag[i] = e->flags;
6740 exit_succ[i++] = e->dest;
6741 remove_edge (e);
6742 }
6743 }
6744 else
6745 {
6746 num_exit_edges = 0;
6747 exit_succ = NULL;
6748 exit_flag = NULL;
6749 exit_prob = NULL;
6750 }
6751
6752 /* Switch context to the child function to initialize DEST_FN's CFG. */
6753 gcc_assert (dest_cfun->cfg == NULL);
6754 push_cfun (dest_cfun);
6755
6756 init_empty_tree_cfg ();
6757
6758 /* Initialize EH information for the new function. */
6759 eh_map = NULL;
6760 new_label_map = NULL;
6761 if (saved_cfun->eh)
6762 {
6763 eh_region region = NULL;
6764
6765 FOR_EACH_VEC_ELT (bbs, i, bb)
6766 region = find_outermost_region_in_block (saved_cfun, bb, region);
6767
6768 init_eh_for_function ();
6769 if (region != NULL)
6770 {
6771 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6772 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
6773 new_label_mapper, new_label_map);
6774 }
6775 }
6776
6777 /* Initialize an empty loop tree. */
6778 struct loops *loops = ggc_alloc_cleared_loops ();
6779 init_loops_structure (dest_cfun, loops, 1);
6780 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
6781 set_loops_for_fn (dest_cfun, loops);
6782
6783 /* Move the outlined loop tree part. */
6784 num_nodes = bbs.length ();
6785 FOR_EACH_VEC_ELT (bbs, i, bb)
6786 {
6787 if (bb->loop_father->header == bb)
6788 {
6789 struct loop *this_loop = bb->loop_father;
6790 struct loop *outer = loop_outer (this_loop);
6791 if (outer == loop
6792 /* If the SESE region contains some bbs ending with
6793 a noreturn call, those are considered to belong
6794 to the outermost loop in saved_cfun, rather than
6795 the entry_bb's loop_father. */
6796 || outer == loop0)
6797 {
6798 if (outer != loop)
6799 num_nodes -= this_loop->num_nodes;
6800 flow_loop_tree_node_remove (bb->loop_father);
6801 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
6802 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
6803 }
6804 }
6805 else if (bb->loop_father == loop0 && loop0 != loop)
6806 num_nodes--;
6807
6808 /* Remove loop exits from the outlined region. */
6809 if (loops_for_fn (saved_cfun)->exits)
6810 FOR_EACH_EDGE (e, ei, bb->succs)
6811 {
6812 void **slot = htab_find_slot_with_hash
6813 (loops_for_fn (saved_cfun)->exits, e,
6814 htab_hash_pointer (e), NO_INSERT);
6815 if (slot)
6816 htab_clear_slot (loops_for_fn (saved_cfun)->exits, slot);
6817 }
6818 }
6819
6820
6821 /* Adjust the number of blocks in the tree root of the outlined part. */
6822 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
6823
6824 /* Setup a mapping to be used by move_block_to_fn. */
6825 loop->aux = current_loops->tree_root;
6826 loop0->aux = current_loops->tree_root;
6827
6828 pop_cfun ();
6829
6830 /* Move blocks from BBS into DEST_CFUN. */
6831 gcc_assert (bbs.length () >= 2);
6832 after = dest_cfun->cfg->x_entry_block_ptr;
6833 vars_map = pointer_map_create ();
6834
6835 memset (&d, 0, sizeof (d));
6836 d.orig_block = orig_block;
6837 d.new_block = DECL_INITIAL (dest_cfun->decl);
6838 d.from_context = cfun->decl;
6839 d.to_context = dest_cfun->decl;
6840 d.vars_map = vars_map;
6841 d.new_label_map = new_label_map;
6842 d.eh_map = eh_map;
6843 d.remap_decls_p = true;
6844
6845 FOR_EACH_VEC_ELT (bbs, i, bb)
6846 {
6847 /* No need to update edge counts on the last block. It has
6848 already been updated earlier when we detached the region from
6849 the original CFG. */
6850 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
6851 after = bb;
6852 }
6853
6854 loop->aux = NULL;
6855 loop0->aux = NULL;
6856 /* Loop sizes are no longer correct, fix them up. */
6857 loop->num_nodes -= num_nodes;
6858 for (struct loop *outer = loop_outer (loop);
6859 outer; outer = loop_outer (outer))
6860 outer->num_nodes -= num_nodes;
6861 loop0->num_nodes -= bbs.length () - num_nodes;
6862
6863 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vect_loops)
6864 {
6865 struct loop *aloop;
6866 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
6867 if (aloop != NULL)
6868 {
6869 if (aloop->simduid)
6870 {
6871 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
6872 d.to_context);
6873 dest_cfun->has_simduid_loops = true;
6874 }
6875 if (aloop->force_vect)
6876 dest_cfun->has_force_vect_loops = true;
6877 }
6878 }
6879
6880 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
6881 if (orig_block)
6882 {
6883 tree block;
6884 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6885 == NULL_TREE);
6886 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6887 = BLOCK_SUBBLOCKS (orig_block);
6888 for (block = BLOCK_SUBBLOCKS (orig_block);
6889 block; block = BLOCK_CHAIN (block))
6890 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
6891 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
6892 }
6893
6894 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
6895 vars_map, dest_cfun->decl);
6896
6897 if (new_label_map)
6898 htab_delete (new_label_map);
6899 if (eh_map)
6900 pointer_map_destroy (eh_map);
6901 pointer_map_destroy (vars_map);
6902
6903 /* Rewire the entry and exit blocks. The successor to the entry
6904 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6905 the child function. Similarly, the predecessor of DEST_FN's
6906 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
6907 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6908 various CFG manipulation function get to the right CFG.
6909
6910 FIXME, this is silly. The CFG ought to become a parameter to
6911 these helpers. */
6912 push_cfun (dest_cfun);
6913 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
6914 if (exit_bb)
6915 make_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
6916 pop_cfun ();
6917
6918 /* Back in the original function, the SESE region has disappeared,
6919 create a new basic block in its place. */
6920 bb = create_empty_bb (entry_pred[0]);
6921 if (current_loops)
6922 add_bb_to_loop (bb, loop);
6923 for (i = 0; i < num_entry_edges; i++)
6924 {
6925 e = make_edge (entry_pred[i], bb, entry_flag[i]);
6926 e->probability = entry_prob[i];
6927 }
6928
6929 for (i = 0; i < num_exit_edges; i++)
6930 {
6931 e = make_edge (bb, exit_succ[i], exit_flag[i]);
6932 e->probability = exit_prob[i];
6933 }
6934
6935 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
6936 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
6937 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
6938 dom_bbs.release ();
6939
6940 if (exit_bb)
6941 {
6942 free (exit_prob);
6943 free (exit_flag);
6944 free (exit_succ);
6945 }
6946 free (entry_prob);
6947 free (entry_flag);
6948 free (entry_pred);
6949 bbs.release ();
6950
6951 return bb;
6952 }
6953
6954
6955 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
6956 */
6957
6958 void
6959 dump_function_to_file (tree fndecl, FILE *file, int flags)
6960 {
6961 tree arg, var, old_current_fndecl = current_function_decl;
6962 struct function *dsf;
6963 bool ignore_topmost_bind = false, any_var = false;
6964 basic_block bb;
6965 tree chain;
6966 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
6967 && decl_is_tm_clone (fndecl));
6968 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
6969
6970 current_function_decl = fndecl;
6971 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
6972
6973 arg = DECL_ARGUMENTS (fndecl);
6974 while (arg)
6975 {
6976 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
6977 fprintf (file, " ");
6978 print_generic_expr (file, arg, dump_flags);
6979 if (flags & TDF_VERBOSE)
6980 print_node (file, "", arg, 4);
6981 if (DECL_CHAIN (arg))
6982 fprintf (file, ", ");
6983 arg = DECL_CHAIN (arg);
6984 }
6985 fprintf (file, ")\n");
6986
6987 if (flags & TDF_VERBOSE)
6988 print_node (file, "", fndecl, 2);
6989
6990 dsf = DECL_STRUCT_FUNCTION (fndecl);
6991 if (dsf && (flags & TDF_EH))
6992 dump_eh_tree (file, dsf);
6993
6994 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
6995 {
6996 dump_node (fndecl, TDF_SLIM | flags, file);
6997 current_function_decl = old_current_fndecl;
6998 return;
6999 }
7000
7001 /* When GIMPLE is lowered, the variables are no longer available in
7002 BIND_EXPRs, so display them separately. */
7003 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7004 {
7005 unsigned ix;
7006 ignore_topmost_bind = true;
7007
7008 fprintf (file, "{\n");
7009 if (!vec_safe_is_empty (fun->local_decls))
7010 FOR_EACH_LOCAL_DECL (fun, ix, var)
7011 {
7012 print_generic_decl (file, var, flags);
7013 if (flags & TDF_VERBOSE)
7014 print_node (file, "", var, 4);
7015 fprintf (file, "\n");
7016
7017 any_var = true;
7018 }
7019 if (gimple_in_ssa_p (cfun))
7020 for (ix = 1; ix < num_ssa_names; ++ix)
7021 {
7022 tree name = ssa_name (ix);
7023 if (name && !SSA_NAME_VAR (name))
7024 {
7025 fprintf (file, " ");
7026 print_generic_expr (file, TREE_TYPE (name), flags);
7027 fprintf (file, " ");
7028 print_generic_expr (file, name, flags);
7029 fprintf (file, ";\n");
7030
7031 any_var = true;
7032 }
7033 }
7034 }
7035
7036 if (fun && fun->decl == fndecl
7037 && fun->cfg
7038 && basic_block_info_for_function (fun))
7039 {
7040 /* If the CFG has been built, emit a CFG-based dump. */
7041 if (!ignore_topmost_bind)
7042 fprintf (file, "{\n");
7043
7044 if (any_var && n_basic_blocks_for_fn (fun))
7045 fprintf (file, "\n");
7046
7047 FOR_EACH_BB_FN (bb, fun)
7048 dump_bb (file, bb, 2, flags | TDF_COMMENT);
7049
7050 fprintf (file, "}\n");
7051 }
7052 else if (DECL_SAVED_TREE (fndecl) == NULL)
7053 {
7054 /* The function is now in GIMPLE form but the CFG has not been
7055 built yet. Emit the single sequence of GIMPLE statements
7056 that make up its body. */
7057 gimple_seq body = gimple_body (fndecl);
7058
7059 if (gimple_seq_first_stmt (body)
7060 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7061 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7062 print_gimple_seq (file, body, 0, flags);
7063 else
7064 {
7065 if (!ignore_topmost_bind)
7066 fprintf (file, "{\n");
7067
7068 if (any_var)
7069 fprintf (file, "\n");
7070
7071 print_gimple_seq (file, body, 2, flags);
7072 fprintf (file, "}\n");
7073 }
7074 }
7075 else
7076 {
7077 int indent;
7078
7079 /* Make a tree based dump. */
7080 chain = DECL_SAVED_TREE (fndecl);
7081 if (chain && TREE_CODE (chain) == BIND_EXPR)
7082 {
7083 if (ignore_topmost_bind)
7084 {
7085 chain = BIND_EXPR_BODY (chain);
7086 indent = 2;
7087 }
7088 else
7089 indent = 0;
7090 }
7091 else
7092 {
7093 if (!ignore_topmost_bind)
7094 fprintf (file, "{\n");
7095 indent = 2;
7096 }
7097
7098 if (any_var)
7099 fprintf (file, "\n");
7100
7101 print_generic_stmt_indented (file, chain, flags, indent);
7102 if (ignore_topmost_bind)
7103 fprintf (file, "}\n");
7104 }
7105
7106 if (flags & TDF_ENUMERATE_LOCALS)
7107 dump_enumerated_decls (file, flags);
7108 fprintf (file, "\n\n");
7109
7110 current_function_decl = old_current_fndecl;
7111 }
7112
7113 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7114
7115 DEBUG_FUNCTION void
7116 debug_function (tree fn, int flags)
7117 {
7118 dump_function_to_file (fn, stderr, flags);
7119 }
7120
7121
7122 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7123
7124 static void
7125 print_pred_bbs (FILE *file, basic_block bb)
7126 {
7127 edge e;
7128 edge_iterator ei;
7129
7130 FOR_EACH_EDGE (e, ei, bb->preds)
7131 fprintf (file, "bb_%d ", e->src->index);
7132 }
7133
7134
7135 /* Print on FILE the indexes for the successors of basic_block BB. */
7136
7137 static void
7138 print_succ_bbs (FILE *file, basic_block bb)
7139 {
7140 edge e;
7141 edge_iterator ei;
7142
7143 FOR_EACH_EDGE (e, ei, bb->succs)
7144 fprintf (file, "bb_%d ", e->dest->index);
7145 }
7146
7147 /* Print to FILE the basic block BB following the VERBOSITY level. */
7148
7149 void
7150 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7151 {
7152 char *s_indent = (char *) alloca ((size_t) indent + 1);
7153 memset ((void *) s_indent, ' ', (size_t) indent);
7154 s_indent[indent] = '\0';
7155
7156 /* Print basic_block's header. */
7157 if (verbosity >= 2)
7158 {
7159 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7160 print_pred_bbs (file, bb);
7161 fprintf (file, "}, succs = {");
7162 print_succ_bbs (file, bb);
7163 fprintf (file, "})\n");
7164 }
7165
7166 /* Print basic_block's body. */
7167 if (verbosity >= 3)
7168 {
7169 fprintf (file, "%s {\n", s_indent);
7170 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7171 fprintf (file, "%s }\n", s_indent);
7172 }
7173 }
7174
7175 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7176
7177 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7178 VERBOSITY level this outputs the contents of the loop, or just its
7179 structure. */
7180
7181 static void
7182 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7183 {
7184 char *s_indent;
7185 basic_block bb;
7186
7187 if (loop == NULL)
7188 return;
7189
7190 s_indent = (char *) alloca ((size_t) indent + 1);
7191 memset ((void *) s_indent, ' ', (size_t) indent);
7192 s_indent[indent] = '\0';
7193
7194 /* Print loop's header. */
7195 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7196 if (loop->header)
7197 fprintf (file, "header = %d", loop->header->index);
7198 else
7199 {
7200 fprintf (file, "deleted)\n");
7201 return;
7202 }
7203 if (loop->latch)
7204 fprintf (file, ", latch = %d", loop->latch->index);
7205 else
7206 fprintf (file, ", multiple latches");
7207 fprintf (file, ", niter = ");
7208 print_generic_expr (file, loop->nb_iterations, 0);
7209
7210 if (loop->any_upper_bound)
7211 {
7212 fprintf (file, ", upper_bound = ");
7213 dump_double_int (file, loop->nb_iterations_upper_bound, true);
7214 }
7215
7216 if (loop->any_estimate)
7217 {
7218 fprintf (file, ", estimate = ");
7219 dump_double_int (file, loop->nb_iterations_estimate, true);
7220 }
7221 fprintf (file, ")\n");
7222
7223 /* Print loop's body. */
7224 if (verbosity >= 1)
7225 {
7226 fprintf (file, "%s{\n", s_indent);
7227 FOR_EACH_BB (bb)
7228 if (bb->loop_father == loop)
7229 print_loops_bb (file, bb, indent, verbosity);
7230
7231 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7232 fprintf (file, "%s}\n", s_indent);
7233 }
7234 }
7235
7236 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7237 spaces. Following VERBOSITY level this outputs the contents of the
7238 loop, or just its structure. */
7239
7240 static void
7241 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7242 int verbosity)
7243 {
7244 if (loop == NULL)
7245 return;
7246
7247 print_loop (file, loop, indent, verbosity);
7248 print_loop_and_siblings (file, loop->next, indent, verbosity);
7249 }
7250
7251 /* Follow a CFG edge from the entry point of the program, and on entry
7252 of a loop, pretty print the loop structure on FILE. */
7253
7254 void
7255 print_loops (FILE *file, int verbosity)
7256 {
7257 basic_block bb;
7258
7259 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7260 if (bb && bb->loop_father)
7261 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7262 }
7263
7264 /* Dump a loop. */
7265
7266 DEBUG_FUNCTION void
7267 debug (struct loop &ref)
7268 {
7269 print_loop (stderr, &ref, 0, /*verbosity*/0);
7270 }
7271
7272 DEBUG_FUNCTION void
7273 debug (struct loop *ptr)
7274 {
7275 if (ptr)
7276 debug (*ptr);
7277 else
7278 fprintf (stderr, "<nil>\n");
7279 }
7280
7281 /* Dump a loop verbosely. */
7282
7283 DEBUG_FUNCTION void
7284 debug_verbose (struct loop &ref)
7285 {
7286 print_loop (stderr, &ref, 0, /*verbosity*/3);
7287 }
7288
7289 DEBUG_FUNCTION void
7290 debug_verbose (struct loop *ptr)
7291 {
7292 if (ptr)
7293 debug (*ptr);
7294 else
7295 fprintf (stderr, "<nil>\n");
7296 }
7297
7298
7299 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7300
7301 DEBUG_FUNCTION void
7302 debug_loops (int verbosity)
7303 {
7304 print_loops (stderr, verbosity);
7305 }
7306
7307 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7308
7309 DEBUG_FUNCTION void
7310 debug_loop (struct loop *loop, int verbosity)
7311 {
7312 print_loop (stderr, loop, 0, verbosity);
7313 }
7314
7315 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7316 level. */
7317
7318 DEBUG_FUNCTION void
7319 debug_loop_num (unsigned num, int verbosity)
7320 {
7321 debug_loop (get_loop (cfun, num), verbosity);
7322 }
7323
7324 /* Return true if BB ends with a call, possibly followed by some
7325 instructions that must stay with the call. Return false,
7326 otherwise. */
7327
7328 static bool
7329 gimple_block_ends_with_call_p (basic_block bb)
7330 {
7331 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7332 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7333 }
7334
7335
7336 /* Return true if BB ends with a conditional branch. Return false,
7337 otherwise. */
7338
7339 static bool
7340 gimple_block_ends_with_condjump_p (const_basic_block bb)
7341 {
7342 gimple stmt = last_stmt (CONST_CAST_BB (bb));
7343 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7344 }
7345
7346
7347 /* Return true if we need to add fake edge to exit at statement T.
7348 Helper function for gimple_flow_call_edges_add. */
7349
7350 static bool
7351 need_fake_edge_p (gimple t)
7352 {
7353 tree fndecl = NULL_TREE;
7354 int call_flags = 0;
7355
7356 /* NORETURN and LONGJMP calls already have an edge to exit.
7357 CONST and PURE calls do not need one.
7358 We don't currently check for CONST and PURE here, although
7359 it would be a good idea, because those attributes are
7360 figured out from the RTL in mark_constant_function, and
7361 the counter incrementation code from -fprofile-arcs
7362 leads to different results from -fbranch-probabilities. */
7363 if (is_gimple_call (t))
7364 {
7365 fndecl = gimple_call_fndecl (t);
7366 call_flags = gimple_call_flags (t);
7367 }
7368
7369 if (is_gimple_call (t)
7370 && fndecl
7371 && DECL_BUILT_IN (fndecl)
7372 && (call_flags & ECF_NOTHROW)
7373 && !(call_flags & ECF_RETURNS_TWICE)
7374 /* fork() doesn't really return twice, but the effect of
7375 wrapping it in __gcov_fork() which calls __gcov_flush()
7376 and clears the counters before forking has the same
7377 effect as returning twice. Force a fake edge. */
7378 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7379 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7380 return false;
7381
7382 if (is_gimple_call (t))
7383 {
7384 edge_iterator ei;
7385 edge e;
7386 basic_block bb;
7387
7388 if (!(call_flags & ECF_NORETURN))
7389 return true;
7390
7391 bb = gimple_bb (t);
7392 FOR_EACH_EDGE (e, ei, bb->succs)
7393 if ((e->flags & EDGE_FAKE) == 0)
7394 return true;
7395 }
7396
7397 if (gimple_code (t) == GIMPLE_ASM
7398 && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
7399 return true;
7400
7401 return false;
7402 }
7403
7404
7405 /* Add fake edges to the function exit for any non constant and non
7406 noreturn calls (or noreturn calls with EH/abnormal edges),
7407 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7408 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7409 that were split.
7410
7411 The goal is to expose cases in which entering a basic block does
7412 not imply that all subsequent instructions must be executed. */
7413
7414 static int
7415 gimple_flow_call_edges_add (sbitmap blocks)
7416 {
7417 int i;
7418 int blocks_split = 0;
7419 int last_bb = last_basic_block;
7420 bool check_last_block = false;
7421
7422 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
7423 return 0;
7424
7425 if (! blocks)
7426 check_last_block = true;
7427 else
7428 check_last_block = bitmap_bit_p (blocks,
7429 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
7430
7431 /* In the last basic block, before epilogue generation, there will be
7432 a fallthru edge to EXIT. Special care is required if the last insn
7433 of the last basic block is a call because make_edge folds duplicate
7434 edges, which would result in the fallthru edge also being marked
7435 fake, which would result in the fallthru edge being removed by
7436 remove_fake_edges, which would result in an invalid CFG.
7437
7438 Moreover, we can't elide the outgoing fake edge, since the block
7439 profiler needs to take this into account in order to solve the minimal
7440 spanning tree in the case that the call doesn't return.
7441
7442 Handle this by adding a dummy instruction in a new last basic block. */
7443 if (check_last_block)
7444 {
7445 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
7446 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7447 gimple t = NULL;
7448
7449 if (!gsi_end_p (gsi))
7450 t = gsi_stmt (gsi);
7451
7452 if (t && need_fake_edge_p (t))
7453 {
7454 edge e;
7455
7456 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7457 if (e)
7458 {
7459 gsi_insert_on_edge (e, gimple_build_nop ());
7460 gsi_commit_edge_inserts ();
7461 }
7462 }
7463 }
7464
7465 /* Now add fake edges to the function exit for any non constant
7466 calls since there is no way that we can determine if they will
7467 return or not... */
7468 for (i = 0; i < last_bb; i++)
7469 {
7470 basic_block bb = BASIC_BLOCK (i);
7471 gimple_stmt_iterator gsi;
7472 gimple stmt, last_stmt;
7473
7474 if (!bb)
7475 continue;
7476
7477 if (blocks && !bitmap_bit_p (blocks, i))
7478 continue;
7479
7480 gsi = gsi_last_nondebug_bb (bb);
7481 if (!gsi_end_p (gsi))
7482 {
7483 last_stmt = gsi_stmt (gsi);
7484 do
7485 {
7486 stmt = gsi_stmt (gsi);
7487 if (need_fake_edge_p (stmt))
7488 {
7489 edge e;
7490
7491 /* The handling above of the final block before the
7492 epilogue should be enough to verify that there is
7493 no edge to the exit block in CFG already.
7494 Calling make_edge in such case would cause us to
7495 mark that edge as fake and remove it later. */
7496 #ifdef ENABLE_CHECKING
7497 if (stmt == last_stmt)
7498 {
7499 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7500 gcc_assert (e == NULL);
7501 }
7502 #endif
7503
7504 /* Note that the following may create a new basic block
7505 and renumber the existing basic blocks. */
7506 if (stmt != last_stmt)
7507 {
7508 e = split_block (bb, stmt);
7509 if (e)
7510 blocks_split++;
7511 }
7512 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
7513 }
7514 gsi_prev (&gsi);
7515 }
7516 while (!gsi_end_p (gsi));
7517 }
7518 }
7519
7520 if (blocks_split)
7521 verify_flow_info ();
7522
7523 return blocks_split;
7524 }
7525
7526 /* Removes edge E and all the blocks dominated by it, and updates dominance
7527 information. The IL in E->src needs to be updated separately.
7528 If dominance info is not available, only the edge E is removed.*/
7529
7530 void
7531 remove_edge_and_dominated_blocks (edge e)
7532 {
7533 vec<basic_block> bbs_to_remove = vNULL;
7534 vec<basic_block> bbs_to_fix_dom = vNULL;
7535 bitmap df, df_idom;
7536 edge f;
7537 edge_iterator ei;
7538 bool none_removed = false;
7539 unsigned i;
7540 basic_block bb, dbb;
7541 bitmap_iterator bi;
7542
7543 if (!dom_info_available_p (CDI_DOMINATORS))
7544 {
7545 remove_edge (e);
7546 return;
7547 }
7548
7549 /* No updating is needed for edges to exit. */
7550 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7551 {
7552 if (cfgcleanup_altered_bbs)
7553 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7554 remove_edge (e);
7555 return;
7556 }
7557
7558 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7559 that is not dominated by E->dest, then this set is empty. Otherwise,
7560 all the basic blocks dominated by E->dest are removed.
7561
7562 Also, to DF_IDOM we store the immediate dominators of the blocks in
7563 the dominance frontier of E (i.e., of the successors of the
7564 removed blocks, if there are any, and of E->dest otherwise). */
7565 FOR_EACH_EDGE (f, ei, e->dest->preds)
7566 {
7567 if (f == e)
7568 continue;
7569
7570 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7571 {
7572 none_removed = true;
7573 break;
7574 }
7575 }
7576
7577 df = BITMAP_ALLOC (NULL);
7578 df_idom = BITMAP_ALLOC (NULL);
7579
7580 if (none_removed)
7581 bitmap_set_bit (df_idom,
7582 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7583 else
7584 {
7585 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7586 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7587 {
7588 FOR_EACH_EDGE (f, ei, bb->succs)
7589 {
7590 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
7591 bitmap_set_bit (df, f->dest->index);
7592 }
7593 }
7594 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7595 bitmap_clear_bit (df, bb->index);
7596
7597 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7598 {
7599 bb = BASIC_BLOCK (i);
7600 bitmap_set_bit (df_idom,
7601 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7602 }
7603 }
7604
7605 if (cfgcleanup_altered_bbs)
7606 {
7607 /* Record the set of the altered basic blocks. */
7608 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7609 bitmap_ior_into (cfgcleanup_altered_bbs, df);
7610 }
7611
7612 /* Remove E and the cancelled blocks. */
7613 if (none_removed)
7614 remove_edge (e);
7615 else
7616 {
7617 /* Walk backwards so as to get a chance to substitute all
7618 released DEFs into debug stmts. See
7619 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7620 details. */
7621 for (i = bbs_to_remove.length (); i-- > 0; )
7622 delete_basic_block (bbs_to_remove[i]);
7623 }
7624
7625 /* Update the dominance information. The immediate dominator may change only
7626 for blocks whose immediate dominator belongs to DF_IDOM:
7627
7628 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7629 removal. Let Z the arbitrary block such that idom(Z) = Y and
7630 Z dominates X after the removal. Before removal, there exists a path P
7631 from Y to X that avoids Z. Let F be the last edge on P that is
7632 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7633 dominates W, and because of P, Z does not dominate W), and W belongs to
7634 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7635 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
7636 {
7637 bb = BASIC_BLOCK (i);
7638 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
7639 dbb;
7640 dbb = next_dom_son (CDI_DOMINATORS, dbb))
7641 bbs_to_fix_dom.safe_push (dbb);
7642 }
7643
7644 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
7645
7646 BITMAP_FREE (df);
7647 BITMAP_FREE (df_idom);
7648 bbs_to_remove.release ();
7649 bbs_to_fix_dom.release ();
7650 }
7651
7652 /* Purge dead EH edges from basic block BB. */
7653
7654 bool
7655 gimple_purge_dead_eh_edges (basic_block bb)
7656 {
7657 bool changed = false;
7658 edge e;
7659 edge_iterator ei;
7660 gimple stmt = last_stmt (bb);
7661
7662 if (stmt && stmt_can_throw_internal (stmt))
7663 return false;
7664
7665 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7666 {
7667 if (e->flags & EDGE_EH)
7668 {
7669 remove_edge_and_dominated_blocks (e);
7670 changed = true;
7671 }
7672 else
7673 ei_next (&ei);
7674 }
7675
7676 return changed;
7677 }
7678
7679 /* Purge dead EH edges from basic block listed in BLOCKS. */
7680
7681 bool
7682 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
7683 {
7684 bool changed = false;
7685 unsigned i;
7686 bitmap_iterator bi;
7687
7688 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7689 {
7690 basic_block bb = BASIC_BLOCK (i);
7691
7692 /* Earlier gimple_purge_dead_eh_edges could have removed
7693 this basic block already. */
7694 gcc_assert (bb || changed);
7695 if (bb != NULL)
7696 changed |= gimple_purge_dead_eh_edges (bb);
7697 }
7698
7699 return changed;
7700 }
7701
7702 /* Purge dead abnormal call edges from basic block BB. */
7703
7704 bool
7705 gimple_purge_dead_abnormal_call_edges (basic_block bb)
7706 {
7707 bool changed = false;
7708 edge e;
7709 edge_iterator ei;
7710 gimple stmt = last_stmt (bb);
7711
7712 if (!cfun->has_nonlocal_label
7713 && !cfun->calls_setjmp)
7714 return false;
7715
7716 if (stmt && stmt_can_make_abnormal_goto (stmt))
7717 return false;
7718
7719 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7720 {
7721 if (e->flags & EDGE_ABNORMAL)
7722 {
7723 if (e->flags & EDGE_FALLTHRU)
7724 e->flags &= ~EDGE_ABNORMAL;
7725 else
7726 remove_edge_and_dominated_blocks (e);
7727 changed = true;
7728 }
7729 else
7730 ei_next (&ei);
7731 }
7732
7733 return changed;
7734 }
7735
7736 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
7737
7738 bool
7739 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
7740 {
7741 bool changed = false;
7742 unsigned i;
7743 bitmap_iterator bi;
7744
7745 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7746 {
7747 basic_block bb = BASIC_BLOCK (i);
7748
7749 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
7750 this basic block already. */
7751 gcc_assert (bb || changed);
7752 if (bb != NULL)
7753 changed |= gimple_purge_dead_abnormal_call_edges (bb);
7754 }
7755
7756 return changed;
7757 }
7758
7759 /* This function is called whenever a new edge is created or
7760 redirected. */
7761
7762 static void
7763 gimple_execute_on_growing_pred (edge e)
7764 {
7765 basic_block bb = e->dest;
7766
7767 if (!gimple_seq_empty_p (phi_nodes (bb)))
7768 reserve_phi_args_for_new_edge (bb);
7769 }
7770
7771 /* This function is called immediately before edge E is removed from
7772 the edge vector E->dest->preds. */
7773
7774 static void
7775 gimple_execute_on_shrinking_pred (edge e)
7776 {
7777 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
7778 remove_phi_args (e);
7779 }
7780
7781 /*---------------------------------------------------------------------------
7782 Helper functions for Loop versioning
7783 ---------------------------------------------------------------------------*/
7784
7785 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
7786 of 'first'. Both of them are dominated by 'new_head' basic block. When
7787 'new_head' was created by 'second's incoming edge it received phi arguments
7788 on the edge by split_edge(). Later, additional edge 'e' was created to
7789 connect 'new_head' and 'first'. Now this routine adds phi args on this
7790 additional edge 'e' that new_head to second edge received as part of edge
7791 splitting. */
7792
7793 static void
7794 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
7795 basic_block new_head, edge e)
7796 {
7797 gimple phi1, phi2;
7798 gimple_stmt_iterator psi1, psi2;
7799 tree def;
7800 edge e2 = find_edge (new_head, second);
7801
7802 /* Because NEW_HEAD has been created by splitting SECOND's incoming
7803 edge, we should always have an edge from NEW_HEAD to SECOND. */
7804 gcc_assert (e2 != NULL);
7805
7806 /* Browse all 'second' basic block phi nodes and add phi args to
7807 edge 'e' for 'first' head. PHI args are always in correct order. */
7808
7809 for (psi2 = gsi_start_phis (second),
7810 psi1 = gsi_start_phis (first);
7811 !gsi_end_p (psi2) && !gsi_end_p (psi1);
7812 gsi_next (&psi2), gsi_next (&psi1))
7813 {
7814 phi1 = gsi_stmt (psi1);
7815 phi2 = gsi_stmt (psi2);
7816 def = PHI_ARG_DEF (phi2, e2->dest_idx);
7817 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
7818 }
7819 }
7820
7821
7822 /* Adds a if else statement to COND_BB with condition COND_EXPR.
7823 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
7824 the destination of the ELSE part. */
7825
7826 static void
7827 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
7828 basic_block second_head ATTRIBUTE_UNUSED,
7829 basic_block cond_bb, void *cond_e)
7830 {
7831 gimple_stmt_iterator gsi;
7832 gimple new_cond_expr;
7833 tree cond_expr = (tree) cond_e;
7834 edge e0;
7835
7836 /* Build new conditional expr */
7837 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
7838 NULL_TREE, NULL_TREE);
7839
7840 /* Add new cond in cond_bb. */
7841 gsi = gsi_last_bb (cond_bb);
7842 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
7843
7844 /* Adjust edges appropriately to connect new head with first head
7845 as well as second head. */
7846 e0 = single_succ_edge (cond_bb);
7847 e0->flags &= ~EDGE_FALLTHRU;
7848 e0->flags |= EDGE_FALSE_VALUE;
7849 }
7850
7851
7852 /* Do book-keeping of basic block BB for the profile consistency checker.
7853 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
7854 then do post-pass accounting. Store the counting in RECORD. */
7855 static void
7856 gimple_account_profile_record (basic_block bb, int after_pass,
7857 struct profile_record *record)
7858 {
7859 gimple_stmt_iterator i;
7860 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
7861 {
7862 record->size[after_pass]
7863 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
7864 if (profile_status == PROFILE_READ)
7865 record->time[after_pass]
7866 += estimate_num_insns (gsi_stmt (i),
7867 &eni_time_weights) * bb->count;
7868 else if (profile_status == PROFILE_GUESSED)
7869 record->time[after_pass]
7870 += estimate_num_insns (gsi_stmt (i),
7871 &eni_time_weights) * bb->frequency;
7872 }
7873 }
7874
7875 struct cfg_hooks gimple_cfg_hooks = {
7876 "gimple",
7877 gimple_verify_flow_info,
7878 gimple_dump_bb, /* dump_bb */
7879 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
7880 create_bb, /* create_basic_block */
7881 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
7882 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
7883 gimple_can_remove_branch_p, /* can_remove_branch_p */
7884 remove_bb, /* delete_basic_block */
7885 gimple_split_block, /* split_block */
7886 gimple_move_block_after, /* move_block_after */
7887 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
7888 gimple_merge_blocks, /* merge_blocks */
7889 gimple_predict_edge, /* predict_edge */
7890 gimple_predicted_by_p, /* predicted_by_p */
7891 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
7892 gimple_duplicate_bb, /* duplicate_block */
7893 gimple_split_edge, /* split_edge */
7894 gimple_make_forwarder_block, /* make_forward_block */
7895 NULL, /* tidy_fallthru_edge */
7896 NULL, /* force_nonfallthru */
7897 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
7898 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
7899 gimple_flow_call_edges_add, /* flow_call_edges_add */
7900 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
7901 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
7902 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
7903 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
7904 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
7905 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
7906 flush_pending_stmts, /* flush_pending_stmts */
7907 gimple_empty_block_p, /* block_empty_p */
7908 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
7909 gimple_account_profile_record,
7910 };
7911
7912
7913 /* Split all critical edges. */
7914
7915 static unsigned int
7916 split_critical_edges (void)
7917 {
7918 basic_block bb;
7919 edge e;
7920 edge_iterator ei;
7921
7922 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
7923 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
7924 mappings around the calls to split_edge. */
7925 start_recording_case_labels ();
7926 FOR_ALL_BB (bb)
7927 {
7928 FOR_EACH_EDGE (e, ei, bb->succs)
7929 {
7930 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
7931 split_edge (e);
7932 /* PRE inserts statements to edges and expects that
7933 since split_critical_edges was done beforehand, committing edge
7934 insertions will not split more edges. In addition to critical
7935 edges we must split edges that have multiple successors and
7936 end by control flow statements, such as RESX.
7937 Go ahead and split them too. This matches the logic in
7938 gimple_find_edge_insert_loc. */
7939 else if ((!single_pred_p (e->dest)
7940 || !gimple_seq_empty_p (phi_nodes (e->dest))
7941 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7942 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
7943 && !(e->flags & EDGE_ABNORMAL))
7944 {
7945 gimple_stmt_iterator gsi;
7946
7947 gsi = gsi_last_bb (e->src);
7948 if (!gsi_end_p (gsi)
7949 && stmt_ends_bb_p (gsi_stmt (gsi))
7950 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
7951 && !gimple_call_builtin_p (gsi_stmt (gsi),
7952 BUILT_IN_RETURN)))
7953 split_edge (e);
7954 }
7955 }
7956 }
7957 end_recording_case_labels ();
7958 return 0;
7959 }
7960
7961 namespace {
7962
7963 const pass_data pass_data_split_crit_edges =
7964 {
7965 GIMPLE_PASS, /* type */
7966 "crited", /* name */
7967 OPTGROUP_NONE, /* optinfo_flags */
7968 false, /* has_gate */
7969 true, /* has_execute */
7970 TV_TREE_SPLIT_EDGES, /* tv_id */
7971 PROP_cfg, /* properties_required */
7972 PROP_no_crit_edges, /* properties_provided */
7973 0, /* properties_destroyed */
7974 0, /* todo_flags_start */
7975 TODO_verify_flow, /* todo_flags_finish */
7976 };
7977
7978 class pass_split_crit_edges : public gimple_opt_pass
7979 {
7980 public:
7981 pass_split_crit_edges (gcc::context *ctxt)
7982 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
7983 {}
7984
7985 /* opt_pass methods: */
7986 unsigned int execute () { return split_critical_edges (); }
7987
7988 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
7989 }; // class pass_split_crit_edges
7990
7991 } // anon namespace
7992
7993 gimple_opt_pass *
7994 make_pass_split_crit_edges (gcc::context *ctxt)
7995 {
7996 return new pass_split_crit_edges (ctxt);
7997 }
7998
7999
8000 /* Build a ternary operation and gimplify it. Emit code before GSI.
8001 Return the gimple_val holding the result. */
8002
8003 tree
8004 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8005 tree type, tree a, tree b, tree c)
8006 {
8007 tree ret;
8008 location_t loc = gimple_location (gsi_stmt (*gsi));
8009
8010 ret = fold_build3_loc (loc, code, type, a, b, c);
8011 STRIP_NOPS (ret);
8012
8013 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8014 GSI_SAME_STMT);
8015 }
8016
8017 /* Build a binary operation and gimplify it. Emit code before GSI.
8018 Return the gimple_val holding the result. */
8019
8020 tree
8021 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8022 tree type, tree a, tree b)
8023 {
8024 tree ret;
8025
8026 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8027 STRIP_NOPS (ret);
8028
8029 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8030 GSI_SAME_STMT);
8031 }
8032
8033 /* Build a unary operation and gimplify it. Emit code before GSI.
8034 Return the gimple_val holding the result. */
8035
8036 tree
8037 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8038 tree a)
8039 {
8040 tree ret;
8041
8042 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8043 STRIP_NOPS (ret);
8044
8045 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8046 GSI_SAME_STMT);
8047 }
8048
8049
8050 \f
8051 /* Emit return warnings. */
8052
8053 static unsigned int
8054 execute_warn_function_return (void)
8055 {
8056 source_location location;
8057 gimple last;
8058 edge e;
8059 edge_iterator ei;
8060
8061 if (!targetm.warn_func_return (cfun->decl))
8062 return 0;
8063
8064 /* If we have a path to EXIT, then we do return. */
8065 if (TREE_THIS_VOLATILE (cfun->decl)
8066 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) > 0)
8067 {
8068 location = UNKNOWN_LOCATION;
8069 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
8070 {
8071 last = last_stmt (e->src);
8072 if ((gimple_code (last) == GIMPLE_RETURN
8073 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8074 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8075 break;
8076 }
8077 if (location == UNKNOWN_LOCATION)
8078 location = cfun->function_end_locus;
8079 warning_at (location, 0, "%<noreturn%> function does return");
8080 }
8081
8082 /* If we see "return;" in some basic block, then we do reach the end
8083 without returning a value. */
8084 else if (warn_return_type
8085 && !TREE_NO_WARNING (cfun->decl)
8086 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) > 0
8087 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
8088 {
8089 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
8090 {
8091 gimple last = last_stmt (e->src);
8092 if (gimple_code (last) == GIMPLE_RETURN
8093 && gimple_return_retval (last) == NULL
8094 && !gimple_no_warning_p (last))
8095 {
8096 location = gimple_location (last);
8097 if (location == UNKNOWN_LOCATION)
8098 location = cfun->function_end_locus;
8099 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8100 TREE_NO_WARNING (cfun->decl) = 1;
8101 break;
8102 }
8103 }
8104 }
8105 return 0;
8106 }
8107
8108
8109 /* Given a basic block B which ends with a conditional and has
8110 precisely two successors, determine which of the edges is taken if
8111 the conditional is true and which is taken if the conditional is
8112 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8113
8114 void
8115 extract_true_false_edges_from_block (basic_block b,
8116 edge *true_edge,
8117 edge *false_edge)
8118 {
8119 edge e = EDGE_SUCC (b, 0);
8120
8121 if (e->flags & EDGE_TRUE_VALUE)
8122 {
8123 *true_edge = e;
8124 *false_edge = EDGE_SUCC (b, 1);
8125 }
8126 else
8127 {
8128 *false_edge = e;
8129 *true_edge = EDGE_SUCC (b, 1);
8130 }
8131 }
8132
8133 namespace {
8134
8135 const pass_data pass_data_warn_function_return =
8136 {
8137 GIMPLE_PASS, /* type */
8138 "*warn_function_return", /* name */
8139 OPTGROUP_NONE, /* optinfo_flags */
8140 false, /* has_gate */
8141 true, /* has_execute */
8142 TV_NONE, /* tv_id */
8143 PROP_cfg, /* properties_required */
8144 0, /* properties_provided */
8145 0, /* properties_destroyed */
8146 0, /* todo_flags_start */
8147 0, /* todo_flags_finish */
8148 };
8149
8150 class pass_warn_function_return : public gimple_opt_pass
8151 {
8152 public:
8153 pass_warn_function_return (gcc::context *ctxt)
8154 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8155 {}
8156
8157 /* opt_pass methods: */
8158 unsigned int execute () { return execute_warn_function_return (); }
8159
8160 }; // class pass_warn_function_return
8161
8162 } // anon namespace
8163
8164 gimple_opt_pass *
8165 make_pass_warn_function_return (gcc::context *ctxt)
8166 {
8167 return new pass_warn_function_return (ctxt);
8168 }
8169
8170 /* Walk a gimplified function and warn for functions whose return value is
8171 ignored and attribute((warn_unused_result)) is set. This is done before
8172 inlining, so we don't have to worry about that. */
8173
8174 static void
8175 do_warn_unused_result (gimple_seq seq)
8176 {
8177 tree fdecl, ftype;
8178 gimple_stmt_iterator i;
8179
8180 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8181 {
8182 gimple g = gsi_stmt (i);
8183
8184 switch (gimple_code (g))
8185 {
8186 case GIMPLE_BIND:
8187 do_warn_unused_result (gimple_bind_body (g));
8188 break;
8189 case GIMPLE_TRY:
8190 do_warn_unused_result (gimple_try_eval (g));
8191 do_warn_unused_result (gimple_try_cleanup (g));
8192 break;
8193 case GIMPLE_CATCH:
8194 do_warn_unused_result (gimple_catch_handler (g));
8195 break;
8196 case GIMPLE_EH_FILTER:
8197 do_warn_unused_result (gimple_eh_filter_failure (g));
8198 break;
8199
8200 case GIMPLE_CALL:
8201 if (gimple_call_lhs (g))
8202 break;
8203 if (gimple_call_internal_p (g))
8204 break;
8205
8206 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8207 LHS. All calls whose value is ignored should be
8208 represented like this. Look for the attribute. */
8209 fdecl = gimple_call_fndecl (g);
8210 ftype = gimple_call_fntype (g);
8211
8212 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8213 {
8214 location_t loc = gimple_location (g);
8215
8216 if (fdecl)
8217 warning_at (loc, OPT_Wunused_result,
8218 "ignoring return value of %qD, "
8219 "declared with attribute warn_unused_result",
8220 fdecl);
8221 else
8222 warning_at (loc, OPT_Wunused_result,
8223 "ignoring return value of function "
8224 "declared with attribute warn_unused_result");
8225 }
8226 break;
8227
8228 default:
8229 /* Not a container, not a call, or a call whose value is used. */
8230 break;
8231 }
8232 }
8233 }
8234
8235 static unsigned int
8236 run_warn_unused_result (void)
8237 {
8238 do_warn_unused_result (gimple_body (current_function_decl));
8239 return 0;
8240 }
8241
8242 static bool
8243 gate_warn_unused_result (void)
8244 {
8245 return flag_warn_unused_result;
8246 }
8247
8248 namespace {
8249
8250 const pass_data pass_data_warn_unused_result =
8251 {
8252 GIMPLE_PASS, /* type */
8253 "*warn_unused_result", /* name */
8254 OPTGROUP_NONE, /* optinfo_flags */
8255 true, /* has_gate */
8256 true, /* has_execute */
8257 TV_NONE, /* tv_id */
8258 PROP_gimple_any, /* properties_required */
8259 0, /* properties_provided */
8260 0, /* properties_destroyed */
8261 0, /* todo_flags_start */
8262 0, /* todo_flags_finish */
8263 };
8264
8265 class pass_warn_unused_result : public gimple_opt_pass
8266 {
8267 public:
8268 pass_warn_unused_result (gcc::context *ctxt)
8269 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8270 {}
8271
8272 /* opt_pass methods: */
8273 bool gate () { return gate_warn_unused_result (); }
8274 unsigned int execute () { return run_warn_unused_result (); }
8275
8276 }; // class pass_warn_unused_result
8277
8278 } // anon namespace
8279
8280 gimple_opt_pass *
8281 make_pass_warn_unused_result (gcc::context *ctxt)
8282 {
8283 return new pass_warn_unused_result (ctxt);
8284 }
8285
8286 /* IPA passes, compilation of earlier functions or inlining
8287 might have changed some properties, such as marked functions nothrow,
8288 pure, const or noreturn.
8289 Remove redundant edges and basic blocks, and create new ones if necessary.
8290
8291 This pass can't be executed as stand alone pass from pass manager, because
8292 in between inlining and this fixup the verify_flow_info would fail. */
8293
8294 unsigned int
8295 execute_fixup_cfg (void)
8296 {
8297 basic_block bb;
8298 gimple_stmt_iterator gsi;
8299 int todo = gimple_in_ssa_p (cfun) ? TODO_verify_ssa : 0;
8300 gcov_type count_scale;
8301 edge e;
8302 edge_iterator ei;
8303
8304 count_scale
8305 = GCOV_COMPUTE_SCALE (cgraph_get_node (current_function_decl)->count,
8306 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
8307
8308 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
8309 cgraph_get_node (current_function_decl)->count;
8310 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
8311 apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count,
8312 count_scale);
8313
8314 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
8315 e->count = apply_scale (e->count, count_scale);
8316
8317 FOR_EACH_BB (bb)
8318 {
8319 bb->count = apply_scale (bb->count, count_scale);
8320 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
8321 {
8322 gimple stmt = gsi_stmt (gsi);
8323 tree decl = is_gimple_call (stmt)
8324 ? gimple_call_fndecl (stmt)
8325 : NULL;
8326 if (decl)
8327 {
8328 int flags = gimple_call_flags (stmt);
8329 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
8330 {
8331 if (gimple_purge_dead_abnormal_call_edges (bb))
8332 todo |= TODO_cleanup_cfg;
8333
8334 if (gimple_in_ssa_p (cfun))
8335 {
8336 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8337 update_stmt (stmt);
8338 }
8339 }
8340
8341 if (flags & ECF_NORETURN
8342 && fixup_noreturn_call (stmt))
8343 todo |= TODO_cleanup_cfg;
8344 }
8345
8346 if (maybe_clean_eh_stmt (stmt)
8347 && gimple_purge_dead_eh_edges (bb))
8348 todo |= TODO_cleanup_cfg;
8349 }
8350
8351 FOR_EACH_EDGE (e, ei, bb->succs)
8352 e->count = apply_scale (e->count, count_scale);
8353
8354 /* If we have a basic block with no successors that does not
8355 end with a control statement or a noreturn call end it with
8356 a call to __builtin_unreachable. This situation can occur
8357 when inlining a noreturn call that does in fact return. */
8358 if (EDGE_COUNT (bb->succs) == 0)
8359 {
8360 gimple stmt = last_stmt (bb);
8361 if (!stmt
8362 || (!is_ctrl_stmt (stmt)
8363 && (!is_gimple_call (stmt)
8364 || (gimple_call_flags (stmt) & ECF_NORETURN) == 0)))
8365 {
8366 stmt = gimple_build_call
8367 (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
8368 gimple_stmt_iterator gsi = gsi_last_bb (bb);
8369 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
8370 }
8371 }
8372 }
8373 if (count_scale != REG_BR_PROB_BASE)
8374 compute_function_frequency ();
8375
8376 /* We just processed all calls. */
8377 if (cfun->gimple_df)
8378 vec_free (MODIFIED_NORETURN_CALLS (cfun));
8379
8380 /* Dump a textual representation of the flowgraph. */
8381 if (dump_file)
8382 gimple_dump_cfg (dump_file, dump_flags);
8383
8384 if (current_loops
8385 && (todo & TODO_cleanup_cfg))
8386 loops_state_set (LOOPS_NEED_FIXUP);
8387
8388 return todo;
8389 }
8390
8391 namespace {
8392
8393 const pass_data pass_data_fixup_cfg =
8394 {
8395 GIMPLE_PASS, /* type */
8396 "*free_cfg_annotations", /* name */
8397 OPTGROUP_NONE, /* optinfo_flags */
8398 false, /* has_gate */
8399 true, /* has_execute */
8400 TV_NONE, /* tv_id */
8401 PROP_cfg, /* properties_required */
8402 0, /* properties_provided */
8403 0, /* properties_destroyed */
8404 0, /* todo_flags_start */
8405 0, /* todo_flags_finish */
8406 };
8407
8408 class pass_fixup_cfg : public gimple_opt_pass
8409 {
8410 public:
8411 pass_fixup_cfg (gcc::context *ctxt)
8412 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
8413 {}
8414
8415 /* opt_pass methods: */
8416 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
8417 unsigned int execute () { return execute_fixup_cfg (); }
8418
8419 }; // class pass_fixup_cfg
8420
8421 } // anon namespace
8422
8423 gimple_opt_pass *
8424 make_pass_fixup_cfg (gcc::context *ctxt)
8425 {
8426 return new pass_fixup_cfg (ctxt);
8427 }
8428
8429 /* Garbage collection support for edge_def. */
8430
8431 extern void gt_ggc_mx (tree&);
8432 extern void gt_ggc_mx (gimple&);
8433 extern void gt_ggc_mx (rtx&);
8434 extern void gt_ggc_mx (basic_block&);
8435
8436 void
8437 gt_ggc_mx (edge_def *e)
8438 {
8439 tree block = LOCATION_BLOCK (e->goto_locus);
8440 gt_ggc_mx (e->src);
8441 gt_ggc_mx (e->dest);
8442 if (current_ir_type () == IR_GIMPLE)
8443 gt_ggc_mx (e->insns.g);
8444 else
8445 gt_ggc_mx (e->insns.r);
8446 gt_ggc_mx (block);
8447 }
8448
8449 /* PCH support for edge_def. */
8450
8451 extern void gt_pch_nx (tree&);
8452 extern void gt_pch_nx (gimple&);
8453 extern void gt_pch_nx (rtx&);
8454 extern void gt_pch_nx (basic_block&);
8455
8456 void
8457 gt_pch_nx (edge_def *e)
8458 {
8459 tree block = LOCATION_BLOCK (e->goto_locus);
8460 gt_pch_nx (e->src);
8461 gt_pch_nx (e->dest);
8462 if (current_ir_type () == IR_GIMPLE)
8463 gt_pch_nx (e->insns.g);
8464 else
8465 gt_pch_nx (e->insns.r);
8466 gt_pch_nx (block);
8467 }
8468
8469 void
8470 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
8471 {
8472 tree block = LOCATION_BLOCK (e->goto_locus);
8473 op (&(e->src), cookie);
8474 op (&(e->dest), cookie);
8475 if (current_ir_type () == IR_GIMPLE)
8476 op (&(e->insns.g), cookie);
8477 else
8478 op (&(e->insns.r), cookie);
8479 op (&(block), cookie);
8480 }