Factor unrelated declarations out of tree.h.
[gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2013 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "hash-table.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "trans-mem.h"
28 #include "stor-layout.h"
29 #include "print-tree.h"
30 #include "tm_p.h"
31 #include "basic-block.h"
32 #include "flags.h"
33 #include "function.h"
34 #include "ggc.h"
35 #include "gimple-pretty-print.h"
36 #include "gimple.h"
37 #include "gimple-iterator.h"
38 #include "gimplify-me.h"
39 #include "gimple-walk.h"
40 #include "gimple-ssa.h"
41 #include "cgraph.h"
42 #include "tree-cfg.h"
43 #include "tree-phinodes.h"
44 #include "ssa-iterators.h"
45 #include "stringpool.h"
46 #include "tree-ssanames.h"
47 #include "tree-ssa-loop-manip.h"
48 #include "tree-ssa-loop-niter.h"
49 #include "tree-into-ssa.h"
50 #include "expr.h"
51 #include "tree-dfa.h"
52 #include "tree-ssa.h"
53 #include "tree-dump.h"
54 #include "tree-pass.h"
55 #include "diagnostic-core.h"
56 #include "except.h"
57 #include "cfgloop.h"
58 #include "tree-ssa-propagate.h"
59 #include "value-prof.h"
60 #include "pointer-set.h"
61 #include "tree-inline.h"
62 #include "target.h"
63 #include "tree-ssa-live.h"
64 #include "omp-low.h"
65 #include "tree-cfgcleanup.h"
66
67 /* This file contains functions for building the Control Flow Graph (CFG)
68 for a function tree. */
69
70 /* Local declarations. */
71
72 /* Initial capacity for the basic block array. */
73 static const int initial_cfg_capacity = 20;
74
75 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
76 which use a particular edge. The CASE_LABEL_EXPRs are chained together
77 via their CASE_CHAIN field, which we clear after we're done with the
78 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
79
80 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
81 update the case vector in response to edge redirections.
82
83 Right now this table is set up and torn down at key points in the
84 compilation process. It would be nice if we could make the table
85 more persistent. The key is getting notification of changes to
86 the CFG (particularly edge removal, creation and redirection). */
87
88 static struct pointer_map_t *edge_to_cases;
89
90 /* If we record edge_to_cases, this bitmap will hold indexes
91 of basic blocks that end in a GIMPLE_SWITCH which we touched
92 due to edge manipulations. */
93
94 static bitmap touched_switch_bbs;
95
96 /* CFG statistics. */
97 struct cfg_stats_d
98 {
99 long num_merged_labels;
100 };
101
102 static struct cfg_stats_d cfg_stats;
103
104 /* Nonzero if we found a computed goto while building basic blocks. */
105 static bool found_computed_goto;
106
107 /* Hash table to store last discriminator assigned for each locus. */
108 struct locus_discrim_map
109 {
110 location_t locus;
111 int discriminator;
112 };
113
114 /* Hashtable helpers. */
115
116 struct locus_discrim_hasher : typed_free_remove <locus_discrim_map>
117 {
118 typedef locus_discrim_map value_type;
119 typedef locus_discrim_map compare_type;
120 static inline hashval_t hash (const value_type *);
121 static inline bool equal (const value_type *, const compare_type *);
122 };
123
124 /* Trivial hash function for a location_t. ITEM is a pointer to
125 a hash table entry that maps a location_t to a discriminator. */
126
127 inline hashval_t
128 locus_discrim_hasher::hash (const value_type *item)
129 {
130 return LOCATION_LINE (item->locus);
131 }
132
133 /* Equality function for the locus-to-discriminator map. A and B
134 point to the two hash table entries to compare. */
135
136 inline bool
137 locus_discrim_hasher::equal (const value_type *a, const compare_type *b)
138 {
139 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
140 }
141
142 static hash_table <locus_discrim_hasher> discriminator_per_locus;
143
144 /* Basic blocks and flowgraphs. */
145 static void make_blocks (gimple_seq);
146 static void factor_computed_gotos (void);
147
148 /* Edges. */
149 static void make_edges (void);
150 static void assign_discriminators (void);
151 static void make_cond_expr_edges (basic_block);
152 static void make_gimple_switch_edges (basic_block);
153 static void make_goto_expr_edges (basic_block);
154 static void make_gimple_asm_edges (basic_block);
155 static edge gimple_redirect_edge_and_branch (edge, basic_block);
156 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
157 static unsigned int split_critical_edges (void);
158
159 /* Various helpers. */
160 static inline bool stmt_starts_bb_p (gimple, gimple);
161 static int gimple_verify_flow_info (void);
162 static void gimple_make_forwarder_block (edge);
163 static gimple first_non_label_stmt (basic_block);
164 static bool verify_gimple_transaction (gimple);
165
166 /* Flowgraph optimization and cleanup. */
167 static void gimple_merge_blocks (basic_block, basic_block);
168 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
169 static void remove_bb (basic_block);
170 static edge find_taken_edge_computed_goto (basic_block, tree);
171 static edge find_taken_edge_cond_expr (basic_block, tree);
172 static edge find_taken_edge_switch_expr (basic_block, tree);
173 static tree find_case_label_for_value (gimple, tree);
174
175 void
176 init_empty_tree_cfg_for_function (struct function *fn)
177 {
178 /* Initialize the basic block array. */
179 init_flow (fn);
180 profile_status_for_function (fn) = PROFILE_ABSENT;
181 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
182 last_basic_block_for_function (fn) = NUM_FIXED_BLOCKS;
183 vec_alloc (basic_block_info_for_function (fn), initial_cfg_capacity);
184 vec_safe_grow_cleared (basic_block_info_for_function (fn),
185 initial_cfg_capacity);
186
187 /* Build a mapping of labels to their associated blocks. */
188 vec_alloc (label_to_block_map_for_function (fn), initial_cfg_capacity);
189 vec_safe_grow_cleared (label_to_block_map_for_function (fn),
190 initial_cfg_capacity);
191
192 SET_BASIC_BLOCK_FOR_FUNCTION (fn, ENTRY_BLOCK,
193 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn));
194 SET_BASIC_BLOCK_FOR_FUNCTION (fn, EXIT_BLOCK,
195 EXIT_BLOCK_PTR_FOR_FUNCTION (fn));
196
197 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)->next_bb
198 = EXIT_BLOCK_PTR_FOR_FUNCTION (fn);
199 EXIT_BLOCK_PTR_FOR_FUNCTION (fn)->prev_bb
200 = ENTRY_BLOCK_PTR_FOR_FUNCTION (fn);
201 }
202
203 void
204 init_empty_tree_cfg (void)
205 {
206 init_empty_tree_cfg_for_function (cfun);
207 }
208
209 /*---------------------------------------------------------------------------
210 Create basic blocks
211 ---------------------------------------------------------------------------*/
212
213 /* Entry point to the CFG builder for trees. SEQ is the sequence of
214 statements to be added to the flowgraph. */
215
216 static void
217 build_gimple_cfg (gimple_seq seq)
218 {
219 /* Register specific gimple functions. */
220 gimple_register_cfg_hooks ();
221
222 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
223
224 init_empty_tree_cfg ();
225
226 found_computed_goto = 0;
227 make_blocks (seq);
228
229 /* Computed gotos are hell to deal with, especially if there are
230 lots of them with a large number of destinations. So we factor
231 them to a common computed goto location before we build the
232 edge list. After we convert back to normal form, we will un-factor
233 the computed gotos since factoring introduces an unwanted jump. */
234 if (found_computed_goto)
235 factor_computed_gotos ();
236
237 /* Make sure there is always at least one block, even if it's empty. */
238 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
239 create_empty_bb (ENTRY_BLOCK_PTR);
240
241 /* Adjust the size of the array. */
242 if (basic_block_info->length () < (size_t) n_basic_blocks_for_fn (cfun))
243 vec_safe_grow_cleared (basic_block_info, n_basic_blocks_for_fn (cfun));
244
245 /* To speed up statement iterator walks, we first purge dead labels. */
246 cleanup_dead_labels ();
247
248 /* Group case nodes to reduce the number of edges.
249 We do this after cleaning up dead labels because otherwise we miss
250 a lot of obvious case merging opportunities. */
251 group_case_labels ();
252
253 /* Create the edges of the flowgraph. */
254 discriminator_per_locus.create (13);
255 make_edges ();
256 assign_discriminators ();
257 cleanup_dead_labels ();
258 discriminator_per_locus.dispose ();
259 }
260
261
262 /* Search for ANNOTATE call with annot_expr_ivdep_kind; if found, remove
263 it and set loop->safelen to INT_MAX. We assume that the annotation
264 comes immediately before the condition. */
265
266 static void
267 replace_loop_annotate ()
268 {
269 struct loop *loop;
270 loop_iterator li;
271 basic_block bb;
272 gimple_stmt_iterator gsi;
273 gimple stmt;
274
275 FOR_EACH_LOOP (li, loop, 0)
276 {
277 gsi = gsi_last_bb (loop->header);
278 stmt = gsi_stmt (gsi);
279 if (stmt && gimple_code (stmt) == GIMPLE_COND)
280 {
281 gsi_prev_nondebug (&gsi);
282 if (gsi_end_p (gsi))
283 continue;
284 stmt = gsi_stmt (gsi);
285 if (gimple_code (stmt) != GIMPLE_CALL)
286 continue;
287 if (!gimple_call_internal_p (stmt)
288 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
289 continue;
290 if ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1))
291 != annot_expr_ivdep_kind)
292 continue;
293 stmt = gimple_build_assign (gimple_call_lhs (stmt),
294 gimple_call_arg (stmt, 0));
295 gsi_replace (&gsi, stmt, true);
296 loop->safelen = INT_MAX;
297 }
298 }
299
300 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
301 FOR_EACH_BB (bb)
302 {
303 gsi = gsi_last_bb (bb);
304 stmt = gsi_stmt (gsi);
305 if (stmt && gimple_code (stmt) == GIMPLE_COND)
306 gsi_prev_nondebug (&gsi);
307 if (gsi_end_p (gsi))
308 continue;
309 stmt = gsi_stmt (gsi);
310 if (gimple_code (stmt) != GIMPLE_CALL)
311 continue;
312 if (!gimple_call_internal_p (stmt)
313 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
314 continue;
315 if ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1))
316 != annot_expr_ivdep_kind)
317 continue;
318 warning_at (gimple_location (stmt), 0, "ignoring %<GCC ivdep%> "
319 "annotation");
320 stmt = gimple_build_assign (gimple_call_lhs (stmt),
321 gimple_call_arg (stmt, 0));
322 gsi_replace (&gsi, stmt, true);
323 }
324 }
325
326
327 static unsigned int
328 execute_build_cfg (void)
329 {
330 gimple_seq body = gimple_body (current_function_decl);
331
332 build_gimple_cfg (body);
333 gimple_set_body (current_function_decl, NULL);
334 if (dump_file && (dump_flags & TDF_DETAILS))
335 {
336 fprintf (dump_file, "Scope blocks:\n");
337 dump_scope_blocks (dump_file, dump_flags);
338 }
339 cleanup_tree_cfg ();
340 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
341 replace_loop_annotate ();
342 return 0;
343 }
344
345 namespace {
346
347 const pass_data pass_data_build_cfg =
348 {
349 GIMPLE_PASS, /* type */
350 "cfg", /* name */
351 OPTGROUP_NONE, /* optinfo_flags */
352 false, /* has_gate */
353 true, /* has_execute */
354 TV_TREE_CFG, /* tv_id */
355 PROP_gimple_leh, /* properties_required */
356 ( PROP_cfg | PROP_loops ), /* properties_provided */
357 0, /* properties_destroyed */
358 0, /* todo_flags_start */
359 TODO_verify_stmts, /* todo_flags_finish */
360 };
361
362 class pass_build_cfg : public gimple_opt_pass
363 {
364 public:
365 pass_build_cfg (gcc::context *ctxt)
366 : gimple_opt_pass (pass_data_build_cfg, ctxt)
367 {}
368
369 /* opt_pass methods: */
370 unsigned int execute () { return execute_build_cfg (); }
371
372 }; // class pass_build_cfg
373
374 } // anon namespace
375
376 gimple_opt_pass *
377 make_pass_build_cfg (gcc::context *ctxt)
378 {
379 return new pass_build_cfg (ctxt);
380 }
381
382
383 /* Return true if T is a computed goto. */
384
385 static bool
386 computed_goto_p (gimple t)
387 {
388 return (gimple_code (t) == GIMPLE_GOTO
389 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
390 }
391
392 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
393 the other edge points to a bb with just __builtin_unreachable ().
394 I.e. return true for C->M edge in:
395 <bb C>:
396 ...
397 if (something)
398 goto <bb N>;
399 else
400 goto <bb M>;
401 <bb N>:
402 __builtin_unreachable ();
403 <bb M>: */
404
405 bool
406 assert_unreachable_fallthru_edge_p (edge e)
407 {
408 basic_block pred_bb = e->src;
409 gimple last = last_stmt (pred_bb);
410 if (last && gimple_code (last) == GIMPLE_COND)
411 {
412 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
413 if (other_bb == e->dest)
414 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
415 if (EDGE_COUNT (other_bb->succs) == 0)
416 {
417 gimple_stmt_iterator gsi = gsi_after_labels (other_bb);
418 gimple stmt;
419
420 if (gsi_end_p (gsi))
421 return false;
422 stmt = gsi_stmt (gsi);
423 if (is_gimple_debug (stmt))
424 {
425 gsi_next_nondebug (&gsi);
426 if (gsi_end_p (gsi))
427 return false;
428 stmt = gsi_stmt (gsi);
429 }
430 return gimple_call_builtin_p (stmt, BUILT_IN_UNREACHABLE);
431 }
432 }
433 return false;
434 }
435
436
437 /* Search the CFG for any computed gotos. If found, factor them to a
438 common computed goto site. Also record the location of that site so
439 that we can un-factor the gotos after we have converted back to
440 normal form. */
441
442 static void
443 factor_computed_gotos (void)
444 {
445 basic_block bb;
446 tree factored_label_decl = NULL;
447 tree var = NULL;
448 gimple factored_computed_goto_label = NULL;
449 gimple factored_computed_goto = NULL;
450
451 /* We know there are one or more computed gotos in this function.
452 Examine the last statement in each basic block to see if the block
453 ends with a computed goto. */
454
455 FOR_EACH_BB (bb)
456 {
457 gimple_stmt_iterator gsi = gsi_last_bb (bb);
458 gimple last;
459
460 if (gsi_end_p (gsi))
461 continue;
462
463 last = gsi_stmt (gsi);
464
465 /* Ignore the computed goto we create when we factor the original
466 computed gotos. */
467 if (last == factored_computed_goto)
468 continue;
469
470 /* If the last statement is a computed goto, factor it. */
471 if (computed_goto_p (last))
472 {
473 gimple assignment;
474
475 /* The first time we find a computed goto we need to create
476 the factored goto block and the variable each original
477 computed goto will use for their goto destination. */
478 if (!factored_computed_goto)
479 {
480 basic_block new_bb = create_empty_bb (bb);
481 gimple_stmt_iterator new_gsi = gsi_start_bb (new_bb);
482
483 /* Create the destination of the factored goto. Each original
484 computed goto will put its desired destination into this
485 variable and jump to the label we create immediately
486 below. */
487 var = create_tmp_var (ptr_type_node, "gotovar");
488
489 /* Build a label for the new block which will contain the
490 factored computed goto. */
491 factored_label_decl = create_artificial_label (UNKNOWN_LOCATION);
492 factored_computed_goto_label
493 = gimple_build_label (factored_label_decl);
494 gsi_insert_after (&new_gsi, factored_computed_goto_label,
495 GSI_NEW_STMT);
496
497 /* Build our new computed goto. */
498 factored_computed_goto = gimple_build_goto (var);
499 gsi_insert_after (&new_gsi, factored_computed_goto, GSI_NEW_STMT);
500 }
501
502 /* Copy the original computed goto's destination into VAR. */
503 assignment = gimple_build_assign (var, gimple_goto_dest (last));
504 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
505
506 /* And re-vector the computed goto to the new destination. */
507 gimple_goto_set_dest (last, factored_label_decl);
508 }
509 }
510 }
511
512
513 /* Build a flowgraph for the sequence of stmts SEQ. */
514
515 static void
516 make_blocks (gimple_seq seq)
517 {
518 gimple_stmt_iterator i = gsi_start (seq);
519 gimple stmt = NULL;
520 bool start_new_block = true;
521 bool first_stmt_of_seq = true;
522 basic_block bb = ENTRY_BLOCK_PTR;
523
524 while (!gsi_end_p (i))
525 {
526 gimple prev_stmt;
527
528 prev_stmt = stmt;
529 stmt = gsi_stmt (i);
530
531 /* If the statement starts a new basic block or if we have determined
532 in a previous pass that we need to create a new block for STMT, do
533 so now. */
534 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
535 {
536 if (!first_stmt_of_seq)
537 gsi_split_seq_before (&i, &seq);
538 bb = create_basic_block (seq, NULL, bb);
539 start_new_block = false;
540 }
541
542 /* Now add STMT to BB and create the subgraphs for special statement
543 codes. */
544 gimple_set_bb (stmt, bb);
545
546 if (computed_goto_p (stmt))
547 found_computed_goto = true;
548
549 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
550 next iteration. */
551 if (stmt_ends_bb_p (stmt))
552 {
553 /* If the stmt can make abnormal goto use a new temporary
554 for the assignment to the LHS. This makes sure the old value
555 of the LHS is available on the abnormal edge. Otherwise
556 we will end up with overlapping life-ranges for abnormal
557 SSA names. */
558 if (gimple_has_lhs (stmt)
559 && stmt_can_make_abnormal_goto (stmt)
560 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
561 {
562 tree lhs = gimple_get_lhs (stmt);
563 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
564 gimple s = gimple_build_assign (lhs, tmp);
565 gimple_set_location (s, gimple_location (stmt));
566 gimple_set_block (s, gimple_block (stmt));
567 gimple_set_lhs (stmt, tmp);
568 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
569 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
570 DECL_GIMPLE_REG_P (tmp) = 1;
571 gsi_insert_after (&i, s, GSI_SAME_STMT);
572 }
573 start_new_block = true;
574 }
575
576 gsi_next (&i);
577 first_stmt_of_seq = false;
578 }
579 }
580
581
582 /* Create and return a new empty basic block after bb AFTER. */
583
584 static basic_block
585 create_bb (void *h, void *e, basic_block after)
586 {
587 basic_block bb;
588
589 gcc_assert (!e);
590
591 /* Create and initialize a new basic block. Since alloc_block uses
592 GC allocation that clears memory to allocate a basic block, we do
593 not have to clear the newly allocated basic block here. */
594 bb = alloc_block ();
595
596 bb->index = last_basic_block;
597 bb->flags = BB_NEW;
598 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
599
600 /* Add the new block to the linked list of blocks. */
601 link_block (bb, after);
602
603 /* Grow the basic block array if needed. */
604 if ((size_t) last_basic_block == basic_block_info->length ())
605 {
606 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
607 vec_safe_grow_cleared (basic_block_info, new_size);
608 }
609
610 /* Add the newly created block to the array. */
611 SET_BASIC_BLOCK (last_basic_block, bb);
612
613 n_basic_blocks_for_fn (cfun)++;
614 last_basic_block++;
615
616 return bb;
617 }
618
619
620 /*---------------------------------------------------------------------------
621 Edge creation
622 ---------------------------------------------------------------------------*/
623
624 /* Fold COND_EXPR_COND of each COND_EXPR. */
625
626 void
627 fold_cond_expr_cond (void)
628 {
629 basic_block bb;
630
631 FOR_EACH_BB (bb)
632 {
633 gimple stmt = last_stmt (bb);
634
635 if (stmt && gimple_code (stmt) == GIMPLE_COND)
636 {
637 location_t loc = gimple_location (stmt);
638 tree cond;
639 bool zerop, onep;
640
641 fold_defer_overflow_warnings ();
642 cond = fold_binary_loc (loc, gimple_cond_code (stmt), boolean_type_node,
643 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
644 if (cond)
645 {
646 zerop = integer_zerop (cond);
647 onep = integer_onep (cond);
648 }
649 else
650 zerop = onep = false;
651
652 fold_undefer_overflow_warnings (zerop || onep,
653 stmt,
654 WARN_STRICT_OVERFLOW_CONDITIONAL);
655 if (zerop)
656 gimple_cond_make_false (stmt);
657 else if (onep)
658 gimple_cond_make_true (stmt);
659 }
660 }
661 }
662
663 /* Join all the blocks in the flowgraph. */
664
665 static void
666 make_edges (void)
667 {
668 basic_block bb;
669 struct omp_region *cur_region = NULL;
670
671 /* Create an edge from entry to the first block with executable
672 statements in it. */
673 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
674
675 /* Traverse the basic block array placing edges. */
676 FOR_EACH_BB (bb)
677 {
678 gimple last = last_stmt (bb);
679 bool fallthru;
680
681 if (last)
682 {
683 enum gimple_code code = gimple_code (last);
684 switch (code)
685 {
686 case GIMPLE_GOTO:
687 make_goto_expr_edges (bb);
688 fallthru = false;
689 break;
690 case GIMPLE_RETURN:
691 make_edge (bb, EXIT_BLOCK_PTR, 0);
692 fallthru = false;
693 break;
694 case GIMPLE_COND:
695 make_cond_expr_edges (bb);
696 fallthru = false;
697 break;
698 case GIMPLE_SWITCH:
699 make_gimple_switch_edges (bb);
700 fallthru = false;
701 break;
702 case GIMPLE_RESX:
703 make_eh_edges (last);
704 fallthru = false;
705 break;
706 case GIMPLE_EH_DISPATCH:
707 fallthru = make_eh_dispatch_edges (last);
708 break;
709
710 case GIMPLE_CALL:
711 /* If this function receives a nonlocal goto, then we need to
712 make edges from this call site to all the nonlocal goto
713 handlers. */
714 if (stmt_can_make_abnormal_goto (last))
715 make_abnormal_goto_edges (bb, true);
716
717 /* If this statement has reachable exception handlers, then
718 create abnormal edges to them. */
719 make_eh_edges (last);
720
721 /* BUILTIN_RETURN is really a return statement. */
722 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
723 make_edge (bb, EXIT_BLOCK_PTR, 0), fallthru = false;
724 /* Some calls are known not to return. */
725 else
726 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
727 break;
728
729 case GIMPLE_ASSIGN:
730 /* A GIMPLE_ASSIGN may throw internally and thus be considered
731 control-altering. */
732 if (is_ctrl_altering_stmt (last))
733 make_eh_edges (last);
734 fallthru = true;
735 break;
736
737 case GIMPLE_ASM:
738 make_gimple_asm_edges (bb);
739 fallthru = true;
740 break;
741
742 CASE_GIMPLE_OMP:
743 fallthru = make_gimple_omp_edges (bb, &cur_region);
744 break;
745
746 case GIMPLE_TRANSACTION:
747 {
748 tree abort_label = gimple_transaction_label (last);
749 if (abort_label)
750 make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT);
751 fallthru = true;
752 }
753 break;
754
755 default:
756 gcc_assert (!stmt_ends_bb_p (last));
757 fallthru = true;
758 }
759 }
760 else
761 fallthru = true;
762
763 if (fallthru)
764 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
765 }
766
767 free_omp_regions ();
768
769 /* Fold COND_EXPR_COND of each COND_EXPR. */
770 fold_cond_expr_cond ();
771 }
772
773 /* Find the next available discriminator value for LOCUS. The
774 discriminator distinguishes among several basic blocks that
775 share a common locus, allowing for more accurate sample-based
776 profiling. */
777
778 static int
779 next_discriminator_for_locus (location_t locus)
780 {
781 struct locus_discrim_map item;
782 struct locus_discrim_map **slot;
783
784 item.locus = locus;
785 item.discriminator = 0;
786 slot = discriminator_per_locus.find_slot_with_hash (
787 &item, LOCATION_LINE (locus), INSERT);
788 gcc_assert (slot);
789 if (*slot == HTAB_EMPTY_ENTRY)
790 {
791 *slot = XNEW (struct locus_discrim_map);
792 gcc_assert (*slot);
793 (*slot)->locus = locus;
794 (*slot)->discriminator = 0;
795 }
796 (*slot)->discriminator++;
797 return (*slot)->discriminator;
798 }
799
800 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
801
802 static bool
803 same_line_p (location_t locus1, location_t locus2)
804 {
805 expanded_location from, to;
806
807 if (locus1 == locus2)
808 return true;
809
810 from = expand_location (locus1);
811 to = expand_location (locus2);
812
813 if (from.line != to.line)
814 return false;
815 if (from.file == to.file)
816 return true;
817 return (from.file != NULL
818 && to.file != NULL
819 && filename_cmp (from.file, to.file) == 0);
820 }
821
822 /* Assign discriminators to each basic block. */
823
824 static void
825 assign_discriminators (void)
826 {
827 basic_block bb;
828
829 FOR_EACH_BB (bb)
830 {
831 edge e;
832 edge_iterator ei;
833 gimple last = last_stmt (bb);
834 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
835
836 if (locus == UNKNOWN_LOCATION)
837 continue;
838
839 FOR_EACH_EDGE (e, ei, bb->succs)
840 {
841 gimple first = first_non_label_stmt (e->dest);
842 gimple last = last_stmt (e->dest);
843 if ((first && same_line_p (locus, gimple_location (first)))
844 || (last && same_line_p (locus, gimple_location (last))))
845 {
846 if (e->dest->discriminator != 0 && bb->discriminator == 0)
847 bb->discriminator = next_discriminator_for_locus (locus);
848 else
849 e->dest->discriminator = next_discriminator_for_locus (locus);
850 }
851 }
852 }
853 }
854
855 /* Create the edges for a GIMPLE_COND starting at block BB. */
856
857 static void
858 make_cond_expr_edges (basic_block bb)
859 {
860 gimple entry = last_stmt (bb);
861 gimple then_stmt, else_stmt;
862 basic_block then_bb, else_bb;
863 tree then_label, else_label;
864 edge e;
865
866 gcc_assert (entry);
867 gcc_assert (gimple_code (entry) == GIMPLE_COND);
868
869 /* Entry basic blocks for each component. */
870 then_label = gimple_cond_true_label (entry);
871 else_label = gimple_cond_false_label (entry);
872 then_bb = label_to_block (then_label);
873 else_bb = label_to_block (else_label);
874 then_stmt = first_stmt (then_bb);
875 else_stmt = first_stmt (else_bb);
876
877 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
878 e->goto_locus = gimple_location (then_stmt);
879 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
880 if (e)
881 e->goto_locus = gimple_location (else_stmt);
882
883 /* We do not need the labels anymore. */
884 gimple_cond_set_true_label (entry, NULL_TREE);
885 gimple_cond_set_false_label (entry, NULL_TREE);
886 }
887
888
889 /* Called for each element in the hash table (P) as we delete the
890 edge to cases hash table.
891
892 Clear all the TREE_CHAINs to prevent problems with copying of
893 SWITCH_EXPRs and structure sharing rules, then free the hash table
894 element. */
895
896 static bool
897 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
898 void *data ATTRIBUTE_UNUSED)
899 {
900 tree t, next;
901
902 for (t = (tree) *value; t; t = next)
903 {
904 next = CASE_CHAIN (t);
905 CASE_CHAIN (t) = NULL;
906 }
907
908 *value = NULL;
909 return true;
910 }
911
912 /* Start recording information mapping edges to case labels. */
913
914 void
915 start_recording_case_labels (void)
916 {
917 gcc_assert (edge_to_cases == NULL);
918 edge_to_cases = pointer_map_create ();
919 touched_switch_bbs = BITMAP_ALLOC (NULL);
920 }
921
922 /* Return nonzero if we are recording information for case labels. */
923
924 static bool
925 recording_case_labels_p (void)
926 {
927 return (edge_to_cases != NULL);
928 }
929
930 /* Stop recording information mapping edges to case labels and
931 remove any information we have recorded. */
932 void
933 end_recording_case_labels (void)
934 {
935 bitmap_iterator bi;
936 unsigned i;
937 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
938 pointer_map_destroy (edge_to_cases);
939 edge_to_cases = NULL;
940 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
941 {
942 basic_block bb = BASIC_BLOCK (i);
943 if (bb)
944 {
945 gimple stmt = last_stmt (bb);
946 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
947 group_case_labels_stmt (stmt);
948 }
949 }
950 BITMAP_FREE (touched_switch_bbs);
951 }
952
953 /* If we are inside a {start,end}_recording_cases block, then return
954 a chain of CASE_LABEL_EXPRs from T which reference E.
955
956 Otherwise return NULL. */
957
958 static tree
959 get_cases_for_edge (edge e, gimple t)
960 {
961 void **slot;
962 size_t i, n;
963
964 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
965 chains available. Return NULL so the caller can detect this case. */
966 if (!recording_case_labels_p ())
967 return NULL;
968
969 slot = pointer_map_contains (edge_to_cases, e);
970 if (slot)
971 return (tree) *slot;
972
973 /* If we did not find E in the hash table, then this must be the first
974 time we have been queried for information about E & T. Add all the
975 elements from T to the hash table then perform the query again. */
976
977 n = gimple_switch_num_labels (t);
978 for (i = 0; i < n; i++)
979 {
980 tree elt = gimple_switch_label (t, i);
981 tree lab = CASE_LABEL (elt);
982 basic_block label_bb = label_to_block (lab);
983 edge this_edge = find_edge (e->src, label_bb);
984
985 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
986 a new chain. */
987 slot = pointer_map_insert (edge_to_cases, this_edge);
988 CASE_CHAIN (elt) = (tree) *slot;
989 *slot = elt;
990 }
991
992 return (tree) *pointer_map_contains (edge_to_cases, e);
993 }
994
995 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
996
997 static void
998 make_gimple_switch_edges (basic_block bb)
999 {
1000 gimple entry = last_stmt (bb);
1001 size_t i, n;
1002
1003 n = gimple_switch_num_labels (entry);
1004
1005 for (i = 0; i < n; ++i)
1006 {
1007 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1008 basic_block label_bb = label_to_block (lab);
1009 make_edge (bb, label_bb, 0);
1010 }
1011 }
1012
1013
1014 /* Return the basic block holding label DEST. */
1015
1016 basic_block
1017 label_to_block_fn (struct function *ifun, tree dest)
1018 {
1019 int uid = LABEL_DECL_UID (dest);
1020
1021 /* We would die hard when faced by an undefined label. Emit a label to
1022 the very first basic block. This will hopefully make even the dataflow
1023 and undefined variable warnings quite right. */
1024 if (seen_error () && uid < 0)
1025 {
1026 gimple_stmt_iterator gsi = gsi_start_bb (BASIC_BLOCK (NUM_FIXED_BLOCKS));
1027 gimple stmt;
1028
1029 stmt = gimple_build_label (dest);
1030 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1031 uid = LABEL_DECL_UID (dest);
1032 }
1033 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1034 return NULL;
1035 return (*ifun->cfg->x_label_to_block_map)[uid];
1036 }
1037
1038 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
1039 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
1040
1041 void
1042 make_abnormal_goto_edges (basic_block bb, bool for_call)
1043 {
1044 basic_block target_bb;
1045 gimple_stmt_iterator gsi;
1046
1047 FOR_EACH_BB (target_bb)
1048 {
1049 for (gsi = gsi_start_bb (target_bb); !gsi_end_p (gsi); gsi_next (&gsi))
1050 {
1051 gimple label_stmt = gsi_stmt (gsi);
1052 tree target;
1053
1054 if (gimple_code (label_stmt) != GIMPLE_LABEL)
1055 break;
1056
1057 target = gimple_label_label (label_stmt);
1058
1059 /* Make an edge to every label block that has been marked as a
1060 potential target for a computed goto or a non-local goto. */
1061 if ((FORCED_LABEL (target) && !for_call)
1062 || (DECL_NONLOCAL (target) && for_call))
1063 {
1064 make_edge (bb, target_bb, EDGE_ABNORMAL);
1065 break;
1066 }
1067 }
1068 if (!gsi_end_p (gsi)
1069 && is_gimple_debug (gsi_stmt (gsi)))
1070 gsi_next_nondebug (&gsi);
1071 if (!gsi_end_p (gsi))
1072 {
1073 /* Make an edge to every setjmp-like call. */
1074 gimple call_stmt = gsi_stmt (gsi);
1075 if (is_gimple_call (call_stmt)
1076 && (gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE))
1077 make_edge (bb, target_bb, EDGE_ABNORMAL);
1078 }
1079 }
1080 }
1081
1082 /* Create edges for a goto statement at block BB. */
1083
1084 static void
1085 make_goto_expr_edges (basic_block bb)
1086 {
1087 gimple_stmt_iterator last = gsi_last_bb (bb);
1088 gimple goto_t = gsi_stmt (last);
1089
1090 /* A simple GOTO creates normal edges. */
1091 if (simple_goto_p (goto_t))
1092 {
1093 tree dest = gimple_goto_dest (goto_t);
1094 basic_block label_bb = label_to_block (dest);
1095 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1096 e->goto_locus = gimple_location (goto_t);
1097 gsi_remove (&last, true);
1098 return;
1099 }
1100
1101 /* A computed GOTO creates abnormal edges. */
1102 make_abnormal_goto_edges (bb, false);
1103 }
1104
1105 /* Create edges for an asm statement with labels at block BB. */
1106
1107 static void
1108 make_gimple_asm_edges (basic_block bb)
1109 {
1110 gimple stmt = last_stmt (bb);
1111 int i, n = gimple_asm_nlabels (stmt);
1112
1113 for (i = 0; i < n; ++i)
1114 {
1115 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1116 basic_block label_bb = label_to_block (label);
1117 make_edge (bb, label_bb, 0);
1118 }
1119 }
1120
1121 /*---------------------------------------------------------------------------
1122 Flowgraph analysis
1123 ---------------------------------------------------------------------------*/
1124
1125 /* Cleanup useless labels in basic blocks. This is something we wish
1126 to do early because it allows us to group case labels before creating
1127 the edges for the CFG, and it speeds up block statement iterators in
1128 all passes later on.
1129 We rerun this pass after CFG is created, to get rid of the labels that
1130 are no longer referenced. After then we do not run it any more, since
1131 (almost) no new labels should be created. */
1132
1133 /* A map from basic block index to the leading label of that block. */
1134 static struct label_record
1135 {
1136 /* The label. */
1137 tree label;
1138
1139 /* True if the label is referenced from somewhere. */
1140 bool used;
1141 } *label_for_bb;
1142
1143 /* Given LABEL return the first label in the same basic block. */
1144
1145 static tree
1146 main_block_label (tree label)
1147 {
1148 basic_block bb = label_to_block (label);
1149 tree main_label = label_for_bb[bb->index].label;
1150
1151 /* label_to_block possibly inserted undefined label into the chain. */
1152 if (!main_label)
1153 {
1154 label_for_bb[bb->index].label = label;
1155 main_label = label;
1156 }
1157
1158 label_for_bb[bb->index].used = true;
1159 return main_label;
1160 }
1161
1162 /* Clean up redundant labels within the exception tree. */
1163
1164 static void
1165 cleanup_dead_labels_eh (void)
1166 {
1167 eh_landing_pad lp;
1168 eh_region r;
1169 tree lab;
1170 int i;
1171
1172 if (cfun->eh == NULL)
1173 return;
1174
1175 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1176 if (lp && lp->post_landing_pad)
1177 {
1178 lab = main_block_label (lp->post_landing_pad);
1179 if (lab != lp->post_landing_pad)
1180 {
1181 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1182 EH_LANDING_PAD_NR (lab) = lp->index;
1183 }
1184 }
1185
1186 FOR_ALL_EH_REGION (r)
1187 switch (r->type)
1188 {
1189 case ERT_CLEANUP:
1190 case ERT_MUST_NOT_THROW:
1191 break;
1192
1193 case ERT_TRY:
1194 {
1195 eh_catch c;
1196 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1197 {
1198 lab = c->label;
1199 if (lab)
1200 c->label = main_block_label (lab);
1201 }
1202 }
1203 break;
1204
1205 case ERT_ALLOWED_EXCEPTIONS:
1206 lab = r->u.allowed.label;
1207 if (lab)
1208 r->u.allowed.label = main_block_label (lab);
1209 break;
1210 }
1211 }
1212
1213
1214 /* Cleanup redundant labels. This is a three-step process:
1215 1) Find the leading label for each block.
1216 2) Redirect all references to labels to the leading labels.
1217 3) Cleanup all useless labels. */
1218
1219 void
1220 cleanup_dead_labels (void)
1221 {
1222 basic_block bb;
1223 label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
1224
1225 /* Find a suitable label for each block. We use the first user-defined
1226 label if there is one, or otherwise just the first label we see. */
1227 FOR_EACH_BB (bb)
1228 {
1229 gimple_stmt_iterator i;
1230
1231 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1232 {
1233 tree label;
1234 gimple stmt = gsi_stmt (i);
1235
1236 if (gimple_code (stmt) != GIMPLE_LABEL)
1237 break;
1238
1239 label = gimple_label_label (stmt);
1240
1241 /* If we have not yet seen a label for the current block,
1242 remember this one and see if there are more labels. */
1243 if (!label_for_bb[bb->index].label)
1244 {
1245 label_for_bb[bb->index].label = label;
1246 continue;
1247 }
1248
1249 /* If we did see a label for the current block already, but it
1250 is an artificially created label, replace it if the current
1251 label is a user defined label. */
1252 if (!DECL_ARTIFICIAL (label)
1253 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1254 {
1255 label_for_bb[bb->index].label = label;
1256 break;
1257 }
1258 }
1259 }
1260
1261 /* Now redirect all jumps/branches to the selected label.
1262 First do so for each block ending in a control statement. */
1263 FOR_EACH_BB (bb)
1264 {
1265 gimple stmt = last_stmt (bb);
1266 tree label, new_label;
1267
1268 if (!stmt)
1269 continue;
1270
1271 switch (gimple_code (stmt))
1272 {
1273 case GIMPLE_COND:
1274 label = gimple_cond_true_label (stmt);
1275 if (label)
1276 {
1277 new_label = main_block_label (label);
1278 if (new_label != label)
1279 gimple_cond_set_true_label (stmt, new_label);
1280 }
1281
1282 label = gimple_cond_false_label (stmt);
1283 if (label)
1284 {
1285 new_label = main_block_label (label);
1286 if (new_label != label)
1287 gimple_cond_set_false_label (stmt, new_label);
1288 }
1289 break;
1290
1291 case GIMPLE_SWITCH:
1292 {
1293 size_t i, n = gimple_switch_num_labels (stmt);
1294
1295 /* Replace all destination labels. */
1296 for (i = 0; i < n; ++i)
1297 {
1298 tree case_label = gimple_switch_label (stmt, i);
1299 label = CASE_LABEL (case_label);
1300 new_label = main_block_label (label);
1301 if (new_label != label)
1302 CASE_LABEL (case_label) = new_label;
1303 }
1304 break;
1305 }
1306
1307 case GIMPLE_ASM:
1308 {
1309 int i, n = gimple_asm_nlabels (stmt);
1310
1311 for (i = 0; i < n; ++i)
1312 {
1313 tree cons = gimple_asm_label_op (stmt, i);
1314 tree label = main_block_label (TREE_VALUE (cons));
1315 TREE_VALUE (cons) = label;
1316 }
1317 break;
1318 }
1319
1320 /* We have to handle gotos until they're removed, and we don't
1321 remove them until after we've created the CFG edges. */
1322 case GIMPLE_GOTO:
1323 if (!computed_goto_p (stmt))
1324 {
1325 label = gimple_goto_dest (stmt);
1326 new_label = main_block_label (label);
1327 if (new_label != label)
1328 gimple_goto_set_dest (stmt, new_label);
1329 }
1330 break;
1331
1332 case GIMPLE_TRANSACTION:
1333 {
1334 tree label = gimple_transaction_label (stmt);
1335 if (label)
1336 {
1337 tree new_label = main_block_label (label);
1338 if (new_label != label)
1339 gimple_transaction_set_label (stmt, new_label);
1340 }
1341 }
1342 break;
1343
1344 default:
1345 break;
1346 }
1347 }
1348
1349 /* Do the same for the exception region tree labels. */
1350 cleanup_dead_labels_eh ();
1351
1352 /* Finally, purge dead labels. All user-defined labels and labels that
1353 can be the target of non-local gotos and labels which have their
1354 address taken are preserved. */
1355 FOR_EACH_BB (bb)
1356 {
1357 gimple_stmt_iterator i;
1358 tree label_for_this_bb = label_for_bb[bb->index].label;
1359
1360 if (!label_for_this_bb)
1361 continue;
1362
1363 /* If the main label of the block is unused, we may still remove it. */
1364 if (!label_for_bb[bb->index].used)
1365 label_for_this_bb = NULL;
1366
1367 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1368 {
1369 tree label;
1370 gimple stmt = gsi_stmt (i);
1371
1372 if (gimple_code (stmt) != GIMPLE_LABEL)
1373 break;
1374
1375 label = gimple_label_label (stmt);
1376
1377 if (label == label_for_this_bb
1378 || !DECL_ARTIFICIAL (label)
1379 || DECL_NONLOCAL (label)
1380 || FORCED_LABEL (label))
1381 gsi_next (&i);
1382 else
1383 gsi_remove (&i, true);
1384 }
1385 }
1386
1387 free (label_for_bb);
1388 }
1389
1390 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1391 the ones jumping to the same label.
1392 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1393
1394 void
1395 group_case_labels_stmt (gimple stmt)
1396 {
1397 int old_size = gimple_switch_num_labels (stmt);
1398 int i, j, new_size = old_size;
1399 basic_block default_bb = NULL;
1400
1401 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1402
1403 /* Look for possible opportunities to merge cases. */
1404 i = 1;
1405 while (i < old_size)
1406 {
1407 tree base_case, base_high;
1408 basic_block base_bb;
1409
1410 base_case = gimple_switch_label (stmt, i);
1411
1412 gcc_assert (base_case);
1413 base_bb = label_to_block (CASE_LABEL (base_case));
1414
1415 /* Discard cases that have the same destination as the
1416 default case. */
1417 if (base_bb == default_bb)
1418 {
1419 gimple_switch_set_label (stmt, i, NULL_TREE);
1420 i++;
1421 new_size--;
1422 continue;
1423 }
1424
1425 base_high = CASE_HIGH (base_case)
1426 ? CASE_HIGH (base_case)
1427 : CASE_LOW (base_case);
1428 i++;
1429
1430 /* Try to merge case labels. Break out when we reach the end
1431 of the label vector or when we cannot merge the next case
1432 label with the current one. */
1433 while (i < old_size)
1434 {
1435 tree merge_case = gimple_switch_label (stmt, i);
1436 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1437 double_int bhp1 = tree_to_double_int (base_high) + double_int_one;
1438
1439 /* Merge the cases if they jump to the same place,
1440 and their ranges are consecutive. */
1441 if (merge_bb == base_bb
1442 && tree_to_double_int (CASE_LOW (merge_case)) == bhp1)
1443 {
1444 base_high = CASE_HIGH (merge_case) ?
1445 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1446 CASE_HIGH (base_case) = base_high;
1447 gimple_switch_set_label (stmt, i, NULL_TREE);
1448 new_size--;
1449 i++;
1450 }
1451 else
1452 break;
1453 }
1454 }
1455
1456 /* Compress the case labels in the label vector, and adjust the
1457 length of the vector. */
1458 for (i = 0, j = 0; i < new_size; i++)
1459 {
1460 while (! gimple_switch_label (stmt, j))
1461 j++;
1462 gimple_switch_set_label (stmt, i,
1463 gimple_switch_label (stmt, j++));
1464 }
1465
1466 gcc_assert (new_size <= old_size);
1467 gimple_switch_set_num_labels (stmt, new_size);
1468 }
1469
1470 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1471 and scan the sorted vector of cases. Combine the ones jumping to the
1472 same label. */
1473
1474 void
1475 group_case_labels (void)
1476 {
1477 basic_block bb;
1478
1479 FOR_EACH_BB (bb)
1480 {
1481 gimple stmt = last_stmt (bb);
1482 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1483 group_case_labels_stmt (stmt);
1484 }
1485 }
1486
1487 /* Checks whether we can merge block B into block A. */
1488
1489 static bool
1490 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1491 {
1492 gimple stmt;
1493 gimple_stmt_iterator gsi;
1494
1495 if (!single_succ_p (a))
1496 return false;
1497
1498 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1499 return false;
1500
1501 if (single_succ (a) != b)
1502 return false;
1503
1504 if (!single_pred_p (b))
1505 return false;
1506
1507 if (b == EXIT_BLOCK_PTR)
1508 return false;
1509
1510 /* If A ends by a statement causing exceptions or something similar, we
1511 cannot merge the blocks. */
1512 stmt = last_stmt (a);
1513 if (stmt && stmt_ends_bb_p (stmt))
1514 return false;
1515
1516 /* Do not allow a block with only a non-local label to be merged. */
1517 if (stmt
1518 && gimple_code (stmt) == GIMPLE_LABEL
1519 && DECL_NONLOCAL (gimple_label_label (stmt)))
1520 return false;
1521
1522 /* Examine the labels at the beginning of B. */
1523 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
1524 {
1525 tree lab;
1526 stmt = gsi_stmt (gsi);
1527 if (gimple_code (stmt) != GIMPLE_LABEL)
1528 break;
1529 lab = gimple_label_label (stmt);
1530
1531 /* Do not remove user forced labels or for -O0 any user labels. */
1532 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1533 return false;
1534 }
1535
1536 /* Protect the loop latches. */
1537 if (current_loops && b->loop_father->latch == b)
1538 return false;
1539
1540 /* It must be possible to eliminate all phi nodes in B. If ssa form
1541 is not up-to-date and a name-mapping is registered, we cannot eliminate
1542 any phis. Symbols marked for renaming are never a problem though. */
1543 for (gsi = gsi_start_phis (b); !gsi_end_p (gsi); gsi_next (&gsi))
1544 {
1545 gimple phi = gsi_stmt (gsi);
1546 /* Technically only new names matter. */
1547 if (name_registered_for_update_p (PHI_RESULT (phi)))
1548 return false;
1549 }
1550
1551 /* When not optimizing, don't merge if we'd lose goto_locus. */
1552 if (!optimize
1553 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1554 {
1555 location_t goto_locus = single_succ_edge (a)->goto_locus;
1556 gimple_stmt_iterator prev, next;
1557 prev = gsi_last_nondebug_bb (a);
1558 next = gsi_after_labels (b);
1559 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1560 gsi_next_nondebug (&next);
1561 if ((gsi_end_p (prev)
1562 || gimple_location (gsi_stmt (prev)) != goto_locus)
1563 && (gsi_end_p (next)
1564 || gimple_location (gsi_stmt (next)) != goto_locus))
1565 return false;
1566 }
1567
1568 return true;
1569 }
1570
1571 /* Replaces all uses of NAME by VAL. */
1572
1573 void
1574 replace_uses_by (tree name, tree val)
1575 {
1576 imm_use_iterator imm_iter;
1577 use_operand_p use;
1578 gimple stmt;
1579 edge e;
1580
1581 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1582 {
1583 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1584 {
1585 replace_exp (use, val);
1586
1587 if (gimple_code (stmt) == GIMPLE_PHI)
1588 {
1589 e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
1590 if (e->flags & EDGE_ABNORMAL)
1591 {
1592 /* This can only occur for virtual operands, since
1593 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1594 would prevent replacement. */
1595 gcc_checking_assert (virtual_operand_p (name));
1596 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1597 }
1598 }
1599 }
1600
1601 if (gimple_code (stmt) != GIMPLE_PHI)
1602 {
1603 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1604 gimple orig_stmt = stmt;
1605 size_t i;
1606
1607 /* Mark the block if we changed the last stmt in it. */
1608 if (cfgcleanup_altered_bbs
1609 && stmt_ends_bb_p (stmt))
1610 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1611
1612 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1613 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1614 only change sth from non-invariant to invariant, and only
1615 when propagating constants. */
1616 if (is_gimple_min_invariant (val))
1617 for (i = 0; i < gimple_num_ops (stmt); i++)
1618 {
1619 tree op = gimple_op (stmt, i);
1620 /* Operands may be empty here. For example, the labels
1621 of a GIMPLE_COND are nulled out following the creation
1622 of the corresponding CFG edges. */
1623 if (op && TREE_CODE (op) == ADDR_EXPR)
1624 recompute_tree_invariant_for_addr_expr (op);
1625 }
1626
1627 if (fold_stmt (&gsi))
1628 stmt = gsi_stmt (gsi);
1629
1630 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1631 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1632
1633 update_stmt (stmt);
1634 }
1635 }
1636
1637 gcc_checking_assert (has_zero_uses (name));
1638
1639 /* Also update the trees stored in loop structures. */
1640 if (current_loops)
1641 {
1642 struct loop *loop;
1643 loop_iterator li;
1644
1645 FOR_EACH_LOOP (li, loop, 0)
1646 {
1647 substitute_in_loop_info (loop, name, val);
1648 }
1649 }
1650 }
1651
1652 /* Merge block B into block A. */
1653
1654 static void
1655 gimple_merge_blocks (basic_block a, basic_block b)
1656 {
1657 gimple_stmt_iterator last, gsi, psi;
1658
1659 if (dump_file)
1660 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1661
1662 /* Remove all single-valued PHI nodes from block B of the form
1663 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1664 gsi = gsi_last_bb (a);
1665 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1666 {
1667 gimple phi = gsi_stmt (psi);
1668 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1669 gimple copy;
1670 bool may_replace_uses = (virtual_operand_p (def)
1671 || may_propagate_copy (def, use));
1672
1673 /* In case we maintain loop closed ssa form, do not propagate arguments
1674 of loop exit phi nodes. */
1675 if (current_loops
1676 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1677 && !virtual_operand_p (def)
1678 && TREE_CODE (use) == SSA_NAME
1679 && a->loop_father != b->loop_father)
1680 may_replace_uses = false;
1681
1682 if (!may_replace_uses)
1683 {
1684 gcc_assert (!virtual_operand_p (def));
1685
1686 /* Note that just emitting the copies is fine -- there is no problem
1687 with ordering of phi nodes. This is because A is the single
1688 predecessor of B, therefore results of the phi nodes cannot
1689 appear as arguments of the phi nodes. */
1690 copy = gimple_build_assign (def, use);
1691 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1692 remove_phi_node (&psi, false);
1693 }
1694 else
1695 {
1696 /* If we deal with a PHI for virtual operands, we can simply
1697 propagate these without fussing with folding or updating
1698 the stmt. */
1699 if (virtual_operand_p (def))
1700 {
1701 imm_use_iterator iter;
1702 use_operand_p use_p;
1703 gimple stmt;
1704
1705 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1706 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1707 SET_USE (use_p, use);
1708
1709 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1710 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1711 }
1712 else
1713 replace_uses_by (def, use);
1714
1715 remove_phi_node (&psi, true);
1716 }
1717 }
1718
1719 /* Ensure that B follows A. */
1720 move_block_after (b, a);
1721
1722 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1723 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1724
1725 /* Remove labels from B and set gimple_bb to A for other statements. */
1726 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1727 {
1728 gimple stmt = gsi_stmt (gsi);
1729 if (gimple_code (stmt) == GIMPLE_LABEL)
1730 {
1731 tree label = gimple_label_label (stmt);
1732 int lp_nr;
1733
1734 gsi_remove (&gsi, false);
1735
1736 /* Now that we can thread computed gotos, we might have
1737 a situation where we have a forced label in block B
1738 However, the label at the start of block B might still be
1739 used in other ways (think about the runtime checking for
1740 Fortran assigned gotos). So we can not just delete the
1741 label. Instead we move the label to the start of block A. */
1742 if (FORCED_LABEL (label))
1743 {
1744 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1745 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1746 }
1747 /* Other user labels keep around in a form of a debug stmt. */
1748 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1749 {
1750 gimple dbg = gimple_build_debug_bind (label,
1751 integer_zero_node,
1752 stmt);
1753 gimple_debug_bind_reset_value (dbg);
1754 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1755 }
1756
1757 lp_nr = EH_LANDING_PAD_NR (label);
1758 if (lp_nr)
1759 {
1760 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1761 lp->post_landing_pad = NULL;
1762 }
1763 }
1764 else
1765 {
1766 gimple_set_bb (stmt, a);
1767 gsi_next (&gsi);
1768 }
1769 }
1770
1771 /* Merge the sequences. */
1772 last = gsi_last_bb (a);
1773 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1774 set_bb_seq (b, NULL);
1775
1776 if (cfgcleanup_altered_bbs)
1777 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1778 }
1779
1780
1781 /* Return the one of two successors of BB that is not reachable by a
1782 complex edge, if there is one. Else, return BB. We use
1783 this in optimizations that use post-dominators for their heuristics,
1784 to catch the cases in C++ where function calls are involved. */
1785
1786 basic_block
1787 single_noncomplex_succ (basic_block bb)
1788 {
1789 edge e0, e1;
1790 if (EDGE_COUNT (bb->succs) != 2)
1791 return bb;
1792
1793 e0 = EDGE_SUCC (bb, 0);
1794 e1 = EDGE_SUCC (bb, 1);
1795 if (e0->flags & EDGE_COMPLEX)
1796 return e1->dest;
1797 if (e1->flags & EDGE_COMPLEX)
1798 return e0->dest;
1799
1800 return bb;
1801 }
1802
1803 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1804
1805 void
1806 notice_special_calls (gimple call)
1807 {
1808 int flags = gimple_call_flags (call);
1809
1810 if (flags & ECF_MAY_BE_ALLOCA)
1811 cfun->calls_alloca = true;
1812 if (flags & ECF_RETURNS_TWICE)
1813 cfun->calls_setjmp = true;
1814 }
1815
1816
1817 /* Clear flags set by notice_special_calls. Used by dead code removal
1818 to update the flags. */
1819
1820 void
1821 clear_special_calls (void)
1822 {
1823 cfun->calls_alloca = false;
1824 cfun->calls_setjmp = false;
1825 }
1826
1827 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1828
1829 static void
1830 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1831 {
1832 /* Since this block is no longer reachable, we can just delete all
1833 of its PHI nodes. */
1834 remove_phi_nodes (bb);
1835
1836 /* Remove edges to BB's successors. */
1837 while (EDGE_COUNT (bb->succs) > 0)
1838 remove_edge (EDGE_SUCC (bb, 0));
1839 }
1840
1841
1842 /* Remove statements of basic block BB. */
1843
1844 static void
1845 remove_bb (basic_block bb)
1846 {
1847 gimple_stmt_iterator i;
1848
1849 if (dump_file)
1850 {
1851 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1852 if (dump_flags & TDF_DETAILS)
1853 {
1854 dump_bb (dump_file, bb, 0, dump_flags);
1855 fprintf (dump_file, "\n");
1856 }
1857 }
1858
1859 if (current_loops)
1860 {
1861 struct loop *loop = bb->loop_father;
1862
1863 /* If a loop gets removed, clean up the information associated
1864 with it. */
1865 if (loop->latch == bb
1866 || loop->header == bb)
1867 free_numbers_of_iterations_estimates_loop (loop);
1868 }
1869
1870 /* Remove all the instructions in the block. */
1871 if (bb_seq (bb) != NULL)
1872 {
1873 /* Walk backwards so as to get a chance to substitute all
1874 released DEFs into debug stmts. See
1875 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
1876 details. */
1877 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
1878 {
1879 gimple stmt = gsi_stmt (i);
1880 if (gimple_code (stmt) == GIMPLE_LABEL
1881 && (FORCED_LABEL (gimple_label_label (stmt))
1882 || DECL_NONLOCAL (gimple_label_label (stmt))))
1883 {
1884 basic_block new_bb;
1885 gimple_stmt_iterator new_gsi;
1886
1887 /* A non-reachable non-local label may still be referenced.
1888 But it no longer needs to carry the extra semantics of
1889 non-locality. */
1890 if (DECL_NONLOCAL (gimple_label_label (stmt)))
1891 {
1892 DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
1893 FORCED_LABEL (gimple_label_label (stmt)) = 1;
1894 }
1895
1896 new_bb = bb->prev_bb;
1897 new_gsi = gsi_start_bb (new_bb);
1898 gsi_remove (&i, false);
1899 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
1900 }
1901 else
1902 {
1903 /* Release SSA definitions if we are in SSA. Note that we
1904 may be called when not in SSA. For example,
1905 final_cleanup calls this function via
1906 cleanup_tree_cfg. */
1907 if (gimple_in_ssa_p (cfun))
1908 release_defs (stmt);
1909
1910 gsi_remove (&i, true);
1911 }
1912
1913 if (gsi_end_p (i))
1914 i = gsi_last_bb (bb);
1915 else
1916 gsi_prev (&i);
1917 }
1918 }
1919
1920 remove_phi_nodes_and_edges_for_unreachable_block (bb);
1921 bb->il.gimple.seq = NULL;
1922 bb->il.gimple.phi_nodes = NULL;
1923 }
1924
1925
1926 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
1927 predicate VAL, return the edge that will be taken out of the block.
1928 If VAL does not match a unique edge, NULL is returned. */
1929
1930 edge
1931 find_taken_edge (basic_block bb, tree val)
1932 {
1933 gimple stmt;
1934
1935 stmt = last_stmt (bb);
1936
1937 gcc_assert (stmt);
1938 gcc_assert (is_ctrl_stmt (stmt));
1939
1940 if (val == NULL)
1941 return NULL;
1942
1943 if (!is_gimple_min_invariant (val))
1944 return NULL;
1945
1946 if (gimple_code (stmt) == GIMPLE_COND)
1947 return find_taken_edge_cond_expr (bb, val);
1948
1949 if (gimple_code (stmt) == GIMPLE_SWITCH)
1950 return find_taken_edge_switch_expr (bb, val);
1951
1952 if (computed_goto_p (stmt))
1953 {
1954 /* Only optimize if the argument is a label, if the argument is
1955 not a label then we can not construct a proper CFG.
1956
1957 It may be the case that we only need to allow the LABEL_REF to
1958 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
1959 appear inside a LABEL_EXPR just to be safe. */
1960 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
1961 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
1962 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
1963 return NULL;
1964 }
1965
1966 gcc_unreachable ();
1967 }
1968
1969 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
1970 statement, determine which of the outgoing edges will be taken out of the
1971 block. Return NULL if either edge may be taken. */
1972
1973 static edge
1974 find_taken_edge_computed_goto (basic_block bb, tree val)
1975 {
1976 basic_block dest;
1977 edge e = NULL;
1978
1979 dest = label_to_block (val);
1980 if (dest)
1981 {
1982 e = find_edge (bb, dest);
1983 gcc_assert (e != NULL);
1984 }
1985
1986 return e;
1987 }
1988
1989 /* Given a constant value VAL and the entry block BB to a COND_EXPR
1990 statement, determine which of the two edges will be taken out of the
1991 block. Return NULL if either edge may be taken. */
1992
1993 static edge
1994 find_taken_edge_cond_expr (basic_block bb, tree val)
1995 {
1996 edge true_edge, false_edge;
1997
1998 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1999
2000 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2001 return (integer_zerop (val) ? false_edge : true_edge);
2002 }
2003
2004 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2005 statement, determine which edge will be taken out of the block. Return
2006 NULL if any edge may be taken. */
2007
2008 static edge
2009 find_taken_edge_switch_expr (basic_block bb, tree val)
2010 {
2011 basic_block dest_bb;
2012 edge e;
2013 gimple switch_stmt;
2014 tree taken_case;
2015
2016 switch_stmt = last_stmt (bb);
2017 taken_case = find_case_label_for_value (switch_stmt, val);
2018 dest_bb = label_to_block (CASE_LABEL (taken_case));
2019
2020 e = find_edge (bb, dest_bb);
2021 gcc_assert (e);
2022 return e;
2023 }
2024
2025
2026 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2027 We can make optimal use here of the fact that the case labels are
2028 sorted: We can do a binary search for a case matching VAL. */
2029
2030 static tree
2031 find_case_label_for_value (gimple switch_stmt, tree val)
2032 {
2033 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2034 tree default_case = gimple_switch_default_label (switch_stmt);
2035
2036 for (low = 0, high = n; high - low > 1; )
2037 {
2038 size_t i = (high + low) / 2;
2039 tree t = gimple_switch_label (switch_stmt, i);
2040 int cmp;
2041
2042 /* Cache the result of comparing CASE_LOW and val. */
2043 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2044
2045 if (cmp > 0)
2046 high = i;
2047 else
2048 low = i;
2049
2050 if (CASE_HIGH (t) == NULL)
2051 {
2052 /* A singe-valued case label. */
2053 if (cmp == 0)
2054 return t;
2055 }
2056 else
2057 {
2058 /* A case range. We can only handle integer ranges. */
2059 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2060 return t;
2061 }
2062 }
2063
2064 return default_case;
2065 }
2066
2067
2068 /* Dump a basic block on stderr. */
2069
2070 void
2071 gimple_debug_bb (basic_block bb)
2072 {
2073 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2074 }
2075
2076
2077 /* Dump basic block with index N on stderr. */
2078
2079 basic_block
2080 gimple_debug_bb_n (int n)
2081 {
2082 gimple_debug_bb (BASIC_BLOCK (n));
2083 return BASIC_BLOCK (n);
2084 }
2085
2086
2087 /* Dump the CFG on stderr.
2088
2089 FLAGS are the same used by the tree dumping functions
2090 (see TDF_* in dumpfile.h). */
2091
2092 void
2093 gimple_debug_cfg (int flags)
2094 {
2095 gimple_dump_cfg (stderr, flags);
2096 }
2097
2098
2099 /* Dump the program showing basic block boundaries on the given FILE.
2100
2101 FLAGS are the same used by the tree dumping functions (see TDF_* in
2102 tree.h). */
2103
2104 void
2105 gimple_dump_cfg (FILE *file, int flags)
2106 {
2107 if (flags & TDF_DETAILS)
2108 {
2109 dump_function_header (file, current_function_decl, flags);
2110 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2111 n_basic_blocks_for_fn (cfun), n_edges, last_basic_block);
2112
2113 brief_dump_cfg (file, flags | TDF_COMMENT);
2114 fprintf (file, "\n");
2115 }
2116
2117 if (flags & TDF_STATS)
2118 dump_cfg_stats (file);
2119
2120 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2121 }
2122
2123
2124 /* Dump CFG statistics on FILE. */
2125
2126 void
2127 dump_cfg_stats (FILE *file)
2128 {
2129 static long max_num_merged_labels = 0;
2130 unsigned long size, total = 0;
2131 long num_edges;
2132 basic_block bb;
2133 const char * const fmt_str = "%-30s%-13s%12s\n";
2134 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2135 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2136 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2137 const char *funcname = current_function_name ();
2138
2139 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2140
2141 fprintf (file, "---------------------------------------------------------\n");
2142 fprintf (file, fmt_str, "", " Number of ", "Memory");
2143 fprintf (file, fmt_str, "", " instances ", "used ");
2144 fprintf (file, "---------------------------------------------------------\n");
2145
2146 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2147 total += size;
2148 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2149 SCALE (size), LABEL (size));
2150
2151 num_edges = 0;
2152 FOR_EACH_BB (bb)
2153 num_edges += EDGE_COUNT (bb->succs);
2154 size = num_edges * sizeof (struct edge_def);
2155 total += size;
2156 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2157
2158 fprintf (file, "---------------------------------------------------------\n");
2159 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2160 LABEL (total));
2161 fprintf (file, "---------------------------------------------------------\n");
2162 fprintf (file, "\n");
2163
2164 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2165 max_num_merged_labels = cfg_stats.num_merged_labels;
2166
2167 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2168 cfg_stats.num_merged_labels, max_num_merged_labels);
2169
2170 fprintf (file, "\n");
2171 }
2172
2173
2174 /* Dump CFG statistics on stderr. Keep extern so that it's always
2175 linked in the final executable. */
2176
2177 DEBUG_FUNCTION void
2178 debug_cfg_stats (void)
2179 {
2180 dump_cfg_stats (stderr);
2181 }
2182
2183 /*---------------------------------------------------------------------------
2184 Miscellaneous helpers
2185 ---------------------------------------------------------------------------*/
2186
2187 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2188 flow. Transfers of control flow associated with EH are excluded. */
2189
2190 static bool
2191 call_can_make_abnormal_goto (gimple t)
2192 {
2193 /* If the function has no non-local labels, then a call cannot make an
2194 abnormal transfer of control. */
2195 if (!cfun->has_nonlocal_label
2196 && !cfun->calls_setjmp)
2197 return false;
2198
2199 /* Likewise if the call has no side effects. */
2200 if (!gimple_has_side_effects (t))
2201 return false;
2202
2203 /* Likewise if the called function is leaf. */
2204 if (gimple_call_flags (t) & ECF_LEAF)
2205 return false;
2206
2207 return true;
2208 }
2209
2210
2211 /* Return true if T can make an abnormal transfer of control flow.
2212 Transfers of control flow associated with EH are excluded. */
2213
2214 bool
2215 stmt_can_make_abnormal_goto (gimple t)
2216 {
2217 if (computed_goto_p (t))
2218 return true;
2219 if (is_gimple_call (t))
2220 return call_can_make_abnormal_goto (t);
2221 return false;
2222 }
2223
2224
2225 /* Return true if T represents a stmt that always transfers control. */
2226
2227 bool
2228 is_ctrl_stmt (gimple t)
2229 {
2230 switch (gimple_code (t))
2231 {
2232 case GIMPLE_COND:
2233 case GIMPLE_SWITCH:
2234 case GIMPLE_GOTO:
2235 case GIMPLE_RETURN:
2236 case GIMPLE_RESX:
2237 return true;
2238 default:
2239 return false;
2240 }
2241 }
2242
2243
2244 /* Return true if T is a statement that may alter the flow of control
2245 (e.g., a call to a non-returning function). */
2246
2247 bool
2248 is_ctrl_altering_stmt (gimple t)
2249 {
2250 gcc_assert (t);
2251
2252 switch (gimple_code (t))
2253 {
2254 case GIMPLE_CALL:
2255 {
2256 int flags = gimple_call_flags (t);
2257
2258 /* A call alters control flow if it can make an abnormal goto. */
2259 if (call_can_make_abnormal_goto (t))
2260 return true;
2261
2262 /* A call also alters control flow if it does not return. */
2263 if (flags & ECF_NORETURN)
2264 return true;
2265
2266 /* TM ending statements have backedges out of the transaction.
2267 Return true so we split the basic block containing them.
2268 Note that the TM_BUILTIN test is merely an optimization. */
2269 if ((flags & ECF_TM_BUILTIN)
2270 && is_tm_ending_fndecl (gimple_call_fndecl (t)))
2271 return true;
2272
2273 /* BUILT_IN_RETURN call is same as return statement. */
2274 if (gimple_call_builtin_p (t, BUILT_IN_RETURN))
2275 return true;
2276 }
2277 break;
2278
2279 case GIMPLE_EH_DISPATCH:
2280 /* EH_DISPATCH branches to the individual catch handlers at
2281 this level of a try or allowed-exceptions region. It can
2282 fallthru to the next statement as well. */
2283 return true;
2284
2285 case GIMPLE_ASM:
2286 if (gimple_asm_nlabels (t) > 0)
2287 return true;
2288 break;
2289
2290 CASE_GIMPLE_OMP:
2291 /* OpenMP directives alter control flow. */
2292 return true;
2293
2294 case GIMPLE_TRANSACTION:
2295 /* A transaction start alters control flow. */
2296 return true;
2297
2298 default:
2299 break;
2300 }
2301
2302 /* If a statement can throw, it alters control flow. */
2303 return stmt_can_throw_internal (t);
2304 }
2305
2306
2307 /* Return true if T is a simple local goto. */
2308
2309 bool
2310 simple_goto_p (gimple t)
2311 {
2312 return (gimple_code (t) == GIMPLE_GOTO
2313 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2314 }
2315
2316
2317 /* Return true if STMT should start a new basic block. PREV_STMT is
2318 the statement preceding STMT. It is used when STMT is a label or a
2319 case label. Labels should only start a new basic block if their
2320 previous statement wasn't a label. Otherwise, sequence of labels
2321 would generate unnecessary basic blocks that only contain a single
2322 label. */
2323
2324 static inline bool
2325 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2326 {
2327 if (stmt == NULL)
2328 return false;
2329
2330 /* Labels start a new basic block only if the preceding statement
2331 wasn't a label of the same type. This prevents the creation of
2332 consecutive blocks that have nothing but a single label. */
2333 if (gimple_code (stmt) == GIMPLE_LABEL)
2334 {
2335 /* Nonlocal and computed GOTO targets always start a new block. */
2336 if (DECL_NONLOCAL (gimple_label_label (stmt))
2337 || FORCED_LABEL (gimple_label_label (stmt)))
2338 return true;
2339
2340 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2341 {
2342 if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
2343 return true;
2344
2345 cfg_stats.num_merged_labels++;
2346 return false;
2347 }
2348 else
2349 return true;
2350 }
2351 else if (gimple_code (stmt) == GIMPLE_CALL
2352 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2353 /* setjmp acts similar to a nonlocal GOTO target and thus should
2354 start a new block. */
2355 return true;
2356
2357 return false;
2358 }
2359
2360
2361 /* Return true if T should end a basic block. */
2362
2363 bool
2364 stmt_ends_bb_p (gimple t)
2365 {
2366 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2367 }
2368
2369 /* Remove block annotations and other data structures. */
2370
2371 void
2372 delete_tree_cfg_annotations (void)
2373 {
2374 vec_free (label_to_block_map);
2375 }
2376
2377
2378 /* Return the first statement in basic block BB. */
2379
2380 gimple
2381 first_stmt (basic_block bb)
2382 {
2383 gimple_stmt_iterator i = gsi_start_bb (bb);
2384 gimple stmt = NULL;
2385
2386 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2387 {
2388 gsi_next (&i);
2389 stmt = NULL;
2390 }
2391 return stmt;
2392 }
2393
2394 /* Return the first non-label statement in basic block BB. */
2395
2396 static gimple
2397 first_non_label_stmt (basic_block bb)
2398 {
2399 gimple_stmt_iterator i = gsi_start_bb (bb);
2400 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2401 gsi_next (&i);
2402 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2403 }
2404
2405 /* Return the last statement in basic block BB. */
2406
2407 gimple
2408 last_stmt (basic_block bb)
2409 {
2410 gimple_stmt_iterator i = gsi_last_bb (bb);
2411 gimple stmt = NULL;
2412
2413 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2414 {
2415 gsi_prev (&i);
2416 stmt = NULL;
2417 }
2418 return stmt;
2419 }
2420
2421 /* Return the last statement of an otherwise empty block. Return NULL
2422 if the block is totally empty, or if it contains more than one
2423 statement. */
2424
2425 gimple
2426 last_and_only_stmt (basic_block bb)
2427 {
2428 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2429 gimple last, prev;
2430
2431 if (gsi_end_p (i))
2432 return NULL;
2433
2434 last = gsi_stmt (i);
2435 gsi_prev_nondebug (&i);
2436 if (gsi_end_p (i))
2437 return last;
2438
2439 /* Empty statements should no longer appear in the instruction stream.
2440 Everything that might have appeared before should be deleted by
2441 remove_useless_stmts, and the optimizers should just gsi_remove
2442 instead of smashing with build_empty_stmt.
2443
2444 Thus the only thing that should appear here in a block containing
2445 one executable statement is a label. */
2446 prev = gsi_stmt (i);
2447 if (gimple_code (prev) == GIMPLE_LABEL)
2448 return last;
2449 else
2450 return NULL;
2451 }
2452
2453 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2454
2455 static void
2456 reinstall_phi_args (edge new_edge, edge old_edge)
2457 {
2458 edge_var_map_vector *v;
2459 edge_var_map *vm;
2460 int i;
2461 gimple_stmt_iterator phis;
2462
2463 v = redirect_edge_var_map_vector (old_edge);
2464 if (!v)
2465 return;
2466
2467 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2468 v->iterate (i, &vm) && !gsi_end_p (phis);
2469 i++, gsi_next (&phis))
2470 {
2471 gimple phi = gsi_stmt (phis);
2472 tree result = redirect_edge_var_map_result (vm);
2473 tree arg = redirect_edge_var_map_def (vm);
2474
2475 gcc_assert (result == gimple_phi_result (phi));
2476
2477 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2478 }
2479
2480 redirect_edge_var_map_clear (old_edge);
2481 }
2482
2483 /* Returns the basic block after which the new basic block created
2484 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2485 near its "logical" location. This is of most help to humans looking
2486 at debugging dumps. */
2487
2488 static basic_block
2489 split_edge_bb_loc (edge edge_in)
2490 {
2491 basic_block dest = edge_in->dest;
2492 basic_block dest_prev = dest->prev_bb;
2493
2494 if (dest_prev)
2495 {
2496 edge e = find_edge (dest_prev, dest);
2497 if (e && !(e->flags & EDGE_COMPLEX))
2498 return edge_in->src;
2499 }
2500 return dest_prev;
2501 }
2502
2503 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2504 Abort on abnormal edges. */
2505
2506 static basic_block
2507 gimple_split_edge (edge edge_in)
2508 {
2509 basic_block new_bb, after_bb, dest;
2510 edge new_edge, e;
2511
2512 /* Abnormal edges cannot be split. */
2513 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2514
2515 dest = edge_in->dest;
2516
2517 after_bb = split_edge_bb_loc (edge_in);
2518
2519 new_bb = create_empty_bb (after_bb);
2520 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2521 new_bb->count = edge_in->count;
2522 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2523 new_edge->probability = REG_BR_PROB_BASE;
2524 new_edge->count = edge_in->count;
2525
2526 e = redirect_edge_and_branch (edge_in, new_bb);
2527 gcc_assert (e == edge_in);
2528 reinstall_phi_args (new_edge, e);
2529
2530 return new_bb;
2531 }
2532
2533
2534 /* Verify properties of the address expression T with base object BASE. */
2535
2536 static tree
2537 verify_address (tree t, tree base)
2538 {
2539 bool old_constant;
2540 bool old_side_effects;
2541 bool new_constant;
2542 bool new_side_effects;
2543
2544 old_constant = TREE_CONSTANT (t);
2545 old_side_effects = TREE_SIDE_EFFECTS (t);
2546
2547 recompute_tree_invariant_for_addr_expr (t);
2548 new_side_effects = TREE_SIDE_EFFECTS (t);
2549 new_constant = TREE_CONSTANT (t);
2550
2551 if (old_constant != new_constant)
2552 {
2553 error ("constant not recomputed when ADDR_EXPR changed");
2554 return t;
2555 }
2556 if (old_side_effects != new_side_effects)
2557 {
2558 error ("side effects not recomputed when ADDR_EXPR changed");
2559 return t;
2560 }
2561
2562 if (!(TREE_CODE (base) == VAR_DECL
2563 || TREE_CODE (base) == PARM_DECL
2564 || TREE_CODE (base) == RESULT_DECL))
2565 return NULL_TREE;
2566
2567 if (DECL_GIMPLE_REG_P (base))
2568 {
2569 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2570 return base;
2571 }
2572
2573 return NULL_TREE;
2574 }
2575
2576 /* Callback for walk_tree, check that all elements with address taken are
2577 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2578 inside a PHI node. */
2579
2580 static tree
2581 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2582 {
2583 tree t = *tp, x;
2584
2585 if (TYPE_P (t))
2586 *walk_subtrees = 0;
2587
2588 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2589 #define CHECK_OP(N, MSG) \
2590 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2591 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2592
2593 switch (TREE_CODE (t))
2594 {
2595 case SSA_NAME:
2596 if (SSA_NAME_IN_FREE_LIST (t))
2597 {
2598 error ("SSA name in freelist but still referenced");
2599 return *tp;
2600 }
2601 break;
2602
2603 case INDIRECT_REF:
2604 error ("INDIRECT_REF in gimple IL");
2605 return t;
2606
2607 case MEM_REF:
2608 x = TREE_OPERAND (t, 0);
2609 if (!POINTER_TYPE_P (TREE_TYPE (x))
2610 || !is_gimple_mem_ref_addr (x))
2611 {
2612 error ("invalid first operand of MEM_REF");
2613 return x;
2614 }
2615 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2616 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2617 {
2618 error ("invalid offset operand of MEM_REF");
2619 return TREE_OPERAND (t, 1);
2620 }
2621 if (TREE_CODE (x) == ADDR_EXPR
2622 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2623 return x;
2624 *walk_subtrees = 0;
2625 break;
2626
2627 case ASSERT_EXPR:
2628 x = fold (ASSERT_EXPR_COND (t));
2629 if (x == boolean_false_node)
2630 {
2631 error ("ASSERT_EXPR with an always-false condition");
2632 return *tp;
2633 }
2634 break;
2635
2636 case MODIFY_EXPR:
2637 error ("MODIFY_EXPR not expected while having tuples");
2638 return *tp;
2639
2640 case ADDR_EXPR:
2641 {
2642 tree tem;
2643
2644 gcc_assert (is_gimple_address (t));
2645
2646 /* Skip any references (they will be checked when we recurse down the
2647 tree) and ensure that any variable used as a prefix is marked
2648 addressable. */
2649 for (x = TREE_OPERAND (t, 0);
2650 handled_component_p (x);
2651 x = TREE_OPERAND (x, 0))
2652 ;
2653
2654 if ((tem = verify_address (t, x)))
2655 return tem;
2656
2657 if (!(TREE_CODE (x) == VAR_DECL
2658 || TREE_CODE (x) == PARM_DECL
2659 || TREE_CODE (x) == RESULT_DECL))
2660 return NULL;
2661
2662 if (!TREE_ADDRESSABLE (x))
2663 {
2664 error ("address taken, but ADDRESSABLE bit not set");
2665 return x;
2666 }
2667
2668 break;
2669 }
2670
2671 case COND_EXPR:
2672 x = COND_EXPR_COND (t);
2673 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2674 {
2675 error ("non-integral used in condition");
2676 return x;
2677 }
2678 if (!is_gimple_condexpr (x))
2679 {
2680 error ("invalid conditional operand");
2681 return x;
2682 }
2683 break;
2684
2685 case NON_LVALUE_EXPR:
2686 case TRUTH_NOT_EXPR:
2687 gcc_unreachable ();
2688
2689 CASE_CONVERT:
2690 case FIX_TRUNC_EXPR:
2691 case FLOAT_EXPR:
2692 case NEGATE_EXPR:
2693 case ABS_EXPR:
2694 case BIT_NOT_EXPR:
2695 CHECK_OP (0, "invalid operand to unary operator");
2696 break;
2697
2698 case REALPART_EXPR:
2699 case IMAGPART_EXPR:
2700 case BIT_FIELD_REF:
2701 if (!is_gimple_reg_type (TREE_TYPE (t)))
2702 {
2703 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2704 return t;
2705 }
2706
2707 if (TREE_CODE (t) == BIT_FIELD_REF)
2708 {
2709 if (!tree_fits_uhwi_p (TREE_OPERAND (t, 1))
2710 || !tree_fits_uhwi_p (TREE_OPERAND (t, 2)))
2711 {
2712 error ("invalid position or size operand to BIT_FIELD_REF");
2713 return t;
2714 }
2715 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2716 && (TYPE_PRECISION (TREE_TYPE (t))
2717 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2718 {
2719 error ("integral result type precision does not match "
2720 "field size of BIT_FIELD_REF");
2721 return t;
2722 }
2723 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2724 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2725 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2726 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2727 {
2728 error ("mode precision of non-integral result does not "
2729 "match field size of BIT_FIELD_REF");
2730 return t;
2731 }
2732 }
2733 t = TREE_OPERAND (t, 0);
2734
2735 /* Fall-through. */
2736 case COMPONENT_REF:
2737 case ARRAY_REF:
2738 case ARRAY_RANGE_REF:
2739 case VIEW_CONVERT_EXPR:
2740 /* We have a nest of references. Verify that each of the operands
2741 that determine where to reference is either a constant or a variable,
2742 verify that the base is valid, and then show we've already checked
2743 the subtrees. */
2744 while (handled_component_p (t))
2745 {
2746 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2747 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2748 else if (TREE_CODE (t) == ARRAY_REF
2749 || TREE_CODE (t) == ARRAY_RANGE_REF)
2750 {
2751 CHECK_OP (1, "invalid array index");
2752 if (TREE_OPERAND (t, 2))
2753 CHECK_OP (2, "invalid array lower bound");
2754 if (TREE_OPERAND (t, 3))
2755 CHECK_OP (3, "invalid array stride");
2756 }
2757 else if (TREE_CODE (t) == BIT_FIELD_REF
2758 || TREE_CODE (t) == REALPART_EXPR
2759 || TREE_CODE (t) == IMAGPART_EXPR)
2760 {
2761 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
2762 "REALPART_EXPR");
2763 return t;
2764 }
2765
2766 t = TREE_OPERAND (t, 0);
2767 }
2768
2769 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2770 {
2771 error ("invalid reference prefix");
2772 return t;
2773 }
2774 *walk_subtrees = 0;
2775 break;
2776 case PLUS_EXPR:
2777 case MINUS_EXPR:
2778 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2779 POINTER_PLUS_EXPR. */
2780 if (POINTER_TYPE_P (TREE_TYPE (t)))
2781 {
2782 error ("invalid operand to plus/minus, type is a pointer");
2783 return t;
2784 }
2785 CHECK_OP (0, "invalid operand to binary operator");
2786 CHECK_OP (1, "invalid operand to binary operator");
2787 break;
2788
2789 case POINTER_PLUS_EXPR:
2790 /* Check to make sure the first operand is a pointer or reference type. */
2791 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
2792 {
2793 error ("invalid operand to pointer plus, first operand is not a pointer");
2794 return t;
2795 }
2796 /* Check to make sure the second operand is a ptrofftype. */
2797 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
2798 {
2799 error ("invalid operand to pointer plus, second operand is not an "
2800 "integer type of appropriate width");
2801 return t;
2802 }
2803 /* FALLTHROUGH */
2804 case LT_EXPR:
2805 case LE_EXPR:
2806 case GT_EXPR:
2807 case GE_EXPR:
2808 case EQ_EXPR:
2809 case NE_EXPR:
2810 case UNORDERED_EXPR:
2811 case ORDERED_EXPR:
2812 case UNLT_EXPR:
2813 case UNLE_EXPR:
2814 case UNGT_EXPR:
2815 case UNGE_EXPR:
2816 case UNEQ_EXPR:
2817 case LTGT_EXPR:
2818 case MULT_EXPR:
2819 case TRUNC_DIV_EXPR:
2820 case CEIL_DIV_EXPR:
2821 case FLOOR_DIV_EXPR:
2822 case ROUND_DIV_EXPR:
2823 case TRUNC_MOD_EXPR:
2824 case CEIL_MOD_EXPR:
2825 case FLOOR_MOD_EXPR:
2826 case ROUND_MOD_EXPR:
2827 case RDIV_EXPR:
2828 case EXACT_DIV_EXPR:
2829 case MIN_EXPR:
2830 case MAX_EXPR:
2831 case LSHIFT_EXPR:
2832 case RSHIFT_EXPR:
2833 case LROTATE_EXPR:
2834 case RROTATE_EXPR:
2835 case BIT_IOR_EXPR:
2836 case BIT_XOR_EXPR:
2837 case BIT_AND_EXPR:
2838 CHECK_OP (0, "invalid operand to binary operator");
2839 CHECK_OP (1, "invalid operand to binary operator");
2840 break;
2841
2842 case CONSTRUCTOR:
2843 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2844 *walk_subtrees = 0;
2845 break;
2846
2847 case CASE_LABEL_EXPR:
2848 if (CASE_CHAIN (t))
2849 {
2850 error ("invalid CASE_CHAIN");
2851 return t;
2852 }
2853 break;
2854
2855 default:
2856 break;
2857 }
2858 return NULL;
2859
2860 #undef CHECK_OP
2861 }
2862
2863
2864 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
2865 Returns true if there is an error, otherwise false. */
2866
2867 static bool
2868 verify_types_in_gimple_min_lval (tree expr)
2869 {
2870 tree op;
2871
2872 if (is_gimple_id (expr))
2873 return false;
2874
2875 if (TREE_CODE (expr) != TARGET_MEM_REF
2876 && TREE_CODE (expr) != MEM_REF)
2877 {
2878 error ("invalid expression for min lvalue");
2879 return true;
2880 }
2881
2882 /* TARGET_MEM_REFs are strange beasts. */
2883 if (TREE_CODE (expr) == TARGET_MEM_REF)
2884 return false;
2885
2886 op = TREE_OPERAND (expr, 0);
2887 if (!is_gimple_val (op))
2888 {
2889 error ("invalid operand in indirect reference");
2890 debug_generic_stmt (op);
2891 return true;
2892 }
2893 /* Memory references now generally can involve a value conversion. */
2894
2895 return false;
2896 }
2897
2898 /* Verify if EXPR is a valid GIMPLE reference expression. If
2899 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
2900 if there is an error, otherwise false. */
2901
2902 static bool
2903 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
2904 {
2905 while (handled_component_p (expr))
2906 {
2907 tree op = TREE_OPERAND (expr, 0);
2908
2909 if (TREE_CODE (expr) == ARRAY_REF
2910 || TREE_CODE (expr) == ARRAY_RANGE_REF)
2911 {
2912 if (!is_gimple_val (TREE_OPERAND (expr, 1))
2913 || (TREE_OPERAND (expr, 2)
2914 && !is_gimple_val (TREE_OPERAND (expr, 2)))
2915 || (TREE_OPERAND (expr, 3)
2916 && !is_gimple_val (TREE_OPERAND (expr, 3))))
2917 {
2918 error ("invalid operands to array reference");
2919 debug_generic_stmt (expr);
2920 return true;
2921 }
2922 }
2923
2924 /* Verify if the reference array element types are compatible. */
2925 if (TREE_CODE (expr) == ARRAY_REF
2926 && !useless_type_conversion_p (TREE_TYPE (expr),
2927 TREE_TYPE (TREE_TYPE (op))))
2928 {
2929 error ("type mismatch in array reference");
2930 debug_generic_stmt (TREE_TYPE (expr));
2931 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2932 return true;
2933 }
2934 if (TREE_CODE (expr) == ARRAY_RANGE_REF
2935 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
2936 TREE_TYPE (TREE_TYPE (op))))
2937 {
2938 error ("type mismatch in array range reference");
2939 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
2940 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2941 return true;
2942 }
2943
2944 if ((TREE_CODE (expr) == REALPART_EXPR
2945 || TREE_CODE (expr) == IMAGPART_EXPR)
2946 && !useless_type_conversion_p (TREE_TYPE (expr),
2947 TREE_TYPE (TREE_TYPE (op))))
2948 {
2949 error ("type mismatch in real/imagpart reference");
2950 debug_generic_stmt (TREE_TYPE (expr));
2951 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2952 return true;
2953 }
2954
2955 if (TREE_CODE (expr) == COMPONENT_REF
2956 && !useless_type_conversion_p (TREE_TYPE (expr),
2957 TREE_TYPE (TREE_OPERAND (expr, 1))))
2958 {
2959 error ("type mismatch in component reference");
2960 debug_generic_stmt (TREE_TYPE (expr));
2961 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
2962 return true;
2963 }
2964
2965 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2966 {
2967 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
2968 that their operand is not an SSA name or an invariant when
2969 requiring an lvalue (this usually means there is a SRA or IPA-SRA
2970 bug). Otherwise there is nothing to verify, gross mismatches at
2971 most invoke undefined behavior. */
2972 if (require_lvalue
2973 && (TREE_CODE (op) == SSA_NAME
2974 || is_gimple_min_invariant (op)))
2975 {
2976 error ("conversion of an SSA_NAME on the left hand side");
2977 debug_generic_stmt (expr);
2978 return true;
2979 }
2980 else if (TREE_CODE (op) == SSA_NAME
2981 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
2982 {
2983 error ("conversion of register to a different size");
2984 debug_generic_stmt (expr);
2985 return true;
2986 }
2987 else if (!handled_component_p (op))
2988 return false;
2989 }
2990
2991 expr = op;
2992 }
2993
2994 if (TREE_CODE (expr) == MEM_REF)
2995 {
2996 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
2997 {
2998 error ("invalid address operand in MEM_REF");
2999 debug_generic_stmt (expr);
3000 return true;
3001 }
3002 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3003 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3004 {
3005 error ("invalid offset operand in MEM_REF");
3006 debug_generic_stmt (expr);
3007 return true;
3008 }
3009 }
3010 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3011 {
3012 if (!TMR_BASE (expr)
3013 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3014 {
3015 error ("invalid address operand in TARGET_MEM_REF");
3016 return true;
3017 }
3018 if (!TMR_OFFSET (expr)
3019 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3020 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3021 {
3022 error ("invalid offset operand in TARGET_MEM_REF");
3023 debug_generic_stmt (expr);
3024 return true;
3025 }
3026 }
3027
3028 return ((require_lvalue || !is_gimple_min_invariant (expr))
3029 && verify_types_in_gimple_min_lval (expr));
3030 }
3031
3032 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3033 list of pointer-to types that is trivially convertible to DEST. */
3034
3035 static bool
3036 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3037 {
3038 tree src;
3039
3040 if (!TYPE_POINTER_TO (src_obj))
3041 return true;
3042
3043 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3044 if (useless_type_conversion_p (dest, src))
3045 return true;
3046
3047 return false;
3048 }
3049
3050 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3051 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3052
3053 static bool
3054 valid_fixed_convert_types_p (tree type1, tree type2)
3055 {
3056 return (FIXED_POINT_TYPE_P (type1)
3057 && (INTEGRAL_TYPE_P (type2)
3058 || SCALAR_FLOAT_TYPE_P (type2)
3059 || FIXED_POINT_TYPE_P (type2)));
3060 }
3061
3062 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3063 is a problem, otherwise false. */
3064
3065 static bool
3066 verify_gimple_call (gimple stmt)
3067 {
3068 tree fn = gimple_call_fn (stmt);
3069 tree fntype, fndecl;
3070 unsigned i;
3071
3072 if (gimple_call_internal_p (stmt))
3073 {
3074 if (fn)
3075 {
3076 error ("gimple call has two targets");
3077 debug_generic_stmt (fn);
3078 return true;
3079 }
3080 }
3081 else
3082 {
3083 if (!fn)
3084 {
3085 error ("gimple call has no target");
3086 return true;
3087 }
3088 }
3089
3090 if (fn && !is_gimple_call_addr (fn))
3091 {
3092 error ("invalid function in gimple call");
3093 debug_generic_stmt (fn);
3094 return true;
3095 }
3096
3097 if (fn
3098 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3099 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3100 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3101 {
3102 error ("non-function in gimple call");
3103 return true;
3104 }
3105
3106 fndecl = gimple_call_fndecl (stmt);
3107 if (fndecl
3108 && TREE_CODE (fndecl) == FUNCTION_DECL
3109 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3110 && !DECL_PURE_P (fndecl)
3111 && !TREE_READONLY (fndecl))
3112 {
3113 error ("invalid pure const state for function");
3114 return true;
3115 }
3116
3117 if (gimple_call_lhs (stmt)
3118 && (!is_gimple_lvalue (gimple_call_lhs (stmt))
3119 || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
3120 {
3121 error ("invalid LHS in gimple call");
3122 return true;
3123 }
3124
3125 if (gimple_call_lhs (stmt) && gimple_call_noreturn_p (stmt))
3126 {
3127 error ("LHS in noreturn call");
3128 return true;
3129 }
3130
3131 fntype = gimple_call_fntype (stmt);
3132 if (fntype
3133 && gimple_call_lhs (stmt)
3134 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3135 TREE_TYPE (fntype))
3136 /* ??? At least C++ misses conversions at assignments from
3137 void * call results.
3138 ??? Java is completely off. Especially with functions
3139 returning java.lang.Object.
3140 For now simply allow arbitrary pointer type conversions. */
3141 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3142 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3143 {
3144 error ("invalid conversion in gimple call");
3145 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3146 debug_generic_stmt (TREE_TYPE (fntype));
3147 return true;
3148 }
3149
3150 if (gimple_call_chain (stmt)
3151 && !is_gimple_val (gimple_call_chain (stmt)))
3152 {
3153 error ("invalid static chain in gimple call");
3154 debug_generic_stmt (gimple_call_chain (stmt));
3155 return true;
3156 }
3157
3158 /* If there is a static chain argument, this should not be an indirect
3159 call, and the decl should have DECL_STATIC_CHAIN set. */
3160 if (gimple_call_chain (stmt))
3161 {
3162 if (!gimple_call_fndecl (stmt))
3163 {
3164 error ("static chain in indirect gimple call");
3165 return true;
3166 }
3167 fn = TREE_OPERAND (fn, 0);
3168
3169 if (!DECL_STATIC_CHAIN (fn))
3170 {
3171 error ("static chain with function that doesn%'t use one");
3172 return true;
3173 }
3174 }
3175
3176 /* ??? The C frontend passes unpromoted arguments in case it
3177 didn't see a function declaration before the call. So for now
3178 leave the call arguments mostly unverified. Once we gimplify
3179 unit-at-a-time we have a chance to fix this. */
3180
3181 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3182 {
3183 tree arg = gimple_call_arg (stmt, i);
3184 if ((is_gimple_reg_type (TREE_TYPE (arg))
3185 && !is_gimple_val (arg))
3186 || (!is_gimple_reg_type (TREE_TYPE (arg))
3187 && !is_gimple_lvalue (arg)))
3188 {
3189 error ("invalid argument to gimple call");
3190 debug_generic_expr (arg);
3191 return true;
3192 }
3193 }
3194
3195 return false;
3196 }
3197
3198 /* Verifies the gimple comparison with the result type TYPE and
3199 the operands OP0 and OP1. */
3200
3201 static bool
3202 verify_gimple_comparison (tree type, tree op0, tree op1)
3203 {
3204 tree op0_type = TREE_TYPE (op0);
3205 tree op1_type = TREE_TYPE (op1);
3206
3207 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3208 {
3209 error ("invalid operands in gimple comparison");
3210 return true;
3211 }
3212
3213 /* For comparisons we do not have the operations type as the
3214 effective type the comparison is carried out in. Instead
3215 we require that either the first operand is trivially
3216 convertible into the second, or the other way around.
3217 Because we special-case pointers to void we allow
3218 comparisons of pointers with the same mode as well. */
3219 if (!useless_type_conversion_p (op0_type, op1_type)
3220 && !useless_type_conversion_p (op1_type, op0_type)
3221 && (!POINTER_TYPE_P (op0_type)
3222 || !POINTER_TYPE_P (op1_type)
3223 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3224 {
3225 error ("mismatching comparison operand types");
3226 debug_generic_expr (op0_type);
3227 debug_generic_expr (op1_type);
3228 return true;
3229 }
3230
3231 /* The resulting type of a comparison may be an effective boolean type. */
3232 if (INTEGRAL_TYPE_P (type)
3233 && (TREE_CODE (type) == BOOLEAN_TYPE
3234 || TYPE_PRECISION (type) == 1))
3235 {
3236 if (TREE_CODE (op0_type) == VECTOR_TYPE
3237 || TREE_CODE (op1_type) == VECTOR_TYPE)
3238 {
3239 error ("vector comparison returning a boolean");
3240 debug_generic_expr (op0_type);
3241 debug_generic_expr (op1_type);
3242 return true;
3243 }
3244 }
3245 /* Or an integer vector type with the same size and element count
3246 as the comparison operand types. */
3247 else if (TREE_CODE (type) == VECTOR_TYPE
3248 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3249 {
3250 if (TREE_CODE (op0_type) != VECTOR_TYPE
3251 || TREE_CODE (op1_type) != VECTOR_TYPE)
3252 {
3253 error ("non-vector operands in vector comparison");
3254 debug_generic_expr (op0_type);
3255 debug_generic_expr (op1_type);
3256 return true;
3257 }
3258
3259 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3260 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3261 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type))))
3262 /* The result of a vector comparison is of signed
3263 integral type. */
3264 || TYPE_UNSIGNED (TREE_TYPE (type)))
3265 {
3266 error ("invalid vector comparison resulting type");
3267 debug_generic_expr (type);
3268 return true;
3269 }
3270 }
3271 else
3272 {
3273 error ("bogus comparison result type");
3274 debug_generic_expr (type);
3275 return true;
3276 }
3277
3278 return false;
3279 }
3280
3281 /* Verify a gimple assignment statement STMT with an unary rhs.
3282 Returns true if anything is wrong. */
3283
3284 static bool
3285 verify_gimple_assign_unary (gimple stmt)
3286 {
3287 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3288 tree lhs = gimple_assign_lhs (stmt);
3289 tree lhs_type = TREE_TYPE (lhs);
3290 tree rhs1 = gimple_assign_rhs1 (stmt);
3291 tree rhs1_type = TREE_TYPE (rhs1);
3292
3293 if (!is_gimple_reg (lhs))
3294 {
3295 error ("non-register as LHS of unary operation");
3296 return true;
3297 }
3298
3299 if (!is_gimple_val (rhs1))
3300 {
3301 error ("invalid operand in unary operation");
3302 return true;
3303 }
3304
3305 /* First handle conversions. */
3306 switch (rhs_code)
3307 {
3308 CASE_CONVERT:
3309 {
3310 /* Allow conversions from pointer type to integral type only if
3311 there is no sign or zero extension involved.
3312 For targets were the precision of ptrofftype doesn't match that
3313 of pointers we need to allow arbitrary conversions to ptrofftype. */
3314 if ((POINTER_TYPE_P (lhs_type)
3315 && INTEGRAL_TYPE_P (rhs1_type))
3316 || (POINTER_TYPE_P (rhs1_type)
3317 && INTEGRAL_TYPE_P (lhs_type)
3318 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3319 || ptrofftype_p (sizetype))))
3320 return false;
3321
3322 /* Allow conversion from integral to offset type and vice versa. */
3323 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3324 && INTEGRAL_TYPE_P (rhs1_type))
3325 || (INTEGRAL_TYPE_P (lhs_type)
3326 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3327 return false;
3328
3329 /* Otherwise assert we are converting between types of the
3330 same kind. */
3331 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3332 {
3333 error ("invalid types in nop conversion");
3334 debug_generic_expr (lhs_type);
3335 debug_generic_expr (rhs1_type);
3336 return true;
3337 }
3338
3339 return false;
3340 }
3341
3342 case ADDR_SPACE_CONVERT_EXPR:
3343 {
3344 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3345 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3346 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3347 {
3348 error ("invalid types in address space conversion");
3349 debug_generic_expr (lhs_type);
3350 debug_generic_expr (rhs1_type);
3351 return true;
3352 }
3353
3354 return false;
3355 }
3356
3357 case FIXED_CONVERT_EXPR:
3358 {
3359 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3360 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3361 {
3362 error ("invalid types in fixed-point conversion");
3363 debug_generic_expr (lhs_type);
3364 debug_generic_expr (rhs1_type);
3365 return true;
3366 }
3367
3368 return false;
3369 }
3370
3371 case FLOAT_EXPR:
3372 {
3373 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3374 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3375 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3376 {
3377 error ("invalid types in conversion to floating point");
3378 debug_generic_expr (lhs_type);
3379 debug_generic_expr (rhs1_type);
3380 return true;
3381 }
3382
3383 return false;
3384 }
3385
3386 case FIX_TRUNC_EXPR:
3387 {
3388 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3389 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3390 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3391 {
3392 error ("invalid types in conversion to integer");
3393 debug_generic_expr (lhs_type);
3394 debug_generic_expr (rhs1_type);
3395 return true;
3396 }
3397
3398 return false;
3399 }
3400
3401 case VEC_UNPACK_HI_EXPR:
3402 case VEC_UNPACK_LO_EXPR:
3403 case REDUC_MAX_EXPR:
3404 case REDUC_MIN_EXPR:
3405 case REDUC_PLUS_EXPR:
3406 case VEC_UNPACK_FLOAT_HI_EXPR:
3407 case VEC_UNPACK_FLOAT_LO_EXPR:
3408 /* FIXME. */
3409 return false;
3410
3411 case NEGATE_EXPR:
3412 case ABS_EXPR:
3413 case BIT_NOT_EXPR:
3414 case PAREN_EXPR:
3415 case NON_LVALUE_EXPR:
3416 case CONJ_EXPR:
3417 break;
3418
3419 default:
3420 gcc_unreachable ();
3421 }
3422
3423 /* For the remaining codes assert there is no conversion involved. */
3424 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3425 {
3426 error ("non-trivial conversion in unary operation");
3427 debug_generic_expr (lhs_type);
3428 debug_generic_expr (rhs1_type);
3429 return true;
3430 }
3431
3432 return false;
3433 }
3434
3435 /* Verify a gimple assignment statement STMT with a binary rhs.
3436 Returns true if anything is wrong. */
3437
3438 static bool
3439 verify_gimple_assign_binary (gimple stmt)
3440 {
3441 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3442 tree lhs = gimple_assign_lhs (stmt);
3443 tree lhs_type = TREE_TYPE (lhs);
3444 tree rhs1 = gimple_assign_rhs1 (stmt);
3445 tree rhs1_type = TREE_TYPE (rhs1);
3446 tree rhs2 = gimple_assign_rhs2 (stmt);
3447 tree rhs2_type = TREE_TYPE (rhs2);
3448
3449 if (!is_gimple_reg (lhs))
3450 {
3451 error ("non-register as LHS of binary operation");
3452 return true;
3453 }
3454
3455 if (!is_gimple_val (rhs1)
3456 || !is_gimple_val (rhs2))
3457 {
3458 error ("invalid operands in binary operation");
3459 return true;
3460 }
3461
3462 /* First handle operations that involve different types. */
3463 switch (rhs_code)
3464 {
3465 case COMPLEX_EXPR:
3466 {
3467 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3468 || !(INTEGRAL_TYPE_P (rhs1_type)
3469 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3470 || !(INTEGRAL_TYPE_P (rhs2_type)
3471 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3472 {
3473 error ("type mismatch in complex expression");
3474 debug_generic_expr (lhs_type);
3475 debug_generic_expr (rhs1_type);
3476 debug_generic_expr (rhs2_type);
3477 return true;
3478 }
3479
3480 return false;
3481 }
3482
3483 case LSHIFT_EXPR:
3484 case RSHIFT_EXPR:
3485 case LROTATE_EXPR:
3486 case RROTATE_EXPR:
3487 {
3488 /* Shifts and rotates are ok on integral types, fixed point
3489 types and integer vector types. */
3490 if ((!INTEGRAL_TYPE_P (rhs1_type)
3491 && !FIXED_POINT_TYPE_P (rhs1_type)
3492 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3493 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3494 || (!INTEGRAL_TYPE_P (rhs2_type)
3495 /* Vector shifts of vectors are also ok. */
3496 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3497 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3498 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3499 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3500 || !useless_type_conversion_p (lhs_type, rhs1_type))
3501 {
3502 error ("type mismatch in shift expression");
3503 debug_generic_expr (lhs_type);
3504 debug_generic_expr (rhs1_type);
3505 debug_generic_expr (rhs2_type);
3506 return true;
3507 }
3508
3509 return false;
3510 }
3511
3512 case VEC_LSHIFT_EXPR:
3513 case VEC_RSHIFT_EXPR:
3514 {
3515 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3516 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3517 || POINTER_TYPE_P (TREE_TYPE (rhs1_type))
3518 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type))
3519 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3520 || (!INTEGRAL_TYPE_P (rhs2_type)
3521 && (TREE_CODE (rhs2_type) != VECTOR_TYPE
3522 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3523 || !useless_type_conversion_p (lhs_type, rhs1_type))
3524 {
3525 error ("type mismatch in vector shift expression");
3526 debug_generic_expr (lhs_type);
3527 debug_generic_expr (rhs1_type);
3528 debug_generic_expr (rhs2_type);
3529 return true;
3530 }
3531 /* For shifting a vector of non-integral components we
3532 only allow shifting by a constant multiple of the element size. */
3533 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3534 && (TREE_CODE (rhs2) != INTEGER_CST
3535 || !div_if_zero_remainder (EXACT_DIV_EXPR, rhs2,
3536 TYPE_SIZE (TREE_TYPE (rhs1_type)))))
3537 {
3538 error ("non-element sized vector shift of floating point vector");
3539 return true;
3540 }
3541
3542 return false;
3543 }
3544
3545 case WIDEN_LSHIFT_EXPR:
3546 {
3547 if (!INTEGRAL_TYPE_P (lhs_type)
3548 || !INTEGRAL_TYPE_P (rhs1_type)
3549 || TREE_CODE (rhs2) != INTEGER_CST
3550 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3551 {
3552 error ("type mismatch in widening vector shift expression");
3553 debug_generic_expr (lhs_type);
3554 debug_generic_expr (rhs1_type);
3555 debug_generic_expr (rhs2_type);
3556 return true;
3557 }
3558
3559 return false;
3560 }
3561
3562 case VEC_WIDEN_LSHIFT_HI_EXPR:
3563 case VEC_WIDEN_LSHIFT_LO_EXPR:
3564 {
3565 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3566 || TREE_CODE (lhs_type) != VECTOR_TYPE
3567 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3568 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3569 || TREE_CODE (rhs2) != INTEGER_CST
3570 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3571 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3572 {
3573 error ("type mismatch in widening vector shift expression");
3574 debug_generic_expr (lhs_type);
3575 debug_generic_expr (rhs1_type);
3576 debug_generic_expr (rhs2_type);
3577 return true;
3578 }
3579
3580 return false;
3581 }
3582
3583 case PLUS_EXPR:
3584 case MINUS_EXPR:
3585 {
3586 tree lhs_etype = lhs_type;
3587 tree rhs1_etype = rhs1_type;
3588 tree rhs2_etype = rhs2_type;
3589 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3590 {
3591 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3592 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3593 {
3594 error ("invalid non-vector operands to vector valued plus");
3595 return true;
3596 }
3597 lhs_etype = TREE_TYPE (lhs_type);
3598 rhs1_etype = TREE_TYPE (rhs1_type);
3599 rhs2_etype = TREE_TYPE (rhs2_type);
3600 }
3601 if (POINTER_TYPE_P (lhs_etype)
3602 || POINTER_TYPE_P (rhs1_etype)
3603 || POINTER_TYPE_P (rhs2_etype))
3604 {
3605 error ("invalid (pointer) operands to plus/minus");
3606 return true;
3607 }
3608
3609 /* Continue with generic binary expression handling. */
3610 break;
3611 }
3612
3613 case POINTER_PLUS_EXPR:
3614 {
3615 if (!POINTER_TYPE_P (rhs1_type)
3616 || !useless_type_conversion_p (lhs_type, rhs1_type)
3617 || !ptrofftype_p (rhs2_type))
3618 {
3619 error ("type mismatch in pointer plus expression");
3620 debug_generic_stmt (lhs_type);
3621 debug_generic_stmt (rhs1_type);
3622 debug_generic_stmt (rhs2_type);
3623 return true;
3624 }
3625
3626 return false;
3627 }
3628
3629 case TRUTH_ANDIF_EXPR:
3630 case TRUTH_ORIF_EXPR:
3631 case TRUTH_AND_EXPR:
3632 case TRUTH_OR_EXPR:
3633 case TRUTH_XOR_EXPR:
3634
3635 gcc_unreachable ();
3636
3637 case LT_EXPR:
3638 case LE_EXPR:
3639 case GT_EXPR:
3640 case GE_EXPR:
3641 case EQ_EXPR:
3642 case NE_EXPR:
3643 case UNORDERED_EXPR:
3644 case ORDERED_EXPR:
3645 case UNLT_EXPR:
3646 case UNLE_EXPR:
3647 case UNGT_EXPR:
3648 case UNGE_EXPR:
3649 case UNEQ_EXPR:
3650 case LTGT_EXPR:
3651 /* Comparisons are also binary, but the result type is not
3652 connected to the operand types. */
3653 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3654
3655 case WIDEN_MULT_EXPR:
3656 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3657 return true;
3658 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3659 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3660
3661 case WIDEN_SUM_EXPR:
3662 case VEC_WIDEN_MULT_HI_EXPR:
3663 case VEC_WIDEN_MULT_LO_EXPR:
3664 case VEC_WIDEN_MULT_EVEN_EXPR:
3665 case VEC_WIDEN_MULT_ODD_EXPR:
3666 case VEC_PACK_TRUNC_EXPR:
3667 case VEC_PACK_SAT_EXPR:
3668 case VEC_PACK_FIX_TRUNC_EXPR:
3669 /* FIXME. */
3670 return false;
3671
3672 case MULT_EXPR:
3673 case MULT_HIGHPART_EXPR:
3674 case TRUNC_DIV_EXPR:
3675 case CEIL_DIV_EXPR:
3676 case FLOOR_DIV_EXPR:
3677 case ROUND_DIV_EXPR:
3678 case TRUNC_MOD_EXPR:
3679 case CEIL_MOD_EXPR:
3680 case FLOOR_MOD_EXPR:
3681 case ROUND_MOD_EXPR:
3682 case RDIV_EXPR:
3683 case EXACT_DIV_EXPR:
3684 case MIN_EXPR:
3685 case MAX_EXPR:
3686 case BIT_IOR_EXPR:
3687 case BIT_XOR_EXPR:
3688 case BIT_AND_EXPR:
3689 /* Continue with generic binary expression handling. */
3690 break;
3691
3692 default:
3693 gcc_unreachable ();
3694 }
3695
3696 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3697 || !useless_type_conversion_p (lhs_type, rhs2_type))
3698 {
3699 error ("type mismatch in binary expression");
3700 debug_generic_stmt (lhs_type);
3701 debug_generic_stmt (rhs1_type);
3702 debug_generic_stmt (rhs2_type);
3703 return true;
3704 }
3705
3706 return false;
3707 }
3708
3709 /* Verify a gimple assignment statement STMT with a ternary rhs.
3710 Returns true if anything is wrong. */
3711
3712 static bool
3713 verify_gimple_assign_ternary (gimple stmt)
3714 {
3715 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3716 tree lhs = gimple_assign_lhs (stmt);
3717 tree lhs_type = TREE_TYPE (lhs);
3718 tree rhs1 = gimple_assign_rhs1 (stmt);
3719 tree rhs1_type = TREE_TYPE (rhs1);
3720 tree rhs2 = gimple_assign_rhs2 (stmt);
3721 tree rhs2_type = TREE_TYPE (rhs2);
3722 tree rhs3 = gimple_assign_rhs3 (stmt);
3723 tree rhs3_type = TREE_TYPE (rhs3);
3724
3725 if (!is_gimple_reg (lhs))
3726 {
3727 error ("non-register as LHS of ternary operation");
3728 return true;
3729 }
3730
3731 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3732 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3733 || !is_gimple_val (rhs2)
3734 || !is_gimple_val (rhs3))
3735 {
3736 error ("invalid operands in ternary operation");
3737 return true;
3738 }
3739
3740 /* First handle operations that involve different types. */
3741 switch (rhs_code)
3742 {
3743 case WIDEN_MULT_PLUS_EXPR:
3744 case WIDEN_MULT_MINUS_EXPR:
3745 if ((!INTEGRAL_TYPE_P (rhs1_type)
3746 && !FIXED_POINT_TYPE_P (rhs1_type))
3747 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3748 || !useless_type_conversion_p (lhs_type, rhs3_type)
3749 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3750 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3751 {
3752 error ("type mismatch in widening multiply-accumulate expression");
3753 debug_generic_expr (lhs_type);
3754 debug_generic_expr (rhs1_type);
3755 debug_generic_expr (rhs2_type);
3756 debug_generic_expr (rhs3_type);
3757 return true;
3758 }
3759 break;
3760
3761 case FMA_EXPR:
3762 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3763 || !useless_type_conversion_p (lhs_type, rhs2_type)
3764 || !useless_type_conversion_p (lhs_type, rhs3_type))
3765 {
3766 error ("type mismatch in fused multiply-add expression");
3767 debug_generic_expr (lhs_type);
3768 debug_generic_expr (rhs1_type);
3769 debug_generic_expr (rhs2_type);
3770 debug_generic_expr (rhs3_type);
3771 return true;
3772 }
3773 break;
3774
3775 case COND_EXPR:
3776 case VEC_COND_EXPR:
3777 if (!useless_type_conversion_p (lhs_type, rhs2_type)
3778 || !useless_type_conversion_p (lhs_type, rhs3_type))
3779 {
3780 error ("type mismatch in conditional expression");
3781 debug_generic_expr (lhs_type);
3782 debug_generic_expr (rhs2_type);
3783 debug_generic_expr (rhs3_type);
3784 return true;
3785 }
3786 break;
3787
3788 case VEC_PERM_EXPR:
3789 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3790 || !useless_type_conversion_p (lhs_type, rhs2_type))
3791 {
3792 error ("type mismatch in vector permute expression");
3793 debug_generic_expr (lhs_type);
3794 debug_generic_expr (rhs1_type);
3795 debug_generic_expr (rhs2_type);
3796 debug_generic_expr (rhs3_type);
3797 return true;
3798 }
3799
3800 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3801 || TREE_CODE (rhs2_type) != VECTOR_TYPE
3802 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
3803 {
3804 error ("vector types expected in vector permute expression");
3805 debug_generic_expr (lhs_type);
3806 debug_generic_expr (rhs1_type);
3807 debug_generic_expr (rhs2_type);
3808 debug_generic_expr (rhs3_type);
3809 return true;
3810 }
3811
3812 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
3813 || TYPE_VECTOR_SUBPARTS (rhs2_type)
3814 != TYPE_VECTOR_SUBPARTS (rhs3_type)
3815 || TYPE_VECTOR_SUBPARTS (rhs3_type)
3816 != TYPE_VECTOR_SUBPARTS (lhs_type))
3817 {
3818 error ("vectors with different element number found "
3819 "in vector permute expression");
3820 debug_generic_expr (lhs_type);
3821 debug_generic_expr (rhs1_type);
3822 debug_generic_expr (rhs2_type);
3823 debug_generic_expr (rhs3_type);
3824 return true;
3825 }
3826
3827 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
3828 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
3829 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
3830 {
3831 error ("invalid mask type in vector permute expression");
3832 debug_generic_expr (lhs_type);
3833 debug_generic_expr (rhs1_type);
3834 debug_generic_expr (rhs2_type);
3835 debug_generic_expr (rhs3_type);
3836 return true;
3837 }
3838
3839 return false;
3840
3841 case DOT_PROD_EXPR:
3842 case REALIGN_LOAD_EXPR:
3843 /* FIXME. */
3844 return false;
3845
3846 default:
3847 gcc_unreachable ();
3848 }
3849 return false;
3850 }
3851
3852 /* Verify a gimple assignment statement STMT with a single rhs.
3853 Returns true if anything is wrong. */
3854
3855 static bool
3856 verify_gimple_assign_single (gimple stmt)
3857 {
3858 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3859 tree lhs = gimple_assign_lhs (stmt);
3860 tree lhs_type = TREE_TYPE (lhs);
3861 tree rhs1 = gimple_assign_rhs1 (stmt);
3862 tree rhs1_type = TREE_TYPE (rhs1);
3863 bool res = false;
3864
3865 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3866 {
3867 error ("non-trivial conversion at assignment");
3868 debug_generic_expr (lhs_type);
3869 debug_generic_expr (rhs1_type);
3870 return true;
3871 }
3872
3873 if (gimple_clobber_p (stmt)
3874 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
3875 {
3876 error ("non-decl/MEM_REF LHS in clobber statement");
3877 debug_generic_expr (lhs);
3878 return true;
3879 }
3880
3881 if (handled_component_p (lhs))
3882 res |= verify_types_in_gimple_reference (lhs, true);
3883
3884 /* Special codes we cannot handle via their class. */
3885 switch (rhs_code)
3886 {
3887 case ADDR_EXPR:
3888 {
3889 tree op = TREE_OPERAND (rhs1, 0);
3890 if (!is_gimple_addressable (op))
3891 {
3892 error ("invalid operand in unary expression");
3893 return true;
3894 }
3895
3896 /* Technically there is no longer a need for matching types, but
3897 gimple hygiene asks for this check. In LTO we can end up
3898 combining incompatible units and thus end up with addresses
3899 of globals that change their type to a common one. */
3900 if (!in_lto_p
3901 && !types_compatible_p (TREE_TYPE (op),
3902 TREE_TYPE (TREE_TYPE (rhs1)))
3903 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
3904 TREE_TYPE (op)))
3905 {
3906 error ("type mismatch in address expression");
3907 debug_generic_stmt (TREE_TYPE (rhs1));
3908 debug_generic_stmt (TREE_TYPE (op));
3909 return true;
3910 }
3911
3912 return verify_types_in_gimple_reference (op, true);
3913 }
3914
3915 /* tcc_reference */
3916 case INDIRECT_REF:
3917 error ("INDIRECT_REF in gimple IL");
3918 return true;
3919
3920 case COMPONENT_REF:
3921 case BIT_FIELD_REF:
3922 case ARRAY_REF:
3923 case ARRAY_RANGE_REF:
3924 case VIEW_CONVERT_EXPR:
3925 case REALPART_EXPR:
3926 case IMAGPART_EXPR:
3927 case TARGET_MEM_REF:
3928 case MEM_REF:
3929 if (!is_gimple_reg (lhs)
3930 && is_gimple_reg_type (TREE_TYPE (lhs)))
3931 {
3932 error ("invalid rhs for gimple memory store");
3933 debug_generic_stmt (lhs);
3934 debug_generic_stmt (rhs1);
3935 return true;
3936 }
3937 return res || verify_types_in_gimple_reference (rhs1, false);
3938
3939 /* tcc_constant */
3940 case SSA_NAME:
3941 case INTEGER_CST:
3942 case REAL_CST:
3943 case FIXED_CST:
3944 case COMPLEX_CST:
3945 case VECTOR_CST:
3946 case STRING_CST:
3947 return res;
3948
3949 /* tcc_declaration */
3950 case CONST_DECL:
3951 return res;
3952 case VAR_DECL:
3953 case PARM_DECL:
3954 if (!is_gimple_reg (lhs)
3955 && !is_gimple_reg (rhs1)
3956 && is_gimple_reg_type (TREE_TYPE (lhs)))
3957 {
3958 error ("invalid rhs for gimple memory store");
3959 debug_generic_stmt (lhs);
3960 debug_generic_stmt (rhs1);
3961 return true;
3962 }
3963 return res;
3964
3965 case CONSTRUCTOR:
3966 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
3967 {
3968 unsigned int i;
3969 tree elt_i, elt_v, elt_t = NULL_TREE;
3970
3971 if (CONSTRUCTOR_NELTS (rhs1) == 0)
3972 return res;
3973 /* For vector CONSTRUCTORs we require that either it is empty
3974 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
3975 (then the element count must be correct to cover the whole
3976 outer vector and index must be NULL on all elements, or it is
3977 a CONSTRUCTOR of scalar elements, where we as an exception allow
3978 smaller number of elements (assuming zero filling) and
3979 consecutive indexes as compared to NULL indexes (such
3980 CONSTRUCTORs can appear in the IL from FEs). */
3981 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
3982 {
3983 if (elt_t == NULL_TREE)
3984 {
3985 elt_t = TREE_TYPE (elt_v);
3986 if (TREE_CODE (elt_t) == VECTOR_TYPE)
3987 {
3988 tree elt_t = TREE_TYPE (elt_v);
3989 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
3990 TREE_TYPE (elt_t)))
3991 {
3992 error ("incorrect type of vector CONSTRUCTOR"
3993 " elements");
3994 debug_generic_stmt (rhs1);
3995 return true;
3996 }
3997 else if (CONSTRUCTOR_NELTS (rhs1)
3998 * TYPE_VECTOR_SUBPARTS (elt_t)
3999 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4000 {
4001 error ("incorrect number of vector CONSTRUCTOR"
4002 " elements");
4003 debug_generic_stmt (rhs1);
4004 return true;
4005 }
4006 }
4007 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4008 elt_t))
4009 {
4010 error ("incorrect type of vector CONSTRUCTOR elements");
4011 debug_generic_stmt (rhs1);
4012 return true;
4013 }
4014 else if (CONSTRUCTOR_NELTS (rhs1)
4015 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4016 {
4017 error ("incorrect number of vector CONSTRUCTOR elements");
4018 debug_generic_stmt (rhs1);
4019 return true;
4020 }
4021 }
4022 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4023 {
4024 error ("incorrect type of vector CONSTRUCTOR elements");
4025 debug_generic_stmt (rhs1);
4026 return true;
4027 }
4028 if (elt_i != NULL_TREE
4029 && (TREE_CODE (elt_t) == VECTOR_TYPE
4030 || TREE_CODE (elt_i) != INTEGER_CST
4031 || compare_tree_int (elt_i, i) != 0))
4032 {
4033 error ("vector CONSTRUCTOR with non-NULL element index");
4034 debug_generic_stmt (rhs1);
4035 return true;
4036 }
4037 }
4038 }
4039 return res;
4040 case OBJ_TYPE_REF:
4041 case ASSERT_EXPR:
4042 case WITH_SIZE_EXPR:
4043 /* FIXME. */
4044 return res;
4045
4046 default:;
4047 }
4048
4049 return res;
4050 }
4051
4052 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4053 is a problem, otherwise false. */
4054
4055 static bool
4056 verify_gimple_assign (gimple stmt)
4057 {
4058 switch (gimple_assign_rhs_class (stmt))
4059 {
4060 case GIMPLE_SINGLE_RHS:
4061 return verify_gimple_assign_single (stmt);
4062
4063 case GIMPLE_UNARY_RHS:
4064 return verify_gimple_assign_unary (stmt);
4065
4066 case GIMPLE_BINARY_RHS:
4067 return verify_gimple_assign_binary (stmt);
4068
4069 case GIMPLE_TERNARY_RHS:
4070 return verify_gimple_assign_ternary (stmt);
4071
4072 default:
4073 gcc_unreachable ();
4074 }
4075 }
4076
4077 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4078 is a problem, otherwise false. */
4079
4080 static bool
4081 verify_gimple_return (gimple stmt)
4082 {
4083 tree op = gimple_return_retval (stmt);
4084 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4085
4086 /* We cannot test for present return values as we do not fix up missing
4087 return values from the original source. */
4088 if (op == NULL)
4089 return false;
4090
4091 if (!is_gimple_val (op)
4092 && TREE_CODE (op) != RESULT_DECL)
4093 {
4094 error ("invalid operand in return statement");
4095 debug_generic_stmt (op);
4096 return true;
4097 }
4098
4099 if ((TREE_CODE (op) == RESULT_DECL
4100 && DECL_BY_REFERENCE (op))
4101 || (TREE_CODE (op) == SSA_NAME
4102 && SSA_NAME_VAR (op)
4103 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4104 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4105 op = TREE_TYPE (op);
4106
4107 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4108 {
4109 error ("invalid conversion in return statement");
4110 debug_generic_stmt (restype);
4111 debug_generic_stmt (TREE_TYPE (op));
4112 return true;
4113 }
4114
4115 return false;
4116 }
4117
4118
4119 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4120 is a problem, otherwise false. */
4121
4122 static bool
4123 verify_gimple_goto (gimple stmt)
4124 {
4125 tree dest = gimple_goto_dest (stmt);
4126
4127 /* ??? We have two canonical forms of direct goto destinations, a
4128 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4129 if (TREE_CODE (dest) != LABEL_DECL
4130 && (!is_gimple_val (dest)
4131 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4132 {
4133 error ("goto destination is neither a label nor a pointer");
4134 return true;
4135 }
4136
4137 return false;
4138 }
4139
4140 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4141 is a problem, otherwise false. */
4142
4143 static bool
4144 verify_gimple_switch (gimple stmt)
4145 {
4146 unsigned int i, n;
4147 tree elt, prev_upper_bound = NULL_TREE;
4148 tree index_type, elt_type = NULL_TREE;
4149
4150 if (!is_gimple_val (gimple_switch_index (stmt)))
4151 {
4152 error ("invalid operand to switch statement");
4153 debug_generic_stmt (gimple_switch_index (stmt));
4154 return true;
4155 }
4156
4157 index_type = TREE_TYPE (gimple_switch_index (stmt));
4158 if (! INTEGRAL_TYPE_P (index_type))
4159 {
4160 error ("non-integral type switch statement");
4161 debug_generic_expr (index_type);
4162 return true;
4163 }
4164
4165 elt = gimple_switch_label (stmt, 0);
4166 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4167 {
4168 error ("invalid default case label in switch statement");
4169 debug_generic_expr (elt);
4170 return true;
4171 }
4172
4173 n = gimple_switch_num_labels (stmt);
4174 for (i = 1; i < n; i++)
4175 {
4176 elt = gimple_switch_label (stmt, i);
4177
4178 if (! CASE_LOW (elt))
4179 {
4180 error ("invalid case label in switch statement");
4181 debug_generic_expr (elt);
4182 return true;
4183 }
4184 if (CASE_HIGH (elt)
4185 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4186 {
4187 error ("invalid case range in switch statement");
4188 debug_generic_expr (elt);
4189 return true;
4190 }
4191
4192 if (elt_type)
4193 {
4194 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4195 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4196 {
4197 error ("type mismatch for case label in switch statement");
4198 debug_generic_expr (elt);
4199 return true;
4200 }
4201 }
4202 else
4203 {
4204 elt_type = TREE_TYPE (CASE_LOW (elt));
4205 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4206 {
4207 error ("type precision mismatch in switch statement");
4208 return true;
4209 }
4210 }
4211
4212 if (prev_upper_bound)
4213 {
4214 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4215 {
4216 error ("case labels not sorted in switch statement");
4217 return true;
4218 }
4219 }
4220
4221 prev_upper_bound = CASE_HIGH (elt);
4222 if (! prev_upper_bound)
4223 prev_upper_bound = CASE_LOW (elt);
4224 }
4225
4226 return false;
4227 }
4228
4229 /* Verify a gimple debug statement STMT.
4230 Returns true if anything is wrong. */
4231
4232 static bool
4233 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4234 {
4235 /* There isn't much that could be wrong in a gimple debug stmt. A
4236 gimple debug bind stmt, for example, maps a tree, that's usually
4237 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4238 component or member of an aggregate type, to another tree, that
4239 can be an arbitrary expression. These stmts expand into debug
4240 insns, and are converted to debug notes by var-tracking.c. */
4241 return false;
4242 }
4243
4244 /* Verify a gimple label statement STMT.
4245 Returns true if anything is wrong. */
4246
4247 static bool
4248 verify_gimple_label (gimple stmt)
4249 {
4250 tree decl = gimple_label_label (stmt);
4251 int uid;
4252 bool err = false;
4253
4254 if (TREE_CODE (decl) != LABEL_DECL)
4255 return true;
4256 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4257 && DECL_CONTEXT (decl) != current_function_decl)
4258 {
4259 error ("label's context is not the current function decl");
4260 err |= true;
4261 }
4262
4263 uid = LABEL_DECL_UID (decl);
4264 if (cfun->cfg
4265 && (uid == -1 || (*label_to_block_map)[uid] != gimple_bb (stmt)))
4266 {
4267 error ("incorrect entry in label_to_block_map");
4268 err |= true;
4269 }
4270
4271 uid = EH_LANDING_PAD_NR (decl);
4272 if (uid)
4273 {
4274 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4275 if (decl != lp->post_landing_pad)
4276 {
4277 error ("incorrect setting of landing pad number");
4278 err |= true;
4279 }
4280 }
4281
4282 return err;
4283 }
4284
4285 /* Verify the GIMPLE statement STMT. Returns true if there is an
4286 error, otherwise false. */
4287
4288 static bool
4289 verify_gimple_stmt (gimple stmt)
4290 {
4291 switch (gimple_code (stmt))
4292 {
4293 case GIMPLE_ASSIGN:
4294 return verify_gimple_assign (stmt);
4295
4296 case GIMPLE_LABEL:
4297 return verify_gimple_label (stmt);
4298
4299 case GIMPLE_CALL:
4300 return verify_gimple_call (stmt);
4301
4302 case GIMPLE_COND:
4303 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4304 {
4305 error ("invalid comparison code in gimple cond");
4306 return true;
4307 }
4308 if (!(!gimple_cond_true_label (stmt)
4309 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4310 || !(!gimple_cond_false_label (stmt)
4311 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4312 {
4313 error ("invalid labels in gimple cond");
4314 return true;
4315 }
4316
4317 return verify_gimple_comparison (boolean_type_node,
4318 gimple_cond_lhs (stmt),
4319 gimple_cond_rhs (stmt));
4320
4321 case GIMPLE_GOTO:
4322 return verify_gimple_goto (stmt);
4323
4324 case GIMPLE_SWITCH:
4325 return verify_gimple_switch (stmt);
4326
4327 case GIMPLE_RETURN:
4328 return verify_gimple_return (stmt);
4329
4330 case GIMPLE_ASM:
4331 return false;
4332
4333 case GIMPLE_TRANSACTION:
4334 return verify_gimple_transaction (stmt);
4335
4336 /* Tuples that do not have tree operands. */
4337 case GIMPLE_NOP:
4338 case GIMPLE_PREDICT:
4339 case GIMPLE_RESX:
4340 case GIMPLE_EH_DISPATCH:
4341 case GIMPLE_EH_MUST_NOT_THROW:
4342 return false;
4343
4344 CASE_GIMPLE_OMP:
4345 /* OpenMP directives are validated by the FE and never operated
4346 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4347 non-gimple expressions when the main index variable has had
4348 its address taken. This does not affect the loop itself
4349 because the header of an GIMPLE_OMP_FOR is merely used to determine
4350 how to setup the parallel iteration. */
4351 return false;
4352
4353 case GIMPLE_DEBUG:
4354 return verify_gimple_debug (stmt);
4355
4356 default:
4357 gcc_unreachable ();
4358 }
4359 }
4360
4361 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4362 and false otherwise. */
4363
4364 static bool
4365 verify_gimple_phi (gimple phi)
4366 {
4367 bool err = false;
4368 unsigned i;
4369 tree phi_result = gimple_phi_result (phi);
4370 bool virtual_p;
4371
4372 if (!phi_result)
4373 {
4374 error ("invalid PHI result");
4375 return true;
4376 }
4377
4378 virtual_p = virtual_operand_p (phi_result);
4379 if (TREE_CODE (phi_result) != SSA_NAME
4380 || (virtual_p
4381 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4382 {
4383 error ("invalid PHI result");
4384 err = true;
4385 }
4386
4387 for (i = 0; i < gimple_phi_num_args (phi); i++)
4388 {
4389 tree t = gimple_phi_arg_def (phi, i);
4390
4391 if (!t)
4392 {
4393 error ("missing PHI def");
4394 err |= true;
4395 continue;
4396 }
4397 /* Addressable variables do have SSA_NAMEs but they
4398 are not considered gimple values. */
4399 else if ((TREE_CODE (t) == SSA_NAME
4400 && virtual_p != virtual_operand_p (t))
4401 || (virtual_p
4402 && (TREE_CODE (t) != SSA_NAME
4403 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4404 || (!virtual_p
4405 && !is_gimple_val (t)))
4406 {
4407 error ("invalid PHI argument");
4408 debug_generic_expr (t);
4409 err |= true;
4410 }
4411 #ifdef ENABLE_TYPES_CHECKING
4412 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4413 {
4414 error ("incompatible types in PHI argument %u", i);
4415 debug_generic_stmt (TREE_TYPE (phi_result));
4416 debug_generic_stmt (TREE_TYPE (t));
4417 err |= true;
4418 }
4419 #endif
4420 }
4421
4422 return err;
4423 }
4424
4425 /* Verify the GIMPLE statements inside the sequence STMTS. */
4426
4427 static bool
4428 verify_gimple_in_seq_2 (gimple_seq stmts)
4429 {
4430 gimple_stmt_iterator ittr;
4431 bool err = false;
4432
4433 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4434 {
4435 gimple stmt = gsi_stmt (ittr);
4436
4437 switch (gimple_code (stmt))
4438 {
4439 case GIMPLE_BIND:
4440 err |= verify_gimple_in_seq_2 (gimple_bind_body (stmt));
4441 break;
4442
4443 case GIMPLE_TRY:
4444 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4445 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4446 break;
4447
4448 case GIMPLE_EH_FILTER:
4449 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4450 break;
4451
4452 case GIMPLE_EH_ELSE:
4453 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (stmt));
4454 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (stmt));
4455 break;
4456
4457 case GIMPLE_CATCH:
4458 err |= verify_gimple_in_seq_2 (gimple_catch_handler (stmt));
4459 break;
4460
4461 case GIMPLE_TRANSACTION:
4462 err |= verify_gimple_transaction (stmt);
4463 break;
4464
4465 default:
4466 {
4467 bool err2 = verify_gimple_stmt (stmt);
4468 if (err2)
4469 debug_gimple_stmt (stmt);
4470 err |= err2;
4471 }
4472 }
4473 }
4474
4475 return err;
4476 }
4477
4478 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4479 is a problem, otherwise false. */
4480
4481 static bool
4482 verify_gimple_transaction (gimple stmt)
4483 {
4484 tree lab = gimple_transaction_label (stmt);
4485 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4486 return true;
4487 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4488 }
4489
4490
4491 /* Verify the GIMPLE statements inside the statement list STMTS. */
4492
4493 DEBUG_FUNCTION void
4494 verify_gimple_in_seq (gimple_seq stmts)
4495 {
4496 timevar_push (TV_TREE_STMT_VERIFY);
4497 if (verify_gimple_in_seq_2 (stmts))
4498 internal_error ("verify_gimple failed");
4499 timevar_pop (TV_TREE_STMT_VERIFY);
4500 }
4501
4502 /* Return true when the T can be shared. */
4503
4504 static bool
4505 tree_node_can_be_shared (tree t)
4506 {
4507 if (IS_TYPE_OR_DECL_P (t)
4508 || is_gimple_min_invariant (t)
4509 || TREE_CODE (t) == SSA_NAME
4510 || t == error_mark_node
4511 || TREE_CODE (t) == IDENTIFIER_NODE)
4512 return true;
4513
4514 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4515 return true;
4516
4517 if (DECL_P (t))
4518 return true;
4519
4520 return false;
4521 }
4522
4523 /* Called via walk_tree. Verify tree sharing. */
4524
4525 static tree
4526 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4527 {
4528 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4529
4530 if (tree_node_can_be_shared (*tp))
4531 {
4532 *walk_subtrees = false;
4533 return NULL;
4534 }
4535
4536 if (pointer_set_insert (visited, *tp))
4537 return *tp;
4538
4539 return NULL;
4540 }
4541
4542 /* Called via walk_gimple_stmt. Verify tree sharing. */
4543
4544 static tree
4545 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4546 {
4547 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4548 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4549 }
4550
4551 static bool eh_error_found;
4552 static int
4553 verify_eh_throw_stmt_node (void **slot, void *data)
4554 {
4555 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4556 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4557
4558 if (!pointer_set_contains (visited, node->stmt))
4559 {
4560 error ("dead STMT in EH table");
4561 debug_gimple_stmt (node->stmt);
4562 eh_error_found = true;
4563 }
4564 return 1;
4565 }
4566
4567 /* Verify if the location LOCs block is in BLOCKS. */
4568
4569 static bool
4570 verify_location (pointer_set_t *blocks, location_t loc)
4571 {
4572 tree block = LOCATION_BLOCK (loc);
4573 if (block != NULL_TREE
4574 && !pointer_set_contains (blocks, block))
4575 {
4576 error ("location references block not in block tree");
4577 return true;
4578 }
4579 if (block != NULL_TREE)
4580 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4581 return false;
4582 }
4583
4584 /* Called via walk_tree. Verify that expressions have no blocks. */
4585
4586 static tree
4587 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4588 {
4589 if (!EXPR_P (*tp))
4590 {
4591 *walk_subtrees = false;
4592 return NULL;
4593 }
4594
4595 location_t loc = EXPR_LOCATION (*tp);
4596 if (LOCATION_BLOCK (loc) != NULL)
4597 return *tp;
4598
4599 return NULL;
4600 }
4601
4602 /* Called via walk_tree. Verify locations of expressions. */
4603
4604 static tree
4605 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4606 {
4607 struct pointer_set_t *blocks = (struct pointer_set_t *) data;
4608
4609 if (TREE_CODE (*tp) == VAR_DECL
4610 && DECL_HAS_DEBUG_EXPR_P (*tp))
4611 {
4612 tree t = DECL_DEBUG_EXPR (*tp);
4613 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4614 if (addr)
4615 return addr;
4616 }
4617 if ((TREE_CODE (*tp) == VAR_DECL
4618 || TREE_CODE (*tp) == PARM_DECL
4619 || TREE_CODE (*tp) == RESULT_DECL)
4620 && DECL_HAS_VALUE_EXPR_P (*tp))
4621 {
4622 tree t = DECL_VALUE_EXPR (*tp);
4623 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4624 if (addr)
4625 return addr;
4626 }
4627
4628 if (!EXPR_P (*tp))
4629 {
4630 *walk_subtrees = false;
4631 return NULL;
4632 }
4633
4634 location_t loc = EXPR_LOCATION (*tp);
4635 if (verify_location (blocks, loc))
4636 return *tp;
4637
4638 return NULL;
4639 }
4640
4641 /* Called via walk_gimple_op. Verify locations of expressions. */
4642
4643 static tree
4644 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
4645 {
4646 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4647 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
4648 }
4649
4650 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4651
4652 static void
4653 collect_subblocks (pointer_set_t *blocks, tree block)
4654 {
4655 tree t;
4656 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
4657 {
4658 pointer_set_insert (blocks, t);
4659 collect_subblocks (blocks, t);
4660 }
4661 }
4662
4663 /* Verify the GIMPLE statements in the CFG of FN. */
4664
4665 DEBUG_FUNCTION void
4666 verify_gimple_in_cfg (struct function *fn)
4667 {
4668 basic_block bb;
4669 bool err = false;
4670 struct pointer_set_t *visited, *visited_stmts, *blocks;
4671
4672 timevar_push (TV_TREE_STMT_VERIFY);
4673 visited = pointer_set_create ();
4674 visited_stmts = pointer_set_create ();
4675
4676 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4677 blocks = pointer_set_create ();
4678 if (DECL_INITIAL (fn->decl))
4679 {
4680 pointer_set_insert (blocks, DECL_INITIAL (fn->decl));
4681 collect_subblocks (blocks, DECL_INITIAL (fn->decl));
4682 }
4683
4684 FOR_EACH_BB_FN (bb, fn)
4685 {
4686 gimple_stmt_iterator gsi;
4687
4688 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4689 {
4690 gimple phi = gsi_stmt (gsi);
4691 bool err2 = false;
4692 unsigned i;
4693
4694 pointer_set_insert (visited_stmts, phi);
4695
4696 if (gimple_bb (phi) != bb)
4697 {
4698 error ("gimple_bb (phi) is set to a wrong basic block");
4699 err2 = true;
4700 }
4701
4702 err2 |= verify_gimple_phi (phi);
4703
4704 /* Only PHI arguments have locations. */
4705 if (gimple_location (phi) != UNKNOWN_LOCATION)
4706 {
4707 error ("PHI node with location");
4708 err2 = true;
4709 }
4710
4711 for (i = 0; i < gimple_phi_num_args (phi); i++)
4712 {
4713 tree arg = gimple_phi_arg_def (phi, i);
4714 tree addr = walk_tree (&arg, verify_node_sharing_1,
4715 visited, NULL);
4716 if (addr)
4717 {
4718 error ("incorrect sharing of tree nodes");
4719 debug_generic_expr (addr);
4720 err2 |= true;
4721 }
4722 location_t loc = gimple_phi_arg_location (phi, i);
4723 if (virtual_operand_p (gimple_phi_result (phi))
4724 && loc != UNKNOWN_LOCATION)
4725 {
4726 error ("virtual PHI with argument locations");
4727 err2 = true;
4728 }
4729 addr = walk_tree (&arg, verify_expr_location_1, blocks, NULL);
4730 if (addr)
4731 {
4732 debug_generic_expr (addr);
4733 err2 = true;
4734 }
4735 err2 |= verify_location (blocks, loc);
4736 }
4737
4738 if (err2)
4739 debug_gimple_stmt (phi);
4740 err |= err2;
4741 }
4742
4743 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4744 {
4745 gimple stmt = gsi_stmt (gsi);
4746 bool err2 = false;
4747 struct walk_stmt_info wi;
4748 tree addr;
4749 int lp_nr;
4750
4751 pointer_set_insert (visited_stmts, stmt);
4752
4753 if (gimple_bb (stmt) != bb)
4754 {
4755 error ("gimple_bb (stmt) is set to a wrong basic block");
4756 err2 = true;
4757 }
4758
4759 err2 |= verify_gimple_stmt (stmt);
4760 err2 |= verify_location (blocks, gimple_location (stmt));
4761
4762 memset (&wi, 0, sizeof (wi));
4763 wi.info = (void *) visited;
4764 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
4765 if (addr)
4766 {
4767 error ("incorrect sharing of tree nodes");
4768 debug_generic_expr (addr);
4769 err2 |= true;
4770 }
4771
4772 memset (&wi, 0, sizeof (wi));
4773 wi.info = (void *) blocks;
4774 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
4775 if (addr)
4776 {
4777 debug_generic_expr (addr);
4778 err2 |= true;
4779 }
4780
4781 /* ??? Instead of not checking these stmts at all the walker
4782 should know its context via wi. */
4783 if (!is_gimple_debug (stmt)
4784 && !is_gimple_omp (stmt))
4785 {
4786 memset (&wi, 0, sizeof (wi));
4787 addr = walk_gimple_op (stmt, verify_expr, &wi);
4788 if (addr)
4789 {
4790 debug_generic_expr (addr);
4791 inform (gimple_location (stmt), "in statement");
4792 err2 |= true;
4793 }
4794 }
4795
4796 /* If the statement is marked as part of an EH region, then it is
4797 expected that the statement could throw. Verify that when we
4798 have optimizations that simplify statements such that we prove
4799 that they cannot throw, that we update other data structures
4800 to match. */
4801 lp_nr = lookup_stmt_eh_lp (stmt);
4802 if (lp_nr != 0)
4803 {
4804 if (!stmt_could_throw_p (stmt))
4805 {
4806 error ("statement marked for throw, but doesn%'t");
4807 err2 |= true;
4808 }
4809 else if (lp_nr > 0
4810 && !gsi_one_before_end_p (gsi)
4811 && stmt_can_throw_internal (stmt))
4812 {
4813 error ("statement marked for throw in middle of block");
4814 err2 |= true;
4815 }
4816 }
4817
4818 if (err2)
4819 debug_gimple_stmt (stmt);
4820 err |= err2;
4821 }
4822 }
4823
4824 eh_error_found = false;
4825 if (get_eh_throw_stmt_table (cfun))
4826 htab_traverse (get_eh_throw_stmt_table (cfun),
4827 verify_eh_throw_stmt_node,
4828 visited_stmts);
4829
4830 if (err || eh_error_found)
4831 internal_error ("verify_gimple failed");
4832
4833 pointer_set_destroy (visited);
4834 pointer_set_destroy (visited_stmts);
4835 pointer_set_destroy (blocks);
4836 verify_histograms ();
4837 timevar_pop (TV_TREE_STMT_VERIFY);
4838 }
4839
4840
4841 /* Verifies that the flow information is OK. */
4842
4843 static int
4844 gimple_verify_flow_info (void)
4845 {
4846 int err = 0;
4847 basic_block bb;
4848 gimple_stmt_iterator gsi;
4849 gimple stmt;
4850 edge e;
4851 edge_iterator ei;
4852
4853 if (ENTRY_BLOCK_PTR->il.gimple.seq || ENTRY_BLOCK_PTR->il.gimple.phi_nodes)
4854 {
4855 error ("ENTRY_BLOCK has IL associated with it");
4856 err = 1;
4857 }
4858
4859 if (EXIT_BLOCK_PTR->il.gimple.seq || EXIT_BLOCK_PTR->il.gimple.phi_nodes)
4860 {
4861 error ("EXIT_BLOCK has IL associated with it");
4862 err = 1;
4863 }
4864
4865 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4866 if (e->flags & EDGE_FALLTHRU)
4867 {
4868 error ("fallthru to exit from bb %d", e->src->index);
4869 err = 1;
4870 }
4871
4872 FOR_EACH_BB (bb)
4873 {
4874 bool found_ctrl_stmt = false;
4875
4876 stmt = NULL;
4877
4878 /* Skip labels on the start of basic block. */
4879 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4880 {
4881 tree label;
4882 gimple prev_stmt = stmt;
4883
4884 stmt = gsi_stmt (gsi);
4885
4886 if (gimple_code (stmt) != GIMPLE_LABEL)
4887 break;
4888
4889 label = gimple_label_label (stmt);
4890 if (prev_stmt && DECL_NONLOCAL (label))
4891 {
4892 error ("nonlocal label ");
4893 print_generic_expr (stderr, label, 0);
4894 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4895 bb->index);
4896 err = 1;
4897 }
4898
4899 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
4900 {
4901 error ("EH landing pad label ");
4902 print_generic_expr (stderr, label, 0);
4903 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4904 bb->index);
4905 err = 1;
4906 }
4907
4908 if (label_to_block (label) != bb)
4909 {
4910 error ("label ");
4911 print_generic_expr (stderr, label, 0);
4912 fprintf (stderr, " to block does not match in bb %d",
4913 bb->index);
4914 err = 1;
4915 }
4916
4917 if (decl_function_context (label) != current_function_decl)
4918 {
4919 error ("label ");
4920 print_generic_expr (stderr, label, 0);
4921 fprintf (stderr, " has incorrect context in bb %d",
4922 bb->index);
4923 err = 1;
4924 }
4925 }
4926
4927 /* Verify that body of basic block BB is free of control flow. */
4928 for (; !gsi_end_p (gsi); gsi_next (&gsi))
4929 {
4930 gimple stmt = gsi_stmt (gsi);
4931
4932 if (found_ctrl_stmt)
4933 {
4934 error ("control flow in the middle of basic block %d",
4935 bb->index);
4936 err = 1;
4937 }
4938
4939 if (stmt_ends_bb_p (stmt))
4940 found_ctrl_stmt = true;
4941
4942 if (gimple_code (stmt) == GIMPLE_LABEL)
4943 {
4944 error ("label ");
4945 print_generic_expr (stderr, gimple_label_label (stmt), 0);
4946 fprintf (stderr, " in the middle of basic block %d", bb->index);
4947 err = 1;
4948 }
4949 }
4950
4951 gsi = gsi_last_bb (bb);
4952 if (gsi_end_p (gsi))
4953 continue;
4954
4955 stmt = gsi_stmt (gsi);
4956
4957 if (gimple_code (stmt) == GIMPLE_LABEL)
4958 continue;
4959
4960 err |= verify_eh_edges (stmt);
4961
4962 if (is_ctrl_stmt (stmt))
4963 {
4964 FOR_EACH_EDGE (e, ei, bb->succs)
4965 if (e->flags & EDGE_FALLTHRU)
4966 {
4967 error ("fallthru edge after a control statement in bb %d",
4968 bb->index);
4969 err = 1;
4970 }
4971 }
4972
4973 if (gimple_code (stmt) != GIMPLE_COND)
4974 {
4975 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4976 after anything else but if statement. */
4977 FOR_EACH_EDGE (e, ei, bb->succs)
4978 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
4979 {
4980 error ("true/false edge after a non-GIMPLE_COND in bb %d",
4981 bb->index);
4982 err = 1;
4983 }
4984 }
4985
4986 switch (gimple_code (stmt))
4987 {
4988 case GIMPLE_COND:
4989 {
4990 edge true_edge;
4991 edge false_edge;
4992
4993 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
4994
4995 if (!true_edge
4996 || !false_edge
4997 || !(true_edge->flags & EDGE_TRUE_VALUE)
4998 || !(false_edge->flags & EDGE_FALSE_VALUE)
4999 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5000 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5001 || EDGE_COUNT (bb->succs) >= 3)
5002 {
5003 error ("wrong outgoing edge flags at end of bb %d",
5004 bb->index);
5005 err = 1;
5006 }
5007 }
5008 break;
5009
5010 case GIMPLE_GOTO:
5011 if (simple_goto_p (stmt))
5012 {
5013 error ("explicit goto at end of bb %d", bb->index);
5014 err = 1;
5015 }
5016 else
5017 {
5018 /* FIXME. We should double check that the labels in the
5019 destination blocks have their address taken. */
5020 FOR_EACH_EDGE (e, ei, bb->succs)
5021 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5022 | EDGE_FALSE_VALUE))
5023 || !(e->flags & EDGE_ABNORMAL))
5024 {
5025 error ("wrong outgoing edge flags at end of bb %d",
5026 bb->index);
5027 err = 1;
5028 }
5029 }
5030 break;
5031
5032 case GIMPLE_CALL:
5033 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5034 break;
5035 /* ... fallthru ... */
5036 case GIMPLE_RETURN:
5037 if (!single_succ_p (bb)
5038 || (single_succ_edge (bb)->flags
5039 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5040 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5041 {
5042 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5043 err = 1;
5044 }
5045 if (single_succ (bb) != EXIT_BLOCK_PTR)
5046 {
5047 error ("return edge does not point to exit in bb %d",
5048 bb->index);
5049 err = 1;
5050 }
5051 break;
5052
5053 case GIMPLE_SWITCH:
5054 {
5055 tree prev;
5056 edge e;
5057 size_t i, n;
5058
5059 n = gimple_switch_num_labels (stmt);
5060
5061 /* Mark all the destination basic blocks. */
5062 for (i = 0; i < n; ++i)
5063 {
5064 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
5065 basic_block label_bb = label_to_block (lab);
5066 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5067 label_bb->aux = (void *)1;
5068 }
5069
5070 /* Verify that the case labels are sorted. */
5071 prev = gimple_switch_label (stmt, 0);
5072 for (i = 1; i < n; ++i)
5073 {
5074 tree c = gimple_switch_label (stmt, i);
5075 if (!CASE_LOW (c))
5076 {
5077 error ("found default case not at the start of "
5078 "case vector");
5079 err = 1;
5080 continue;
5081 }
5082 if (CASE_LOW (prev)
5083 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5084 {
5085 error ("case labels not sorted: ");
5086 print_generic_expr (stderr, prev, 0);
5087 fprintf (stderr," is greater than ");
5088 print_generic_expr (stderr, c, 0);
5089 fprintf (stderr," but comes before it.\n");
5090 err = 1;
5091 }
5092 prev = c;
5093 }
5094 /* VRP will remove the default case if it can prove it will
5095 never be executed. So do not verify there always exists
5096 a default case here. */
5097
5098 FOR_EACH_EDGE (e, ei, bb->succs)
5099 {
5100 if (!e->dest->aux)
5101 {
5102 error ("extra outgoing edge %d->%d",
5103 bb->index, e->dest->index);
5104 err = 1;
5105 }
5106
5107 e->dest->aux = (void *)2;
5108 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5109 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5110 {
5111 error ("wrong outgoing edge flags at end of bb %d",
5112 bb->index);
5113 err = 1;
5114 }
5115 }
5116
5117 /* Check that we have all of them. */
5118 for (i = 0; i < n; ++i)
5119 {
5120 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
5121 basic_block label_bb = label_to_block (lab);
5122
5123 if (label_bb->aux != (void *)2)
5124 {
5125 error ("missing edge %i->%i", bb->index, label_bb->index);
5126 err = 1;
5127 }
5128 }
5129
5130 FOR_EACH_EDGE (e, ei, bb->succs)
5131 e->dest->aux = (void *)0;
5132 }
5133 break;
5134
5135 case GIMPLE_EH_DISPATCH:
5136 err |= verify_eh_dispatch_edge (stmt);
5137 break;
5138
5139 default:
5140 break;
5141 }
5142 }
5143
5144 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5145 verify_dominators (CDI_DOMINATORS);
5146
5147 return err;
5148 }
5149
5150
5151 /* Updates phi nodes after creating a forwarder block joined
5152 by edge FALLTHRU. */
5153
5154 static void
5155 gimple_make_forwarder_block (edge fallthru)
5156 {
5157 edge e;
5158 edge_iterator ei;
5159 basic_block dummy, bb;
5160 tree var;
5161 gimple_stmt_iterator gsi;
5162
5163 dummy = fallthru->src;
5164 bb = fallthru->dest;
5165
5166 if (single_pred_p (bb))
5167 return;
5168
5169 /* If we redirected a branch we must create new PHI nodes at the
5170 start of BB. */
5171 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5172 {
5173 gimple phi, new_phi;
5174
5175 phi = gsi_stmt (gsi);
5176 var = gimple_phi_result (phi);
5177 new_phi = create_phi_node (var, bb);
5178 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5179 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5180 UNKNOWN_LOCATION);
5181 }
5182
5183 /* Add the arguments we have stored on edges. */
5184 FOR_EACH_EDGE (e, ei, bb->preds)
5185 {
5186 if (e == fallthru)
5187 continue;
5188
5189 flush_pending_stmts (e);
5190 }
5191 }
5192
5193
5194 /* Return a non-special label in the head of basic block BLOCK.
5195 Create one if it doesn't exist. */
5196
5197 tree
5198 gimple_block_label (basic_block bb)
5199 {
5200 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5201 bool first = true;
5202 tree label;
5203 gimple stmt;
5204
5205 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5206 {
5207 stmt = gsi_stmt (i);
5208 if (gimple_code (stmt) != GIMPLE_LABEL)
5209 break;
5210 label = gimple_label_label (stmt);
5211 if (!DECL_NONLOCAL (label))
5212 {
5213 if (!first)
5214 gsi_move_before (&i, &s);
5215 return label;
5216 }
5217 }
5218
5219 label = create_artificial_label (UNKNOWN_LOCATION);
5220 stmt = gimple_build_label (label);
5221 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5222 return label;
5223 }
5224
5225
5226 /* Attempt to perform edge redirection by replacing a possibly complex
5227 jump instruction by a goto or by removing the jump completely.
5228 This can apply only if all edges now point to the same block. The
5229 parameters and return values are equivalent to
5230 redirect_edge_and_branch. */
5231
5232 static edge
5233 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5234 {
5235 basic_block src = e->src;
5236 gimple_stmt_iterator i;
5237 gimple stmt;
5238
5239 /* We can replace or remove a complex jump only when we have exactly
5240 two edges. */
5241 if (EDGE_COUNT (src->succs) != 2
5242 /* Verify that all targets will be TARGET. Specifically, the
5243 edge that is not E must also go to TARGET. */
5244 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5245 return NULL;
5246
5247 i = gsi_last_bb (src);
5248 if (gsi_end_p (i))
5249 return NULL;
5250
5251 stmt = gsi_stmt (i);
5252
5253 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5254 {
5255 gsi_remove (&i, true);
5256 e = ssa_redirect_edge (e, target);
5257 e->flags = EDGE_FALLTHRU;
5258 return e;
5259 }
5260
5261 return NULL;
5262 }
5263
5264
5265 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5266 edge representing the redirected branch. */
5267
5268 static edge
5269 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5270 {
5271 basic_block bb = e->src;
5272 gimple_stmt_iterator gsi;
5273 edge ret;
5274 gimple stmt;
5275
5276 if (e->flags & EDGE_ABNORMAL)
5277 return NULL;
5278
5279 if (e->dest == dest)
5280 return NULL;
5281
5282 if (e->flags & EDGE_EH)
5283 return redirect_eh_edge (e, dest);
5284
5285 if (e->src != ENTRY_BLOCK_PTR)
5286 {
5287 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5288 if (ret)
5289 return ret;
5290 }
5291
5292 gsi = gsi_last_bb (bb);
5293 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5294
5295 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5296 {
5297 case GIMPLE_COND:
5298 /* For COND_EXPR, we only need to redirect the edge. */
5299 break;
5300
5301 case GIMPLE_GOTO:
5302 /* No non-abnormal edges should lead from a non-simple goto, and
5303 simple ones should be represented implicitly. */
5304 gcc_unreachable ();
5305
5306 case GIMPLE_SWITCH:
5307 {
5308 tree label = gimple_block_label (dest);
5309 tree cases = get_cases_for_edge (e, stmt);
5310
5311 /* If we have a list of cases associated with E, then use it
5312 as it's a lot faster than walking the entire case vector. */
5313 if (cases)
5314 {
5315 edge e2 = find_edge (e->src, dest);
5316 tree last, first;
5317
5318 first = cases;
5319 while (cases)
5320 {
5321 last = cases;
5322 CASE_LABEL (cases) = label;
5323 cases = CASE_CHAIN (cases);
5324 }
5325
5326 /* If there was already an edge in the CFG, then we need
5327 to move all the cases associated with E to E2. */
5328 if (e2)
5329 {
5330 tree cases2 = get_cases_for_edge (e2, stmt);
5331
5332 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5333 CASE_CHAIN (cases2) = first;
5334 }
5335 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5336 }
5337 else
5338 {
5339 size_t i, n = gimple_switch_num_labels (stmt);
5340
5341 for (i = 0; i < n; i++)
5342 {
5343 tree elt = gimple_switch_label (stmt, i);
5344 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5345 CASE_LABEL (elt) = label;
5346 }
5347 }
5348 }
5349 break;
5350
5351 case GIMPLE_ASM:
5352 {
5353 int i, n = gimple_asm_nlabels (stmt);
5354 tree label = NULL;
5355
5356 for (i = 0; i < n; ++i)
5357 {
5358 tree cons = gimple_asm_label_op (stmt, i);
5359 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5360 {
5361 if (!label)
5362 label = gimple_block_label (dest);
5363 TREE_VALUE (cons) = label;
5364 }
5365 }
5366
5367 /* If we didn't find any label matching the former edge in the
5368 asm labels, we must be redirecting the fallthrough
5369 edge. */
5370 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5371 }
5372 break;
5373
5374 case GIMPLE_RETURN:
5375 gsi_remove (&gsi, true);
5376 e->flags |= EDGE_FALLTHRU;
5377 break;
5378
5379 case GIMPLE_OMP_RETURN:
5380 case GIMPLE_OMP_CONTINUE:
5381 case GIMPLE_OMP_SECTIONS_SWITCH:
5382 case GIMPLE_OMP_FOR:
5383 /* The edges from OMP constructs can be simply redirected. */
5384 break;
5385
5386 case GIMPLE_EH_DISPATCH:
5387 if (!(e->flags & EDGE_FALLTHRU))
5388 redirect_eh_dispatch_edge (stmt, e, dest);
5389 break;
5390
5391 case GIMPLE_TRANSACTION:
5392 /* The ABORT edge has a stored label associated with it, otherwise
5393 the edges are simply redirectable. */
5394 if (e->flags == 0)
5395 gimple_transaction_set_label (stmt, gimple_block_label (dest));
5396 break;
5397
5398 default:
5399 /* Otherwise it must be a fallthru edge, and we don't need to
5400 do anything besides redirecting it. */
5401 gcc_assert (e->flags & EDGE_FALLTHRU);
5402 break;
5403 }
5404
5405 /* Update/insert PHI nodes as necessary. */
5406
5407 /* Now update the edges in the CFG. */
5408 e = ssa_redirect_edge (e, dest);
5409
5410 return e;
5411 }
5412
5413 /* Returns true if it is possible to remove edge E by redirecting
5414 it to the destination of the other edge from E->src. */
5415
5416 static bool
5417 gimple_can_remove_branch_p (const_edge e)
5418 {
5419 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5420 return false;
5421
5422 return true;
5423 }
5424
5425 /* Simple wrapper, as we can always redirect fallthru edges. */
5426
5427 static basic_block
5428 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5429 {
5430 e = gimple_redirect_edge_and_branch (e, dest);
5431 gcc_assert (e);
5432
5433 return NULL;
5434 }
5435
5436
5437 /* Splits basic block BB after statement STMT (but at least after the
5438 labels). If STMT is NULL, BB is split just after the labels. */
5439
5440 static basic_block
5441 gimple_split_block (basic_block bb, void *stmt)
5442 {
5443 gimple_stmt_iterator gsi;
5444 gimple_stmt_iterator gsi_tgt;
5445 gimple act;
5446 gimple_seq list;
5447 basic_block new_bb;
5448 edge e;
5449 edge_iterator ei;
5450
5451 new_bb = create_empty_bb (bb);
5452
5453 /* Redirect the outgoing edges. */
5454 new_bb->succs = bb->succs;
5455 bb->succs = NULL;
5456 FOR_EACH_EDGE (e, ei, new_bb->succs)
5457 e->src = new_bb;
5458
5459 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5460 stmt = NULL;
5461
5462 /* Move everything from GSI to the new basic block. */
5463 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5464 {
5465 act = gsi_stmt (gsi);
5466 if (gimple_code (act) == GIMPLE_LABEL)
5467 continue;
5468
5469 if (!stmt)
5470 break;
5471
5472 if (stmt == act)
5473 {
5474 gsi_next (&gsi);
5475 break;
5476 }
5477 }
5478
5479 if (gsi_end_p (gsi))
5480 return new_bb;
5481
5482 /* Split the statement list - avoid re-creating new containers as this
5483 brings ugly quadratic memory consumption in the inliner.
5484 (We are still quadratic since we need to update stmt BB pointers,
5485 sadly.) */
5486 gsi_split_seq_before (&gsi, &list);
5487 set_bb_seq (new_bb, list);
5488 for (gsi_tgt = gsi_start (list);
5489 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5490 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5491
5492 return new_bb;
5493 }
5494
5495
5496 /* Moves basic block BB after block AFTER. */
5497
5498 static bool
5499 gimple_move_block_after (basic_block bb, basic_block after)
5500 {
5501 if (bb->prev_bb == after)
5502 return true;
5503
5504 unlink_block (bb);
5505 link_block (bb, after);
5506
5507 return true;
5508 }
5509
5510
5511 /* Return TRUE if block BB has no executable statements, otherwise return
5512 FALSE. */
5513
5514 static bool
5515 gimple_empty_block_p (basic_block bb)
5516 {
5517 /* BB must have no executable statements. */
5518 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5519 if (phi_nodes (bb))
5520 return false;
5521 if (gsi_end_p (gsi))
5522 return true;
5523 if (is_gimple_debug (gsi_stmt (gsi)))
5524 gsi_next_nondebug (&gsi);
5525 return gsi_end_p (gsi);
5526 }
5527
5528
5529 /* Split a basic block if it ends with a conditional branch and if the
5530 other part of the block is not empty. */
5531
5532 static basic_block
5533 gimple_split_block_before_cond_jump (basic_block bb)
5534 {
5535 gimple last, split_point;
5536 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5537 if (gsi_end_p (gsi))
5538 return NULL;
5539 last = gsi_stmt (gsi);
5540 if (gimple_code (last) != GIMPLE_COND
5541 && gimple_code (last) != GIMPLE_SWITCH)
5542 return NULL;
5543 gsi_prev_nondebug (&gsi);
5544 split_point = gsi_stmt (gsi);
5545 return split_block (bb, split_point)->dest;
5546 }
5547
5548
5549 /* Return true if basic_block can be duplicated. */
5550
5551 static bool
5552 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5553 {
5554 return true;
5555 }
5556
5557 /* Create a duplicate of the basic block BB. NOTE: This does not
5558 preserve SSA form. */
5559
5560 static basic_block
5561 gimple_duplicate_bb (basic_block bb)
5562 {
5563 basic_block new_bb;
5564 gimple_stmt_iterator gsi, gsi_tgt;
5565 gimple_seq phis = phi_nodes (bb);
5566 gimple phi, stmt, copy;
5567
5568 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
5569
5570 /* Copy the PHI nodes. We ignore PHI node arguments here because
5571 the incoming edges have not been setup yet. */
5572 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
5573 {
5574 phi = gsi_stmt (gsi);
5575 copy = create_phi_node (NULL_TREE, new_bb);
5576 create_new_def_for (gimple_phi_result (phi), copy,
5577 gimple_phi_result_ptr (copy));
5578 gimple_set_uid (copy, gimple_uid (phi));
5579 }
5580
5581 gsi_tgt = gsi_start_bb (new_bb);
5582 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5583 {
5584 def_operand_p def_p;
5585 ssa_op_iter op_iter;
5586 tree lhs;
5587
5588 stmt = gsi_stmt (gsi);
5589 if (gimple_code (stmt) == GIMPLE_LABEL)
5590 continue;
5591
5592 /* Don't duplicate label debug stmts. */
5593 if (gimple_debug_bind_p (stmt)
5594 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5595 == LABEL_DECL)
5596 continue;
5597
5598 /* Create a new copy of STMT and duplicate STMT's virtual
5599 operands. */
5600 copy = gimple_copy (stmt);
5601 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5602
5603 maybe_duplicate_eh_stmt (copy, stmt);
5604 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5605
5606 /* When copying around a stmt writing into a local non-user
5607 aggregate, make sure it won't share stack slot with other
5608 vars. */
5609 lhs = gimple_get_lhs (stmt);
5610 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5611 {
5612 tree base = get_base_address (lhs);
5613 if (base
5614 && (TREE_CODE (base) == VAR_DECL
5615 || TREE_CODE (base) == RESULT_DECL)
5616 && DECL_IGNORED_P (base)
5617 && !TREE_STATIC (base)
5618 && !DECL_EXTERNAL (base)
5619 && (TREE_CODE (base) != VAR_DECL
5620 || !DECL_HAS_VALUE_EXPR_P (base)))
5621 DECL_NONSHAREABLE (base) = 1;
5622 }
5623
5624 /* Create new names for all the definitions created by COPY and
5625 add replacement mappings for each new name. */
5626 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5627 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5628 }
5629
5630 return new_bb;
5631 }
5632
5633 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5634
5635 static void
5636 add_phi_args_after_copy_edge (edge e_copy)
5637 {
5638 basic_block bb, bb_copy = e_copy->src, dest;
5639 edge e;
5640 edge_iterator ei;
5641 gimple phi, phi_copy;
5642 tree def;
5643 gimple_stmt_iterator psi, psi_copy;
5644
5645 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5646 return;
5647
5648 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5649
5650 if (e_copy->dest->flags & BB_DUPLICATED)
5651 dest = get_bb_original (e_copy->dest);
5652 else
5653 dest = e_copy->dest;
5654
5655 e = find_edge (bb, dest);
5656 if (!e)
5657 {
5658 /* During loop unrolling the target of the latch edge is copied.
5659 In this case we are not looking for edge to dest, but to
5660 duplicated block whose original was dest. */
5661 FOR_EACH_EDGE (e, ei, bb->succs)
5662 {
5663 if ((e->dest->flags & BB_DUPLICATED)
5664 && get_bb_original (e->dest) == dest)
5665 break;
5666 }
5667
5668 gcc_assert (e != NULL);
5669 }
5670
5671 for (psi = gsi_start_phis (e->dest),
5672 psi_copy = gsi_start_phis (e_copy->dest);
5673 !gsi_end_p (psi);
5674 gsi_next (&psi), gsi_next (&psi_copy))
5675 {
5676 phi = gsi_stmt (psi);
5677 phi_copy = gsi_stmt (psi_copy);
5678 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5679 add_phi_arg (phi_copy, def, e_copy,
5680 gimple_phi_arg_location_from_edge (phi, e));
5681 }
5682 }
5683
5684
5685 /* Basic block BB_COPY was created by code duplication. Add phi node
5686 arguments for edges going out of BB_COPY. The blocks that were
5687 duplicated have BB_DUPLICATED set. */
5688
5689 void
5690 add_phi_args_after_copy_bb (basic_block bb_copy)
5691 {
5692 edge e_copy;
5693 edge_iterator ei;
5694
5695 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5696 {
5697 add_phi_args_after_copy_edge (e_copy);
5698 }
5699 }
5700
5701 /* Blocks in REGION_COPY array of length N_REGION were created by
5702 duplication of basic blocks. Add phi node arguments for edges
5703 going from these blocks. If E_COPY is not NULL, also add
5704 phi node arguments for its destination.*/
5705
5706 void
5707 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5708 edge e_copy)
5709 {
5710 unsigned i;
5711
5712 for (i = 0; i < n_region; i++)
5713 region_copy[i]->flags |= BB_DUPLICATED;
5714
5715 for (i = 0; i < n_region; i++)
5716 add_phi_args_after_copy_bb (region_copy[i]);
5717 if (e_copy)
5718 add_phi_args_after_copy_edge (e_copy);
5719
5720 for (i = 0; i < n_region; i++)
5721 region_copy[i]->flags &= ~BB_DUPLICATED;
5722 }
5723
5724 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5725 important exit edge EXIT. By important we mean that no SSA name defined
5726 inside region is live over the other exit edges of the region. All entry
5727 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5728 to the duplicate of the region. Dominance and loop information is
5729 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
5730 UPDATE_DOMINANCE is false then we assume that the caller will update the
5731 dominance information after calling this function. The new basic
5732 blocks are stored to REGION_COPY in the same order as they had in REGION,
5733 provided that REGION_COPY is not NULL.
5734 The function returns false if it is unable to copy the region,
5735 true otherwise. */
5736
5737 bool
5738 gimple_duplicate_sese_region (edge entry, edge exit,
5739 basic_block *region, unsigned n_region,
5740 basic_block *region_copy,
5741 bool update_dominance)
5742 {
5743 unsigned i;
5744 bool free_region_copy = false, copying_header = false;
5745 struct loop *loop = entry->dest->loop_father;
5746 edge exit_copy;
5747 vec<basic_block> doms;
5748 edge redirected;
5749 int total_freq = 0, entry_freq = 0;
5750 gcov_type total_count = 0, entry_count = 0;
5751
5752 if (!can_copy_bbs_p (region, n_region))
5753 return false;
5754
5755 /* Some sanity checking. Note that we do not check for all possible
5756 missuses of the functions. I.e. if you ask to copy something weird,
5757 it will work, but the state of structures probably will not be
5758 correct. */
5759 for (i = 0; i < n_region; i++)
5760 {
5761 /* We do not handle subloops, i.e. all the blocks must belong to the
5762 same loop. */
5763 if (region[i]->loop_father != loop)
5764 return false;
5765
5766 if (region[i] != entry->dest
5767 && region[i] == loop->header)
5768 return false;
5769 }
5770
5771 set_loop_copy (loop, loop);
5772
5773 /* In case the function is used for loop header copying (which is the primary
5774 use), ensure that EXIT and its copy will be new latch and entry edges. */
5775 if (loop->header == entry->dest)
5776 {
5777 copying_header = true;
5778 set_loop_copy (loop, loop_outer (loop));
5779
5780 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5781 return false;
5782
5783 for (i = 0; i < n_region; i++)
5784 if (region[i] != exit->src
5785 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5786 return false;
5787 }
5788
5789 if (!region_copy)
5790 {
5791 region_copy = XNEWVEC (basic_block, n_region);
5792 free_region_copy = true;
5793 }
5794
5795 initialize_original_copy_tables ();
5796
5797 /* Record blocks outside the region that are dominated by something
5798 inside. */
5799 if (update_dominance)
5800 {
5801 doms.create (0);
5802 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5803 }
5804
5805 if (entry->dest->count)
5806 {
5807 total_count = entry->dest->count;
5808 entry_count = entry->count;
5809 /* Fix up corner cases, to avoid division by zero or creation of negative
5810 frequencies. */
5811 if (entry_count > total_count)
5812 entry_count = total_count;
5813 }
5814 else
5815 {
5816 total_freq = entry->dest->frequency;
5817 entry_freq = EDGE_FREQUENCY (entry);
5818 /* Fix up corner cases, to avoid division by zero or creation of negative
5819 frequencies. */
5820 if (total_freq == 0)
5821 total_freq = 1;
5822 else if (entry_freq > total_freq)
5823 entry_freq = total_freq;
5824 }
5825
5826 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5827 split_edge_bb_loc (entry), update_dominance);
5828 if (total_count)
5829 {
5830 scale_bbs_frequencies_gcov_type (region, n_region,
5831 total_count - entry_count,
5832 total_count);
5833 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
5834 total_count);
5835 }
5836 else
5837 {
5838 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5839 total_freq);
5840 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5841 }
5842
5843 if (copying_header)
5844 {
5845 loop->header = exit->dest;
5846 loop->latch = exit->src;
5847 }
5848
5849 /* Redirect the entry and add the phi node arguments. */
5850 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
5851 gcc_assert (redirected != NULL);
5852 flush_pending_stmts (entry);
5853
5854 /* Concerning updating of dominators: We must recount dominators
5855 for entry block and its copy. Anything that is outside of the
5856 region, but was dominated by something inside needs recounting as
5857 well. */
5858 if (update_dominance)
5859 {
5860 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
5861 doms.safe_push (get_bb_original (entry->dest));
5862 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5863 doms.release ();
5864 }
5865
5866 /* Add the other PHI node arguments. */
5867 add_phi_args_after_copy (region_copy, n_region, NULL);
5868
5869 if (free_region_copy)
5870 free (region_copy);
5871
5872 free_original_copy_tables ();
5873 return true;
5874 }
5875
5876 /* Checks if BB is part of the region defined by N_REGION BBS. */
5877 static bool
5878 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
5879 {
5880 unsigned int n;
5881
5882 for (n = 0; n < n_region; n++)
5883 {
5884 if (bb == bbs[n])
5885 return true;
5886 }
5887 return false;
5888 }
5889
5890 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
5891 are stored to REGION_COPY in the same order in that they appear
5892 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5893 the region, EXIT an exit from it. The condition guarding EXIT
5894 is moved to ENTRY. Returns true if duplication succeeds, false
5895 otherwise.
5896
5897 For example,
5898
5899 some_code;
5900 if (cond)
5901 A;
5902 else
5903 B;
5904
5905 is transformed to
5906
5907 if (cond)
5908 {
5909 some_code;
5910 A;
5911 }
5912 else
5913 {
5914 some_code;
5915 B;
5916 }
5917 */
5918
5919 bool
5920 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
5921 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
5922 basic_block *region_copy ATTRIBUTE_UNUSED)
5923 {
5924 unsigned i;
5925 bool free_region_copy = false;
5926 struct loop *loop = exit->dest->loop_father;
5927 struct loop *orig_loop = entry->dest->loop_father;
5928 basic_block switch_bb, entry_bb, nentry_bb;
5929 vec<basic_block> doms;
5930 int total_freq = 0, exit_freq = 0;
5931 gcov_type total_count = 0, exit_count = 0;
5932 edge exits[2], nexits[2], e;
5933 gimple_stmt_iterator gsi;
5934 gimple cond_stmt;
5935 edge sorig, snew;
5936 basic_block exit_bb;
5937 gimple_stmt_iterator psi;
5938 gimple phi;
5939 tree def;
5940 struct loop *target, *aloop, *cloop;
5941
5942 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
5943 exits[0] = exit;
5944 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
5945
5946 if (!can_copy_bbs_p (region, n_region))
5947 return false;
5948
5949 initialize_original_copy_tables ();
5950 set_loop_copy (orig_loop, loop);
5951
5952 target= loop;
5953 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
5954 {
5955 if (bb_part_of_region_p (aloop->header, region, n_region))
5956 {
5957 cloop = duplicate_loop (aloop, target);
5958 duplicate_subloops (aloop, cloop);
5959 }
5960 }
5961
5962 if (!region_copy)
5963 {
5964 region_copy = XNEWVEC (basic_block, n_region);
5965 free_region_copy = true;
5966 }
5967
5968 gcc_assert (!need_ssa_update_p (cfun));
5969
5970 /* Record blocks outside the region that are dominated by something
5971 inside. */
5972 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5973
5974 if (exit->src->count)
5975 {
5976 total_count = exit->src->count;
5977 exit_count = exit->count;
5978 /* Fix up corner cases, to avoid division by zero or creation of negative
5979 frequencies. */
5980 if (exit_count > total_count)
5981 exit_count = total_count;
5982 }
5983 else
5984 {
5985 total_freq = exit->src->frequency;
5986 exit_freq = EDGE_FREQUENCY (exit);
5987 /* Fix up corner cases, to avoid division by zero or creation of negative
5988 frequencies. */
5989 if (total_freq == 0)
5990 total_freq = 1;
5991 if (exit_freq > total_freq)
5992 exit_freq = total_freq;
5993 }
5994
5995 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
5996 split_edge_bb_loc (exit), true);
5997 if (total_count)
5998 {
5999 scale_bbs_frequencies_gcov_type (region, n_region,
6000 total_count - exit_count,
6001 total_count);
6002 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
6003 total_count);
6004 }
6005 else
6006 {
6007 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6008 total_freq);
6009 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6010 }
6011
6012 /* Create the switch block, and put the exit condition to it. */
6013 entry_bb = entry->dest;
6014 nentry_bb = get_bb_copy (entry_bb);
6015 if (!last_stmt (entry->src)
6016 || !stmt_ends_bb_p (last_stmt (entry->src)))
6017 switch_bb = entry->src;
6018 else
6019 switch_bb = split_edge (entry);
6020 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6021
6022 gsi = gsi_last_bb (switch_bb);
6023 cond_stmt = last_stmt (exit->src);
6024 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6025 cond_stmt = gimple_copy (cond_stmt);
6026
6027 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6028
6029 sorig = single_succ_edge (switch_bb);
6030 sorig->flags = exits[1]->flags;
6031 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6032
6033 /* Register the new edge from SWITCH_BB in loop exit lists. */
6034 rescan_loop_exit (snew, true, false);
6035
6036 /* Add the PHI node arguments. */
6037 add_phi_args_after_copy (region_copy, n_region, snew);
6038
6039 /* Get rid of now superfluous conditions and associated edges (and phi node
6040 arguments). */
6041 exit_bb = exit->dest;
6042
6043 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6044 PENDING_STMT (e) = NULL;
6045
6046 /* The latch of ORIG_LOOP was copied, and so was the backedge
6047 to the original header. We redirect this backedge to EXIT_BB. */
6048 for (i = 0; i < n_region; i++)
6049 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6050 {
6051 gcc_assert (single_succ_edge (region_copy[i]));
6052 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6053 PENDING_STMT (e) = NULL;
6054 for (psi = gsi_start_phis (exit_bb);
6055 !gsi_end_p (psi);
6056 gsi_next (&psi))
6057 {
6058 phi = gsi_stmt (psi);
6059 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6060 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6061 }
6062 }
6063 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6064 PENDING_STMT (e) = NULL;
6065
6066 /* Anything that is outside of the region, but was dominated by something
6067 inside needs to update dominance info. */
6068 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6069 doms.release ();
6070 /* Update the SSA web. */
6071 update_ssa (TODO_update_ssa);
6072
6073 if (free_region_copy)
6074 free (region_copy);
6075
6076 free_original_copy_tables ();
6077 return true;
6078 }
6079
6080 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6081 adding blocks when the dominator traversal reaches EXIT. This
6082 function silently assumes that ENTRY strictly dominates EXIT. */
6083
6084 void
6085 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6086 vec<basic_block> *bbs_p)
6087 {
6088 basic_block son;
6089
6090 for (son = first_dom_son (CDI_DOMINATORS, entry);
6091 son;
6092 son = next_dom_son (CDI_DOMINATORS, son))
6093 {
6094 bbs_p->safe_push (son);
6095 if (son != exit)
6096 gather_blocks_in_sese_region (son, exit, bbs_p);
6097 }
6098 }
6099
6100 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6101 The duplicates are recorded in VARS_MAP. */
6102
6103 static void
6104 replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
6105 tree to_context)
6106 {
6107 tree t = *tp, new_t;
6108 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6109 void **loc;
6110
6111 if (DECL_CONTEXT (t) == to_context)
6112 return;
6113
6114 loc = pointer_map_contains (vars_map, t);
6115
6116 if (!loc)
6117 {
6118 loc = pointer_map_insert (vars_map, t);
6119
6120 if (SSA_VAR_P (t))
6121 {
6122 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6123 add_local_decl (f, new_t);
6124 }
6125 else
6126 {
6127 gcc_assert (TREE_CODE (t) == CONST_DECL);
6128 new_t = copy_node (t);
6129 }
6130 DECL_CONTEXT (new_t) = to_context;
6131
6132 *loc = new_t;
6133 }
6134 else
6135 new_t = (tree) *loc;
6136
6137 *tp = new_t;
6138 }
6139
6140
6141 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6142 VARS_MAP maps old ssa names and var_decls to the new ones. */
6143
6144 static tree
6145 replace_ssa_name (tree name, struct pointer_map_t *vars_map,
6146 tree to_context)
6147 {
6148 void **loc;
6149 tree new_name;
6150
6151 gcc_assert (!virtual_operand_p (name));
6152
6153 loc = pointer_map_contains (vars_map, name);
6154
6155 if (!loc)
6156 {
6157 tree decl = SSA_NAME_VAR (name);
6158 if (decl)
6159 {
6160 replace_by_duplicate_decl (&decl, vars_map, to_context);
6161 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6162 decl, SSA_NAME_DEF_STMT (name));
6163 if (SSA_NAME_IS_DEFAULT_DEF (name))
6164 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context),
6165 decl, new_name);
6166 }
6167 else
6168 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6169 name, SSA_NAME_DEF_STMT (name));
6170
6171 loc = pointer_map_insert (vars_map, name);
6172 *loc = new_name;
6173 }
6174 else
6175 new_name = (tree) *loc;
6176
6177 return new_name;
6178 }
6179
6180 struct move_stmt_d
6181 {
6182 tree orig_block;
6183 tree new_block;
6184 tree from_context;
6185 tree to_context;
6186 struct pointer_map_t *vars_map;
6187 htab_t new_label_map;
6188 struct pointer_map_t *eh_map;
6189 bool remap_decls_p;
6190 };
6191
6192 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6193 contained in *TP if it has been ORIG_BLOCK previously and change the
6194 DECL_CONTEXT of every local variable referenced in *TP. */
6195
6196 static tree
6197 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6198 {
6199 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6200 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6201 tree t = *tp;
6202
6203 if (EXPR_P (t))
6204 {
6205 tree block = TREE_BLOCK (t);
6206 if (block == p->orig_block
6207 || (p->orig_block == NULL_TREE
6208 && block != NULL_TREE))
6209 TREE_SET_BLOCK (t, p->new_block);
6210 #ifdef ENABLE_CHECKING
6211 else if (block != NULL_TREE)
6212 {
6213 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6214 block = BLOCK_SUPERCONTEXT (block);
6215 gcc_assert (block == p->orig_block);
6216 }
6217 #endif
6218 }
6219 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6220 {
6221 if (TREE_CODE (t) == SSA_NAME)
6222 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6223 else if (TREE_CODE (t) == LABEL_DECL)
6224 {
6225 if (p->new_label_map)
6226 {
6227 struct tree_map in, *out;
6228 in.base.from = t;
6229 out = (struct tree_map *)
6230 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6231 if (out)
6232 *tp = t = out->to;
6233 }
6234
6235 DECL_CONTEXT (t) = p->to_context;
6236 }
6237 else if (p->remap_decls_p)
6238 {
6239 /* Replace T with its duplicate. T should no longer appear in the
6240 parent function, so this looks wasteful; however, it may appear
6241 in referenced_vars, and more importantly, as virtual operands of
6242 statements, and in alias lists of other variables. It would be
6243 quite difficult to expunge it from all those places. ??? It might
6244 suffice to do this for addressable variables. */
6245 if ((TREE_CODE (t) == VAR_DECL
6246 && !is_global_var (t))
6247 || TREE_CODE (t) == CONST_DECL)
6248 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6249 }
6250 *walk_subtrees = 0;
6251 }
6252 else if (TYPE_P (t))
6253 *walk_subtrees = 0;
6254
6255 return NULL_TREE;
6256 }
6257
6258 /* Helper for move_stmt_r. Given an EH region number for the source
6259 function, map that to the duplicate EH regio number in the dest. */
6260
6261 static int
6262 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6263 {
6264 eh_region old_r, new_r;
6265 void **slot;
6266
6267 old_r = get_eh_region_from_number (old_nr);
6268 slot = pointer_map_contains (p->eh_map, old_r);
6269 new_r = (eh_region) *slot;
6270
6271 return new_r->index;
6272 }
6273
6274 /* Similar, but operate on INTEGER_CSTs. */
6275
6276 static tree
6277 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6278 {
6279 int old_nr, new_nr;
6280
6281 old_nr = tree_to_shwi (old_t_nr);
6282 new_nr = move_stmt_eh_region_nr (old_nr, p);
6283
6284 return build_int_cst (integer_type_node, new_nr);
6285 }
6286
6287 /* Like move_stmt_op, but for gimple statements.
6288
6289 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6290 contained in the current statement in *GSI_P and change the
6291 DECL_CONTEXT of every local variable referenced in the current
6292 statement. */
6293
6294 static tree
6295 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6296 struct walk_stmt_info *wi)
6297 {
6298 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6299 gimple stmt = gsi_stmt (*gsi_p);
6300 tree block = gimple_block (stmt);
6301
6302 if (block == p->orig_block
6303 || (p->orig_block == NULL_TREE
6304 && block != NULL_TREE))
6305 gimple_set_block (stmt, p->new_block);
6306
6307 switch (gimple_code (stmt))
6308 {
6309 case GIMPLE_CALL:
6310 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6311 {
6312 tree r, fndecl = gimple_call_fndecl (stmt);
6313 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6314 switch (DECL_FUNCTION_CODE (fndecl))
6315 {
6316 case BUILT_IN_EH_COPY_VALUES:
6317 r = gimple_call_arg (stmt, 1);
6318 r = move_stmt_eh_region_tree_nr (r, p);
6319 gimple_call_set_arg (stmt, 1, r);
6320 /* FALLTHRU */
6321
6322 case BUILT_IN_EH_POINTER:
6323 case BUILT_IN_EH_FILTER:
6324 r = gimple_call_arg (stmt, 0);
6325 r = move_stmt_eh_region_tree_nr (r, p);
6326 gimple_call_set_arg (stmt, 0, r);
6327 break;
6328
6329 default:
6330 break;
6331 }
6332 }
6333 break;
6334
6335 case GIMPLE_RESX:
6336 {
6337 int r = gimple_resx_region (stmt);
6338 r = move_stmt_eh_region_nr (r, p);
6339 gimple_resx_set_region (stmt, r);
6340 }
6341 break;
6342
6343 case GIMPLE_EH_DISPATCH:
6344 {
6345 int r = gimple_eh_dispatch_region (stmt);
6346 r = move_stmt_eh_region_nr (r, p);
6347 gimple_eh_dispatch_set_region (stmt, r);
6348 }
6349 break;
6350
6351 case GIMPLE_OMP_RETURN:
6352 case GIMPLE_OMP_CONTINUE:
6353 break;
6354 default:
6355 if (is_gimple_omp (stmt))
6356 {
6357 /* Do not remap variables inside OMP directives. Variables
6358 referenced in clauses and directive header belong to the
6359 parent function and should not be moved into the child
6360 function. */
6361 bool save_remap_decls_p = p->remap_decls_p;
6362 p->remap_decls_p = false;
6363 *handled_ops_p = true;
6364
6365 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6366 move_stmt_op, wi);
6367
6368 p->remap_decls_p = save_remap_decls_p;
6369 }
6370 break;
6371 }
6372
6373 return NULL_TREE;
6374 }
6375
6376 /* Move basic block BB from function CFUN to function DEST_FN. The
6377 block is moved out of the original linked list and placed after
6378 block AFTER in the new list. Also, the block is removed from the
6379 original array of blocks and placed in DEST_FN's array of blocks.
6380 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6381 updated to reflect the moved edges.
6382
6383 The local variables are remapped to new instances, VARS_MAP is used
6384 to record the mapping. */
6385
6386 static void
6387 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6388 basic_block after, bool update_edge_count_p,
6389 struct move_stmt_d *d)
6390 {
6391 struct control_flow_graph *cfg;
6392 edge_iterator ei;
6393 edge e;
6394 gimple_stmt_iterator si;
6395 unsigned old_len, new_len;
6396
6397 /* Remove BB from dominance structures. */
6398 delete_from_dominance_info (CDI_DOMINATORS, bb);
6399
6400 /* Move BB from its current loop to the copy in the new function. */
6401 if (current_loops)
6402 {
6403 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6404 if (new_loop)
6405 bb->loop_father = new_loop;
6406 }
6407
6408 /* Link BB to the new linked list. */
6409 move_block_after (bb, after);
6410
6411 /* Update the edge count in the corresponding flowgraphs. */
6412 if (update_edge_count_p)
6413 FOR_EACH_EDGE (e, ei, bb->succs)
6414 {
6415 cfun->cfg->x_n_edges--;
6416 dest_cfun->cfg->x_n_edges++;
6417 }
6418
6419 /* Remove BB from the original basic block array. */
6420 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6421 cfun->cfg->x_n_basic_blocks--;
6422
6423 /* Grow DEST_CFUN's basic block array if needed. */
6424 cfg = dest_cfun->cfg;
6425 cfg->x_n_basic_blocks++;
6426 if (bb->index >= cfg->x_last_basic_block)
6427 cfg->x_last_basic_block = bb->index + 1;
6428
6429 old_len = vec_safe_length (cfg->x_basic_block_info);
6430 if ((unsigned) cfg->x_last_basic_block >= old_len)
6431 {
6432 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6433 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6434 }
6435
6436 (*cfg->x_basic_block_info)[bb->index] = bb;
6437
6438 /* Remap the variables in phi nodes. */
6439 for (si = gsi_start_phis (bb); !gsi_end_p (si); )
6440 {
6441 gimple phi = gsi_stmt (si);
6442 use_operand_p use;
6443 tree op = PHI_RESULT (phi);
6444 ssa_op_iter oi;
6445 unsigned i;
6446
6447 if (virtual_operand_p (op))
6448 {
6449 /* Remove the phi nodes for virtual operands (alias analysis will be
6450 run for the new function, anyway). */
6451 remove_phi_node (&si, true);
6452 continue;
6453 }
6454
6455 SET_PHI_RESULT (phi,
6456 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6457 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6458 {
6459 op = USE_FROM_PTR (use);
6460 if (TREE_CODE (op) == SSA_NAME)
6461 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6462 }
6463
6464 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6465 {
6466 location_t locus = gimple_phi_arg_location (phi, i);
6467 tree block = LOCATION_BLOCK (locus);
6468
6469 if (locus == UNKNOWN_LOCATION)
6470 continue;
6471 if (d->orig_block == NULL_TREE || block == d->orig_block)
6472 {
6473 if (d->new_block == NULL_TREE)
6474 locus = LOCATION_LOCUS (locus);
6475 else
6476 locus = COMBINE_LOCATION_DATA (line_table, locus, d->new_block);
6477 gimple_phi_arg_set_location (phi, i, locus);
6478 }
6479 }
6480
6481 gsi_next (&si);
6482 }
6483
6484 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6485 {
6486 gimple stmt = gsi_stmt (si);
6487 struct walk_stmt_info wi;
6488
6489 memset (&wi, 0, sizeof (wi));
6490 wi.info = d;
6491 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6492
6493 if (gimple_code (stmt) == GIMPLE_LABEL)
6494 {
6495 tree label = gimple_label_label (stmt);
6496 int uid = LABEL_DECL_UID (label);
6497
6498 gcc_assert (uid > -1);
6499
6500 old_len = vec_safe_length (cfg->x_label_to_block_map);
6501 if (old_len <= (unsigned) uid)
6502 {
6503 new_len = 3 * uid / 2 + 1;
6504 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6505 }
6506
6507 (*cfg->x_label_to_block_map)[uid] = bb;
6508 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6509
6510 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6511
6512 if (uid >= dest_cfun->cfg->last_label_uid)
6513 dest_cfun->cfg->last_label_uid = uid + 1;
6514 }
6515
6516 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6517 remove_stmt_from_eh_lp_fn (cfun, stmt);
6518
6519 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6520 gimple_remove_stmt_histograms (cfun, stmt);
6521
6522 /* We cannot leave any operands allocated from the operand caches of
6523 the current function. */
6524 free_stmt_operands (stmt);
6525 push_cfun (dest_cfun);
6526 update_stmt (stmt);
6527 pop_cfun ();
6528 }
6529
6530 FOR_EACH_EDGE (e, ei, bb->succs)
6531 if (e->goto_locus != UNKNOWN_LOCATION)
6532 {
6533 tree block = LOCATION_BLOCK (e->goto_locus);
6534 if (d->orig_block == NULL_TREE
6535 || block == d->orig_block)
6536 e->goto_locus = d->new_block ?
6537 COMBINE_LOCATION_DATA (line_table, e->goto_locus, d->new_block) :
6538 LOCATION_LOCUS (e->goto_locus);
6539 }
6540 }
6541
6542 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6543 the outermost EH region. Use REGION as the incoming base EH region. */
6544
6545 static eh_region
6546 find_outermost_region_in_block (struct function *src_cfun,
6547 basic_block bb, eh_region region)
6548 {
6549 gimple_stmt_iterator si;
6550
6551 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6552 {
6553 gimple stmt = gsi_stmt (si);
6554 eh_region stmt_region;
6555 int lp_nr;
6556
6557 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6558 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6559 if (stmt_region)
6560 {
6561 if (region == NULL)
6562 region = stmt_region;
6563 else if (stmt_region != region)
6564 {
6565 region = eh_region_outermost (src_cfun, stmt_region, region);
6566 gcc_assert (region != NULL);
6567 }
6568 }
6569 }
6570
6571 return region;
6572 }
6573
6574 static tree
6575 new_label_mapper (tree decl, void *data)
6576 {
6577 htab_t hash = (htab_t) data;
6578 struct tree_map *m;
6579 void **slot;
6580
6581 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6582
6583 m = XNEW (struct tree_map);
6584 m->hash = DECL_UID (decl);
6585 m->base.from = decl;
6586 m->to = create_artificial_label (UNKNOWN_LOCATION);
6587 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6588 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6589 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6590
6591 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6592 gcc_assert (*slot == NULL);
6593
6594 *slot = m;
6595
6596 return m->to;
6597 }
6598
6599 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6600 subblocks. */
6601
6602 static void
6603 replace_block_vars_by_duplicates (tree block, struct pointer_map_t *vars_map,
6604 tree to_context)
6605 {
6606 tree *tp, t;
6607
6608 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6609 {
6610 t = *tp;
6611 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6612 continue;
6613 replace_by_duplicate_decl (&t, vars_map, to_context);
6614 if (t != *tp)
6615 {
6616 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6617 {
6618 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6619 DECL_HAS_VALUE_EXPR_P (t) = 1;
6620 }
6621 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6622 *tp = t;
6623 }
6624 }
6625
6626 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6627 replace_block_vars_by_duplicates (block, vars_map, to_context);
6628 }
6629
6630 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6631 from FN1 to FN2. */
6632
6633 static void
6634 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
6635 struct loop *loop)
6636 {
6637 /* Discard it from the old loop array. */
6638 (*get_loops (fn1))[loop->num] = NULL;
6639
6640 /* Place it in the new loop array, assigning it a new number. */
6641 loop->num = number_of_loops (fn2);
6642 vec_safe_push (loops_for_fn (fn2)->larray, loop);
6643
6644 /* Recurse to children. */
6645 for (loop = loop->inner; loop; loop = loop->next)
6646 fixup_loop_arrays_after_move (fn1, fn2, loop);
6647 }
6648
6649 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6650 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6651 single basic block in the original CFG and the new basic block is
6652 returned. DEST_CFUN must not have a CFG yet.
6653
6654 Note that the region need not be a pure SESE region. Blocks inside
6655 the region may contain calls to abort/exit. The only restriction
6656 is that ENTRY_BB should be the only entry point and it must
6657 dominate EXIT_BB.
6658
6659 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6660 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6661 to the new function.
6662
6663 All local variables referenced in the region are assumed to be in
6664 the corresponding BLOCK_VARS and unexpanded variable lists
6665 associated with DEST_CFUN. */
6666
6667 basic_block
6668 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
6669 basic_block exit_bb, tree orig_block)
6670 {
6671 vec<basic_block> bbs, dom_bbs;
6672 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6673 basic_block after, bb, *entry_pred, *exit_succ, abb;
6674 struct function *saved_cfun = cfun;
6675 int *entry_flag, *exit_flag;
6676 unsigned *entry_prob, *exit_prob;
6677 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
6678 edge e;
6679 edge_iterator ei;
6680 htab_t new_label_map;
6681 struct pointer_map_t *vars_map, *eh_map;
6682 struct loop *loop = entry_bb->loop_father;
6683 struct loop *loop0 = get_loop (saved_cfun, 0);
6684 struct move_stmt_d d;
6685
6686 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6687 region. */
6688 gcc_assert (entry_bb != exit_bb
6689 && (!exit_bb
6690 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
6691
6692 /* Collect all the blocks in the region. Manually add ENTRY_BB
6693 because it won't be added by dfs_enumerate_from. */
6694 bbs.create (0);
6695 bbs.safe_push (entry_bb);
6696 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6697
6698 /* The blocks that used to be dominated by something in BBS will now be
6699 dominated by the new block. */
6700 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6701 bbs.address (),
6702 bbs.length ());
6703
6704 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6705 the predecessor edges to ENTRY_BB and the successor edges to
6706 EXIT_BB so that we can re-attach them to the new basic block that
6707 will replace the region. */
6708 num_entry_edges = EDGE_COUNT (entry_bb->preds);
6709 entry_pred = XNEWVEC (basic_block, num_entry_edges);
6710 entry_flag = XNEWVEC (int, num_entry_edges);
6711 entry_prob = XNEWVEC (unsigned, num_entry_edges);
6712 i = 0;
6713 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
6714 {
6715 entry_prob[i] = e->probability;
6716 entry_flag[i] = e->flags;
6717 entry_pred[i++] = e->src;
6718 remove_edge (e);
6719 }
6720
6721 if (exit_bb)
6722 {
6723 num_exit_edges = EDGE_COUNT (exit_bb->succs);
6724 exit_succ = XNEWVEC (basic_block, num_exit_edges);
6725 exit_flag = XNEWVEC (int, num_exit_edges);
6726 exit_prob = XNEWVEC (unsigned, num_exit_edges);
6727 i = 0;
6728 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
6729 {
6730 exit_prob[i] = e->probability;
6731 exit_flag[i] = e->flags;
6732 exit_succ[i++] = e->dest;
6733 remove_edge (e);
6734 }
6735 }
6736 else
6737 {
6738 num_exit_edges = 0;
6739 exit_succ = NULL;
6740 exit_flag = NULL;
6741 exit_prob = NULL;
6742 }
6743
6744 /* Switch context to the child function to initialize DEST_FN's CFG. */
6745 gcc_assert (dest_cfun->cfg == NULL);
6746 push_cfun (dest_cfun);
6747
6748 init_empty_tree_cfg ();
6749
6750 /* Initialize EH information for the new function. */
6751 eh_map = NULL;
6752 new_label_map = NULL;
6753 if (saved_cfun->eh)
6754 {
6755 eh_region region = NULL;
6756
6757 FOR_EACH_VEC_ELT (bbs, i, bb)
6758 region = find_outermost_region_in_block (saved_cfun, bb, region);
6759
6760 init_eh_for_function ();
6761 if (region != NULL)
6762 {
6763 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6764 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
6765 new_label_mapper, new_label_map);
6766 }
6767 }
6768
6769 /* Initialize an empty loop tree. */
6770 struct loops *loops = ggc_alloc_cleared_loops ();
6771 init_loops_structure (dest_cfun, loops, 1);
6772 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
6773 set_loops_for_fn (dest_cfun, loops);
6774
6775 /* Move the outlined loop tree part. */
6776 num_nodes = bbs.length ();
6777 FOR_EACH_VEC_ELT (bbs, i, bb)
6778 {
6779 if (bb->loop_father->header == bb)
6780 {
6781 struct loop *this_loop = bb->loop_father;
6782 struct loop *outer = loop_outer (this_loop);
6783 if (outer == loop
6784 /* If the SESE region contains some bbs ending with
6785 a noreturn call, those are considered to belong
6786 to the outermost loop in saved_cfun, rather than
6787 the entry_bb's loop_father. */
6788 || outer == loop0)
6789 {
6790 if (outer != loop)
6791 num_nodes -= this_loop->num_nodes;
6792 flow_loop_tree_node_remove (bb->loop_father);
6793 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
6794 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
6795 }
6796 }
6797 else if (bb->loop_father == loop0 && loop0 != loop)
6798 num_nodes--;
6799
6800 /* Remove loop exits from the outlined region. */
6801 if (loops_for_fn (saved_cfun)->exits)
6802 FOR_EACH_EDGE (e, ei, bb->succs)
6803 {
6804 void **slot = htab_find_slot_with_hash
6805 (loops_for_fn (saved_cfun)->exits, e,
6806 htab_hash_pointer (e), NO_INSERT);
6807 if (slot)
6808 htab_clear_slot (loops_for_fn (saved_cfun)->exits, slot);
6809 }
6810 }
6811
6812
6813 /* Adjust the number of blocks in the tree root of the outlined part. */
6814 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
6815
6816 /* Setup a mapping to be used by move_block_to_fn. */
6817 loop->aux = current_loops->tree_root;
6818 loop0->aux = current_loops->tree_root;
6819
6820 pop_cfun ();
6821
6822 /* Move blocks from BBS into DEST_CFUN. */
6823 gcc_assert (bbs.length () >= 2);
6824 after = dest_cfun->cfg->x_entry_block_ptr;
6825 vars_map = pointer_map_create ();
6826
6827 memset (&d, 0, sizeof (d));
6828 d.orig_block = orig_block;
6829 d.new_block = DECL_INITIAL (dest_cfun->decl);
6830 d.from_context = cfun->decl;
6831 d.to_context = dest_cfun->decl;
6832 d.vars_map = vars_map;
6833 d.new_label_map = new_label_map;
6834 d.eh_map = eh_map;
6835 d.remap_decls_p = true;
6836
6837 FOR_EACH_VEC_ELT (bbs, i, bb)
6838 {
6839 /* No need to update edge counts on the last block. It has
6840 already been updated earlier when we detached the region from
6841 the original CFG. */
6842 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
6843 after = bb;
6844 }
6845
6846 loop->aux = NULL;
6847 loop0->aux = NULL;
6848 /* Loop sizes are no longer correct, fix them up. */
6849 loop->num_nodes -= num_nodes;
6850 for (struct loop *outer = loop_outer (loop);
6851 outer; outer = loop_outer (outer))
6852 outer->num_nodes -= num_nodes;
6853 loop0->num_nodes -= bbs.length () - num_nodes;
6854
6855 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vect_loops)
6856 {
6857 struct loop *aloop;
6858 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
6859 if (aloop != NULL)
6860 {
6861 if (aloop->simduid)
6862 {
6863 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
6864 d.to_context);
6865 dest_cfun->has_simduid_loops = true;
6866 }
6867 if (aloop->force_vect)
6868 dest_cfun->has_force_vect_loops = true;
6869 }
6870 }
6871
6872 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
6873 if (orig_block)
6874 {
6875 tree block;
6876 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6877 == NULL_TREE);
6878 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6879 = BLOCK_SUBBLOCKS (orig_block);
6880 for (block = BLOCK_SUBBLOCKS (orig_block);
6881 block; block = BLOCK_CHAIN (block))
6882 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
6883 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
6884 }
6885
6886 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
6887 vars_map, dest_cfun->decl);
6888
6889 if (new_label_map)
6890 htab_delete (new_label_map);
6891 if (eh_map)
6892 pointer_map_destroy (eh_map);
6893 pointer_map_destroy (vars_map);
6894
6895 /* Rewire the entry and exit blocks. The successor to the entry
6896 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6897 the child function. Similarly, the predecessor of DEST_FN's
6898 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
6899 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6900 various CFG manipulation function get to the right CFG.
6901
6902 FIXME, this is silly. The CFG ought to become a parameter to
6903 these helpers. */
6904 push_cfun (dest_cfun);
6905 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
6906 if (exit_bb)
6907 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
6908 pop_cfun ();
6909
6910 /* Back in the original function, the SESE region has disappeared,
6911 create a new basic block in its place. */
6912 bb = create_empty_bb (entry_pred[0]);
6913 if (current_loops)
6914 add_bb_to_loop (bb, loop);
6915 for (i = 0; i < num_entry_edges; i++)
6916 {
6917 e = make_edge (entry_pred[i], bb, entry_flag[i]);
6918 e->probability = entry_prob[i];
6919 }
6920
6921 for (i = 0; i < num_exit_edges; i++)
6922 {
6923 e = make_edge (bb, exit_succ[i], exit_flag[i]);
6924 e->probability = exit_prob[i];
6925 }
6926
6927 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
6928 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
6929 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
6930 dom_bbs.release ();
6931
6932 if (exit_bb)
6933 {
6934 free (exit_prob);
6935 free (exit_flag);
6936 free (exit_succ);
6937 }
6938 free (entry_prob);
6939 free (entry_flag);
6940 free (entry_pred);
6941 bbs.release ();
6942
6943 return bb;
6944 }
6945
6946
6947 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
6948 */
6949
6950 void
6951 dump_function_to_file (tree fndecl, FILE *file, int flags)
6952 {
6953 tree arg, var, old_current_fndecl = current_function_decl;
6954 struct function *dsf;
6955 bool ignore_topmost_bind = false, any_var = false;
6956 basic_block bb;
6957 tree chain;
6958 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
6959 && decl_is_tm_clone (fndecl));
6960 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
6961
6962 current_function_decl = fndecl;
6963 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
6964
6965 arg = DECL_ARGUMENTS (fndecl);
6966 while (arg)
6967 {
6968 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
6969 fprintf (file, " ");
6970 print_generic_expr (file, arg, dump_flags);
6971 if (flags & TDF_VERBOSE)
6972 print_node (file, "", arg, 4);
6973 if (DECL_CHAIN (arg))
6974 fprintf (file, ", ");
6975 arg = DECL_CHAIN (arg);
6976 }
6977 fprintf (file, ")\n");
6978
6979 if (flags & TDF_VERBOSE)
6980 print_node (file, "", fndecl, 2);
6981
6982 dsf = DECL_STRUCT_FUNCTION (fndecl);
6983 if (dsf && (flags & TDF_EH))
6984 dump_eh_tree (file, dsf);
6985
6986 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
6987 {
6988 dump_node (fndecl, TDF_SLIM | flags, file);
6989 current_function_decl = old_current_fndecl;
6990 return;
6991 }
6992
6993 /* When GIMPLE is lowered, the variables are no longer available in
6994 BIND_EXPRs, so display them separately. */
6995 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
6996 {
6997 unsigned ix;
6998 ignore_topmost_bind = true;
6999
7000 fprintf (file, "{\n");
7001 if (!vec_safe_is_empty (fun->local_decls))
7002 FOR_EACH_LOCAL_DECL (fun, ix, var)
7003 {
7004 print_generic_decl (file, var, flags);
7005 if (flags & TDF_VERBOSE)
7006 print_node (file, "", var, 4);
7007 fprintf (file, "\n");
7008
7009 any_var = true;
7010 }
7011 if (gimple_in_ssa_p (cfun))
7012 for (ix = 1; ix < num_ssa_names; ++ix)
7013 {
7014 tree name = ssa_name (ix);
7015 if (name && !SSA_NAME_VAR (name))
7016 {
7017 fprintf (file, " ");
7018 print_generic_expr (file, TREE_TYPE (name), flags);
7019 fprintf (file, " ");
7020 print_generic_expr (file, name, flags);
7021 fprintf (file, ";\n");
7022
7023 any_var = true;
7024 }
7025 }
7026 }
7027
7028 if (fun && fun->decl == fndecl
7029 && fun->cfg
7030 && basic_block_info_for_function (fun))
7031 {
7032 /* If the CFG has been built, emit a CFG-based dump. */
7033 if (!ignore_topmost_bind)
7034 fprintf (file, "{\n");
7035
7036 if (any_var && n_basic_blocks_for_fn (fun))
7037 fprintf (file, "\n");
7038
7039 FOR_EACH_BB_FN (bb, fun)
7040 dump_bb (file, bb, 2, flags | TDF_COMMENT);
7041
7042 fprintf (file, "}\n");
7043 }
7044 else if (DECL_SAVED_TREE (fndecl) == NULL)
7045 {
7046 /* The function is now in GIMPLE form but the CFG has not been
7047 built yet. Emit the single sequence of GIMPLE statements
7048 that make up its body. */
7049 gimple_seq body = gimple_body (fndecl);
7050
7051 if (gimple_seq_first_stmt (body)
7052 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7053 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7054 print_gimple_seq (file, body, 0, flags);
7055 else
7056 {
7057 if (!ignore_topmost_bind)
7058 fprintf (file, "{\n");
7059
7060 if (any_var)
7061 fprintf (file, "\n");
7062
7063 print_gimple_seq (file, body, 2, flags);
7064 fprintf (file, "}\n");
7065 }
7066 }
7067 else
7068 {
7069 int indent;
7070
7071 /* Make a tree based dump. */
7072 chain = DECL_SAVED_TREE (fndecl);
7073 if (chain && TREE_CODE (chain) == BIND_EXPR)
7074 {
7075 if (ignore_topmost_bind)
7076 {
7077 chain = BIND_EXPR_BODY (chain);
7078 indent = 2;
7079 }
7080 else
7081 indent = 0;
7082 }
7083 else
7084 {
7085 if (!ignore_topmost_bind)
7086 fprintf (file, "{\n");
7087 indent = 2;
7088 }
7089
7090 if (any_var)
7091 fprintf (file, "\n");
7092
7093 print_generic_stmt_indented (file, chain, flags, indent);
7094 if (ignore_topmost_bind)
7095 fprintf (file, "}\n");
7096 }
7097
7098 if (flags & TDF_ENUMERATE_LOCALS)
7099 dump_enumerated_decls (file, flags);
7100 fprintf (file, "\n\n");
7101
7102 current_function_decl = old_current_fndecl;
7103 }
7104
7105 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7106
7107 DEBUG_FUNCTION void
7108 debug_function (tree fn, int flags)
7109 {
7110 dump_function_to_file (fn, stderr, flags);
7111 }
7112
7113
7114 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7115
7116 static void
7117 print_pred_bbs (FILE *file, basic_block bb)
7118 {
7119 edge e;
7120 edge_iterator ei;
7121
7122 FOR_EACH_EDGE (e, ei, bb->preds)
7123 fprintf (file, "bb_%d ", e->src->index);
7124 }
7125
7126
7127 /* Print on FILE the indexes for the successors of basic_block BB. */
7128
7129 static void
7130 print_succ_bbs (FILE *file, basic_block bb)
7131 {
7132 edge e;
7133 edge_iterator ei;
7134
7135 FOR_EACH_EDGE (e, ei, bb->succs)
7136 fprintf (file, "bb_%d ", e->dest->index);
7137 }
7138
7139 /* Print to FILE the basic block BB following the VERBOSITY level. */
7140
7141 void
7142 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7143 {
7144 char *s_indent = (char *) alloca ((size_t) indent + 1);
7145 memset ((void *) s_indent, ' ', (size_t) indent);
7146 s_indent[indent] = '\0';
7147
7148 /* Print basic_block's header. */
7149 if (verbosity >= 2)
7150 {
7151 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7152 print_pred_bbs (file, bb);
7153 fprintf (file, "}, succs = {");
7154 print_succ_bbs (file, bb);
7155 fprintf (file, "})\n");
7156 }
7157
7158 /* Print basic_block's body. */
7159 if (verbosity >= 3)
7160 {
7161 fprintf (file, "%s {\n", s_indent);
7162 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7163 fprintf (file, "%s }\n", s_indent);
7164 }
7165 }
7166
7167 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7168
7169 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7170 VERBOSITY level this outputs the contents of the loop, or just its
7171 structure. */
7172
7173 static void
7174 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7175 {
7176 char *s_indent;
7177 basic_block bb;
7178
7179 if (loop == NULL)
7180 return;
7181
7182 s_indent = (char *) alloca ((size_t) indent + 1);
7183 memset ((void *) s_indent, ' ', (size_t) indent);
7184 s_indent[indent] = '\0';
7185
7186 /* Print loop's header. */
7187 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7188 if (loop->header)
7189 fprintf (file, "header = %d", loop->header->index);
7190 else
7191 {
7192 fprintf (file, "deleted)\n");
7193 return;
7194 }
7195 if (loop->latch)
7196 fprintf (file, ", latch = %d", loop->latch->index);
7197 else
7198 fprintf (file, ", multiple latches");
7199 fprintf (file, ", niter = ");
7200 print_generic_expr (file, loop->nb_iterations, 0);
7201
7202 if (loop->any_upper_bound)
7203 {
7204 fprintf (file, ", upper_bound = ");
7205 dump_double_int (file, loop->nb_iterations_upper_bound, true);
7206 }
7207
7208 if (loop->any_estimate)
7209 {
7210 fprintf (file, ", estimate = ");
7211 dump_double_int (file, loop->nb_iterations_estimate, true);
7212 }
7213 fprintf (file, ")\n");
7214
7215 /* Print loop's body. */
7216 if (verbosity >= 1)
7217 {
7218 fprintf (file, "%s{\n", s_indent);
7219 FOR_EACH_BB (bb)
7220 if (bb->loop_father == loop)
7221 print_loops_bb (file, bb, indent, verbosity);
7222
7223 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7224 fprintf (file, "%s}\n", s_indent);
7225 }
7226 }
7227
7228 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7229 spaces. Following VERBOSITY level this outputs the contents of the
7230 loop, or just its structure. */
7231
7232 static void
7233 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7234 int verbosity)
7235 {
7236 if (loop == NULL)
7237 return;
7238
7239 print_loop (file, loop, indent, verbosity);
7240 print_loop_and_siblings (file, loop->next, indent, verbosity);
7241 }
7242
7243 /* Follow a CFG edge from the entry point of the program, and on entry
7244 of a loop, pretty print the loop structure on FILE. */
7245
7246 void
7247 print_loops (FILE *file, int verbosity)
7248 {
7249 basic_block bb;
7250
7251 bb = ENTRY_BLOCK_PTR;
7252 if (bb && bb->loop_father)
7253 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7254 }
7255
7256 /* Dump a loop. */
7257
7258 DEBUG_FUNCTION void
7259 debug (struct loop &ref)
7260 {
7261 print_loop (stderr, &ref, 0, /*verbosity*/0);
7262 }
7263
7264 DEBUG_FUNCTION void
7265 debug (struct loop *ptr)
7266 {
7267 if (ptr)
7268 debug (*ptr);
7269 else
7270 fprintf (stderr, "<nil>\n");
7271 }
7272
7273 /* Dump a loop verbosely. */
7274
7275 DEBUG_FUNCTION void
7276 debug_verbose (struct loop &ref)
7277 {
7278 print_loop (stderr, &ref, 0, /*verbosity*/3);
7279 }
7280
7281 DEBUG_FUNCTION void
7282 debug_verbose (struct loop *ptr)
7283 {
7284 if (ptr)
7285 debug (*ptr);
7286 else
7287 fprintf (stderr, "<nil>\n");
7288 }
7289
7290
7291 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7292
7293 DEBUG_FUNCTION void
7294 debug_loops (int verbosity)
7295 {
7296 print_loops (stderr, verbosity);
7297 }
7298
7299 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7300
7301 DEBUG_FUNCTION void
7302 debug_loop (struct loop *loop, int verbosity)
7303 {
7304 print_loop (stderr, loop, 0, verbosity);
7305 }
7306
7307 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7308 level. */
7309
7310 DEBUG_FUNCTION void
7311 debug_loop_num (unsigned num, int verbosity)
7312 {
7313 debug_loop (get_loop (cfun, num), verbosity);
7314 }
7315
7316 /* Return true if BB ends with a call, possibly followed by some
7317 instructions that must stay with the call. Return false,
7318 otherwise. */
7319
7320 static bool
7321 gimple_block_ends_with_call_p (basic_block bb)
7322 {
7323 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7324 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7325 }
7326
7327
7328 /* Return true if BB ends with a conditional branch. Return false,
7329 otherwise. */
7330
7331 static bool
7332 gimple_block_ends_with_condjump_p (const_basic_block bb)
7333 {
7334 gimple stmt = last_stmt (CONST_CAST_BB (bb));
7335 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7336 }
7337
7338
7339 /* Return true if we need to add fake edge to exit at statement T.
7340 Helper function for gimple_flow_call_edges_add. */
7341
7342 static bool
7343 need_fake_edge_p (gimple t)
7344 {
7345 tree fndecl = NULL_TREE;
7346 int call_flags = 0;
7347
7348 /* NORETURN and LONGJMP calls already have an edge to exit.
7349 CONST and PURE calls do not need one.
7350 We don't currently check for CONST and PURE here, although
7351 it would be a good idea, because those attributes are
7352 figured out from the RTL in mark_constant_function, and
7353 the counter incrementation code from -fprofile-arcs
7354 leads to different results from -fbranch-probabilities. */
7355 if (is_gimple_call (t))
7356 {
7357 fndecl = gimple_call_fndecl (t);
7358 call_flags = gimple_call_flags (t);
7359 }
7360
7361 if (is_gimple_call (t)
7362 && fndecl
7363 && DECL_BUILT_IN (fndecl)
7364 && (call_flags & ECF_NOTHROW)
7365 && !(call_flags & ECF_RETURNS_TWICE)
7366 /* fork() doesn't really return twice, but the effect of
7367 wrapping it in __gcov_fork() which calls __gcov_flush()
7368 and clears the counters before forking has the same
7369 effect as returning twice. Force a fake edge. */
7370 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7371 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7372 return false;
7373
7374 if (is_gimple_call (t))
7375 {
7376 edge_iterator ei;
7377 edge e;
7378 basic_block bb;
7379
7380 if (!(call_flags & ECF_NORETURN))
7381 return true;
7382
7383 bb = gimple_bb (t);
7384 FOR_EACH_EDGE (e, ei, bb->succs)
7385 if ((e->flags & EDGE_FAKE) == 0)
7386 return true;
7387 }
7388
7389 if (gimple_code (t) == GIMPLE_ASM
7390 && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
7391 return true;
7392
7393 return false;
7394 }
7395
7396
7397 /* Add fake edges to the function exit for any non constant and non
7398 noreturn calls (or noreturn calls with EH/abnormal edges),
7399 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7400 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7401 that were split.
7402
7403 The goal is to expose cases in which entering a basic block does
7404 not imply that all subsequent instructions must be executed. */
7405
7406 static int
7407 gimple_flow_call_edges_add (sbitmap blocks)
7408 {
7409 int i;
7410 int blocks_split = 0;
7411 int last_bb = last_basic_block;
7412 bool check_last_block = false;
7413
7414 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
7415 return 0;
7416
7417 if (! blocks)
7418 check_last_block = true;
7419 else
7420 check_last_block = bitmap_bit_p (blocks, EXIT_BLOCK_PTR->prev_bb->index);
7421
7422 /* In the last basic block, before epilogue generation, there will be
7423 a fallthru edge to EXIT. Special care is required if the last insn
7424 of the last basic block is a call because make_edge folds duplicate
7425 edges, which would result in the fallthru edge also being marked
7426 fake, which would result in the fallthru edge being removed by
7427 remove_fake_edges, which would result in an invalid CFG.
7428
7429 Moreover, we can't elide the outgoing fake edge, since the block
7430 profiler needs to take this into account in order to solve the minimal
7431 spanning tree in the case that the call doesn't return.
7432
7433 Handle this by adding a dummy instruction in a new last basic block. */
7434 if (check_last_block)
7435 {
7436 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
7437 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7438 gimple t = NULL;
7439
7440 if (!gsi_end_p (gsi))
7441 t = gsi_stmt (gsi);
7442
7443 if (t && need_fake_edge_p (t))
7444 {
7445 edge e;
7446
7447 e = find_edge (bb, EXIT_BLOCK_PTR);
7448 if (e)
7449 {
7450 gsi_insert_on_edge (e, gimple_build_nop ());
7451 gsi_commit_edge_inserts ();
7452 }
7453 }
7454 }
7455
7456 /* Now add fake edges to the function exit for any non constant
7457 calls since there is no way that we can determine if they will
7458 return or not... */
7459 for (i = 0; i < last_bb; i++)
7460 {
7461 basic_block bb = BASIC_BLOCK (i);
7462 gimple_stmt_iterator gsi;
7463 gimple stmt, last_stmt;
7464
7465 if (!bb)
7466 continue;
7467
7468 if (blocks && !bitmap_bit_p (blocks, i))
7469 continue;
7470
7471 gsi = gsi_last_nondebug_bb (bb);
7472 if (!gsi_end_p (gsi))
7473 {
7474 last_stmt = gsi_stmt (gsi);
7475 do
7476 {
7477 stmt = gsi_stmt (gsi);
7478 if (need_fake_edge_p (stmt))
7479 {
7480 edge e;
7481
7482 /* The handling above of the final block before the
7483 epilogue should be enough to verify that there is
7484 no edge to the exit block in CFG already.
7485 Calling make_edge in such case would cause us to
7486 mark that edge as fake and remove it later. */
7487 #ifdef ENABLE_CHECKING
7488 if (stmt == last_stmt)
7489 {
7490 e = find_edge (bb, EXIT_BLOCK_PTR);
7491 gcc_assert (e == NULL);
7492 }
7493 #endif
7494
7495 /* Note that the following may create a new basic block
7496 and renumber the existing basic blocks. */
7497 if (stmt != last_stmt)
7498 {
7499 e = split_block (bb, stmt);
7500 if (e)
7501 blocks_split++;
7502 }
7503 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
7504 }
7505 gsi_prev (&gsi);
7506 }
7507 while (!gsi_end_p (gsi));
7508 }
7509 }
7510
7511 if (blocks_split)
7512 verify_flow_info ();
7513
7514 return blocks_split;
7515 }
7516
7517 /* Removes edge E and all the blocks dominated by it, and updates dominance
7518 information. The IL in E->src needs to be updated separately.
7519 If dominance info is not available, only the edge E is removed.*/
7520
7521 void
7522 remove_edge_and_dominated_blocks (edge e)
7523 {
7524 vec<basic_block> bbs_to_remove = vNULL;
7525 vec<basic_block> bbs_to_fix_dom = vNULL;
7526 bitmap df, df_idom;
7527 edge f;
7528 edge_iterator ei;
7529 bool none_removed = false;
7530 unsigned i;
7531 basic_block bb, dbb;
7532 bitmap_iterator bi;
7533
7534 if (!dom_info_available_p (CDI_DOMINATORS))
7535 {
7536 remove_edge (e);
7537 return;
7538 }
7539
7540 /* No updating is needed for edges to exit. */
7541 if (e->dest == EXIT_BLOCK_PTR)
7542 {
7543 if (cfgcleanup_altered_bbs)
7544 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7545 remove_edge (e);
7546 return;
7547 }
7548
7549 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7550 that is not dominated by E->dest, then this set is empty. Otherwise,
7551 all the basic blocks dominated by E->dest are removed.
7552
7553 Also, to DF_IDOM we store the immediate dominators of the blocks in
7554 the dominance frontier of E (i.e., of the successors of the
7555 removed blocks, if there are any, and of E->dest otherwise). */
7556 FOR_EACH_EDGE (f, ei, e->dest->preds)
7557 {
7558 if (f == e)
7559 continue;
7560
7561 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7562 {
7563 none_removed = true;
7564 break;
7565 }
7566 }
7567
7568 df = BITMAP_ALLOC (NULL);
7569 df_idom = BITMAP_ALLOC (NULL);
7570
7571 if (none_removed)
7572 bitmap_set_bit (df_idom,
7573 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7574 else
7575 {
7576 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7577 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7578 {
7579 FOR_EACH_EDGE (f, ei, bb->succs)
7580 {
7581 if (f->dest != EXIT_BLOCK_PTR)
7582 bitmap_set_bit (df, f->dest->index);
7583 }
7584 }
7585 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7586 bitmap_clear_bit (df, bb->index);
7587
7588 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7589 {
7590 bb = BASIC_BLOCK (i);
7591 bitmap_set_bit (df_idom,
7592 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7593 }
7594 }
7595
7596 if (cfgcleanup_altered_bbs)
7597 {
7598 /* Record the set of the altered basic blocks. */
7599 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7600 bitmap_ior_into (cfgcleanup_altered_bbs, df);
7601 }
7602
7603 /* Remove E and the cancelled blocks. */
7604 if (none_removed)
7605 remove_edge (e);
7606 else
7607 {
7608 /* Walk backwards so as to get a chance to substitute all
7609 released DEFs into debug stmts. See
7610 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7611 details. */
7612 for (i = bbs_to_remove.length (); i-- > 0; )
7613 delete_basic_block (bbs_to_remove[i]);
7614 }
7615
7616 /* Update the dominance information. The immediate dominator may change only
7617 for blocks whose immediate dominator belongs to DF_IDOM:
7618
7619 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7620 removal. Let Z the arbitrary block such that idom(Z) = Y and
7621 Z dominates X after the removal. Before removal, there exists a path P
7622 from Y to X that avoids Z. Let F be the last edge on P that is
7623 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7624 dominates W, and because of P, Z does not dominate W), and W belongs to
7625 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7626 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
7627 {
7628 bb = BASIC_BLOCK (i);
7629 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
7630 dbb;
7631 dbb = next_dom_son (CDI_DOMINATORS, dbb))
7632 bbs_to_fix_dom.safe_push (dbb);
7633 }
7634
7635 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
7636
7637 BITMAP_FREE (df);
7638 BITMAP_FREE (df_idom);
7639 bbs_to_remove.release ();
7640 bbs_to_fix_dom.release ();
7641 }
7642
7643 /* Purge dead EH edges from basic block BB. */
7644
7645 bool
7646 gimple_purge_dead_eh_edges (basic_block bb)
7647 {
7648 bool changed = false;
7649 edge e;
7650 edge_iterator ei;
7651 gimple stmt = last_stmt (bb);
7652
7653 if (stmt && stmt_can_throw_internal (stmt))
7654 return false;
7655
7656 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7657 {
7658 if (e->flags & EDGE_EH)
7659 {
7660 remove_edge_and_dominated_blocks (e);
7661 changed = true;
7662 }
7663 else
7664 ei_next (&ei);
7665 }
7666
7667 return changed;
7668 }
7669
7670 /* Purge dead EH edges from basic block listed in BLOCKS. */
7671
7672 bool
7673 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
7674 {
7675 bool changed = false;
7676 unsigned i;
7677 bitmap_iterator bi;
7678
7679 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7680 {
7681 basic_block bb = BASIC_BLOCK (i);
7682
7683 /* Earlier gimple_purge_dead_eh_edges could have removed
7684 this basic block already. */
7685 gcc_assert (bb || changed);
7686 if (bb != NULL)
7687 changed |= gimple_purge_dead_eh_edges (bb);
7688 }
7689
7690 return changed;
7691 }
7692
7693 /* Purge dead abnormal call edges from basic block BB. */
7694
7695 bool
7696 gimple_purge_dead_abnormal_call_edges (basic_block bb)
7697 {
7698 bool changed = false;
7699 edge e;
7700 edge_iterator ei;
7701 gimple stmt = last_stmt (bb);
7702
7703 if (!cfun->has_nonlocal_label
7704 && !cfun->calls_setjmp)
7705 return false;
7706
7707 if (stmt && stmt_can_make_abnormal_goto (stmt))
7708 return false;
7709
7710 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7711 {
7712 if (e->flags & EDGE_ABNORMAL)
7713 {
7714 if (e->flags & EDGE_FALLTHRU)
7715 e->flags &= ~EDGE_ABNORMAL;
7716 else
7717 remove_edge_and_dominated_blocks (e);
7718 changed = true;
7719 }
7720 else
7721 ei_next (&ei);
7722 }
7723
7724 return changed;
7725 }
7726
7727 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
7728
7729 bool
7730 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
7731 {
7732 bool changed = false;
7733 unsigned i;
7734 bitmap_iterator bi;
7735
7736 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7737 {
7738 basic_block bb = BASIC_BLOCK (i);
7739
7740 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
7741 this basic block already. */
7742 gcc_assert (bb || changed);
7743 if (bb != NULL)
7744 changed |= gimple_purge_dead_abnormal_call_edges (bb);
7745 }
7746
7747 return changed;
7748 }
7749
7750 /* This function is called whenever a new edge is created or
7751 redirected. */
7752
7753 static void
7754 gimple_execute_on_growing_pred (edge e)
7755 {
7756 basic_block bb = e->dest;
7757
7758 if (!gimple_seq_empty_p (phi_nodes (bb)))
7759 reserve_phi_args_for_new_edge (bb);
7760 }
7761
7762 /* This function is called immediately before edge E is removed from
7763 the edge vector E->dest->preds. */
7764
7765 static void
7766 gimple_execute_on_shrinking_pred (edge e)
7767 {
7768 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
7769 remove_phi_args (e);
7770 }
7771
7772 /*---------------------------------------------------------------------------
7773 Helper functions for Loop versioning
7774 ---------------------------------------------------------------------------*/
7775
7776 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
7777 of 'first'. Both of them are dominated by 'new_head' basic block. When
7778 'new_head' was created by 'second's incoming edge it received phi arguments
7779 on the edge by split_edge(). Later, additional edge 'e' was created to
7780 connect 'new_head' and 'first'. Now this routine adds phi args on this
7781 additional edge 'e' that new_head to second edge received as part of edge
7782 splitting. */
7783
7784 static void
7785 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
7786 basic_block new_head, edge e)
7787 {
7788 gimple phi1, phi2;
7789 gimple_stmt_iterator psi1, psi2;
7790 tree def;
7791 edge e2 = find_edge (new_head, second);
7792
7793 /* Because NEW_HEAD has been created by splitting SECOND's incoming
7794 edge, we should always have an edge from NEW_HEAD to SECOND. */
7795 gcc_assert (e2 != NULL);
7796
7797 /* Browse all 'second' basic block phi nodes and add phi args to
7798 edge 'e' for 'first' head. PHI args are always in correct order. */
7799
7800 for (psi2 = gsi_start_phis (second),
7801 psi1 = gsi_start_phis (first);
7802 !gsi_end_p (psi2) && !gsi_end_p (psi1);
7803 gsi_next (&psi2), gsi_next (&psi1))
7804 {
7805 phi1 = gsi_stmt (psi1);
7806 phi2 = gsi_stmt (psi2);
7807 def = PHI_ARG_DEF (phi2, e2->dest_idx);
7808 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
7809 }
7810 }
7811
7812
7813 /* Adds a if else statement to COND_BB with condition COND_EXPR.
7814 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
7815 the destination of the ELSE part. */
7816
7817 static void
7818 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
7819 basic_block second_head ATTRIBUTE_UNUSED,
7820 basic_block cond_bb, void *cond_e)
7821 {
7822 gimple_stmt_iterator gsi;
7823 gimple new_cond_expr;
7824 tree cond_expr = (tree) cond_e;
7825 edge e0;
7826
7827 /* Build new conditional expr */
7828 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
7829 NULL_TREE, NULL_TREE);
7830
7831 /* Add new cond in cond_bb. */
7832 gsi = gsi_last_bb (cond_bb);
7833 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
7834
7835 /* Adjust edges appropriately to connect new head with first head
7836 as well as second head. */
7837 e0 = single_succ_edge (cond_bb);
7838 e0->flags &= ~EDGE_FALLTHRU;
7839 e0->flags |= EDGE_FALSE_VALUE;
7840 }
7841
7842
7843 /* Do book-keeping of basic block BB for the profile consistency checker.
7844 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
7845 then do post-pass accounting. Store the counting in RECORD. */
7846 static void
7847 gimple_account_profile_record (basic_block bb, int after_pass,
7848 struct profile_record *record)
7849 {
7850 gimple_stmt_iterator i;
7851 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
7852 {
7853 record->size[after_pass]
7854 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
7855 if (profile_status == PROFILE_READ)
7856 record->time[after_pass]
7857 += estimate_num_insns (gsi_stmt (i),
7858 &eni_time_weights) * bb->count;
7859 else if (profile_status == PROFILE_GUESSED)
7860 record->time[after_pass]
7861 += estimate_num_insns (gsi_stmt (i),
7862 &eni_time_weights) * bb->frequency;
7863 }
7864 }
7865
7866 struct cfg_hooks gimple_cfg_hooks = {
7867 "gimple",
7868 gimple_verify_flow_info,
7869 gimple_dump_bb, /* dump_bb */
7870 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
7871 create_bb, /* create_basic_block */
7872 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
7873 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
7874 gimple_can_remove_branch_p, /* can_remove_branch_p */
7875 remove_bb, /* delete_basic_block */
7876 gimple_split_block, /* split_block */
7877 gimple_move_block_after, /* move_block_after */
7878 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
7879 gimple_merge_blocks, /* merge_blocks */
7880 gimple_predict_edge, /* predict_edge */
7881 gimple_predicted_by_p, /* predicted_by_p */
7882 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
7883 gimple_duplicate_bb, /* duplicate_block */
7884 gimple_split_edge, /* split_edge */
7885 gimple_make_forwarder_block, /* make_forward_block */
7886 NULL, /* tidy_fallthru_edge */
7887 NULL, /* force_nonfallthru */
7888 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
7889 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
7890 gimple_flow_call_edges_add, /* flow_call_edges_add */
7891 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
7892 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
7893 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
7894 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
7895 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
7896 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
7897 flush_pending_stmts, /* flush_pending_stmts */
7898 gimple_empty_block_p, /* block_empty_p */
7899 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
7900 gimple_account_profile_record,
7901 };
7902
7903
7904 /* Split all critical edges. */
7905
7906 static unsigned int
7907 split_critical_edges (void)
7908 {
7909 basic_block bb;
7910 edge e;
7911 edge_iterator ei;
7912
7913 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
7914 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
7915 mappings around the calls to split_edge. */
7916 start_recording_case_labels ();
7917 FOR_ALL_BB (bb)
7918 {
7919 FOR_EACH_EDGE (e, ei, bb->succs)
7920 {
7921 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
7922 split_edge (e);
7923 /* PRE inserts statements to edges and expects that
7924 since split_critical_edges was done beforehand, committing edge
7925 insertions will not split more edges. In addition to critical
7926 edges we must split edges that have multiple successors and
7927 end by control flow statements, such as RESX.
7928 Go ahead and split them too. This matches the logic in
7929 gimple_find_edge_insert_loc. */
7930 else if ((!single_pred_p (e->dest)
7931 || !gimple_seq_empty_p (phi_nodes (e->dest))
7932 || e->dest == EXIT_BLOCK_PTR)
7933 && e->src != ENTRY_BLOCK_PTR
7934 && !(e->flags & EDGE_ABNORMAL))
7935 {
7936 gimple_stmt_iterator gsi;
7937
7938 gsi = gsi_last_bb (e->src);
7939 if (!gsi_end_p (gsi)
7940 && stmt_ends_bb_p (gsi_stmt (gsi))
7941 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
7942 && !gimple_call_builtin_p (gsi_stmt (gsi),
7943 BUILT_IN_RETURN)))
7944 split_edge (e);
7945 }
7946 }
7947 }
7948 end_recording_case_labels ();
7949 return 0;
7950 }
7951
7952 namespace {
7953
7954 const pass_data pass_data_split_crit_edges =
7955 {
7956 GIMPLE_PASS, /* type */
7957 "crited", /* name */
7958 OPTGROUP_NONE, /* optinfo_flags */
7959 false, /* has_gate */
7960 true, /* has_execute */
7961 TV_TREE_SPLIT_EDGES, /* tv_id */
7962 PROP_cfg, /* properties_required */
7963 PROP_no_crit_edges, /* properties_provided */
7964 0, /* properties_destroyed */
7965 0, /* todo_flags_start */
7966 TODO_verify_flow, /* todo_flags_finish */
7967 };
7968
7969 class pass_split_crit_edges : public gimple_opt_pass
7970 {
7971 public:
7972 pass_split_crit_edges (gcc::context *ctxt)
7973 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
7974 {}
7975
7976 /* opt_pass methods: */
7977 unsigned int execute () { return split_critical_edges (); }
7978
7979 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
7980 }; // class pass_split_crit_edges
7981
7982 } // anon namespace
7983
7984 gimple_opt_pass *
7985 make_pass_split_crit_edges (gcc::context *ctxt)
7986 {
7987 return new pass_split_crit_edges (ctxt);
7988 }
7989
7990
7991 /* Build a ternary operation and gimplify it. Emit code before GSI.
7992 Return the gimple_val holding the result. */
7993
7994 tree
7995 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
7996 tree type, tree a, tree b, tree c)
7997 {
7998 tree ret;
7999 location_t loc = gimple_location (gsi_stmt (*gsi));
8000
8001 ret = fold_build3_loc (loc, code, type, a, b, c);
8002 STRIP_NOPS (ret);
8003
8004 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8005 GSI_SAME_STMT);
8006 }
8007
8008 /* Build a binary operation and gimplify it. Emit code before GSI.
8009 Return the gimple_val holding the result. */
8010
8011 tree
8012 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8013 tree type, tree a, tree b)
8014 {
8015 tree ret;
8016
8017 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8018 STRIP_NOPS (ret);
8019
8020 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8021 GSI_SAME_STMT);
8022 }
8023
8024 /* Build a unary operation and gimplify it. Emit code before GSI.
8025 Return the gimple_val holding the result. */
8026
8027 tree
8028 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8029 tree a)
8030 {
8031 tree ret;
8032
8033 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8034 STRIP_NOPS (ret);
8035
8036 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8037 GSI_SAME_STMT);
8038 }
8039
8040
8041 \f
8042 /* Emit return warnings. */
8043
8044 static unsigned int
8045 execute_warn_function_return (void)
8046 {
8047 source_location location;
8048 gimple last;
8049 edge e;
8050 edge_iterator ei;
8051
8052 if (!targetm.warn_func_return (cfun->decl))
8053 return 0;
8054
8055 /* If we have a path to EXIT, then we do return. */
8056 if (TREE_THIS_VOLATILE (cfun->decl)
8057 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
8058 {
8059 location = UNKNOWN_LOCATION;
8060 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
8061 {
8062 last = last_stmt (e->src);
8063 if ((gimple_code (last) == GIMPLE_RETURN
8064 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8065 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8066 break;
8067 }
8068 if (location == UNKNOWN_LOCATION)
8069 location = cfun->function_end_locus;
8070 warning_at (location, 0, "%<noreturn%> function does return");
8071 }
8072
8073 /* If we see "return;" in some basic block, then we do reach the end
8074 without returning a value. */
8075 else if (warn_return_type
8076 && !TREE_NO_WARNING (cfun->decl)
8077 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
8078 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
8079 {
8080 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
8081 {
8082 gimple last = last_stmt (e->src);
8083 if (gimple_code (last) == GIMPLE_RETURN
8084 && gimple_return_retval (last) == NULL
8085 && !gimple_no_warning_p (last))
8086 {
8087 location = gimple_location (last);
8088 if (location == UNKNOWN_LOCATION)
8089 location = cfun->function_end_locus;
8090 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8091 TREE_NO_WARNING (cfun->decl) = 1;
8092 break;
8093 }
8094 }
8095 }
8096 return 0;
8097 }
8098
8099
8100 /* Given a basic block B which ends with a conditional and has
8101 precisely two successors, determine which of the edges is taken if
8102 the conditional is true and which is taken if the conditional is
8103 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8104
8105 void
8106 extract_true_false_edges_from_block (basic_block b,
8107 edge *true_edge,
8108 edge *false_edge)
8109 {
8110 edge e = EDGE_SUCC (b, 0);
8111
8112 if (e->flags & EDGE_TRUE_VALUE)
8113 {
8114 *true_edge = e;
8115 *false_edge = EDGE_SUCC (b, 1);
8116 }
8117 else
8118 {
8119 *false_edge = e;
8120 *true_edge = EDGE_SUCC (b, 1);
8121 }
8122 }
8123
8124 namespace {
8125
8126 const pass_data pass_data_warn_function_return =
8127 {
8128 GIMPLE_PASS, /* type */
8129 "*warn_function_return", /* name */
8130 OPTGROUP_NONE, /* optinfo_flags */
8131 false, /* has_gate */
8132 true, /* has_execute */
8133 TV_NONE, /* tv_id */
8134 PROP_cfg, /* properties_required */
8135 0, /* properties_provided */
8136 0, /* properties_destroyed */
8137 0, /* todo_flags_start */
8138 0, /* todo_flags_finish */
8139 };
8140
8141 class pass_warn_function_return : public gimple_opt_pass
8142 {
8143 public:
8144 pass_warn_function_return (gcc::context *ctxt)
8145 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8146 {}
8147
8148 /* opt_pass methods: */
8149 unsigned int execute () { return execute_warn_function_return (); }
8150
8151 }; // class pass_warn_function_return
8152
8153 } // anon namespace
8154
8155 gimple_opt_pass *
8156 make_pass_warn_function_return (gcc::context *ctxt)
8157 {
8158 return new pass_warn_function_return (ctxt);
8159 }
8160
8161 /* Walk a gimplified function and warn for functions whose return value is
8162 ignored and attribute((warn_unused_result)) is set. This is done before
8163 inlining, so we don't have to worry about that. */
8164
8165 static void
8166 do_warn_unused_result (gimple_seq seq)
8167 {
8168 tree fdecl, ftype;
8169 gimple_stmt_iterator i;
8170
8171 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8172 {
8173 gimple g = gsi_stmt (i);
8174
8175 switch (gimple_code (g))
8176 {
8177 case GIMPLE_BIND:
8178 do_warn_unused_result (gimple_bind_body (g));
8179 break;
8180 case GIMPLE_TRY:
8181 do_warn_unused_result (gimple_try_eval (g));
8182 do_warn_unused_result (gimple_try_cleanup (g));
8183 break;
8184 case GIMPLE_CATCH:
8185 do_warn_unused_result (gimple_catch_handler (g));
8186 break;
8187 case GIMPLE_EH_FILTER:
8188 do_warn_unused_result (gimple_eh_filter_failure (g));
8189 break;
8190
8191 case GIMPLE_CALL:
8192 if (gimple_call_lhs (g))
8193 break;
8194 if (gimple_call_internal_p (g))
8195 break;
8196
8197 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8198 LHS. All calls whose value is ignored should be
8199 represented like this. Look for the attribute. */
8200 fdecl = gimple_call_fndecl (g);
8201 ftype = gimple_call_fntype (g);
8202
8203 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8204 {
8205 location_t loc = gimple_location (g);
8206
8207 if (fdecl)
8208 warning_at (loc, OPT_Wunused_result,
8209 "ignoring return value of %qD, "
8210 "declared with attribute warn_unused_result",
8211 fdecl);
8212 else
8213 warning_at (loc, OPT_Wunused_result,
8214 "ignoring return value of function "
8215 "declared with attribute warn_unused_result");
8216 }
8217 break;
8218
8219 default:
8220 /* Not a container, not a call, or a call whose value is used. */
8221 break;
8222 }
8223 }
8224 }
8225
8226 static unsigned int
8227 run_warn_unused_result (void)
8228 {
8229 do_warn_unused_result (gimple_body (current_function_decl));
8230 return 0;
8231 }
8232
8233 static bool
8234 gate_warn_unused_result (void)
8235 {
8236 return flag_warn_unused_result;
8237 }
8238
8239 namespace {
8240
8241 const pass_data pass_data_warn_unused_result =
8242 {
8243 GIMPLE_PASS, /* type */
8244 "*warn_unused_result", /* name */
8245 OPTGROUP_NONE, /* optinfo_flags */
8246 true, /* has_gate */
8247 true, /* has_execute */
8248 TV_NONE, /* tv_id */
8249 PROP_gimple_any, /* properties_required */
8250 0, /* properties_provided */
8251 0, /* properties_destroyed */
8252 0, /* todo_flags_start */
8253 0, /* todo_flags_finish */
8254 };
8255
8256 class pass_warn_unused_result : public gimple_opt_pass
8257 {
8258 public:
8259 pass_warn_unused_result (gcc::context *ctxt)
8260 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8261 {}
8262
8263 /* opt_pass methods: */
8264 bool gate () { return gate_warn_unused_result (); }
8265 unsigned int execute () { return run_warn_unused_result (); }
8266
8267 }; // class pass_warn_unused_result
8268
8269 } // anon namespace
8270
8271 gimple_opt_pass *
8272 make_pass_warn_unused_result (gcc::context *ctxt)
8273 {
8274 return new pass_warn_unused_result (ctxt);
8275 }
8276
8277 /* IPA passes, compilation of earlier functions or inlining
8278 might have changed some properties, such as marked functions nothrow,
8279 pure, const or noreturn.
8280 Remove redundant edges and basic blocks, and create new ones if necessary.
8281
8282 This pass can't be executed as stand alone pass from pass manager, because
8283 in between inlining and this fixup the verify_flow_info would fail. */
8284
8285 unsigned int
8286 execute_fixup_cfg (void)
8287 {
8288 basic_block bb;
8289 gimple_stmt_iterator gsi;
8290 int todo = gimple_in_ssa_p (cfun) ? TODO_verify_ssa : 0;
8291 gcov_type count_scale;
8292 edge e;
8293 edge_iterator ei;
8294
8295 count_scale
8296 = GCOV_COMPUTE_SCALE (cgraph_get_node (current_function_decl)->count,
8297 ENTRY_BLOCK_PTR->count);
8298
8299 ENTRY_BLOCK_PTR->count = cgraph_get_node (current_function_decl)->count;
8300 EXIT_BLOCK_PTR->count = apply_scale (EXIT_BLOCK_PTR->count,
8301 count_scale);
8302
8303 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
8304 e->count = apply_scale (e->count, count_scale);
8305
8306 FOR_EACH_BB (bb)
8307 {
8308 bb->count = apply_scale (bb->count, count_scale);
8309 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
8310 {
8311 gimple stmt = gsi_stmt (gsi);
8312 tree decl = is_gimple_call (stmt)
8313 ? gimple_call_fndecl (stmt)
8314 : NULL;
8315 if (decl)
8316 {
8317 int flags = gimple_call_flags (stmt);
8318 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
8319 {
8320 if (gimple_purge_dead_abnormal_call_edges (bb))
8321 todo |= TODO_cleanup_cfg;
8322
8323 if (gimple_in_ssa_p (cfun))
8324 {
8325 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8326 update_stmt (stmt);
8327 }
8328 }
8329
8330 if (flags & ECF_NORETURN
8331 && fixup_noreturn_call (stmt))
8332 todo |= TODO_cleanup_cfg;
8333 }
8334
8335 if (maybe_clean_eh_stmt (stmt)
8336 && gimple_purge_dead_eh_edges (bb))
8337 todo |= TODO_cleanup_cfg;
8338 }
8339
8340 FOR_EACH_EDGE (e, ei, bb->succs)
8341 e->count = apply_scale (e->count, count_scale);
8342
8343 /* If we have a basic block with no successors that does not
8344 end with a control statement or a noreturn call end it with
8345 a call to __builtin_unreachable. This situation can occur
8346 when inlining a noreturn call that does in fact return. */
8347 if (EDGE_COUNT (bb->succs) == 0)
8348 {
8349 gimple stmt = last_stmt (bb);
8350 if (!stmt
8351 || (!is_ctrl_stmt (stmt)
8352 && (!is_gimple_call (stmt)
8353 || (gimple_call_flags (stmt) & ECF_NORETURN) == 0)))
8354 {
8355 stmt = gimple_build_call
8356 (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
8357 gimple_stmt_iterator gsi = gsi_last_bb (bb);
8358 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
8359 }
8360 }
8361 }
8362 if (count_scale != REG_BR_PROB_BASE)
8363 compute_function_frequency ();
8364
8365 /* We just processed all calls. */
8366 if (cfun->gimple_df)
8367 vec_free (MODIFIED_NORETURN_CALLS (cfun));
8368
8369 /* Dump a textual representation of the flowgraph. */
8370 if (dump_file)
8371 gimple_dump_cfg (dump_file, dump_flags);
8372
8373 if (current_loops
8374 && (todo & TODO_cleanup_cfg))
8375 loops_state_set (LOOPS_NEED_FIXUP);
8376
8377 return todo;
8378 }
8379
8380 namespace {
8381
8382 const pass_data pass_data_fixup_cfg =
8383 {
8384 GIMPLE_PASS, /* type */
8385 "*free_cfg_annotations", /* name */
8386 OPTGROUP_NONE, /* optinfo_flags */
8387 false, /* has_gate */
8388 true, /* has_execute */
8389 TV_NONE, /* tv_id */
8390 PROP_cfg, /* properties_required */
8391 0, /* properties_provided */
8392 0, /* properties_destroyed */
8393 0, /* todo_flags_start */
8394 0, /* todo_flags_finish */
8395 };
8396
8397 class pass_fixup_cfg : public gimple_opt_pass
8398 {
8399 public:
8400 pass_fixup_cfg (gcc::context *ctxt)
8401 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
8402 {}
8403
8404 /* opt_pass methods: */
8405 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
8406 unsigned int execute () { return execute_fixup_cfg (); }
8407
8408 }; // class pass_fixup_cfg
8409
8410 } // anon namespace
8411
8412 gimple_opt_pass *
8413 make_pass_fixup_cfg (gcc::context *ctxt)
8414 {
8415 return new pass_fixup_cfg (ctxt);
8416 }
8417
8418 /* Garbage collection support for edge_def. */
8419
8420 extern void gt_ggc_mx (tree&);
8421 extern void gt_ggc_mx (gimple&);
8422 extern void gt_ggc_mx (rtx&);
8423 extern void gt_ggc_mx (basic_block&);
8424
8425 void
8426 gt_ggc_mx (edge_def *e)
8427 {
8428 tree block = LOCATION_BLOCK (e->goto_locus);
8429 gt_ggc_mx (e->src);
8430 gt_ggc_mx (e->dest);
8431 if (current_ir_type () == IR_GIMPLE)
8432 gt_ggc_mx (e->insns.g);
8433 else
8434 gt_ggc_mx (e->insns.r);
8435 gt_ggc_mx (block);
8436 }
8437
8438 /* PCH support for edge_def. */
8439
8440 extern void gt_pch_nx (tree&);
8441 extern void gt_pch_nx (gimple&);
8442 extern void gt_pch_nx (rtx&);
8443 extern void gt_pch_nx (basic_block&);
8444
8445 void
8446 gt_pch_nx (edge_def *e)
8447 {
8448 tree block = LOCATION_BLOCK (e->goto_locus);
8449 gt_pch_nx (e->src);
8450 gt_pch_nx (e->dest);
8451 if (current_ir_type () == IR_GIMPLE)
8452 gt_pch_nx (e->insns.g);
8453 else
8454 gt_pch_nx (e->insns.r);
8455 gt_pch_nx (block);
8456 }
8457
8458 void
8459 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
8460 {
8461 tree block = LOCATION_BLOCK (e->goto_locus);
8462 op (&(e->src), cookie);
8463 op (&(e->dest), cookie);
8464 if (current_ir_type () == IR_GIMPLE)
8465 op (&(e->insns.g), cookie);
8466 else
8467 op (&(e->insns.r), cookie);
8468 op (&(block), cookie);
8469 }