Eliminate label_to_block_map macro.
[gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2013 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "hash-table.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "trans-mem.h"
28 #include "stor-layout.h"
29 #include "print-tree.h"
30 #include "tm_p.h"
31 #include "basic-block.h"
32 #include "flags.h"
33 #include "function.h"
34 #include "gimple-pretty-print.h"
35 #include "pointer-set.h"
36 #include "tree-ssa-alias.h"
37 #include "internal-fn.h"
38 #include "gimple-fold.h"
39 #include "tree-eh.h"
40 #include "gimple-expr.h"
41 #include "is-a.h"
42 #include "gimple.h"
43 #include "gimple-iterator.h"
44 #include "gimplify-me.h"
45 #include "gimple-walk.h"
46 #include "gimple-ssa.h"
47 #include "cgraph.h"
48 #include "tree-cfg.h"
49 #include "tree-phinodes.h"
50 #include "ssa-iterators.h"
51 #include "stringpool.h"
52 #include "tree-ssanames.h"
53 #include "tree-ssa-loop-manip.h"
54 #include "tree-ssa-loop-niter.h"
55 #include "tree-into-ssa.h"
56 #include "expr.h"
57 #include "tree-dfa.h"
58 #include "tree-ssa.h"
59 #include "tree-dump.h"
60 #include "tree-pass.h"
61 #include "diagnostic-core.h"
62 #include "except.h"
63 #include "cfgloop.h"
64 #include "tree-ssa-propagate.h"
65 #include "value-prof.h"
66 #include "tree-inline.h"
67 #include "target.h"
68 #include "tree-ssa-live.h"
69 #include "omp-low.h"
70 #include "tree-cfgcleanup.h"
71
72 /* This file contains functions for building the Control Flow Graph (CFG)
73 for a function tree. */
74
75 /* Local declarations. */
76
77 /* Initial capacity for the basic block array. */
78 static const int initial_cfg_capacity = 20;
79
80 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
81 which use a particular edge. The CASE_LABEL_EXPRs are chained together
82 via their CASE_CHAIN field, which we clear after we're done with the
83 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
84
85 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
86 update the case vector in response to edge redirections.
87
88 Right now this table is set up and torn down at key points in the
89 compilation process. It would be nice if we could make the table
90 more persistent. The key is getting notification of changes to
91 the CFG (particularly edge removal, creation and redirection). */
92
93 static struct pointer_map_t *edge_to_cases;
94
95 /* If we record edge_to_cases, this bitmap will hold indexes
96 of basic blocks that end in a GIMPLE_SWITCH which we touched
97 due to edge manipulations. */
98
99 static bitmap touched_switch_bbs;
100
101 /* CFG statistics. */
102 struct cfg_stats_d
103 {
104 long num_merged_labels;
105 };
106
107 static struct cfg_stats_d cfg_stats;
108
109 /* Nonzero if we found a computed goto while building basic blocks. */
110 static bool found_computed_goto;
111
112 /* Hash table to store last discriminator assigned for each locus. */
113 struct locus_discrim_map
114 {
115 location_t locus;
116 int discriminator;
117 };
118
119 /* Hashtable helpers. */
120
121 struct locus_discrim_hasher : typed_free_remove <locus_discrim_map>
122 {
123 typedef locus_discrim_map value_type;
124 typedef locus_discrim_map compare_type;
125 static inline hashval_t hash (const value_type *);
126 static inline bool equal (const value_type *, const compare_type *);
127 };
128
129 /* Trivial hash function for a location_t. ITEM is a pointer to
130 a hash table entry that maps a location_t to a discriminator. */
131
132 inline hashval_t
133 locus_discrim_hasher::hash (const value_type *item)
134 {
135 return LOCATION_LINE (item->locus);
136 }
137
138 /* Equality function for the locus-to-discriminator map. A and B
139 point to the two hash table entries to compare. */
140
141 inline bool
142 locus_discrim_hasher::equal (const value_type *a, const compare_type *b)
143 {
144 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
145 }
146
147 static hash_table <locus_discrim_hasher> discriminator_per_locus;
148
149 /* Basic blocks and flowgraphs. */
150 static void make_blocks (gimple_seq);
151 static void factor_computed_gotos (void);
152
153 /* Edges. */
154 static void make_edges (void);
155 static void assign_discriminators (void);
156 static void make_cond_expr_edges (basic_block);
157 static void make_gimple_switch_edges (basic_block);
158 static void make_goto_expr_edges (basic_block);
159 static void make_gimple_asm_edges (basic_block);
160 static edge gimple_redirect_edge_and_branch (edge, basic_block);
161 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
162 static unsigned int split_critical_edges (void);
163
164 /* Various helpers. */
165 static inline bool stmt_starts_bb_p (gimple, gimple);
166 static int gimple_verify_flow_info (void);
167 static void gimple_make_forwarder_block (edge);
168 static gimple first_non_label_stmt (basic_block);
169 static bool verify_gimple_transaction (gimple);
170
171 /* Flowgraph optimization and cleanup. */
172 static void gimple_merge_blocks (basic_block, basic_block);
173 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
174 static void remove_bb (basic_block);
175 static edge find_taken_edge_computed_goto (basic_block, tree);
176 static edge find_taken_edge_cond_expr (basic_block, tree);
177 static edge find_taken_edge_switch_expr (basic_block, tree);
178 static tree find_case_label_for_value (gimple, tree);
179
180 void
181 init_empty_tree_cfg_for_function (struct function *fn)
182 {
183 /* Initialize the basic block array. */
184 init_flow (fn);
185 profile_status_for_fn (fn) = PROFILE_ABSENT;
186 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
187 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
188 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
189 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
190 initial_cfg_capacity);
191
192 /* Build a mapping of labels to their associated blocks. */
193 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
194 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
195 initial_cfg_capacity);
196
197 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
198 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
199
200 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
201 = EXIT_BLOCK_PTR_FOR_FN (fn);
202 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
203 = ENTRY_BLOCK_PTR_FOR_FN (fn);
204 }
205
206 void
207 init_empty_tree_cfg (void)
208 {
209 init_empty_tree_cfg_for_function (cfun);
210 }
211
212 /*---------------------------------------------------------------------------
213 Create basic blocks
214 ---------------------------------------------------------------------------*/
215
216 /* Entry point to the CFG builder for trees. SEQ is the sequence of
217 statements to be added to the flowgraph. */
218
219 static void
220 build_gimple_cfg (gimple_seq seq)
221 {
222 /* Register specific gimple functions. */
223 gimple_register_cfg_hooks ();
224
225 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
226
227 init_empty_tree_cfg ();
228
229 found_computed_goto = 0;
230 make_blocks (seq);
231
232 /* Computed gotos are hell to deal with, especially if there are
233 lots of them with a large number of destinations. So we factor
234 them to a common computed goto location before we build the
235 edge list. After we convert back to normal form, we will un-factor
236 the computed gotos since factoring introduces an unwanted jump. */
237 if (found_computed_goto)
238 factor_computed_gotos ();
239
240 /* Make sure there is always at least one block, even if it's empty. */
241 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
242 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
243
244 /* Adjust the size of the array. */
245 if (basic_block_info_for_fn (cfun)->length ()
246 < (size_t) n_basic_blocks_for_fn (cfun))
247 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
248 n_basic_blocks_for_fn (cfun));
249
250 /* To speed up statement iterator walks, we first purge dead labels. */
251 cleanup_dead_labels ();
252
253 /* Group case nodes to reduce the number of edges.
254 We do this after cleaning up dead labels because otherwise we miss
255 a lot of obvious case merging opportunities. */
256 group_case_labels ();
257
258 /* Create the edges of the flowgraph. */
259 discriminator_per_locus.create (13);
260 make_edges ();
261 assign_discriminators ();
262 cleanup_dead_labels ();
263 discriminator_per_locus.dispose ();
264 }
265
266
267 /* Search for ANNOTATE call with annot_expr_ivdep_kind; if found, remove
268 it and set loop->safelen to INT_MAX. We assume that the annotation
269 comes immediately before the condition. */
270
271 static void
272 replace_loop_annotate ()
273 {
274 struct loop *loop;
275 basic_block bb;
276 gimple_stmt_iterator gsi;
277 gimple stmt;
278
279 FOR_EACH_LOOP (loop, 0)
280 {
281 gsi = gsi_last_bb (loop->header);
282 stmt = gsi_stmt (gsi);
283 if (stmt && gimple_code (stmt) == GIMPLE_COND)
284 {
285 gsi_prev_nondebug (&gsi);
286 if (gsi_end_p (gsi))
287 continue;
288 stmt = gsi_stmt (gsi);
289 if (gimple_code (stmt) != GIMPLE_CALL)
290 continue;
291 if (!gimple_call_internal_p (stmt)
292 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
293 continue;
294 if ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1))
295 != annot_expr_ivdep_kind)
296 continue;
297 stmt = gimple_build_assign (gimple_call_lhs (stmt),
298 gimple_call_arg (stmt, 0));
299 gsi_replace (&gsi, stmt, true);
300 loop->safelen = INT_MAX;
301 }
302 }
303
304 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
305 FOR_EACH_BB (bb)
306 {
307 gsi = gsi_last_bb (bb);
308 stmt = gsi_stmt (gsi);
309 if (stmt && gimple_code (stmt) == GIMPLE_COND)
310 gsi_prev_nondebug (&gsi);
311 if (gsi_end_p (gsi))
312 continue;
313 stmt = gsi_stmt (gsi);
314 if (gimple_code (stmt) != GIMPLE_CALL)
315 continue;
316 if (!gimple_call_internal_p (stmt)
317 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
318 continue;
319 if ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1))
320 != annot_expr_ivdep_kind)
321 continue;
322 warning_at (gimple_location (stmt), 0, "ignoring %<GCC ivdep%> "
323 "annotation");
324 stmt = gimple_build_assign (gimple_call_lhs (stmt),
325 gimple_call_arg (stmt, 0));
326 gsi_replace (&gsi, stmt, true);
327 }
328 }
329
330
331 static unsigned int
332 execute_build_cfg (void)
333 {
334 gimple_seq body = gimple_body (current_function_decl);
335
336 build_gimple_cfg (body);
337 gimple_set_body (current_function_decl, NULL);
338 if (dump_file && (dump_flags & TDF_DETAILS))
339 {
340 fprintf (dump_file, "Scope blocks:\n");
341 dump_scope_blocks (dump_file, dump_flags);
342 }
343 cleanup_tree_cfg ();
344 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
345 replace_loop_annotate ();
346 return 0;
347 }
348
349 namespace {
350
351 const pass_data pass_data_build_cfg =
352 {
353 GIMPLE_PASS, /* type */
354 "cfg", /* name */
355 OPTGROUP_NONE, /* optinfo_flags */
356 false, /* has_gate */
357 true, /* has_execute */
358 TV_TREE_CFG, /* tv_id */
359 PROP_gimple_leh, /* properties_required */
360 ( PROP_cfg | PROP_loops ), /* properties_provided */
361 0, /* properties_destroyed */
362 0, /* todo_flags_start */
363 TODO_verify_stmts, /* todo_flags_finish */
364 };
365
366 class pass_build_cfg : public gimple_opt_pass
367 {
368 public:
369 pass_build_cfg (gcc::context *ctxt)
370 : gimple_opt_pass (pass_data_build_cfg, ctxt)
371 {}
372
373 /* opt_pass methods: */
374 unsigned int execute () { return execute_build_cfg (); }
375
376 }; // class pass_build_cfg
377
378 } // anon namespace
379
380 gimple_opt_pass *
381 make_pass_build_cfg (gcc::context *ctxt)
382 {
383 return new pass_build_cfg (ctxt);
384 }
385
386
387 /* Return true if T is a computed goto. */
388
389 static bool
390 computed_goto_p (gimple t)
391 {
392 return (gimple_code (t) == GIMPLE_GOTO
393 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
394 }
395
396 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
397 the other edge points to a bb with just __builtin_unreachable ().
398 I.e. return true for C->M edge in:
399 <bb C>:
400 ...
401 if (something)
402 goto <bb N>;
403 else
404 goto <bb M>;
405 <bb N>:
406 __builtin_unreachable ();
407 <bb M>: */
408
409 bool
410 assert_unreachable_fallthru_edge_p (edge e)
411 {
412 basic_block pred_bb = e->src;
413 gimple last = last_stmt (pred_bb);
414 if (last && gimple_code (last) == GIMPLE_COND)
415 {
416 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
417 if (other_bb == e->dest)
418 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
419 if (EDGE_COUNT (other_bb->succs) == 0)
420 {
421 gimple_stmt_iterator gsi = gsi_after_labels (other_bb);
422 gimple stmt;
423
424 if (gsi_end_p (gsi))
425 return false;
426 stmt = gsi_stmt (gsi);
427 if (is_gimple_debug (stmt))
428 {
429 gsi_next_nondebug (&gsi);
430 if (gsi_end_p (gsi))
431 return false;
432 stmt = gsi_stmt (gsi);
433 }
434 return gimple_call_builtin_p (stmt, BUILT_IN_UNREACHABLE);
435 }
436 }
437 return false;
438 }
439
440
441 /* Search the CFG for any computed gotos. If found, factor them to a
442 common computed goto site. Also record the location of that site so
443 that we can un-factor the gotos after we have converted back to
444 normal form. */
445
446 static void
447 factor_computed_gotos (void)
448 {
449 basic_block bb;
450 tree factored_label_decl = NULL;
451 tree var = NULL;
452 gimple factored_computed_goto_label = NULL;
453 gimple factored_computed_goto = NULL;
454
455 /* We know there are one or more computed gotos in this function.
456 Examine the last statement in each basic block to see if the block
457 ends with a computed goto. */
458
459 FOR_EACH_BB (bb)
460 {
461 gimple_stmt_iterator gsi = gsi_last_bb (bb);
462 gimple last;
463
464 if (gsi_end_p (gsi))
465 continue;
466
467 last = gsi_stmt (gsi);
468
469 /* Ignore the computed goto we create when we factor the original
470 computed gotos. */
471 if (last == factored_computed_goto)
472 continue;
473
474 /* If the last statement is a computed goto, factor it. */
475 if (computed_goto_p (last))
476 {
477 gimple assignment;
478
479 /* The first time we find a computed goto we need to create
480 the factored goto block and the variable each original
481 computed goto will use for their goto destination. */
482 if (!factored_computed_goto)
483 {
484 basic_block new_bb = create_empty_bb (bb);
485 gimple_stmt_iterator new_gsi = gsi_start_bb (new_bb);
486
487 /* Create the destination of the factored goto. Each original
488 computed goto will put its desired destination into this
489 variable and jump to the label we create immediately
490 below. */
491 var = create_tmp_var (ptr_type_node, "gotovar");
492
493 /* Build a label for the new block which will contain the
494 factored computed goto. */
495 factored_label_decl = create_artificial_label (UNKNOWN_LOCATION);
496 factored_computed_goto_label
497 = gimple_build_label (factored_label_decl);
498 gsi_insert_after (&new_gsi, factored_computed_goto_label,
499 GSI_NEW_STMT);
500
501 /* Build our new computed goto. */
502 factored_computed_goto = gimple_build_goto (var);
503 gsi_insert_after (&new_gsi, factored_computed_goto, GSI_NEW_STMT);
504 }
505
506 /* Copy the original computed goto's destination into VAR. */
507 assignment = gimple_build_assign (var, gimple_goto_dest (last));
508 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
509
510 /* And re-vector the computed goto to the new destination. */
511 gimple_goto_set_dest (last, factored_label_decl);
512 }
513 }
514 }
515
516
517 /* Build a flowgraph for the sequence of stmts SEQ. */
518
519 static void
520 make_blocks (gimple_seq seq)
521 {
522 gimple_stmt_iterator i = gsi_start (seq);
523 gimple stmt = NULL;
524 bool start_new_block = true;
525 bool first_stmt_of_seq = true;
526 basic_block bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
527
528 while (!gsi_end_p (i))
529 {
530 gimple prev_stmt;
531
532 prev_stmt = stmt;
533 stmt = gsi_stmt (i);
534
535 /* If the statement starts a new basic block or if we have determined
536 in a previous pass that we need to create a new block for STMT, do
537 so now. */
538 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
539 {
540 if (!first_stmt_of_seq)
541 gsi_split_seq_before (&i, &seq);
542 bb = create_basic_block (seq, NULL, bb);
543 start_new_block = false;
544 }
545
546 /* Now add STMT to BB and create the subgraphs for special statement
547 codes. */
548 gimple_set_bb (stmt, bb);
549
550 if (computed_goto_p (stmt))
551 found_computed_goto = true;
552
553 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
554 next iteration. */
555 if (stmt_ends_bb_p (stmt))
556 {
557 /* If the stmt can make abnormal goto use a new temporary
558 for the assignment to the LHS. This makes sure the old value
559 of the LHS is available on the abnormal edge. Otherwise
560 we will end up with overlapping life-ranges for abnormal
561 SSA names. */
562 if (gimple_has_lhs (stmt)
563 && stmt_can_make_abnormal_goto (stmt)
564 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
565 {
566 tree lhs = gimple_get_lhs (stmt);
567 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
568 gimple s = gimple_build_assign (lhs, tmp);
569 gimple_set_location (s, gimple_location (stmt));
570 gimple_set_block (s, gimple_block (stmt));
571 gimple_set_lhs (stmt, tmp);
572 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
573 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
574 DECL_GIMPLE_REG_P (tmp) = 1;
575 gsi_insert_after (&i, s, GSI_SAME_STMT);
576 }
577 start_new_block = true;
578 }
579
580 gsi_next (&i);
581 first_stmt_of_seq = false;
582 }
583 }
584
585
586 /* Create and return a new empty basic block after bb AFTER. */
587
588 static basic_block
589 create_bb (void *h, void *e, basic_block after)
590 {
591 basic_block bb;
592
593 gcc_assert (!e);
594
595 /* Create and initialize a new basic block. Since alloc_block uses
596 GC allocation that clears memory to allocate a basic block, we do
597 not have to clear the newly allocated basic block here. */
598 bb = alloc_block ();
599
600 bb->index = last_basic_block;
601 bb->flags = BB_NEW;
602 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
603
604 /* Add the new block to the linked list of blocks. */
605 link_block (bb, after);
606
607 /* Grow the basic block array if needed. */
608 if ((size_t) last_basic_block == basic_block_info_for_fn (cfun)->length ())
609 {
610 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
611 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
612 }
613
614 /* Add the newly created block to the array. */
615 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block, bb);
616
617 n_basic_blocks_for_fn (cfun)++;
618 last_basic_block++;
619
620 return bb;
621 }
622
623
624 /*---------------------------------------------------------------------------
625 Edge creation
626 ---------------------------------------------------------------------------*/
627
628 /* Fold COND_EXPR_COND of each COND_EXPR. */
629
630 void
631 fold_cond_expr_cond (void)
632 {
633 basic_block bb;
634
635 FOR_EACH_BB (bb)
636 {
637 gimple stmt = last_stmt (bb);
638
639 if (stmt && gimple_code (stmt) == GIMPLE_COND)
640 {
641 location_t loc = gimple_location (stmt);
642 tree cond;
643 bool zerop, onep;
644
645 fold_defer_overflow_warnings ();
646 cond = fold_binary_loc (loc, gimple_cond_code (stmt), boolean_type_node,
647 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
648 if (cond)
649 {
650 zerop = integer_zerop (cond);
651 onep = integer_onep (cond);
652 }
653 else
654 zerop = onep = false;
655
656 fold_undefer_overflow_warnings (zerop || onep,
657 stmt,
658 WARN_STRICT_OVERFLOW_CONDITIONAL);
659 if (zerop)
660 gimple_cond_make_false (stmt);
661 else if (onep)
662 gimple_cond_make_true (stmt);
663 }
664 }
665 }
666
667 /* Join all the blocks in the flowgraph. */
668
669 static void
670 make_edges (void)
671 {
672 basic_block bb;
673 struct omp_region *cur_region = NULL;
674
675 /* Create an edge from entry to the first block with executable
676 statements in it. */
677 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
678 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
679 EDGE_FALLTHRU);
680
681 /* Traverse the basic block array placing edges. */
682 FOR_EACH_BB (bb)
683 {
684 gimple last = last_stmt (bb);
685 bool fallthru;
686
687 if (last)
688 {
689 enum gimple_code code = gimple_code (last);
690 switch (code)
691 {
692 case GIMPLE_GOTO:
693 make_goto_expr_edges (bb);
694 fallthru = false;
695 break;
696 case GIMPLE_RETURN:
697 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
698 fallthru = false;
699 break;
700 case GIMPLE_COND:
701 make_cond_expr_edges (bb);
702 fallthru = false;
703 break;
704 case GIMPLE_SWITCH:
705 make_gimple_switch_edges (bb);
706 fallthru = false;
707 break;
708 case GIMPLE_RESX:
709 make_eh_edges (last);
710 fallthru = false;
711 break;
712 case GIMPLE_EH_DISPATCH:
713 fallthru = make_eh_dispatch_edges (last);
714 break;
715
716 case GIMPLE_CALL:
717 /* If this function receives a nonlocal goto, then we need to
718 make edges from this call site to all the nonlocal goto
719 handlers. */
720 if (stmt_can_make_abnormal_goto (last))
721 make_abnormal_goto_edges (bb, true);
722
723 /* If this statement has reachable exception handlers, then
724 create abnormal edges to them. */
725 make_eh_edges (last);
726
727 /* BUILTIN_RETURN is really a return statement. */
728 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
729 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0), fallthru =
730 false;
731 /* Some calls are known not to return. */
732 else
733 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
734 break;
735
736 case GIMPLE_ASSIGN:
737 /* A GIMPLE_ASSIGN may throw internally and thus be considered
738 control-altering. */
739 if (is_ctrl_altering_stmt (last))
740 make_eh_edges (last);
741 fallthru = true;
742 break;
743
744 case GIMPLE_ASM:
745 make_gimple_asm_edges (bb);
746 fallthru = true;
747 break;
748
749 CASE_GIMPLE_OMP:
750 fallthru = make_gimple_omp_edges (bb, &cur_region);
751 break;
752
753 case GIMPLE_TRANSACTION:
754 {
755 tree abort_label = gimple_transaction_label (last);
756 if (abort_label)
757 make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT);
758 fallthru = true;
759 }
760 break;
761
762 default:
763 gcc_assert (!stmt_ends_bb_p (last));
764 fallthru = true;
765 }
766 }
767 else
768 fallthru = true;
769
770 if (fallthru)
771 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
772 }
773
774 free_omp_regions ();
775
776 /* Fold COND_EXPR_COND of each COND_EXPR. */
777 fold_cond_expr_cond ();
778 }
779
780 /* Find the next available discriminator value for LOCUS. The
781 discriminator distinguishes among several basic blocks that
782 share a common locus, allowing for more accurate sample-based
783 profiling. */
784
785 static int
786 next_discriminator_for_locus (location_t locus)
787 {
788 struct locus_discrim_map item;
789 struct locus_discrim_map **slot;
790
791 item.locus = locus;
792 item.discriminator = 0;
793 slot = discriminator_per_locus.find_slot_with_hash (
794 &item, LOCATION_LINE (locus), INSERT);
795 gcc_assert (slot);
796 if (*slot == HTAB_EMPTY_ENTRY)
797 {
798 *slot = XNEW (struct locus_discrim_map);
799 gcc_assert (*slot);
800 (*slot)->locus = locus;
801 (*slot)->discriminator = 0;
802 }
803 (*slot)->discriminator++;
804 return (*slot)->discriminator;
805 }
806
807 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
808
809 static bool
810 same_line_p (location_t locus1, location_t locus2)
811 {
812 expanded_location from, to;
813
814 if (locus1 == locus2)
815 return true;
816
817 from = expand_location (locus1);
818 to = expand_location (locus2);
819
820 if (from.line != to.line)
821 return false;
822 if (from.file == to.file)
823 return true;
824 return (from.file != NULL
825 && to.file != NULL
826 && filename_cmp (from.file, to.file) == 0);
827 }
828
829 /* Assign discriminators to each basic block. */
830
831 static void
832 assign_discriminators (void)
833 {
834 basic_block bb;
835
836 FOR_EACH_BB (bb)
837 {
838 edge e;
839 edge_iterator ei;
840 gimple last = last_stmt (bb);
841 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
842
843 if (locus == UNKNOWN_LOCATION)
844 continue;
845
846 FOR_EACH_EDGE (e, ei, bb->succs)
847 {
848 gimple first = first_non_label_stmt (e->dest);
849 gimple last = last_stmt (e->dest);
850 if ((first && same_line_p (locus, gimple_location (first)))
851 || (last && same_line_p (locus, gimple_location (last))))
852 {
853 if (e->dest->discriminator != 0 && bb->discriminator == 0)
854 bb->discriminator = next_discriminator_for_locus (locus);
855 else
856 e->dest->discriminator = next_discriminator_for_locus (locus);
857 }
858 }
859 }
860 }
861
862 /* Create the edges for a GIMPLE_COND starting at block BB. */
863
864 static void
865 make_cond_expr_edges (basic_block bb)
866 {
867 gimple entry = last_stmt (bb);
868 gimple then_stmt, else_stmt;
869 basic_block then_bb, else_bb;
870 tree then_label, else_label;
871 edge e;
872
873 gcc_assert (entry);
874 gcc_assert (gimple_code (entry) == GIMPLE_COND);
875
876 /* Entry basic blocks for each component. */
877 then_label = gimple_cond_true_label (entry);
878 else_label = gimple_cond_false_label (entry);
879 then_bb = label_to_block (then_label);
880 else_bb = label_to_block (else_label);
881 then_stmt = first_stmt (then_bb);
882 else_stmt = first_stmt (else_bb);
883
884 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
885 e->goto_locus = gimple_location (then_stmt);
886 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
887 if (e)
888 e->goto_locus = gimple_location (else_stmt);
889
890 /* We do not need the labels anymore. */
891 gimple_cond_set_true_label (entry, NULL_TREE);
892 gimple_cond_set_false_label (entry, NULL_TREE);
893 }
894
895
896 /* Called for each element in the hash table (P) as we delete the
897 edge to cases hash table.
898
899 Clear all the TREE_CHAINs to prevent problems with copying of
900 SWITCH_EXPRs and structure sharing rules, then free the hash table
901 element. */
902
903 static bool
904 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
905 void *data ATTRIBUTE_UNUSED)
906 {
907 tree t, next;
908
909 for (t = (tree) *value; t; t = next)
910 {
911 next = CASE_CHAIN (t);
912 CASE_CHAIN (t) = NULL;
913 }
914
915 *value = NULL;
916 return true;
917 }
918
919 /* Start recording information mapping edges to case labels. */
920
921 void
922 start_recording_case_labels (void)
923 {
924 gcc_assert (edge_to_cases == NULL);
925 edge_to_cases = pointer_map_create ();
926 touched_switch_bbs = BITMAP_ALLOC (NULL);
927 }
928
929 /* Return nonzero if we are recording information for case labels. */
930
931 static bool
932 recording_case_labels_p (void)
933 {
934 return (edge_to_cases != NULL);
935 }
936
937 /* Stop recording information mapping edges to case labels and
938 remove any information we have recorded. */
939 void
940 end_recording_case_labels (void)
941 {
942 bitmap_iterator bi;
943 unsigned i;
944 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
945 pointer_map_destroy (edge_to_cases);
946 edge_to_cases = NULL;
947 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
948 {
949 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
950 if (bb)
951 {
952 gimple stmt = last_stmt (bb);
953 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
954 group_case_labels_stmt (stmt);
955 }
956 }
957 BITMAP_FREE (touched_switch_bbs);
958 }
959
960 /* If we are inside a {start,end}_recording_cases block, then return
961 a chain of CASE_LABEL_EXPRs from T which reference E.
962
963 Otherwise return NULL. */
964
965 static tree
966 get_cases_for_edge (edge e, gimple t)
967 {
968 void **slot;
969 size_t i, n;
970
971 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
972 chains available. Return NULL so the caller can detect this case. */
973 if (!recording_case_labels_p ())
974 return NULL;
975
976 slot = pointer_map_contains (edge_to_cases, e);
977 if (slot)
978 return (tree) *slot;
979
980 /* If we did not find E in the hash table, then this must be the first
981 time we have been queried for information about E & T. Add all the
982 elements from T to the hash table then perform the query again. */
983
984 n = gimple_switch_num_labels (t);
985 for (i = 0; i < n; i++)
986 {
987 tree elt = gimple_switch_label (t, i);
988 tree lab = CASE_LABEL (elt);
989 basic_block label_bb = label_to_block (lab);
990 edge this_edge = find_edge (e->src, label_bb);
991
992 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
993 a new chain. */
994 slot = pointer_map_insert (edge_to_cases, this_edge);
995 CASE_CHAIN (elt) = (tree) *slot;
996 *slot = elt;
997 }
998
999 return (tree) *pointer_map_contains (edge_to_cases, e);
1000 }
1001
1002 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1003
1004 static void
1005 make_gimple_switch_edges (basic_block bb)
1006 {
1007 gimple entry = last_stmt (bb);
1008 size_t i, n;
1009
1010 n = gimple_switch_num_labels (entry);
1011
1012 for (i = 0; i < n; ++i)
1013 {
1014 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1015 basic_block label_bb = label_to_block (lab);
1016 make_edge (bb, label_bb, 0);
1017 }
1018 }
1019
1020
1021 /* Return the basic block holding label DEST. */
1022
1023 basic_block
1024 label_to_block_fn (struct function *ifun, tree dest)
1025 {
1026 int uid = LABEL_DECL_UID (dest);
1027
1028 /* We would die hard when faced by an undefined label. Emit a label to
1029 the very first basic block. This will hopefully make even the dataflow
1030 and undefined variable warnings quite right. */
1031 if (seen_error () && uid < 0)
1032 {
1033 gimple_stmt_iterator gsi =
1034 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1035 gimple stmt;
1036
1037 stmt = gimple_build_label (dest);
1038 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1039 uid = LABEL_DECL_UID (dest);
1040 }
1041 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1042 return NULL;
1043 return (*ifun->cfg->x_label_to_block_map)[uid];
1044 }
1045
1046 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
1047 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
1048
1049 void
1050 make_abnormal_goto_edges (basic_block bb, bool for_call)
1051 {
1052 basic_block target_bb;
1053 gimple_stmt_iterator gsi;
1054
1055 FOR_EACH_BB (target_bb)
1056 {
1057 for (gsi = gsi_start_bb (target_bb); !gsi_end_p (gsi); gsi_next (&gsi))
1058 {
1059 gimple label_stmt = gsi_stmt (gsi);
1060 tree target;
1061
1062 if (gimple_code (label_stmt) != GIMPLE_LABEL)
1063 break;
1064
1065 target = gimple_label_label (label_stmt);
1066
1067 /* Make an edge to every label block that has been marked as a
1068 potential target for a computed goto or a non-local goto. */
1069 if ((FORCED_LABEL (target) && !for_call)
1070 || (DECL_NONLOCAL (target) && for_call))
1071 {
1072 make_edge (bb, target_bb, EDGE_ABNORMAL);
1073 break;
1074 }
1075 }
1076 if (!gsi_end_p (gsi)
1077 && is_gimple_debug (gsi_stmt (gsi)))
1078 gsi_next_nondebug (&gsi);
1079 if (!gsi_end_p (gsi))
1080 {
1081 /* Make an edge to every setjmp-like call. */
1082 gimple call_stmt = gsi_stmt (gsi);
1083 if (is_gimple_call (call_stmt)
1084 && (gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE))
1085 make_edge (bb, target_bb, EDGE_ABNORMAL);
1086 }
1087 }
1088 }
1089
1090 /* Create edges for a goto statement at block BB. */
1091
1092 static void
1093 make_goto_expr_edges (basic_block bb)
1094 {
1095 gimple_stmt_iterator last = gsi_last_bb (bb);
1096 gimple goto_t = gsi_stmt (last);
1097
1098 /* A simple GOTO creates normal edges. */
1099 if (simple_goto_p (goto_t))
1100 {
1101 tree dest = gimple_goto_dest (goto_t);
1102 basic_block label_bb = label_to_block (dest);
1103 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1104 e->goto_locus = gimple_location (goto_t);
1105 gsi_remove (&last, true);
1106 return;
1107 }
1108
1109 /* A computed GOTO creates abnormal edges. */
1110 make_abnormal_goto_edges (bb, false);
1111 }
1112
1113 /* Create edges for an asm statement with labels at block BB. */
1114
1115 static void
1116 make_gimple_asm_edges (basic_block bb)
1117 {
1118 gimple stmt = last_stmt (bb);
1119 int i, n = gimple_asm_nlabels (stmt);
1120
1121 for (i = 0; i < n; ++i)
1122 {
1123 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1124 basic_block label_bb = label_to_block (label);
1125 make_edge (bb, label_bb, 0);
1126 }
1127 }
1128
1129 /*---------------------------------------------------------------------------
1130 Flowgraph analysis
1131 ---------------------------------------------------------------------------*/
1132
1133 /* Cleanup useless labels in basic blocks. This is something we wish
1134 to do early because it allows us to group case labels before creating
1135 the edges for the CFG, and it speeds up block statement iterators in
1136 all passes later on.
1137 We rerun this pass after CFG is created, to get rid of the labels that
1138 are no longer referenced. After then we do not run it any more, since
1139 (almost) no new labels should be created. */
1140
1141 /* A map from basic block index to the leading label of that block. */
1142 static struct label_record
1143 {
1144 /* The label. */
1145 tree label;
1146
1147 /* True if the label is referenced from somewhere. */
1148 bool used;
1149 } *label_for_bb;
1150
1151 /* Given LABEL return the first label in the same basic block. */
1152
1153 static tree
1154 main_block_label (tree label)
1155 {
1156 basic_block bb = label_to_block (label);
1157 tree main_label = label_for_bb[bb->index].label;
1158
1159 /* label_to_block possibly inserted undefined label into the chain. */
1160 if (!main_label)
1161 {
1162 label_for_bb[bb->index].label = label;
1163 main_label = label;
1164 }
1165
1166 label_for_bb[bb->index].used = true;
1167 return main_label;
1168 }
1169
1170 /* Clean up redundant labels within the exception tree. */
1171
1172 static void
1173 cleanup_dead_labels_eh (void)
1174 {
1175 eh_landing_pad lp;
1176 eh_region r;
1177 tree lab;
1178 int i;
1179
1180 if (cfun->eh == NULL)
1181 return;
1182
1183 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1184 if (lp && lp->post_landing_pad)
1185 {
1186 lab = main_block_label (lp->post_landing_pad);
1187 if (lab != lp->post_landing_pad)
1188 {
1189 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1190 EH_LANDING_PAD_NR (lab) = lp->index;
1191 }
1192 }
1193
1194 FOR_ALL_EH_REGION (r)
1195 switch (r->type)
1196 {
1197 case ERT_CLEANUP:
1198 case ERT_MUST_NOT_THROW:
1199 break;
1200
1201 case ERT_TRY:
1202 {
1203 eh_catch c;
1204 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1205 {
1206 lab = c->label;
1207 if (lab)
1208 c->label = main_block_label (lab);
1209 }
1210 }
1211 break;
1212
1213 case ERT_ALLOWED_EXCEPTIONS:
1214 lab = r->u.allowed.label;
1215 if (lab)
1216 r->u.allowed.label = main_block_label (lab);
1217 break;
1218 }
1219 }
1220
1221
1222 /* Cleanup redundant labels. This is a three-step process:
1223 1) Find the leading label for each block.
1224 2) Redirect all references to labels to the leading labels.
1225 3) Cleanup all useless labels. */
1226
1227 void
1228 cleanup_dead_labels (void)
1229 {
1230 basic_block bb;
1231 label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
1232
1233 /* Find a suitable label for each block. We use the first user-defined
1234 label if there is one, or otherwise just the first label we see. */
1235 FOR_EACH_BB (bb)
1236 {
1237 gimple_stmt_iterator i;
1238
1239 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1240 {
1241 tree label;
1242 gimple stmt = gsi_stmt (i);
1243
1244 if (gimple_code (stmt) != GIMPLE_LABEL)
1245 break;
1246
1247 label = gimple_label_label (stmt);
1248
1249 /* If we have not yet seen a label for the current block,
1250 remember this one and see if there are more labels. */
1251 if (!label_for_bb[bb->index].label)
1252 {
1253 label_for_bb[bb->index].label = label;
1254 continue;
1255 }
1256
1257 /* If we did see a label for the current block already, but it
1258 is an artificially created label, replace it if the current
1259 label is a user defined label. */
1260 if (!DECL_ARTIFICIAL (label)
1261 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1262 {
1263 label_for_bb[bb->index].label = label;
1264 break;
1265 }
1266 }
1267 }
1268
1269 /* Now redirect all jumps/branches to the selected label.
1270 First do so for each block ending in a control statement. */
1271 FOR_EACH_BB (bb)
1272 {
1273 gimple stmt = last_stmt (bb);
1274 tree label, new_label;
1275
1276 if (!stmt)
1277 continue;
1278
1279 switch (gimple_code (stmt))
1280 {
1281 case GIMPLE_COND:
1282 label = gimple_cond_true_label (stmt);
1283 if (label)
1284 {
1285 new_label = main_block_label (label);
1286 if (new_label != label)
1287 gimple_cond_set_true_label (stmt, new_label);
1288 }
1289
1290 label = gimple_cond_false_label (stmt);
1291 if (label)
1292 {
1293 new_label = main_block_label (label);
1294 if (new_label != label)
1295 gimple_cond_set_false_label (stmt, new_label);
1296 }
1297 break;
1298
1299 case GIMPLE_SWITCH:
1300 {
1301 size_t i, n = gimple_switch_num_labels (stmt);
1302
1303 /* Replace all destination labels. */
1304 for (i = 0; i < n; ++i)
1305 {
1306 tree case_label = gimple_switch_label (stmt, i);
1307 label = CASE_LABEL (case_label);
1308 new_label = main_block_label (label);
1309 if (new_label != label)
1310 CASE_LABEL (case_label) = new_label;
1311 }
1312 break;
1313 }
1314
1315 case GIMPLE_ASM:
1316 {
1317 int i, n = gimple_asm_nlabels (stmt);
1318
1319 for (i = 0; i < n; ++i)
1320 {
1321 tree cons = gimple_asm_label_op (stmt, i);
1322 tree label = main_block_label (TREE_VALUE (cons));
1323 TREE_VALUE (cons) = label;
1324 }
1325 break;
1326 }
1327
1328 /* We have to handle gotos until they're removed, and we don't
1329 remove them until after we've created the CFG edges. */
1330 case GIMPLE_GOTO:
1331 if (!computed_goto_p (stmt))
1332 {
1333 label = gimple_goto_dest (stmt);
1334 new_label = main_block_label (label);
1335 if (new_label != label)
1336 gimple_goto_set_dest (stmt, new_label);
1337 }
1338 break;
1339
1340 case GIMPLE_TRANSACTION:
1341 {
1342 tree label = gimple_transaction_label (stmt);
1343 if (label)
1344 {
1345 tree new_label = main_block_label (label);
1346 if (new_label != label)
1347 gimple_transaction_set_label (stmt, new_label);
1348 }
1349 }
1350 break;
1351
1352 default:
1353 break;
1354 }
1355 }
1356
1357 /* Do the same for the exception region tree labels. */
1358 cleanup_dead_labels_eh ();
1359
1360 /* Finally, purge dead labels. All user-defined labels and labels that
1361 can be the target of non-local gotos and labels which have their
1362 address taken are preserved. */
1363 FOR_EACH_BB (bb)
1364 {
1365 gimple_stmt_iterator i;
1366 tree label_for_this_bb = label_for_bb[bb->index].label;
1367
1368 if (!label_for_this_bb)
1369 continue;
1370
1371 /* If the main label of the block is unused, we may still remove it. */
1372 if (!label_for_bb[bb->index].used)
1373 label_for_this_bb = NULL;
1374
1375 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1376 {
1377 tree label;
1378 gimple stmt = gsi_stmt (i);
1379
1380 if (gimple_code (stmt) != GIMPLE_LABEL)
1381 break;
1382
1383 label = gimple_label_label (stmt);
1384
1385 if (label == label_for_this_bb
1386 || !DECL_ARTIFICIAL (label)
1387 || DECL_NONLOCAL (label)
1388 || FORCED_LABEL (label))
1389 gsi_next (&i);
1390 else
1391 gsi_remove (&i, true);
1392 }
1393 }
1394
1395 free (label_for_bb);
1396 }
1397
1398 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1399 the ones jumping to the same label.
1400 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1401
1402 void
1403 group_case_labels_stmt (gimple stmt)
1404 {
1405 int old_size = gimple_switch_num_labels (stmt);
1406 int i, j, new_size = old_size;
1407 basic_block default_bb = NULL;
1408
1409 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1410
1411 /* Look for possible opportunities to merge cases. */
1412 i = 1;
1413 while (i < old_size)
1414 {
1415 tree base_case, base_high;
1416 basic_block base_bb;
1417
1418 base_case = gimple_switch_label (stmt, i);
1419
1420 gcc_assert (base_case);
1421 base_bb = label_to_block (CASE_LABEL (base_case));
1422
1423 /* Discard cases that have the same destination as the
1424 default case. */
1425 if (base_bb == default_bb)
1426 {
1427 gimple_switch_set_label (stmt, i, NULL_TREE);
1428 i++;
1429 new_size--;
1430 continue;
1431 }
1432
1433 base_high = CASE_HIGH (base_case)
1434 ? CASE_HIGH (base_case)
1435 : CASE_LOW (base_case);
1436 i++;
1437
1438 /* Try to merge case labels. Break out when we reach the end
1439 of the label vector or when we cannot merge the next case
1440 label with the current one. */
1441 while (i < old_size)
1442 {
1443 tree merge_case = gimple_switch_label (stmt, i);
1444 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1445 double_int bhp1 = tree_to_double_int (base_high) + double_int_one;
1446
1447 /* Merge the cases if they jump to the same place,
1448 and their ranges are consecutive. */
1449 if (merge_bb == base_bb
1450 && tree_to_double_int (CASE_LOW (merge_case)) == bhp1)
1451 {
1452 base_high = CASE_HIGH (merge_case) ?
1453 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1454 CASE_HIGH (base_case) = base_high;
1455 gimple_switch_set_label (stmt, i, NULL_TREE);
1456 new_size--;
1457 i++;
1458 }
1459 else
1460 break;
1461 }
1462 }
1463
1464 /* Compress the case labels in the label vector, and adjust the
1465 length of the vector. */
1466 for (i = 0, j = 0; i < new_size; i++)
1467 {
1468 while (! gimple_switch_label (stmt, j))
1469 j++;
1470 gimple_switch_set_label (stmt, i,
1471 gimple_switch_label (stmt, j++));
1472 }
1473
1474 gcc_assert (new_size <= old_size);
1475 gimple_switch_set_num_labels (stmt, new_size);
1476 }
1477
1478 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1479 and scan the sorted vector of cases. Combine the ones jumping to the
1480 same label. */
1481
1482 void
1483 group_case_labels (void)
1484 {
1485 basic_block bb;
1486
1487 FOR_EACH_BB (bb)
1488 {
1489 gimple stmt = last_stmt (bb);
1490 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1491 group_case_labels_stmt (stmt);
1492 }
1493 }
1494
1495 /* Checks whether we can merge block B into block A. */
1496
1497 static bool
1498 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1499 {
1500 gimple stmt;
1501 gimple_stmt_iterator gsi;
1502
1503 if (!single_succ_p (a))
1504 return false;
1505
1506 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1507 return false;
1508
1509 if (single_succ (a) != b)
1510 return false;
1511
1512 if (!single_pred_p (b))
1513 return false;
1514
1515 if (b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1516 return false;
1517
1518 /* If A ends by a statement causing exceptions or something similar, we
1519 cannot merge the blocks. */
1520 stmt = last_stmt (a);
1521 if (stmt && stmt_ends_bb_p (stmt))
1522 return false;
1523
1524 /* Do not allow a block with only a non-local label to be merged. */
1525 if (stmt
1526 && gimple_code (stmt) == GIMPLE_LABEL
1527 && DECL_NONLOCAL (gimple_label_label (stmt)))
1528 return false;
1529
1530 /* Examine the labels at the beginning of B. */
1531 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
1532 {
1533 tree lab;
1534 stmt = gsi_stmt (gsi);
1535 if (gimple_code (stmt) != GIMPLE_LABEL)
1536 break;
1537 lab = gimple_label_label (stmt);
1538
1539 /* Do not remove user forced labels or for -O0 any user labels. */
1540 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1541 return false;
1542 }
1543
1544 /* Protect the loop latches. */
1545 if (current_loops && b->loop_father->latch == b)
1546 return false;
1547
1548 /* It must be possible to eliminate all phi nodes in B. If ssa form
1549 is not up-to-date and a name-mapping is registered, we cannot eliminate
1550 any phis. Symbols marked for renaming are never a problem though. */
1551 for (gsi = gsi_start_phis (b); !gsi_end_p (gsi); gsi_next (&gsi))
1552 {
1553 gimple phi = gsi_stmt (gsi);
1554 /* Technically only new names matter. */
1555 if (name_registered_for_update_p (PHI_RESULT (phi)))
1556 return false;
1557 }
1558
1559 /* When not optimizing, don't merge if we'd lose goto_locus. */
1560 if (!optimize
1561 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1562 {
1563 location_t goto_locus = single_succ_edge (a)->goto_locus;
1564 gimple_stmt_iterator prev, next;
1565 prev = gsi_last_nondebug_bb (a);
1566 next = gsi_after_labels (b);
1567 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1568 gsi_next_nondebug (&next);
1569 if ((gsi_end_p (prev)
1570 || gimple_location (gsi_stmt (prev)) != goto_locus)
1571 && (gsi_end_p (next)
1572 || gimple_location (gsi_stmt (next)) != goto_locus))
1573 return false;
1574 }
1575
1576 return true;
1577 }
1578
1579 /* Replaces all uses of NAME by VAL. */
1580
1581 void
1582 replace_uses_by (tree name, tree val)
1583 {
1584 imm_use_iterator imm_iter;
1585 use_operand_p use;
1586 gimple stmt;
1587 edge e;
1588
1589 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1590 {
1591 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1592 {
1593 replace_exp (use, val);
1594
1595 if (gimple_code (stmt) == GIMPLE_PHI)
1596 {
1597 e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
1598 if (e->flags & EDGE_ABNORMAL)
1599 {
1600 /* This can only occur for virtual operands, since
1601 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1602 would prevent replacement. */
1603 gcc_checking_assert (virtual_operand_p (name));
1604 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1605 }
1606 }
1607 }
1608
1609 if (gimple_code (stmt) != GIMPLE_PHI)
1610 {
1611 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1612 gimple orig_stmt = stmt;
1613 size_t i;
1614
1615 /* Mark the block if we changed the last stmt in it. */
1616 if (cfgcleanup_altered_bbs
1617 && stmt_ends_bb_p (stmt))
1618 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1619
1620 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1621 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1622 only change sth from non-invariant to invariant, and only
1623 when propagating constants. */
1624 if (is_gimple_min_invariant (val))
1625 for (i = 0; i < gimple_num_ops (stmt); i++)
1626 {
1627 tree op = gimple_op (stmt, i);
1628 /* Operands may be empty here. For example, the labels
1629 of a GIMPLE_COND are nulled out following the creation
1630 of the corresponding CFG edges. */
1631 if (op && TREE_CODE (op) == ADDR_EXPR)
1632 recompute_tree_invariant_for_addr_expr (op);
1633 }
1634
1635 if (fold_stmt (&gsi))
1636 stmt = gsi_stmt (gsi);
1637
1638 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1639 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1640
1641 update_stmt (stmt);
1642 }
1643 }
1644
1645 gcc_checking_assert (has_zero_uses (name));
1646
1647 /* Also update the trees stored in loop structures. */
1648 if (current_loops)
1649 {
1650 struct loop *loop;
1651
1652 FOR_EACH_LOOP (loop, 0)
1653 {
1654 substitute_in_loop_info (loop, name, val);
1655 }
1656 }
1657 }
1658
1659 /* Merge block B into block A. */
1660
1661 static void
1662 gimple_merge_blocks (basic_block a, basic_block b)
1663 {
1664 gimple_stmt_iterator last, gsi, psi;
1665
1666 if (dump_file)
1667 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1668
1669 /* Remove all single-valued PHI nodes from block B of the form
1670 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1671 gsi = gsi_last_bb (a);
1672 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1673 {
1674 gimple phi = gsi_stmt (psi);
1675 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1676 gimple copy;
1677 bool may_replace_uses = (virtual_operand_p (def)
1678 || may_propagate_copy (def, use));
1679
1680 /* In case we maintain loop closed ssa form, do not propagate arguments
1681 of loop exit phi nodes. */
1682 if (current_loops
1683 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1684 && !virtual_operand_p (def)
1685 && TREE_CODE (use) == SSA_NAME
1686 && a->loop_father != b->loop_father)
1687 may_replace_uses = false;
1688
1689 if (!may_replace_uses)
1690 {
1691 gcc_assert (!virtual_operand_p (def));
1692
1693 /* Note that just emitting the copies is fine -- there is no problem
1694 with ordering of phi nodes. This is because A is the single
1695 predecessor of B, therefore results of the phi nodes cannot
1696 appear as arguments of the phi nodes. */
1697 copy = gimple_build_assign (def, use);
1698 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1699 remove_phi_node (&psi, false);
1700 }
1701 else
1702 {
1703 /* If we deal with a PHI for virtual operands, we can simply
1704 propagate these without fussing with folding or updating
1705 the stmt. */
1706 if (virtual_operand_p (def))
1707 {
1708 imm_use_iterator iter;
1709 use_operand_p use_p;
1710 gimple stmt;
1711
1712 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1713 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1714 SET_USE (use_p, use);
1715
1716 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1717 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1718 }
1719 else
1720 replace_uses_by (def, use);
1721
1722 remove_phi_node (&psi, true);
1723 }
1724 }
1725
1726 /* Ensure that B follows A. */
1727 move_block_after (b, a);
1728
1729 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1730 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1731
1732 /* Remove labels from B and set gimple_bb to A for other statements. */
1733 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1734 {
1735 gimple stmt = gsi_stmt (gsi);
1736 if (gimple_code (stmt) == GIMPLE_LABEL)
1737 {
1738 tree label = gimple_label_label (stmt);
1739 int lp_nr;
1740
1741 gsi_remove (&gsi, false);
1742
1743 /* Now that we can thread computed gotos, we might have
1744 a situation where we have a forced label in block B
1745 However, the label at the start of block B might still be
1746 used in other ways (think about the runtime checking for
1747 Fortran assigned gotos). So we can not just delete the
1748 label. Instead we move the label to the start of block A. */
1749 if (FORCED_LABEL (label))
1750 {
1751 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1752 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1753 }
1754 /* Other user labels keep around in a form of a debug stmt. */
1755 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1756 {
1757 gimple dbg = gimple_build_debug_bind (label,
1758 integer_zero_node,
1759 stmt);
1760 gimple_debug_bind_reset_value (dbg);
1761 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1762 }
1763
1764 lp_nr = EH_LANDING_PAD_NR (label);
1765 if (lp_nr)
1766 {
1767 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1768 lp->post_landing_pad = NULL;
1769 }
1770 }
1771 else
1772 {
1773 gimple_set_bb (stmt, a);
1774 gsi_next (&gsi);
1775 }
1776 }
1777
1778 /* Merge the sequences. */
1779 last = gsi_last_bb (a);
1780 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1781 set_bb_seq (b, NULL);
1782
1783 if (cfgcleanup_altered_bbs)
1784 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1785 }
1786
1787
1788 /* Return the one of two successors of BB that is not reachable by a
1789 complex edge, if there is one. Else, return BB. We use
1790 this in optimizations that use post-dominators for their heuristics,
1791 to catch the cases in C++ where function calls are involved. */
1792
1793 basic_block
1794 single_noncomplex_succ (basic_block bb)
1795 {
1796 edge e0, e1;
1797 if (EDGE_COUNT (bb->succs) != 2)
1798 return bb;
1799
1800 e0 = EDGE_SUCC (bb, 0);
1801 e1 = EDGE_SUCC (bb, 1);
1802 if (e0->flags & EDGE_COMPLEX)
1803 return e1->dest;
1804 if (e1->flags & EDGE_COMPLEX)
1805 return e0->dest;
1806
1807 return bb;
1808 }
1809
1810 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1811
1812 void
1813 notice_special_calls (gimple call)
1814 {
1815 int flags = gimple_call_flags (call);
1816
1817 if (flags & ECF_MAY_BE_ALLOCA)
1818 cfun->calls_alloca = true;
1819 if (flags & ECF_RETURNS_TWICE)
1820 cfun->calls_setjmp = true;
1821 }
1822
1823
1824 /* Clear flags set by notice_special_calls. Used by dead code removal
1825 to update the flags. */
1826
1827 void
1828 clear_special_calls (void)
1829 {
1830 cfun->calls_alloca = false;
1831 cfun->calls_setjmp = false;
1832 }
1833
1834 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1835
1836 static void
1837 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1838 {
1839 /* Since this block is no longer reachable, we can just delete all
1840 of its PHI nodes. */
1841 remove_phi_nodes (bb);
1842
1843 /* Remove edges to BB's successors. */
1844 while (EDGE_COUNT (bb->succs) > 0)
1845 remove_edge (EDGE_SUCC (bb, 0));
1846 }
1847
1848
1849 /* Remove statements of basic block BB. */
1850
1851 static void
1852 remove_bb (basic_block bb)
1853 {
1854 gimple_stmt_iterator i;
1855
1856 if (dump_file)
1857 {
1858 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1859 if (dump_flags & TDF_DETAILS)
1860 {
1861 dump_bb (dump_file, bb, 0, dump_flags);
1862 fprintf (dump_file, "\n");
1863 }
1864 }
1865
1866 if (current_loops)
1867 {
1868 struct loop *loop = bb->loop_father;
1869
1870 /* If a loop gets removed, clean up the information associated
1871 with it. */
1872 if (loop->latch == bb
1873 || loop->header == bb)
1874 free_numbers_of_iterations_estimates_loop (loop);
1875 }
1876
1877 /* Remove all the instructions in the block. */
1878 if (bb_seq (bb) != NULL)
1879 {
1880 /* Walk backwards so as to get a chance to substitute all
1881 released DEFs into debug stmts. See
1882 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
1883 details. */
1884 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
1885 {
1886 gimple stmt = gsi_stmt (i);
1887 if (gimple_code (stmt) == GIMPLE_LABEL
1888 && (FORCED_LABEL (gimple_label_label (stmt))
1889 || DECL_NONLOCAL (gimple_label_label (stmt))))
1890 {
1891 basic_block new_bb;
1892 gimple_stmt_iterator new_gsi;
1893
1894 /* A non-reachable non-local label may still be referenced.
1895 But it no longer needs to carry the extra semantics of
1896 non-locality. */
1897 if (DECL_NONLOCAL (gimple_label_label (stmt)))
1898 {
1899 DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
1900 FORCED_LABEL (gimple_label_label (stmt)) = 1;
1901 }
1902
1903 new_bb = bb->prev_bb;
1904 new_gsi = gsi_start_bb (new_bb);
1905 gsi_remove (&i, false);
1906 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
1907 }
1908 else
1909 {
1910 /* Release SSA definitions if we are in SSA. Note that we
1911 may be called when not in SSA. For example,
1912 final_cleanup calls this function via
1913 cleanup_tree_cfg. */
1914 if (gimple_in_ssa_p (cfun))
1915 release_defs (stmt);
1916
1917 gsi_remove (&i, true);
1918 }
1919
1920 if (gsi_end_p (i))
1921 i = gsi_last_bb (bb);
1922 else
1923 gsi_prev (&i);
1924 }
1925 }
1926
1927 remove_phi_nodes_and_edges_for_unreachable_block (bb);
1928 bb->il.gimple.seq = NULL;
1929 bb->il.gimple.phi_nodes = NULL;
1930 }
1931
1932
1933 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
1934 predicate VAL, return the edge that will be taken out of the block.
1935 If VAL does not match a unique edge, NULL is returned. */
1936
1937 edge
1938 find_taken_edge (basic_block bb, tree val)
1939 {
1940 gimple stmt;
1941
1942 stmt = last_stmt (bb);
1943
1944 gcc_assert (stmt);
1945 gcc_assert (is_ctrl_stmt (stmt));
1946
1947 if (val == NULL)
1948 return NULL;
1949
1950 if (!is_gimple_min_invariant (val))
1951 return NULL;
1952
1953 if (gimple_code (stmt) == GIMPLE_COND)
1954 return find_taken_edge_cond_expr (bb, val);
1955
1956 if (gimple_code (stmt) == GIMPLE_SWITCH)
1957 return find_taken_edge_switch_expr (bb, val);
1958
1959 if (computed_goto_p (stmt))
1960 {
1961 /* Only optimize if the argument is a label, if the argument is
1962 not a label then we can not construct a proper CFG.
1963
1964 It may be the case that we only need to allow the LABEL_REF to
1965 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
1966 appear inside a LABEL_EXPR just to be safe. */
1967 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
1968 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
1969 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
1970 return NULL;
1971 }
1972
1973 gcc_unreachable ();
1974 }
1975
1976 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
1977 statement, determine which of the outgoing edges will be taken out of the
1978 block. Return NULL if either edge may be taken. */
1979
1980 static edge
1981 find_taken_edge_computed_goto (basic_block bb, tree val)
1982 {
1983 basic_block dest;
1984 edge e = NULL;
1985
1986 dest = label_to_block (val);
1987 if (dest)
1988 {
1989 e = find_edge (bb, dest);
1990 gcc_assert (e != NULL);
1991 }
1992
1993 return e;
1994 }
1995
1996 /* Given a constant value VAL and the entry block BB to a COND_EXPR
1997 statement, determine which of the two edges will be taken out of the
1998 block. Return NULL if either edge may be taken. */
1999
2000 static edge
2001 find_taken_edge_cond_expr (basic_block bb, tree val)
2002 {
2003 edge true_edge, false_edge;
2004
2005 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2006
2007 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2008 return (integer_zerop (val) ? false_edge : true_edge);
2009 }
2010
2011 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2012 statement, determine which edge will be taken out of the block. Return
2013 NULL if any edge may be taken. */
2014
2015 static edge
2016 find_taken_edge_switch_expr (basic_block bb, tree val)
2017 {
2018 basic_block dest_bb;
2019 edge e;
2020 gimple switch_stmt;
2021 tree taken_case;
2022
2023 switch_stmt = last_stmt (bb);
2024 taken_case = find_case_label_for_value (switch_stmt, val);
2025 dest_bb = label_to_block (CASE_LABEL (taken_case));
2026
2027 e = find_edge (bb, dest_bb);
2028 gcc_assert (e);
2029 return e;
2030 }
2031
2032
2033 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2034 We can make optimal use here of the fact that the case labels are
2035 sorted: We can do a binary search for a case matching VAL. */
2036
2037 static tree
2038 find_case_label_for_value (gimple switch_stmt, tree val)
2039 {
2040 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2041 tree default_case = gimple_switch_default_label (switch_stmt);
2042
2043 for (low = 0, high = n; high - low > 1; )
2044 {
2045 size_t i = (high + low) / 2;
2046 tree t = gimple_switch_label (switch_stmt, i);
2047 int cmp;
2048
2049 /* Cache the result of comparing CASE_LOW and val. */
2050 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2051
2052 if (cmp > 0)
2053 high = i;
2054 else
2055 low = i;
2056
2057 if (CASE_HIGH (t) == NULL)
2058 {
2059 /* A singe-valued case label. */
2060 if (cmp == 0)
2061 return t;
2062 }
2063 else
2064 {
2065 /* A case range. We can only handle integer ranges. */
2066 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2067 return t;
2068 }
2069 }
2070
2071 return default_case;
2072 }
2073
2074
2075 /* Dump a basic block on stderr. */
2076
2077 void
2078 gimple_debug_bb (basic_block bb)
2079 {
2080 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2081 }
2082
2083
2084 /* Dump basic block with index N on stderr. */
2085
2086 basic_block
2087 gimple_debug_bb_n (int n)
2088 {
2089 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2090 return BASIC_BLOCK_FOR_FN (cfun, n);
2091 }
2092
2093
2094 /* Dump the CFG on stderr.
2095
2096 FLAGS are the same used by the tree dumping functions
2097 (see TDF_* in dumpfile.h). */
2098
2099 void
2100 gimple_debug_cfg (int flags)
2101 {
2102 gimple_dump_cfg (stderr, flags);
2103 }
2104
2105
2106 /* Dump the program showing basic block boundaries on the given FILE.
2107
2108 FLAGS are the same used by the tree dumping functions (see TDF_* in
2109 tree.h). */
2110
2111 void
2112 gimple_dump_cfg (FILE *file, int flags)
2113 {
2114 if (flags & TDF_DETAILS)
2115 {
2116 dump_function_header (file, current_function_decl, flags);
2117 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2118 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2119 last_basic_block);
2120
2121 brief_dump_cfg (file, flags | TDF_COMMENT);
2122 fprintf (file, "\n");
2123 }
2124
2125 if (flags & TDF_STATS)
2126 dump_cfg_stats (file);
2127
2128 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2129 }
2130
2131
2132 /* Dump CFG statistics on FILE. */
2133
2134 void
2135 dump_cfg_stats (FILE *file)
2136 {
2137 static long max_num_merged_labels = 0;
2138 unsigned long size, total = 0;
2139 long num_edges;
2140 basic_block bb;
2141 const char * const fmt_str = "%-30s%-13s%12s\n";
2142 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2143 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2144 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2145 const char *funcname = current_function_name ();
2146
2147 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2148
2149 fprintf (file, "---------------------------------------------------------\n");
2150 fprintf (file, fmt_str, "", " Number of ", "Memory");
2151 fprintf (file, fmt_str, "", " instances ", "used ");
2152 fprintf (file, "---------------------------------------------------------\n");
2153
2154 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2155 total += size;
2156 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2157 SCALE (size), LABEL (size));
2158
2159 num_edges = 0;
2160 FOR_EACH_BB (bb)
2161 num_edges += EDGE_COUNT (bb->succs);
2162 size = num_edges * sizeof (struct edge_def);
2163 total += size;
2164 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2165
2166 fprintf (file, "---------------------------------------------------------\n");
2167 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2168 LABEL (total));
2169 fprintf (file, "---------------------------------------------------------\n");
2170 fprintf (file, "\n");
2171
2172 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2173 max_num_merged_labels = cfg_stats.num_merged_labels;
2174
2175 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2176 cfg_stats.num_merged_labels, max_num_merged_labels);
2177
2178 fprintf (file, "\n");
2179 }
2180
2181
2182 /* Dump CFG statistics on stderr. Keep extern so that it's always
2183 linked in the final executable. */
2184
2185 DEBUG_FUNCTION void
2186 debug_cfg_stats (void)
2187 {
2188 dump_cfg_stats (stderr);
2189 }
2190
2191 /*---------------------------------------------------------------------------
2192 Miscellaneous helpers
2193 ---------------------------------------------------------------------------*/
2194
2195 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2196 flow. Transfers of control flow associated with EH are excluded. */
2197
2198 static bool
2199 call_can_make_abnormal_goto (gimple t)
2200 {
2201 /* If the function has no non-local labels, then a call cannot make an
2202 abnormal transfer of control. */
2203 if (!cfun->has_nonlocal_label
2204 && !cfun->calls_setjmp)
2205 return false;
2206
2207 /* Likewise if the call has no side effects. */
2208 if (!gimple_has_side_effects (t))
2209 return false;
2210
2211 /* Likewise if the called function is leaf. */
2212 if (gimple_call_flags (t) & ECF_LEAF)
2213 return false;
2214
2215 return true;
2216 }
2217
2218
2219 /* Return true if T can make an abnormal transfer of control flow.
2220 Transfers of control flow associated with EH are excluded. */
2221
2222 bool
2223 stmt_can_make_abnormal_goto (gimple t)
2224 {
2225 if (computed_goto_p (t))
2226 return true;
2227 if (is_gimple_call (t))
2228 return call_can_make_abnormal_goto (t);
2229 return false;
2230 }
2231
2232
2233 /* Return true if T represents a stmt that always transfers control. */
2234
2235 bool
2236 is_ctrl_stmt (gimple t)
2237 {
2238 switch (gimple_code (t))
2239 {
2240 case GIMPLE_COND:
2241 case GIMPLE_SWITCH:
2242 case GIMPLE_GOTO:
2243 case GIMPLE_RETURN:
2244 case GIMPLE_RESX:
2245 return true;
2246 default:
2247 return false;
2248 }
2249 }
2250
2251
2252 /* Return true if T is a statement that may alter the flow of control
2253 (e.g., a call to a non-returning function). */
2254
2255 bool
2256 is_ctrl_altering_stmt (gimple t)
2257 {
2258 gcc_assert (t);
2259
2260 switch (gimple_code (t))
2261 {
2262 case GIMPLE_CALL:
2263 {
2264 int flags = gimple_call_flags (t);
2265
2266 /* A call alters control flow if it can make an abnormal goto. */
2267 if (call_can_make_abnormal_goto (t))
2268 return true;
2269
2270 /* A call also alters control flow if it does not return. */
2271 if (flags & ECF_NORETURN)
2272 return true;
2273
2274 /* TM ending statements have backedges out of the transaction.
2275 Return true so we split the basic block containing them.
2276 Note that the TM_BUILTIN test is merely an optimization. */
2277 if ((flags & ECF_TM_BUILTIN)
2278 && is_tm_ending_fndecl (gimple_call_fndecl (t)))
2279 return true;
2280
2281 /* BUILT_IN_RETURN call is same as return statement. */
2282 if (gimple_call_builtin_p (t, BUILT_IN_RETURN))
2283 return true;
2284 }
2285 break;
2286
2287 case GIMPLE_EH_DISPATCH:
2288 /* EH_DISPATCH branches to the individual catch handlers at
2289 this level of a try or allowed-exceptions region. It can
2290 fallthru to the next statement as well. */
2291 return true;
2292
2293 case GIMPLE_ASM:
2294 if (gimple_asm_nlabels (t) > 0)
2295 return true;
2296 break;
2297
2298 CASE_GIMPLE_OMP:
2299 /* OpenMP directives alter control flow. */
2300 return true;
2301
2302 case GIMPLE_TRANSACTION:
2303 /* A transaction start alters control flow. */
2304 return true;
2305
2306 default:
2307 break;
2308 }
2309
2310 /* If a statement can throw, it alters control flow. */
2311 return stmt_can_throw_internal (t);
2312 }
2313
2314
2315 /* Return true if T is a simple local goto. */
2316
2317 bool
2318 simple_goto_p (gimple t)
2319 {
2320 return (gimple_code (t) == GIMPLE_GOTO
2321 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2322 }
2323
2324
2325 /* Return true if STMT should start a new basic block. PREV_STMT is
2326 the statement preceding STMT. It is used when STMT is a label or a
2327 case label. Labels should only start a new basic block if their
2328 previous statement wasn't a label. Otherwise, sequence of labels
2329 would generate unnecessary basic blocks that only contain a single
2330 label. */
2331
2332 static inline bool
2333 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2334 {
2335 if (stmt == NULL)
2336 return false;
2337
2338 /* Labels start a new basic block only if the preceding statement
2339 wasn't a label of the same type. This prevents the creation of
2340 consecutive blocks that have nothing but a single label. */
2341 if (gimple_code (stmt) == GIMPLE_LABEL)
2342 {
2343 /* Nonlocal and computed GOTO targets always start a new block. */
2344 if (DECL_NONLOCAL (gimple_label_label (stmt))
2345 || FORCED_LABEL (gimple_label_label (stmt)))
2346 return true;
2347
2348 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2349 {
2350 if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
2351 return true;
2352
2353 cfg_stats.num_merged_labels++;
2354 return false;
2355 }
2356 else
2357 return true;
2358 }
2359 else if (gimple_code (stmt) == GIMPLE_CALL
2360 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2361 /* setjmp acts similar to a nonlocal GOTO target and thus should
2362 start a new block. */
2363 return true;
2364
2365 return false;
2366 }
2367
2368
2369 /* Return true if T should end a basic block. */
2370
2371 bool
2372 stmt_ends_bb_p (gimple t)
2373 {
2374 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2375 }
2376
2377 /* Remove block annotations and other data structures. */
2378
2379 void
2380 delete_tree_cfg_annotations (void)
2381 {
2382 vec_free (label_to_block_map_for_fn (cfun));
2383 }
2384
2385
2386 /* Return the first statement in basic block BB. */
2387
2388 gimple
2389 first_stmt (basic_block bb)
2390 {
2391 gimple_stmt_iterator i = gsi_start_bb (bb);
2392 gimple stmt = NULL;
2393
2394 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2395 {
2396 gsi_next (&i);
2397 stmt = NULL;
2398 }
2399 return stmt;
2400 }
2401
2402 /* Return the first non-label statement in basic block BB. */
2403
2404 static gimple
2405 first_non_label_stmt (basic_block bb)
2406 {
2407 gimple_stmt_iterator i = gsi_start_bb (bb);
2408 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2409 gsi_next (&i);
2410 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2411 }
2412
2413 /* Return the last statement in basic block BB. */
2414
2415 gimple
2416 last_stmt (basic_block bb)
2417 {
2418 gimple_stmt_iterator i = gsi_last_bb (bb);
2419 gimple stmt = NULL;
2420
2421 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2422 {
2423 gsi_prev (&i);
2424 stmt = NULL;
2425 }
2426 return stmt;
2427 }
2428
2429 /* Return the last statement of an otherwise empty block. Return NULL
2430 if the block is totally empty, or if it contains more than one
2431 statement. */
2432
2433 gimple
2434 last_and_only_stmt (basic_block bb)
2435 {
2436 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2437 gimple last, prev;
2438
2439 if (gsi_end_p (i))
2440 return NULL;
2441
2442 last = gsi_stmt (i);
2443 gsi_prev_nondebug (&i);
2444 if (gsi_end_p (i))
2445 return last;
2446
2447 /* Empty statements should no longer appear in the instruction stream.
2448 Everything that might have appeared before should be deleted by
2449 remove_useless_stmts, and the optimizers should just gsi_remove
2450 instead of smashing with build_empty_stmt.
2451
2452 Thus the only thing that should appear here in a block containing
2453 one executable statement is a label. */
2454 prev = gsi_stmt (i);
2455 if (gimple_code (prev) == GIMPLE_LABEL)
2456 return last;
2457 else
2458 return NULL;
2459 }
2460
2461 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2462
2463 static void
2464 reinstall_phi_args (edge new_edge, edge old_edge)
2465 {
2466 edge_var_map_vector *v;
2467 edge_var_map *vm;
2468 int i;
2469 gimple_stmt_iterator phis;
2470
2471 v = redirect_edge_var_map_vector (old_edge);
2472 if (!v)
2473 return;
2474
2475 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2476 v->iterate (i, &vm) && !gsi_end_p (phis);
2477 i++, gsi_next (&phis))
2478 {
2479 gimple phi = gsi_stmt (phis);
2480 tree result = redirect_edge_var_map_result (vm);
2481 tree arg = redirect_edge_var_map_def (vm);
2482
2483 gcc_assert (result == gimple_phi_result (phi));
2484
2485 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2486 }
2487
2488 redirect_edge_var_map_clear (old_edge);
2489 }
2490
2491 /* Returns the basic block after which the new basic block created
2492 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2493 near its "logical" location. This is of most help to humans looking
2494 at debugging dumps. */
2495
2496 static basic_block
2497 split_edge_bb_loc (edge edge_in)
2498 {
2499 basic_block dest = edge_in->dest;
2500 basic_block dest_prev = dest->prev_bb;
2501
2502 if (dest_prev)
2503 {
2504 edge e = find_edge (dest_prev, dest);
2505 if (e && !(e->flags & EDGE_COMPLEX))
2506 return edge_in->src;
2507 }
2508 return dest_prev;
2509 }
2510
2511 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2512 Abort on abnormal edges. */
2513
2514 static basic_block
2515 gimple_split_edge (edge edge_in)
2516 {
2517 basic_block new_bb, after_bb, dest;
2518 edge new_edge, e;
2519
2520 /* Abnormal edges cannot be split. */
2521 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2522
2523 dest = edge_in->dest;
2524
2525 after_bb = split_edge_bb_loc (edge_in);
2526
2527 new_bb = create_empty_bb (after_bb);
2528 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2529 new_bb->count = edge_in->count;
2530 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2531 new_edge->probability = REG_BR_PROB_BASE;
2532 new_edge->count = edge_in->count;
2533
2534 e = redirect_edge_and_branch (edge_in, new_bb);
2535 gcc_assert (e == edge_in);
2536 reinstall_phi_args (new_edge, e);
2537
2538 return new_bb;
2539 }
2540
2541
2542 /* Verify properties of the address expression T with base object BASE. */
2543
2544 static tree
2545 verify_address (tree t, tree base)
2546 {
2547 bool old_constant;
2548 bool old_side_effects;
2549 bool new_constant;
2550 bool new_side_effects;
2551
2552 old_constant = TREE_CONSTANT (t);
2553 old_side_effects = TREE_SIDE_EFFECTS (t);
2554
2555 recompute_tree_invariant_for_addr_expr (t);
2556 new_side_effects = TREE_SIDE_EFFECTS (t);
2557 new_constant = TREE_CONSTANT (t);
2558
2559 if (old_constant != new_constant)
2560 {
2561 error ("constant not recomputed when ADDR_EXPR changed");
2562 return t;
2563 }
2564 if (old_side_effects != new_side_effects)
2565 {
2566 error ("side effects not recomputed when ADDR_EXPR changed");
2567 return t;
2568 }
2569
2570 if (!(TREE_CODE (base) == VAR_DECL
2571 || TREE_CODE (base) == PARM_DECL
2572 || TREE_CODE (base) == RESULT_DECL))
2573 return NULL_TREE;
2574
2575 if (DECL_GIMPLE_REG_P (base))
2576 {
2577 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2578 return base;
2579 }
2580
2581 return NULL_TREE;
2582 }
2583
2584 /* Callback for walk_tree, check that all elements with address taken are
2585 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2586 inside a PHI node. */
2587
2588 static tree
2589 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2590 {
2591 tree t = *tp, x;
2592
2593 if (TYPE_P (t))
2594 *walk_subtrees = 0;
2595
2596 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2597 #define CHECK_OP(N, MSG) \
2598 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2599 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2600
2601 switch (TREE_CODE (t))
2602 {
2603 case SSA_NAME:
2604 if (SSA_NAME_IN_FREE_LIST (t))
2605 {
2606 error ("SSA name in freelist but still referenced");
2607 return *tp;
2608 }
2609 break;
2610
2611 case INDIRECT_REF:
2612 error ("INDIRECT_REF in gimple IL");
2613 return t;
2614
2615 case MEM_REF:
2616 x = TREE_OPERAND (t, 0);
2617 if (!POINTER_TYPE_P (TREE_TYPE (x))
2618 || !is_gimple_mem_ref_addr (x))
2619 {
2620 error ("invalid first operand of MEM_REF");
2621 return x;
2622 }
2623 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2624 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2625 {
2626 error ("invalid offset operand of MEM_REF");
2627 return TREE_OPERAND (t, 1);
2628 }
2629 if (TREE_CODE (x) == ADDR_EXPR
2630 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2631 return x;
2632 *walk_subtrees = 0;
2633 break;
2634
2635 case ASSERT_EXPR:
2636 x = fold (ASSERT_EXPR_COND (t));
2637 if (x == boolean_false_node)
2638 {
2639 error ("ASSERT_EXPR with an always-false condition");
2640 return *tp;
2641 }
2642 break;
2643
2644 case MODIFY_EXPR:
2645 error ("MODIFY_EXPR not expected while having tuples");
2646 return *tp;
2647
2648 case ADDR_EXPR:
2649 {
2650 tree tem;
2651
2652 gcc_assert (is_gimple_address (t));
2653
2654 /* Skip any references (they will be checked when we recurse down the
2655 tree) and ensure that any variable used as a prefix is marked
2656 addressable. */
2657 for (x = TREE_OPERAND (t, 0);
2658 handled_component_p (x);
2659 x = TREE_OPERAND (x, 0))
2660 ;
2661
2662 if ((tem = verify_address (t, x)))
2663 return tem;
2664
2665 if (!(TREE_CODE (x) == VAR_DECL
2666 || TREE_CODE (x) == PARM_DECL
2667 || TREE_CODE (x) == RESULT_DECL))
2668 return NULL;
2669
2670 if (!TREE_ADDRESSABLE (x))
2671 {
2672 error ("address taken, but ADDRESSABLE bit not set");
2673 return x;
2674 }
2675
2676 break;
2677 }
2678
2679 case COND_EXPR:
2680 x = COND_EXPR_COND (t);
2681 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2682 {
2683 error ("non-integral used in condition");
2684 return x;
2685 }
2686 if (!is_gimple_condexpr (x))
2687 {
2688 error ("invalid conditional operand");
2689 return x;
2690 }
2691 break;
2692
2693 case NON_LVALUE_EXPR:
2694 case TRUTH_NOT_EXPR:
2695 gcc_unreachable ();
2696
2697 CASE_CONVERT:
2698 case FIX_TRUNC_EXPR:
2699 case FLOAT_EXPR:
2700 case NEGATE_EXPR:
2701 case ABS_EXPR:
2702 case BIT_NOT_EXPR:
2703 CHECK_OP (0, "invalid operand to unary operator");
2704 break;
2705
2706 case REALPART_EXPR:
2707 case IMAGPART_EXPR:
2708 case BIT_FIELD_REF:
2709 if (!is_gimple_reg_type (TREE_TYPE (t)))
2710 {
2711 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2712 return t;
2713 }
2714
2715 if (TREE_CODE (t) == BIT_FIELD_REF)
2716 {
2717 tree t0 = TREE_OPERAND (t, 0);
2718 tree t1 = TREE_OPERAND (t, 1);
2719 tree t2 = TREE_OPERAND (t, 2);
2720 if (!tree_fits_uhwi_p (t1)
2721 || !tree_fits_uhwi_p (t2))
2722 {
2723 error ("invalid position or size operand to BIT_FIELD_REF");
2724 return t;
2725 }
2726 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2727 && (TYPE_PRECISION (TREE_TYPE (t))
2728 != tree_to_uhwi (t1)))
2729 {
2730 error ("integral result type precision does not match "
2731 "field size of BIT_FIELD_REF");
2732 return t;
2733 }
2734 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2735 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2736 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2737 != tree_to_uhwi (t1)))
2738 {
2739 error ("mode precision of non-integral result does not "
2740 "match field size of BIT_FIELD_REF");
2741 return t;
2742 }
2743 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
2744 && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
2745 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
2746 {
2747 error ("position plus size exceeds size of referenced object in "
2748 "BIT_FIELD_REF");
2749 return t;
2750 }
2751 }
2752 t = TREE_OPERAND (t, 0);
2753
2754 /* Fall-through. */
2755 case COMPONENT_REF:
2756 case ARRAY_REF:
2757 case ARRAY_RANGE_REF:
2758 case VIEW_CONVERT_EXPR:
2759 /* We have a nest of references. Verify that each of the operands
2760 that determine where to reference is either a constant or a variable,
2761 verify that the base is valid, and then show we've already checked
2762 the subtrees. */
2763 while (handled_component_p (t))
2764 {
2765 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2766 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2767 else if (TREE_CODE (t) == ARRAY_REF
2768 || TREE_CODE (t) == ARRAY_RANGE_REF)
2769 {
2770 CHECK_OP (1, "invalid array index");
2771 if (TREE_OPERAND (t, 2))
2772 CHECK_OP (2, "invalid array lower bound");
2773 if (TREE_OPERAND (t, 3))
2774 CHECK_OP (3, "invalid array stride");
2775 }
2776 else if (TREE_CODE (t) == BIT_FIELD_REF
2777 || TREE_CODE (t) == REALPART_EXPR
2778 || TREE_CODE (t) == IMAGPART_EXPR)
2779 {
2780 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
2781 "REALPART_EXPR");
2782 return t;
2783 }
2784
2785 t = TREE_OPERAND (t, 0);
2786 }
2787
2788 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2789 {
2790 error ("invalid reference prefix");
2791 return t;
2792 }
2793 *walk_subtrees = 0;
2794 break;
2795 case PLUS_EXPR:
2796 case MINUS_EXPR:
2797 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2798 POINTER_PLUS_EXPR. */
2799 if (POINTER_TYPE_P (TREE_TYPE (t)))
2800 {
2801 error ("invalid operand to plus/minus, type is a pointer");
2802 return t;
2803 }
2804 CHECK_OP (0, "invalid operand to binary operator");
2805 CHECK_OP (1, "invalid operand to binary operator");
2806 break;
2807
2808 case POINTER_PLUS_EXPR:
2809 /* Check to make sure the first operand is a pointer or reference type. */
2810 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
2811 {
2812 error ("invalid operand to pointer plus, first operand is not a pointer");
2813 return t;
2814 }
2815 /* Check to make sure the second operand is a ptrofftype. */
2816 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
2817 {
2818 error ("invalid operand to pointer plus, second operand is not an "
2819 "integer type of appropriate width");
2820 return t;
2821 }
2822 /* FALLTHROUGH */
2823 case LT_EXPR:
2824 case LE_EXPR:
2825 case GT_EXPR:
2826 case GE_EXPR:
2827 case EQ_EXPR:
2828 case NE_EXPR:
2829 case UNORDERED_EXPR:
2830 case ORDERED_EXPR:
2831 case UNLT_EXPR:
2832 case UNLE_EXPR:
2833 case UNGT_EXPR:
2834 case UNGE_EXPR:
2835 case UNEQ_EXPR:
2836 case LTGT_EXPR:
2837 case MULT_EXPR:
2838 case TRUNC_DIV_EXPR:
2839 case CEIL_DIV_EXPR:
2840 case FLOOR_DIV_EXPR:
2841 case ROUND_DIV_EXPR:
2842 case TRUNC_MOD_EXPR:
2843 case CEIL_MOD_EXPR:
2844 case FLOOR_MOD_EXPR:
2845 case ROUND_MOD_EXPR:
2846 case RDIV_EXPR:
2847 case EXACT_DIV_EXPR:
2848 case MIN_EXPR:
2849 case MAX_EXPR:
2850 case LSHIFT_EXPR:
2851 case RSHIFT_EXPR:
2852 case LROTATE_EXPR:
2853 case RROTATE_EXPR:
2854 case BIT_IOR_EXPR:
2855 case BIT_XOR_EXPR:
2856 case BIT_AND_EXPR:
2857 CHECK_OP (0, "invalid operand to binary operator");
2858 CHECK_OP (1, "invalid operand to binary operator");
2859 break;
2860
2861 case CONSTRUCTOR:
2862 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2863 *walk_subtrees = 0;
2864 break;
2865
2866 case CASE_LABEL_EXPR:
2867 if (CASE_CHAIN (t))
2868 {
2869 error ("invalid CASE_CHAIN");
2870 return t;
2871 }
2872 break;
2873
2874 default:
2875 break;
2876 }
2877 return NULL;
2878
2879 #undef CHECK_OP
2880 }
2881
2882
2883 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
2884 Returns true if there is an error, otherwise false. */
2885
2886 static bool
2887 verify_types_in_gimple_min_lval (tree expr)
2888 {
2889 tree op;
2890
2891 if (is_gimple_id (expr))
2892 return false;
2893
2894 if (TREE_CODE (expr) != TARGET_MEM_REF
2895 && TREE_CODE (expr) != MEM_REF)
2896 {
2897 error ("invalid expression for min lvalue");
2898 return true;
2899 }
2900
2901 /* TARGET_MEM_REFs are strange beasts. */
2902 if (TREE_CODE (expr) == TARGET_MEM_REF)
2903 return false;
2904
2905 op = TREE_OPERAND (expr, 0);
2906 if (!is_gimple_val (op))
2907 {
2908 error ("invalid operand in indirect reference");
2909 debug_generic_stmt (op);
2910 return true;
2911 }
2912 /* Memory references now generally can involve a value conversion. */
2913
2914 return false;
2915 }
2916
2917 /* Verify if EXPR is a valid GIMPLE reference expression. If
2918 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
2919 if there is an error, otherwise false. */
2920
2921 static bool
2922 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
2923 {
2924 while (handled_component_p (expr))
2925 {
2926 tree op = TREE_OPERAND (expr, 0);
2927
2928 if (TREE_CODE (expr) == ARRAY_REF
2929 || TREE_CODE (expr) == ARRAY_RANGE_REF)
2930 {
2931 if (!is_gimple_val (TREE_OPERAND (expr, 1))
2932 || (TREE_OPERAND (expr, 2)
2933 && !is_gimple_val (TREE_OPERAND (expr, 2)))
2934 || (TREE_OPERAND (expr, 3)
2935 && !is_gimple_val (TREE_OPERAND (expr, 3))))
2936 {
2937 error ("invalid operands to array reference");
2938 debug_generic_stmt (expr);
2939 return true;
2940 }
2941 }
2942
2943 /* Verify if the reference array element types are compatible. */
2944 if (TREE_CODE (expr) == ARRAY_REF
2945 && !useless_type_conversion_p (TREE_TYPE (expr),
2946 TREE_TYPE (TREE_TYPE (op))))
2947 {
2948 error ("type mismatch in array reference");
2949 debug_generic_stmt (TREE_TYPE (expr));
2950 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2951 return true;
2952 }
2953 if (TREE_CODE (expr) == ARRAY_RANGE_REF
2954 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
2955 TREE_TYPE (TREE_TYPE (op))))
2956 {
2957 error ("type mismatch in array range reference");
2958 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
2959 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2960 return true;
2961 }
2962
2963 if ((TREE_CODE (expr) == REALPART_EXPR
2964 || TREE_CODE (expr) == IMAGPART_EXPR)
2965 && !useless_type_conversion_p (TREE_TYPE (expr),
2966 TREE_TYPE (TREE_TYPE (op))))
2967 {
2968 error ("type mismatch in real/imagpart reference");
2969 debug_generic_stmt (TREE_TYPE (expr));
2970 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2971 return true;
2972 }
2973
2974 if (TREE_CODE (expr) == COMPONENT_REF
2975 && !useless_type_conversion_p (TREE_TYPE (expr),
2976 TREE_TYPE (TREE_OPERAND (expr, 1))))
2977 {
2978 error ("type mismatch in component reference");
2979 debug_generic_stmt (TREE_TYPE (expr));
2980 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
2981 return true;
2982 }
2983
2984 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2985 {
2986 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
2987 that their operand is not an SSA name or an invariant when
2988 requiring an lvalue (this usually means there is a SRA or IPA-SRA
2989 bug). Otherwise there is nothing to verify, gross mismatches at
2990 most invoke undefined behavior. */
2991 if (require_lvalue
2992 && (TREE_CODE (op) == SSA_NAME
2993 || is_gimple_min_invariant (op)))
2994 {
2995 error ("conversion of an SSA_NAME on the left hand side");
2996 debug_generic_stmt (expr);
2997 return true;
2998 }
2999 else if (TREE_CODE (op) == SSA_NAME
3000 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3001 {
3002 error ("conversion of register to a different size");
3003 debug_generic_stmt (expr);
3004 return true;
3005 }
3006 else if (!handled_component_p (op))
3007 return false;
3008 }
3009
3010 expr = op;
3011 }
3012
3013 if (TREE_CODE (expr) == MEM_REF)
3014 {
3015 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3016 {
3017 error ("invalid address operand in MEM_REF");
3018 debug_generic_stmt (expr);
3019 return true;
3020 }
3021 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3022 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3023 {
3024 error ("invalid offset operand in MEM_REF");
3025 debug_generic_stmt (expr);
3026 return true;
3027 }
3028 }
3029 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3030 {
3031 if (!TMR_BASE (expr)
3032 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3033 {
3034 error ("invalid address operand in TARGET_MEM_REF");
3035 return true;
3036 }
3037 if (!TMR_OFFSET (expr)
3038 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3039 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3040 {
3041 error ("invalid offset operand in TARGET_MEM_REF");
3042 debug_generic_stmt (expr);
3043 return true;
3044 }
3045 }
3046
3047 return ((require_lvalue || !is_gimple_min_invariant (expr))
3048 && verify_types_in_gimple_min_lval (expr));
3049 }
3050
3051 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3052 list of pointer-to types that is trivially convertible to DEST. */
3053
3054 static bool
3055 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3056 {
3057 tree src;
3058
3059 if (!TYPE_POINTER_TO (src_obj))
3060 return true;
3061
3062 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3063 if (useless_type_conversion_p (dest, src))
3064 return true;
3065
3066 return false;
3067 }
3068
3069 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3070 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3071
3072 static bool
3073 valid_fixed_convert_types_p (tree type1, tree type2)
3074 {
3075 return (FIXED_POINT_TYPE_P (type1)
3076 && (INTEGRAL_TYPE_P (type2)
3077 || SCALAR_FLOAT_TYPE_P (type2)
3078 || FIXED_POINT_TYPE_P (type2)));
3079 }
3080
3081 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3082 is a problem, otherwise false. */
3083
3084 static bool
3085 verify_gimple_call (gimple stmt)
3086 {
3087 tree fn = gimple_call_fn (stmt);
3088 tree fntype, fndecl;
3089 unsigned i;
3090
3091 if (gimple_call_internal_p (stmt))
3092 {
3093 if (fn)
3094 {
3095 error ("gimple call has two targets");
3096 debug_generic_stmt (fn);
3097 return true;
3098 }
3099 }
3100 else
3101 {
3102 if (!fn)
3103 {
3104 error ("gimple call has no target");
3105 return true;
3106 }
3107 }
3108
3109 if (fn && !is_gimple_call_addr (fn))
3110 {
3111 error ("invalid function in gimple call");
3112 debug_generic_stmt (fn);
3113 return true;
3114 }
3115
3116 if (fn
3117 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3118 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3119 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3120 {
3121 error ("non-function in gimple call");
3122 return true;
3123 }
3124
3125 fndecl = gimple_call_fndecl (stmt);
3126 if (fndecl
3127 && TREE_CODE (fndecl) == FUNCTION_DECL
3128 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3129 && !DECL_PURE_P (fndecl)
3130 && !TREE_READONLY (fndecl))
3131 {
3132 error ("invalid pure const state for function");
3133 return true;
3134 }
3135
3136 if (gimple_call_lhs (stmt)
3137 && (!is_gimple_lvalue (gimple_call_lhs (stmt))
3138 || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
3139 {
3140 error ("invalid LHS in gimple call");
3141 return true;
3142 }
3143
3144 if (gimple_call_lhs (stmt) && gimple_call_noreturn_p (stmt))
3145 {
3146 error ("LHS in noreturn call");
3147 return true;
3148 }
3149
3150 fntype = gimple_call_fntype (stmt);
3151 if (fntype
3152 && gimple_call_lhs (stmt)
3153 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3154 TREE_TYPE (fntype))
3155 /* ??? At least C++ misses conversions at assignments from
3156 void * call results.
3157 ??? Java is completely off. Especially with functions
3158 returning java.lang.Object.
3159 For now simply allow arbitrary pointer type conversions. */
3160 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3161 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3162 {
3163 error ("invalid conversion in gimple call");
3164 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3165 debug_generic_stmt (TREE_TYPE (fntype));
3166 return true;
3167 }
3168
3169 if (gimple_call_chain (stmt)
3170 && !is_gimple_val (gimple_call_chain (stmt)))
3171 {
3172 error ("invalid static chain in gimple call");
3173 debug_generic_stmt (gimple_call_chain (stmt));
3174 return true;
3175 }
3176
3177 /* If there is a static chain argument, this should not be an indirect
3178 call, and the decl should have DECL_STATIC_CHAIN set. */
3179 if (gimple_call_chain (stmt))
3180 {
3181 if (!gimple_call_fndecl (stmt))
3182 {
3183 error ("static chain in indirect gimple call");
3184 return true;
3185 }
3186 fn = TREE_OPERAND (fn, 0);
3187
3188 if (!DECL_STATIC_CHAIN (fn))
3189 {
3190 error ("static chain with function that doesn%'t use one");
3191 return true;
3192 }
3193 }
3194
3195 /* ??? The C frontend passes unpromoted arguments in case it
3196 didn't see a function declaration before the call. So for now
3197 leave the call arguments mostly unverified. Once we gimplify
3198 unit-at-a-time we have a chance to fix this. */
3199
3200 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3201 {
3202 tree arg = gimple_call_arg (stmt, i);
3203 if ((is_gimple_reg_type (TREE_TYPE (arg))
3204 && !is_gimple_val (arg))
3205 || (!is_gimple_reg_type (TREE_TYPE (arg))
3206 && !is_gimple_lvalue (arg)))
3207 {
3208 error ("invalid argument to gimple call");
3209 debug_generic_expr (arg);
3210 return true;
3211 }
3212 }
3213
3214 return false;
3215 }
3216
3217 /* Verifies the gimple comparison with the result type TYPE and
3218 the operands OP0 and OP1. */
3219
3220 static bool
3221 verify_gimple_comparison (tree type, tree op0, tree op1)
3222 {
3223 tree op0_type = TREE_TYPE (op0);
3224 tree op1_type = TREE_TYPE (op1);
3225
3226 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3227 {
3228 error ("invalid operands in gimple comparison");
3229 return true;
3230 }
3231
3232 /* For comparisons we do not have the operations type as the
3233 effective type the comparison is carried out in. Instead
3234 we require that either the first operand is trivially
3235 convertible into the second, or the other way around.
3236 Because we special-case pointers to void we allow
3237 comparisons of pointers with the same mode as well. */
3238 if (!useless_type_conversion_p (op0_type, op1_type)
3239 && !useless_type_conversion_p (op1_type, op0_type)
3240 && (!POINTER_TYPE_P (op0_type)
3241 || !POINTER_TYPE_P (op1_type)
3242 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3243 {
3244 error ("mismatching comparison operand types");
3245 debug_generic_expr (op0_type);
3246 debug_generic_expr (op1_type);
3247 return true;
3248 }
3249
3250 /* The resulting type of a comparison may be an effective boolean type. */
3251 if (INTEGRAL_TYPE_P (type)
3252 && (TREE_CODE (type) == BOOLEAN_TYPE
3253 || TYPE_PRECISION (type) == 1))
3254 {
3255 if (TREE_CODE (op0_type) == VECTOR_TYPE
3256 || TREE_CODE (op1_type) == VECTOR_TYPE)
3257 {
3258 error ("vector comparison returning a boolean");
3259 debug_generic_expr (op0_type);
3260 debug_generic_expr (op1_type);
3261 return true;
3262 }
3263 }
3264 /* Or an integer vector type with the same size and element count
3265 as the comparison operand types. */
3266 else if (TREE_CODE (type) == VECTOR_TYPE
3267 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3268 {
3269 if (TREE_CODE (op0_type) != VECTOR_TYPE
3270 || TREE_CODE (op1_type) != VECTOR_TYPE)
3271 {
3272 error ("non-vector operands in vector comparison");
3273 debug_generic_expr (op0_type);
3274 debug_generic_expr (op1_type);
3275 return true;
3276 }
3277
3278 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3279 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3280 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type))))
3281 /* The result of a vector comparison is of signed
3282 integral type. */
3283 || TYPE_UNSIGNED (TREE_TYPE (type)))
3284 {
3285 error ("invalid vector comparison resulting type");
3286 debug_generic_expr (type);
3287 return true;
3288 }
3289 }
3290 else
3291 {
3292 error ("bogus comparison result type");
3293 debug_generic_expr (type);
3294 return true;
3295 }
3296
3297 return false;
3298 }
3299
3300 /* Verify a gimple assignment statement STMT with an unary rhs.
3301 Returns true if anything is wrong. */
3302
3303 static bool
3304 verify_gimple_assign_unary (gimple stmt)
3305 {
3306 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3307 tree lhs = gimple_assign_lhs (stmt);
3308 tree lhs_type = TREE_TYPE (lhs);
3309 tree rhs1 = gimple_assign_rhs1 (stmt);
3310 tree rhs1_type = TREE_TYPE (rhs1);
3311
3312 if (!is_gimple_reg (lhs))
3313 {
3314 error ("non-register as LHS of unary operation");
3315 return true;
3316 }
3317
3318 if (!is_gimple_val (rhs1))
3319 {
3320 error ("invalid operand in unary operation");
3321 return true;
3322 }
3323
3324 /* First handle conversions. */
3325 switch (rhs_code)
3326 {
3327 CASE_CONVERT:
3328 {
3329 /* Allow conversions from pointer type to integral type only if
3330 there is no sign or zero extension involved.
3331 For targets were the precision of ptrofftype doesn't match that
3332 of pointers we need to allow arbitrary conversions to ptrofftype. */
3333 if ((POINTER_TYPE_P (lhs_type)
3334 && INTEGRAL_TYPE_P (rhs1_type))
3335 || (POINTER_TYPE_P (rhs1_type)
3336 && INTEGRAL_TYPE_P (lhs_type)
3337 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3338 || ptrofftype_p (sizetype))))
3339 return false;
3340
3341 /* Allow conversion from integral to offset type and vice versa. */
3342 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3343 && INTEGRAL_TYPE_P (rhs1_type))
3344 || (INTEGRAL_TYPE_P (lhs_type)
3345 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3346 return false;
3347
3348 /* Otherwise assert we are converting between types of the
3349 same kind. */
3350 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3351 {
3352 error ("invalid types in nop conversion");
3353 debug_generic_expr (lhs_type);
3354 debug_generic_expr (rhs1_type);
3355 return true;
3356 }
3357
3358 return false;
3359 }
3360
3361 case ADDR_SPACE_CONVERT_EXPR:
3362 {
3363 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3364 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3365 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3366 {
3367 error ("invalid types in address space conversion");
3368 debug_generic_expr (lhs_type);
3369 debug_generic_expr (rhs1_type);
3370 return true;
3371 }
3372
3373 return false;
3374 }
3375
3376 case FIXED_CONVERT_EXPR:
3377 {
3378 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3379 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3380 {
3381 error ("invalid types in fixed-point conversion");
3382 debug_generic_expr (lhs_type);
3383 debug_generic_expr (rhs1_type);
3384 return true;
3385 }
3386
3387 return false;
3388 }
3389
3390 case FLOAT_EXPR:
3391 {
3392 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3393 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3394 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3395 {
3396 error ("invalid types in conversion to floating point");
3397 debug_generic_expr (lhs_type);
3398 debug_generic_expr (rhs1_type);
3399 return true;
3400 }
3401
3402 return false;
3403 }
3404
3405 case FIX_TRUNC_EXPR:
3406 {
3407 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3408 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3409 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3410 {
3411 error ("invalid types in conversion to integer");
3412 debug_generic_expr (lhs_type);
3413 debug_generic_expr (rhs1_type);
3414 return true;
3415 }
3416
3417 return false;
3418 }
3419
3420 case VEC_UNPACK_HI_EXPR:
3421 case VEC_UNPACK_LO_EXPR:
3422 case REDUC_MAX_EXPR:
3423 case REDUC_MIN_EXPR:
3424 case REDUC_PLUS_EXPR:
3425 case VEC_UNPACK_FLOAT_HI_EXPR:
3426 case VEC_UNPACK_FLOAT_LO_EXPR:
3427 /* FIXME. */
3428 return false;
3429
3430 case NEGATE_EXPR:
3431 case ABS_EXPR:
3432 case BIT_NOT_EXPR:
3433 case PAREN_EXPR:
3434 case NON_LVALUE_EXPR:
3435 case CONJ_EXPR:
3436 break;
3437
3438 default:
3439 gcc_unreachable ();
3440 }
3441
3442 /* For the remaining codes assert there is no conversion involved. */
3443 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3444 {
3445 error ("non-trivial conversion in unary operation");
3446 debug_generic_expr (lhs_type);
3447 debug_generic_expr (rhs1_type);
3448 return true;
3449 }
3450
3451 return false;
3452 }
3453
3454 /* Verify a gimple assignment statement STMT with a binary rhs.
3455 Returns true if anything is wrong. */
3456
3457 static bool
3458 verify_gimple_assign_binary (gimple stmt)
3459 {
3460 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3461 tree lhs = gimple_assign_lhs (stmt);
3462 tree lhs_type = TREE_TYPE (lhs);
3463 tree rhs1 = gimple_assign_rhs1 (stmt);
3464 tree rhs1_type = TREE_TYPE (rhs1);
3465 tree rhs2 = gimple_assign_rhs2 (stmt);
3466 tree rhs2_type = TREE_TYPE (rhs2);
3467
3468 if (!is_gimple_reg (lhs))
3469 {
3470 error ("non-register as LHS of binary operation");
3471 return true;
3472 }
3473
3474 if (!is_gimple_val (rhs1)
3475 || !is_gimple_val (rhs2))
3476 {
3477 error ("invalid operands in binary operation");
3478 return true;
3479 }
3480
3481 /* First handle operations that involve different types. */
3482 switch (rhs_code)
3483 {
3484 case COMPLEX_EXPR:
3485 {
3486 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3487 || !(INTEGRAL_TYPE_P (rhs1_type)
3488 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3489 || !(INTEGRAL_TYPE_P (rhs2_type)
3490 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3491 {
3492 error ("type mismatch in complex expression");
3493 debug_generic_expr (lhs_type);
3494 debug_generic_expr (rhs1_type);
3495 debug_generic_expr (rhs2_type);
3496 return true;
3497 }
3498
3499 return false;
3500 }
3501
3502 case LSHIFT_EXPR:
3503 case RSHIFT_EXPR:
3504 case LROTATE_EXPR:
3505 case RROTATE_EXPR:
3506 {
3507 /* Shifts and rotates are ok on integral types, fixed point
3508 types and integer vector types. */
3509 if ((!INTEGRAL_TYPE_P (rhs1_type)
3510 && !FIXED_POINT_TYPE_P (rhs1_type)
3511 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3512 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3513 || (!INTEGRAL_TYPE_P (rhs2_type)
3514 /* Vector shifts of vectors are also ok. */
3515 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3516 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3517 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3518 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3519 || !useless_type_conversion_p (lhs_type, rhs1_type))
3520 {
3521 error ("type mismatch in shift expression");
3522 debug_generic_expr (lhs_type);
3523 debug_generic_expr (rhs1_type);
3524 debug_generic_expr (rhs2_type);
3525 return true;
3526 }
3527
3528 return false;
3529 }
3530
3531 case VEC_LSHIFT_EXPR:
3532 case VEC_RSHIFT_EXPR:
3533 {
3534 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3535 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3536 || POINTER_TYPE_P (TREE_TYPE (rhs1_type))
3537 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type))
3538 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3539 || (!INTEGRAL_TYPE_P (rhs2_type)
3540 && (TREE_CODE (rhs2_type) != VECTOR_TYPE
3541 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3542 || !useless_type_conversion_p (lhs_type, rhs1_type))
3543 {
3544 error ("type mismatch in vector shift expression");
3545 debug_generic_expr (lhs_type);
3546 debug_generic_expr (rhs1_type);
3547 debug_generic_expr (rhs2_type);
3548 return true;
3549 }
3550 /* For shifting a vector of non-integral components we
3551 only allow shifting by a constant multiple of the element size. */
3552 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3553 && (TREE_CODE (rhs2) != INTEGER_CST
3554 || !div_if_zero_remainder (EXACT_DIV_EXPR, rhs2,
3555 TYPE_SIZE (TREE_TYPE (rhs1_type)))))
3556 {
3557 error ("non-element sized vector shift of floating point vector");
3558 return true;
3559 }
3560
3561 return false;
3562 }
3563
3564 case WIDEN_LSHIFT_EXPR:
3565 {
3566 if (!INTEGRAL_TYPE_P (lhs_type)
3567 || !INTEGRAL_TYPE_P (rhs1_type)
3568 || TREE_CODE (rhs2) != INTEGER_CST
3569 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3570 {
3571 error ("type mismatch in widening vector shift expression");
3572 debug_generic_expr (lhs_type);
3573 debug_generic_expr (rhs1_type);
3574 debug_generic_expr (rhs2_type);
3575 return true;
3576 }
3577
3578 return false;
3579 }
3580
3581 case VEC_WIDEN_LSHIFT_HI_EXPR:
3582 case VEC_WIDEN_LSHIFT_LO_EXPR:
3583 {
3584 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3585 || TREE_CODE (lhs_type) != VECTOR_TYPE
3586 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3587 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3588 || TREE_CODE (rhs2) != INTEGER_CST
3589 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3590 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3591 {
3592 error ("type mismatch in widening vector shift expression");
3593 debug_generic_expr (lhs_type);
3594 debug_generic_expr (rhs1_type);
3595 debug_generic_expr (rhs2_type);
3596 return true;
3597 }
3598
3599 return false;
3600 }
3601
3602 case PLUS_EXPR:
3603 case MINUS_EXPR:
3604 {
3605 tree lhs_etype = lhs_type;
3606 tree rhs1_etype = rhs1_type;
3607 tree rhs2_etype = rhs2_type;
3608 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3609 {
3610 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3611 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3612 {
3613 error ("invalid non-vector operands to vector valued plus");
3614 return true;
3615 }
3616 lhs_etype = TREE_TYPE (lhs_type);
3617 rhs1_etype = TREE_TYPE (rhs1_type);
3618 rhs2_etype = TREE_TYPE (rhs2_type);
3619 }
3620 if (POINTER_TYPE_P (lhs_etype)
3621 || POINTER_TYPE_P (rhs1_etype)
3622 || POINTER_TYPE_P (rhs2_etype))
3623 {
3624 error ("invalid (pointer) operands to plus/minus");
3625 return true;
3626 }
3627
3628 /* Continue with generic binary expression handling. */
3629 break;
3630 }
3631
3632 case POINTER_PLUS_EXPR:
3633 {
3634 if (!POINTER_TYPE_P (rhs1_type)
3635 || !useless_type_conversion_p (lhs_type, rhs1_type)
3636 || !ptrofftype_p (rhs2_type))
3637 {
3638 error ("type mismatch in pointer plus expression");
3639 debug_generic_stmt (lhs_type);
3640 debug_generic_stmt (rhs1_type);
3641 debug_generic_stmt (rhs2_type);
3642 return true;
3643 }
3644
3645 return false;
3646 }
3647
3648 case TRUTH_ANDIF_EXPR:
3649 case TRUTH_ORIF_EXPR:
3650 case TRUTH_AND_EXPR:
3651 case TRUTH_OR_EXPR:
3652 case TRUTH_XOR_EXPR:
3653
3654 gcc_unreachable ();
3655
3656 case LT_EXPR:
3657 case LE_EXPR:
3658 case GT_EXPR:
3659 case GE_EXPR:
3660 case EQ_EXPR:
3661 case NE_EXPR:
3662 case UNORDERED_EXPR:
3663 case ORDERED_EXPR:
3664 case UNLT_EXPR:
3665 case UNLE_EXPR:
3666 case UNGT_EXPR:
3667 case UNGE_EXPR:
3668 case UNEQ_EXPR:
3669 case LTGT_EXPR:
3670 /* Comparisons are also binary, but the result type is not
3671 connected to the operand types. */
3672 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3673
3674 case WIDEN_MULT_EXPR:
3675 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3676 return true;
3677 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3678 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3679
3680 case WIDEN_SUM_EXPR:
3681 case VEC_WIDEN_MULT_HI_EXPR:
3682 case VEC_WIDEN_MULT_LO_EXPR:
3683 case VEC_WIDEN_MULT_EVEN_EXPR:
3684 case VEC_WIDEN_MULT_ODD_EXPR:
3685 case VEC_PACK_TRUNC_EXPR:
3686 case VEC_PACK_SAT_EXPR:
3687 case VEC_PACK_FIX_TRUNC_EXPR:
3688 /* FIXME. */
3689 return false;
3690
3691 case MULT_EXPR:
3692 case MULT_HIGHPART_EXPR:
3693 case TRUNC_DIV_EXPR:
3694 case CEIL_DIV_EXPR:
3695 case FLOOR_DIV_EXPR:
3696 case ROUND_DIV_EXPR:
3697 case TRUNC_MOD_EXPR:
3698 case CEIL_MOD_EXPR:
3699 case FLOOR_MOD_EXPR:
3700 case ROUND_MOD_EXPR:
3701 case RDIV_EXPR:
3702 case EXACT_DIV_EXPR:
3703 case MIN_EXPR:
3704 case MAX_EXPR:
3705 case BIT_IOR_EXPR:
3706 case BIT_XOR_EXPR:
3707 case BIT_AND_EXPR:
3708 /* Continue with generic binary expression handling. */
3709 break;
3710
3711 default:
3712 gcc_unreachable ();
3713 }
3714
3715 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3716 || !useless_type_conversion_p (lhs_type, rhs2_type))
3717 {
3718 error ("type mismatch in binary expression");
3719 debug_generic_stmt (lhs_type);
3720 debug_generic_stmt (rhs1_type);
3721 debug_generic_stmt (rhs2_type);
3722 return true;
3723 }
3724
3725 return false;
3726 }
3727
3728 /* Verify a gimple assignment statement STMT with a ternary rhs.
3729 Returns true if anything is wrong. */
3730
3731 static bool
3732 verify_gimple_assign_ternary (gimple stmt)
3733 {
3734 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3735 tree lhs = gimple_assign_lhs (stmt);
3736 tree lhs_type = TREE_TYPE (lhs);
3737 tree rhs1 = gimple_assign_rhs1 (stmt);
3738 tree rhs1_type = TREE_TYPE (rhs1);
3739 tree rhs2 = gimple_assign_rhs2 (stmt);
3740 tree rhs2_type = TREE_TYPE (rhs2);
3741 tree rhs3 = gimple_assign_rhs3 (stmt);
3742 tree rhs3_type = TREE_TYPE (rhs3);
3743
3744 if (!is_gimple_reg (lhs))
3745 {
3746 error ("non-register as LHS of ternary operation");
3747 return true;
3748 }
3749
3750 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3751 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3752 || !is_gimple_val (rhs2)
3753 || !is_gimple_val (rhs3))
3754 {
3755 error ("invalid operands in ternary operation");
3756 return true;
3757 }
3758
3759 /* First handle operations that involve different types. */
3760 switch (rhs_code)
3761 {
3762 case WIDEN_MULT_PLUS_EXPR:
3763 case WIDEN_MULT_MINUS_EXPR:
3764 if ((!INTEGRAL_TYPE_P (rhs1_type)
3765 && !FIXED_POINT_TYPE_P (rhs1_type))
3766 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3767 || !useless_type_conversion_p (lhs_type, rhs3_type)
3768 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3769 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3770 {
3771 error ("type mismatch in widening multiply-accumulate expression");
3772 debug_generic_expr (lhs_type);
3773 debug_generic_expr (rhs1_type);
3774 debug_generic_expr (rhs2_type);
3775 debug_generic_expr (rhs3_type);
3776 return true;
3777 }
3778 break;
3779
3780 case FMA_EXPR:
3781 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3782 || !useless_type_conversion_p (lhs_type, rhs2_type)
3783 || !useless_type_conversion_p (lhs_type, rhs3_type))
3784 {
3785 error ("type mismatch in fused multiply-add expression");
3786 debug_generic_expr (lhs_type);
3787 debug_generic_expr (rhs1_type);
3788 debug_generic_expr (rhs2_type);
3789 debug_generic_expr (rhs3_type);
3790 return true;
3791 }
3792 break;
3793
3794 case COND_EXPR:
3795 case VEC_COND_EXPR:
3796 if (!useless_type_conversion_p (lhs_type, rhs2_type)
3797 || !useless_type_conversion_p (lhs_type, rhs3_type))
3798 {
3799 error ("type mismatch in conditional expression");
3800 debug_generic_expr (lhs_type);
3801 debug_generic_expr (rhs2_type);
3802 debug_generic_expr (rhs3_type);
3803 return true;
3804 }
3805 break;
3806
3807 case VEC_PERM_EXPR:
3808 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3809 || !useless_type_conversion_p (lhs_type, rhs2_type))
3810 {
3811 error ("type mismatch in vector permute expression");
3812 debug_generic_expr (lhs_type);
3813 debug_generic_expr (rhs1_type);
3814 debug_generic_expr (rhs2_type);
3815 debug_generic_expr (rhs3_type);
3816 return true;
3817 }
3818
3819 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3820 || TREE_CODE (rhs2_type) != VECTOR_TYPE
3821 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
3822 {
3823 error ("vector types expected in vector permute expression");
3824 debug_generic_expr (lhs_type);
3825 debug_generic_expr (rhs1_type);
3826 debug_generic_expr (rhs2_type);
3827 debug_generic_expr (rhs3_type);
3828 return true;
3829 }
3830
3831 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
3832 || TYPE_VECTOR_SUBPARTS (rhs2_type)
3833 != TYPE_VECTOR_SUBPARTS (rhs3_type)
3834 || TYPE_VECTOR_SUBPARTS (rhs3_type)
3835 != TYPE_VECTOR_SUBPARTS (lhs_type))
3836 {
3837 error ("vectors with different element number found "
3838 "in vector permute expression");
3839 debug_generic_expr (lhs_type);
3840 debug_generic_expr (rhs1_type);
3841 debug_generic_expr (rhs2_type);
3842 debug_generic_expr (rhs3_type);
3843 return true;
3844 }
3845
3846 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
3847 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
3848 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
3849 {
3850 error ("invalid mask type in vector permute expression");
3851 debug_generic_expr (lhs_type);
3852 debug_generic_expr (rhs1_type);
3853 debug_generic_expr (rhs2_type);
3854 debug_generic_expr (rhs3_type);
3855 return true;
3856 }
3857
3858 return false;
3859
3860 case DOT_PROD_EXPR:
3861 case REALIGN_LOAD_EXPR:
3862 /* FIXME. */
3863 return false;
3864
3865 default:
3866 gcc_unreachable ();
3867 }
3868 return false;
3869 }
3870
3871 /* Verify a gimple assignment statement STMT with a single rhs.
3872 Returns true if anything is wrong. */
3873
3874 static bool
3875 verify_gimple_assign_single (gimple stmt)
3876 {
3877 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3878 tree lhs = gimple_assign_lhs (stmt);
3879 tree lhs_type = TREE_TYPE (lhs);
3880 tree rhs1 = gimple_assign_rhs1 (stmt);
3881 tree rhs1_type = TREE_TYPE (rhs1);
3882 bool res = false;
3883
3884 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3885 {
3886 error ("non-trivial conversion at assignment");
3887 debug_generic_expr (lhs_type);
3888 debug_generic_expr (rhs1_type);
3889 return true;
3890 }
3891
3892 if (gimple_clobber_p (stmt)
3893 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
3894 {
3895 error ("non-decl/MEM_REF LHS in clobber statement");
3896 debug_generic_expr (lhs);
3897 return true;
3898 }
3899
3900 if (handled_component_p (lhs))
3901 res |= verify_types_in_gimple_reference (lhs, true);
3902
3903 /* Special codes we cannot handle via their class. */
3904 switch (rhs_code)
3905 {
3906 case ADDR_EXPR:
3907 {
3908 tree op = TREE_OPERAND (rhs1, 0);
3909 if (!is_gimple_addressable (op))
3910 {
3911 error ("invalid operand in unary expression");
3912 return true;
3913 }
3914
3915 /* Technically there is no longer a need for matching types, but
3916 gimple hygiene asks for this check. In LTO we can end up
3917 combining incompatible units and thus end up with addresses
3918 of globals that change their type to a common one. */
3919 if (!in_lto_p
3920 && !types_compatible_p (TREE_TYPE (op),
3921 TREE_TYPE (TREE_TYPE (rhs1)))
3922 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
3923 TREE_TYPE (op)))
3924 {
3925 error ("type mismatch in address expression");
3926 debug_generic_stmt (TREE_TYPE (rhs1));
3927 debug_generic_stmt (TREE_TYPE (op));
3928 return true;
3929 }
3930
3931 return verify_types_in_gimple_reference (op, true);
3932 }
3933
3934 /* tcc_reference */
3935 case INDIRECT_REF:
3936 error ("INDIRECT_REF in gimple IL");
3937 return true;
3938
3939 case COMPONENT_REF:
3940 case BIT_FIELD_REF:
3941 case ARRAY_REF:
3942 case ARRAY_RANGE_REF:
3943 case VIEW_CONVERT_EXPR:
3944 case REALPART_EXPR:
3945 case IMAGPART_EXPR:
3946 case TARGET_MEM_REF:
3947 case MEM_REF:
3948 if (!is_gimple_reg (lhs)
3949 && is_gimple_reg_type (TREE_TYPE (lhs)))
3950 {
3951 error ("invalid rhs for gimple memory store");
3952 debug_generic_stmt (lhs);
3953 debug_generic_stmt (rhs1);
3954 return true;
3955 }
3956 return res || verify_types_in_gimple_reference (rhs1, false);
3957
3958 /* tcc_constant */
3959 case SSA_NAME:
3960 case INTEGER_CST:
3961 case REAL_CST:
3962 case FIXED_CST:
3963 case COMPLEX_CST:
3964 case VECTOR_CST:
3965 case STRING_CST:
3966 return res;
3967
3968 /* tcc_declaration */
3969 case CONST_DECL:
3970 return res;
3971 case VAR_DECL:
3972 case PARM_DECL:
3973 if (!is_gimple_reg (lhs)
3974 && !is_gimple_reg (rhs1)
3975 && is_gimple_reg_type (TREE_TYPE (lhs)))
3976 {
3977 error ("invalid rhs for gimple memory store");
3978 debug_generic_stmt (lhs);
3979 debug_generic_stmt (rhs1);
3980 return true;
3981 }
3982 return res;
3983
3984 case CONSTRUCTOR:
3985 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
3986 {
3987 unsigned int i;
3988 tree elt_i, elt_v, elt_t = NULL_TREE;
3989
3990 if (CONSTRUCTOR_NELTS (rhs1) == 0)
3991 return res;
3992 /* For vector CONSTRUCTORs we require that either it is empty
3993 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
3994 (then the element count must be correct to cover the whole
3995 outer vector and index must be NULL on all elements, or it is
3996 a CONSTRUCTOR of scalar elements, where we as an exception allow
3997 smaller number of elements (assuming zero filling) and
3998 consecutive indexes as compared to NULL indexes (such
3999 CONSTRUCTORs can appear in the IL from FEs). */
4000 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4001 {
4002 if (elt_t == NULL_TREE)
4003 {
4004 elt_t = TREE_TYPE (elt_v);
4005 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4006 {
4007 tree elt_t = TREE_TYPE (elt_v);
4008 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4009 TREE_TYPE (elt_t)))
4010 {
4011 error ("incorrect type of vector CONSTRUCTOR"
4012 " elements");
4013 debug_generic_stmt (rhs1);
4014 return true;
4015 }
4016 else if (CONSTRUCTOR_NELTS (rhs1)
4017 * TYPE_VECTOR_SUBPARTS (elt_t)
4018 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4019 {
4020 error ("incorrect number of vector CONSTRUCTOR"
4021 " elements");
4022 debug_generic_stmt (rhs1);
4023 return true;
4024 }
4025 }
4026 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4027 elt_t))
4028 {
4029 error ("incorrect type of vector CONSTRUCTOR elements");
4030 debug_generic_stmt (rhs1);
4031 return true;
4032 }
4033 else if (CONSTRUCTOR_NELTS (rhs1)
4034 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4035 {
4036 error ("incorrect number of vector CONSTRUCTOR elements");
4037 debug_generic_stmt (rhs1);
4038 return true;
4039 }
4040 }
4041 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4042 {
4043 error ("incorrect type of vector CONSTRUCTOR elements");
4044 debug_generic_stmt (rhs1);
4045 return true;
4046 }
4047 if (elt_i != NULL_TREE
4048 && (TREE_CODE (elt_t) == VECTOR_TYPE
4049 || TREE_CODE (elt_i) != INTEGER_CST
4050 || compare_tree_int (elt_i, i) != 0))
4051 {
4052 error ("vector CONSTRUCTOR with non-NULL element index");
4053 debug_generic_stmt (rhs1);
4054 return true;
4055 }
4056 }
4057 }
4058 return res;
4059 case OBJ_TYPE_REF:
4060 case ASSERT_EXPR:
4061 case WITH_SIZE_EXPR:
4062 /* FIXME. */
4063 return res;
4064
4065 default:;
4066 }
4067
4068 return res;
4069 }
4070
4071 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4072 is a problem, otherwise false. */
4073
4074 static bool
4075 verify_gimple_assign (gimple stmt)
4076 {
4077 switch (gimple_assign_rhs_class (stmt))
4078 {
4079 case GIMPLE_SINGLE_RHS:
4080 return verify_gimple_assign_single (stmt);
4081
4082 case GIMPLE_UNARY_RHS:
4083 return verify_gimple_assign_unary (stmt);
4084
4085 case GIMPLE_BINARY_RHS:
4086 return verify_gimple_assign_binary (stmt);
4087
4088 case GIMPLE_TERNARY_RHS:
4089 return verify_gimple_assign_ternary (stmt);
4090
4091 default:
4092 gcc_unreachable ();
4093 }
4094 }
4095
4096 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4097 is a problem, otherwise false. */
4098
4099 static bool
4100 verify_gimple_return (gimple stmt)
4101 {
4102 tree op = gimple_return_retval (stmt);
4103 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4104
4105 /* We cannot test for present return values as we do not fix up missing
4106 return values from the original source. */
4107 if (op == NULL)
4108 return false;
4109
4110 if (!is_gimple_val (op)
4111 && TREE_CODE (op) != RESULT_DECL)
4112 {
4113 error ("invalid operand in return statement");
4114 debug_generic_stmt (op);
4115 return true;
4116 }
4117
4118 if ((TREE_CODE (op) == RESULT_DECL
4119 && DECL_BY_REFERENCE (op))
4120 || (TREE_CODE (op) == SSA_NAME
4121 && SSA_NAME_VAR (op)
4122 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4123 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4124 op = TREE_TYPE (op);
4125
4126 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4127 {
4128 error ("invalid conversion in return statement");
4129 debug_generic_stmt (restype);
4130 debug_generic_stmt (TREE_TYPE (op));
4131 return true;
4132 }
4133
4134 return false;
4135 }
4136
4137
4138 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4139 is a problem, otherwise false. */
4140
4141 static bool
4142 verify_gimple_goto (gimple stmt)
4143 {
4144 tree dest = gimple_goto_dest (stmt);
4145
4146 /* ??? We have two canonical forms of direct goto destinations, a
4147 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4148 if (TREE_CODE (dest) != LABEL_DECL
4149 && (!is_gimple_val (dest)
4150 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4151 {
4152 error ("goto destination is neither a label nor a pointer");
4153 return true;
4154 }
4155
4156 return false;
4157 }
4158
4159 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4160 is a problem, otherwise false. */
4161
4162 static bool
4163 verify_gimple_switch (gimple stmt)
4164 {
4165 unsigned int i, n;
4166 tree elt, prev_upper_bound = NULL_TREE;
4167 tree index_type, elt_type = NULL_TREE;
4168
4169 if (!is_gimple_val (gimple_switch_index (stmt)))
4170 {
4171 error ("invalid operand to switch statement");
4172 debug_generic_stmt (gimple_switch_index (stmt));
4173 return true;
4174 }
4175
4176 index_type = TREE_TYPE (gimple_switch_index (stmt));
4177 if (! INTEGRAL_TYPE_P (index_type))
4178 {
4179 error ("non-integral type switch statement");
4180 debug_generic_expr (index_type);
4181 return true;
4182 }
4183
4184 elt = gimple_switch_label (stmt, 0);
4185 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4186 {
4187 error ("invalid default case label in switch statement");
4188 debug_generic_expr (elt);
4189 return true;
4190 }
4191
4192 n = gimple_switch_num_labels (stmt);
4193 for (i = 1; i < n; i++)
4194 {
4195 elt = gimple_switch_label (stmt, i);
4196
4197 if (! CASE_LOW (elt))
4198 {
4199 error ("invalid case label in switch statement");
4200 debug_generic_expr (elt);
4201 return true;
4202 }
4203 if (CASE_HIGH (elt)
4204 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4205 {
4206 error ("invalid case range in switch statement");
4207 debug_generic_expr (elt);
4208 return true;
4209 }
4210
4211 if (elt_type)
4212 {
4213 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4214 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4215 {
4216 error ("type mismatch for case label in switch statement");
4217 debug_generic_expr (elt);
4218 return true;
4219 }
4220 }
4221 else
4222 {
4223 elt_type = TREE_TYPE (CASE_LOW (elt));
4224 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4225 {
4226 error ("type precision mismatch in switch statement");
4227 return true;
4228 }
4229 }
4230
4231 if (prev_upper_bound)
4232 {
4233 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4234 {
4235 error ("case labels not sorted in switch statement");
4236 return true;
4237 }
4238 }
4239
4240 prev_upper_bound = CASE_HIGH (elt);
4241 if (! prev_upper_bound)
4242 prev_upper_bound = CASE_LOW (elt);
4243 }
4244
4245 return false;
4246 }
4247
4248 /* Verify a gimple debug statement STMT.
4249 Returns true if anything is wrong. */
4250
4251 static bool
4252 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4253 {
4254 /* There isn't much that could be wrong in a gimple debug stmt. A
4255 gimple debug bind stmt, for example, maps a tree, that's usually
4256 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4257 component or member of an aggregate type, to another tree, that
4258 can be an arbitrary expression. These stmts expand into debug
4259 insns, and are converted to debug notes by var-tracking.c. */
4260 return false;
4261 }
4262
4263 /* Verify a gimple label statement STMT.
4264 Returns true if anything is wrong. */
4265
4266 static bool
4267 verify_gimple_label (gimple stmt)
4268 {
4269 tree decl = gimple_label_label (stmt);
4270 int uid;
4271 bool err = false;
4272
4273 if (TREE_CODE (decl) != LABEL_DECL)
4274 return true;
4275 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4276 && DECL_CONTEXT (decl) != current_function_decl)
4277 {
4278 error ("label's context is not the current function decl");
4279 err |= true;
4280 }
4281
4282 uid = LABEL_DECL_UID (decl);
4283 if (cfun->cfg
4284 && (uid == -1
4285 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4286 {
4287 error ("incorrect entry in label_to_block_map");
4288 err |= true;
4289 }
4290
4291 uid = EH_LANDING_PAD_NR (decl);
4292 if (uid)
4293 {
4294 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4295 if (decl != lp->post_landing_pad)
4296 {
4297 error ("incorrect setting of landing pad number");
4298 err |= true;
4299 }
4300 }
4301
4302 return err;
4303 }
4304
4305 /* Verify the GIMPLE statement STMT. Returns true if there is an
4306 error, otherwise false. */
4307
4308 static bool
4309 verify_gimple_stmt (gimple stmt)
4310 {
4311 switch (gimple_code (stmt))
4312 {
4313 case GIMPLE_ASSIGN:
4314 return verify_gimple_assign (stmt);
4315
4316 case GIMPLE_LABEL:
4317 return verify_gimple_label (stmt);
4318
4319 case GIMPLE_CALL:
4320 return verify_gimple_call (stmt);
4321
4322 case GIMPLE_COND:
4323 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4324 {
4325 error ("invalid comparison code in gimple cond");
4326 return true;
4327 }
4328 if (!(!gimple_cond_true_label (stmt)
4329 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4330 || !(!gimple_cond_false_label (stmt)
4331 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4332 {
4333 error ("invalid labels in gimple cond");
4334 return true;
4335 }
4336
4337 return verify_gimple_comparison (boolean_type_node,
4338 gimple_cond_lhs (stmt),
4339 gimple_cond_rhs (stmt));
4340
4341 case GIMPLE_GOTO:
4342 return verify_gimple_goto (stmt);
4343
4344 case GIMPLE_SWITCH:
4345 return verify_gimple_switch (stmt);
4346
4347 case GIMPLE_RETURN:
4348 return verify_gimple_return (stmt);
4349
4350 case GIMPLE_ASM:
4351 return false;
4352
4353 case GIMPLE_TRANSACTION:
4354 return verify_gimple_transaction (stmt);
4355
4356 /* Tuples that do not have tree operands. */
4357 case GIMPLE_NOP:
4358 case GIMPLE_PREDICT:
4359 case GIMPLE_RESX:
4360 case GIMPLE_EH_DISPATCH:
4361 case GIMPLE_EH_MUST_NOT_THROW:
4362 return false;
4363
4364 CASE_GIMPLE_OMP:
4365 /* OpenMP directives are validated by the FE and never operated
4366 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4367 non-gimple expressions when the main index variable has had
4368 its address taken. This does not affect the loop itself
4369 because the header of an GIMPLE_OMP_FOR is merely used to determine
4370 how to setup the parallel iteration. */
4371 return false;
4372
4373 case GIMPLE_DEBUG:
4374 return verify_gimple_debug (stmt);
4375
4376 default:
4377 gcc_unreachable ();
4378 }
4379 }
4380
4381 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4382 and false otherwise. */
4383
4384 static bool
4385 verify_gimple_phi (gimple phi)
4386 {
4387 bool err = false;
4388 unsigned i;
4389 tree phi_result = gimple_phi_result (phi);
4390 bool virtual_p;
4391
4392 if (!phi_result)
4393 {
4394 error ("invalid PHI result");
4395 return true;
4396 }
4397
4398 virtual_p = virtual_operand_p (phi_result);
4399 if (TREE_CODE (phi_result) != SSA_NAME
4400 || (virtual_p
4401 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4402 {
4403 error ("invalid PHI result");
4404 err = true;
4405 }
4406
4407 for (i = 0; i < gimple_phi_num_args (phi); i++)
4408 {
4409 tree t = gimple_phi_arg_def (phi, i);
4410
4411 if (!t)
4412 {
4413 error ("missing PHI def");
4414 err |= true;
4415 continue;
4416 }
4417 /* Addressable variables do have SSA_NAMEs but they
4418 are not considered gimple values. */
4419 else if ((TREE_CODE (t) == SSA_NAME
4420 && virtual_p != virtual_operand_p (t))
4421 || (virtual_p
4422 && (TREE_CODE (t) != SSA_NAME
4423 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4424 || (!virtual_p
4425 && !is_gimple_val (t)))
4426 {
4427 error ("invalid PHI argument");
4428 debug_generic_expr (t);
4429 err |= true;
4430 }
4431 #ifdef ENABLE_TYPES_CHECKING
4432 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4433 {
4434 error ("incompatible types in PHI argument %u", i);
4435 debug_generic_stmt (TREE_TYPE (phi_result));
4436 debug_generic_stmt (TREE_TYPE (t));
4437 err |= true;
4438 }
4439 #endif
4440 }
4441
4442 return err;
4443 }
4444
4445 /* Verify the GIMPLE statements inside the sequence STMTS. */
4446
4447 static bool
4448 verify_gimple_in_seq_2 (gimple_seq stmts)
4449 {
4450 gimple_stmt_iterator ittr;
4451 bool err = false;
4452
4453 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4454 {
4455 gimple stmt = gsi_stmt (ittr);
4456
4457 switch (gimple_code (stmt))
4458 {
4459 case GIMPLE_BIND:
4460 err |= verify_gimple_in_seq_2 (gimple_bind_body (stmt));
4461 break;
4462
4463 case GIMPLE_TRY:
4464 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4465 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4466 break;
4467
4468 case GIMPLE_EH_FILTER:
4469 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4470 break;
4471
4472 case GIMPLE_EH_ELSE:
4473 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (stmt));
4474 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (stmt));
4475 break;
4476
4477 case GIMPLE_CATCH:
4478 err |= verify_gimple_in_seq_2 (gimple_catch_handler (stmt));
4479 break;
4480
4481 case GIMPLE_TRANSACTION:
4482 err |= verify_gimple_transaction (stmt);
4483 break;
4484
4485 default:
4486 {
4487 bool err2 = verify_gimple_stmt (stmt);
4488 if (err2)
4489 debug_gimple_stmt (stmt);
4490 err |= err2;
4491 }
4492 }
4493 }
4494
4495 return err;
4496 }
4497
4498 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4499 is a problem, otherwise false. */
4500
4501 static bool
4502 verify_gimple_transaction (gimple stmt)
4503 {
4504 tree lab = gimple_transaction_label (stmt);
4505 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4506 return true;
4507 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4508 }
4509
4510
4511 /* Verify the GIMPLE statements inside the statement list STMTS. */
4512
4513 DEBUG_FUNCTION void
4514 verify_gimple_in_seq (gimple_seq stmts)
4515 {
4516 timevar_push (TV_TREE_STMT_VERIFY);
4517 if (verify_gimple_in_seq_2 (stmts))
4518 internal_error ("verify_gimple failed");
4519 timevar_pop (TV_TREE_STMT_VERIFY);
4520 }
4521
4522 /* Return true when the T can be shared. */
4523
4524 static bool
4525 tree_node_can_be_shared (tree t)
4526 {
4527 if (IS_TYPE_OR_DECL_P (t)
4528 || is_gimple_min_invariant (t)
4529 || TREE_CODE (t) == SSA_NAME
4530 || t == error_mark_node
4531 || TREE_CODE (t) == IDENTIFIER_NODE)
4532 return true;
4533
4534 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4535 return true;
4536
4537 if (DECL_P (t))
4538 return true;
4539
4540 return false;
4541 }
4542
4543 /* Called via walk_tree. Verify tree sharing. */
4544
4545 static tree
4546 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4547 {
4548 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4549
4550 if (tree_node_can_be_shared (*tp))
4551 {
4552 *walk_subtrees = false;
4553 return NULL;
4554 }
4555
4556 if (pointer_set_insert (visited, *tp))
4557 return *tp;
4558
4559 return NULL;
4560 }
4561
4562 /* Called via walk_gimple_stmt. Verify tree sharing. */
4563
4564 static tree
4565 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4566 {
4567 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4568 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4569 }
4570
4571 static bool eh_error_found;
4572 static int
4573 verify_eh_throw_stmt_node (void **slot, void *data)
4574 {
4575 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4576 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4577
4578 if (!pointer_set_contains (visited, node->stmt))
4579 {
4580 error ("dead STMT in EH table");
4581 debug_gimple_stmt (node->stmt);
4582 eh_error_found = true;
4583 }
4584 return 1;
4585 }
4586
4587 /* Verify if the location LOCs block is in BLOCKS. */
4588
4589 static bool
4590 verify_location (pointer_set_t *blocks, location_t loc)
4591 {
4592 tree block = LOCATION_BLOCK (loc);
4593 if (block != NULL_TREE
4594 && !pointer_set_contains (blocks, block))
4595 {
4596 error ("location references block not in block tree");
4597 return true;
4598 }
4599 if (block != NULL_TREE)
4600 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4601 return false;
4602 }
4603
4604 /* Called via walk_tree. Verify that expressions have no blocks. */
4605
4606 static tree
4607 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4608 {
4609 if (!EXPR_P (*tp))
4610 {
4611 *walk_subtrees = false;
4612 return NULL;
4613 }
4614
4615 location_t loc = EXPR_LOCATION (*tp);
4616 if (LOCATION_BLOCK (loc) != NULL)
4617 return *tp;
4618
4619 return NULL;
4620 }
4621
4622 /* Called via walk_tree. Verify locations of expressions. */
4623
4624 static tree
4625 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4626 {
4627 struct pointer_set_t *blocks = (struct pointer_set_t *) data;
4628
4629 if (TREE_CODE (*tp) == VAR_DECL
4630 && DECL_HAS_DEBUG_EXPR_P (*tp))
4631 {
4632 tree t = DECL_DEBUG_EXPR (*tp);
4633 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4634 if (addr)
4635 return addr;
4636 }
4637 if ((TREE_CODE (*tp) == VAR_DECL
4638 || TREE_CODE (*tp) == PARM_DECL
4639 || TREE_CODE (*tp) == RESULT_DECL)
4640 && DECL_HAS_VALUE_EXPR_P (*tp))
4641 {
4642 tree t = DECL_VALUE_EXPR (*tp);
4643 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4644 if (addr)
4645 return addr;
4646 }
4647
4648 if (!EXPR_P (*tp))
4649 {
4650 *walk_subtrees = false;
4651 return NULL;
4652 }
4653
4654 location_t loc = EXPR_LOCATION (*tp);
4655 if (verify_location (blocks, loc))
4656 return *tp;
4657
4658 return NULL;
4659 }
4660
4661 /* Called via walk_gimple_op. Verify locations of expressions. */
4662
4663 static tree
4664 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
4665 {
4666 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4667 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
4668 }
4669
4670 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4671
4672 static void
4673 collect_subblocks (pointer_set_t *blocks, tree block)
4674 {
4675 tree t;
4676 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
4677 {
4678 pointer_set_insert (blocks, t);
4679 collect_subblocks (blocks, t);
4680 }
4681 }
4682
4683 /* Verify the GIMPLE statements in the CFG of FN. */
4684
4685 DEBUG_FUNCTION void
4686 verify_gimple_in_cfg (struct function *fn)
4687 {
4688 basic_block bb;
4689 bool err = false;
4690 struct pointer_set_t *visited, *visited_stmts, *blocks;
4691
4692 timevar_push (TV_TREE_STMT_VERIFY);
4693 visited = pointer_set_create ();
4694 visited_stmts = pointer_set_create ();
4695
4696 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4697 blocks = pointer_set_create ();
4698 if (DECL_INITIAL (fn->decl))
4699 {
4700 pointer_set_insert (blocks, DECL_INITIAL (fn->decl));
4701 collect_subblocks (blocks, DECL_INITIAL (fn->decl));
4702 }
4703
4704 FOR_EACH_BB_FN (bb, fn)
4705 {
4706 gimple_stmt_iterator gsi;
4707
4708 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4709 {
4710 gimple phi = gsi_stmt (gsi);
4711 bool err2 = false;
4712 unsigned i;
4713
4714 pointer_set_insert (visited_stmts, phi);
4715
4716 if (gimple_bb (phi) != bb)
4717 {
4718 error ("gimple_bb (phi) is set to a wrong basic block");
4719 err2 = true;
4720 }
4721
4722 err2 |= verify_gimple_phi (phi);
4723
4724 /* Only PHI arguments have locations. */
4725 if (gimple_location (phi) != UNKNOWN_LOCATION)
4726 {
4727 error ("PHI node with location");
4728 err2 = true;
4729 }
4730
4731 for (i = 0; i < gimple_phi_num_args (phi); i++)
4732 {
4733 tree arg = gimple_phi_arg_def (phi, i);
4734 tree addr = walk_tree (&arg, verify_node_sharing_1,
4735 visited, NULL);
4736 if (addr)
4737 {
4738 error ("incorrect sharing of tree nodes");
4739 debug_generic_expr (addr);
4740 err2 |= true;
4741 }
4742 location_t loc = gimple_phi_arg_location (phi, i);
4743 if (virtual_operand_p (gimple_phi_result (phi))
4744 && loc != UNKNOWN_LOCATION)
4745 {
4746 error ("virtual PHI with argument locations");
4747 err2 = true;
4748 }
4749 addr = walk_tree (&arg, verify_expr_location_1, blocks, NULL);
4750 if (addr)
4751 {
4752 debug_generic_expr (addr);
4753 err2 = true;
4754 }
4755 err2 |= verify_location (blocks, loc);
4756 }
4757
4758 if (err2)
4759 debug_gimple_stmt (phi);
4760 err |= err2;
4761 }
4762
4763 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4764 {
4765 gimple stmt = gsi_stmt (gsi);
4766 bool err2 = false;
4767 struct walk_stmt_info wi;
4768 tree addr;
4769 int lp_nr;
4770
4771 pointer_set_insert (visited_stmts, stmt);
4772
4773 if (gimple_bb (stmt) != bb)
4774 {
4775 error ("gimple_bb (stmt) is set to a wrong basic block");
4776 err2 = true;
4777 }
4778
4779 err2 |= verify_gimple_stmt (stmt);
4780 err2 |= verify_location (blocks, gimple_location (stmt));
4781
4782 memset (&wi, 0, sizeof (wi));
4783 wi.info = (void *) visited;
4784 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
4785 if (addr)
4786 {
4787 error ("incorrect sharing of tree nodes");
4788 debug_generic_expr (addr);
4789 err2 |= true;
4790 }
4791
4792 memset (&wi, 0, sizeof (wi));
4793 wi.info = (void *) blocks;
4794 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
4795 if (addr)
4796 {
4797 debug_generic_expr (addr);
4798 err2 |= true;
4799 }
4800
4801 /* ??? Instead of not checking these stmts at all the walker
4802 should know its context via wi. */
4803 if (!is_gimple_debug (stmt)
4804 && !is_gimple_omp (stmt))
4805 {
4806 memset (&wi, 0, sizeof (wi));
4807 addr = walk_gimple_op (stmt, verify_expr, &wi);
4808 if (addr)
4809 {
4810 debug_generic_expr (addr);
4811 inform (gimple_location (stmt), "in statement");
4812 err2 |= true;
4813 }
4814 }
4815
4816 /* If the statement is marked as part of an EH region, then it is
4817 expected that the statement could throw. Verify that when we
4818 have optimizations that simplify statements such that we prove
4819 that they cannot throw, that we update other data structures
4820 to match. */
4821 lp_nr = lookup_stmt_eh_lp (stmt);
4822 if (lp_nr != 0)
4823 {
4824 if (!stmt_could_throw_p (stmt))
4825 {
4826 error ("statement marked for throw, but doesn%'t");
4827 err2 |= true;
4828 }
4829 else if (lp_nr > 0
4830 && !gsi_one_before_end_p (gsi)
4831 && stmt_can_throw_internal (stmt))
4832 {
4833 error ("statement marked for throw in middle of block");
4834 err2 |= true;
4835 }
4836 }
4837
4838 if (err2)
4839 debug_gimple_stmt (stmt);
4840 err |= err2;
4841 }
4842 }
4843
4844 eh_error_found = false;
4845 if (get_eh_throw_stmt_table (cfun))
4846 htab_traverse (get_eh_throw_stmt_table (cfun),
4847 verify_eh_throw_stmt_node,
4848 visited_stmts);
4849
4850 if (err || eh_error_found)
4851 internal_error ("verify_gimple failed");
4852
4853 pointer_set_destroy (visited);
4854 pointer_set_destroy (visited_stmts);
4855 pointer_set_destroy (blocks);
4856 verify_histograms ();
4857 timevar_pop (TV_TREE_STMT_VERIFY);
4858 }
4859
4860
4861 /* Verifies that the flow information is OK. */
4862
4863 static int
4864 gimple_verify_flow_info (void)
4865 {
4866 int err = 0;
4867 basic_block bb;
4868 gimple_stmt_iterator gsi;
4869 gimple stmt;
4870 edge e;
4871 edge_iterator ei;
4872
4873 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
4874 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
4875 {
4876 error ("ENTRY_BLOCK has IL associated with it");
4877 err = 1;
4878 }
4879
4880 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
4881 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
4882 {
4883 error ("EXIT_BLOCK has IL associated with it");
4884 err = 1;
4885 }
4886
4887 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
4888 if (e->flags & EDGE_FALLTHRU)
4889 {
4890 error ("fallthru to exit from bb %d", e->src->index);
4891 err = 1;
4892 }
4893
4894 FOR_EACH_BB (bb)
4895 {
4896 bool found_ctrl_stmt = false;
4897
4898 stmt = NULL;
4899
4900 /* Skip labels on the start of basic block. */
4901 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4902 {
4903 tree label;
4904 gimple prev_stmt = stmt;
4905
4906 stmt = gsi_stmt (gsi);
4907
4908 if (gimple_code (stmt) != GIMPLE_LABEL)
4909 break;
4910
4911 label = gimple_label_label (stmt);
4912 if (prev_stmt && DECL_NONLOCAL (label))
4913 {
4914 error ("nonlocal label ");
4915 print_generic_expr (stderr, label, 0);
4916 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4917 bb->index);
4918 err = 1;
4919 }
4920
4921 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
4922 {
4923 error ("EH landing pad label ");
4924 print_generic_expr (stderr, label, 0);
4925 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4926 bb->index);
4927 err = 1;
4928 }
4929
4930 if (label_to_block (label) != bb)
4931 {
4932 error ("label ");
4933 print_generic_expr (stderr, label, 0);
4934 fprintf (stderr, " to block does not match in bb %d",
4935 bb->index);
4936 err = 1;
4937 }
4938
4939 if (decl_function_context (label) != current_function_decl)
4940 {
4941 error ("label ");
4942 print_generic_expr (stderr, label, 0);
4943 fprintf (stderr, " has incorrect context in bb %d",
4944 bb->index);
4945 err = 1;
4946 }
4947 }
4948
4949 /* Verify that body of basic block BB is free of control flow. */
4950 for (; !gsi_end_p (gsi); gsi_next (&gsi))
4951 {
4952 gimple stmt = gsi_stmt (gsi);
4953
4954 if (found_ctrl_stmt)
4955 {
4956 error ("control flow in the middle of basic block %d",
4957 bb->index);
4958 err = 1;
4959 }
4960
4961 if (stmt_ends_bb_p (stmt))
4962 found_ctrl_stmt = true;
4963
4964 if (gimple_code (stmt) == GIMPLE_LABEL)
4965 {
4966 error ("label ");
4967 print_generic_expr (stderr, gimple_label_label (stmt), 0);
4968 fprintf (stderr, " in the middle of basic block %d", bb->index);
4969 err = 1;
4970 }
4971 }
4972
4973 gsi = gsi_last_bb (bb);
4974 if (gsi_end_p (gsi))
4975 continue;
4976
4977 stmt = gsi_stmt (gsi);
4978
4979 if (gimple_code (stmt) == GIMPLE_LABEL)
4980 continue;
4981
4982 err |= verify_eh_edges (stmt);
4983
4984 if (is_ctrl_stmt (stmt))
4985 {
4986 FOR_EACH_EDGE (e, ei, bb->succs)
4987 if (e->flags & EDGE_FALLTHRU)
4988 {
4989 error ("fallthru edge after a control statement in bb %d",
4990 bb->index);
4991 err = 1;
4992 }
4993 }
4994
4995 if (gimple_code (stmt) != GIMPLE_COND)
4996 {
4997 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4998 after anything else but if statement. */
4999 FOR_EACH_EDGE (e, ei, bb->succs)
5000 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5001 {
5002 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5003 bb->index);
5004 err = 1;
5005 }
5006 }
5007
5008 switch (gimple_code (stmt))
5009 {
5010 case GIMPLE_COND:
5011 {
5012 edge true_edge;
5013 edge false_edge;
5014
5015 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5016
5017 if (!true_edge
5018 || !false_edge
5019 || !(true_edge->flags & EDGE_TRUE_VALUE)
5020 || !(false_edge->flags & EDGE_FALSE_VALUE)
5021 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5022 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5023 || EDGE_COUNT (bb->succs) >= 3)
5024 {
5025 error ("wrong outgoing edge flags at end of bb %d",
5026 bb->index);
5027 err = 1;
5028 }
5029 }
5030 break;
5031
5032 case GIMPLE_GOTO:
5033 if (simple_goto_p (stmt))
5034 {
5035 error ("explicit goto at end of bb %d", bb->index);
5036 err = 1;
5037 }
5038 else
5039 {
5040 /* FIXME. We should double check that the labels in the
5041 destination blocks have their address taken. */
5042 FOR_EACH_EDGE (e, ei, bb->succs)
5043 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5044 | EDGE_FALSE_VALUE))
5045 || !(e->flags & EDGE_ABNORMAL))
5046 {
5047 error ("wrong outgoing edge flags at end of bb %d",
5048 bb->index);
5049 err = 1;
5050 }
5051 }
5052 break;
5053
5054 case GIMPLE_CALL:
5055 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5056 break;
5057 /* ... fallthru ... */
5058 case GIMPLE_RETURN:
5059 if (!single_succ_p (bb)
5060 || (single_succ_edge (bb)->flags
5061 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5062 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5063 {
5064 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5065 err = 1;
5066 }
5067 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5068 {
5069 error ("return edge does not point to exit in bb %d",
5070 bb->index);
5071 err = 1;
5072 }
5073 break;
5074
5075 case GIMPLE_SWITCH:
5076 {
5077 tree prev;
5078 edge e;
5079 size_t i, n;
5080
5081 n = gimple_switch_num_labels (stmt);
5082
5083 /* Mark all the destination basic blocks. */
5084 for (i = 0; i < n; ++i)
5085 {
5086 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
5087 basic_block label_bb = label_to_block (lab);
5088 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5089 label_bb->aux = (void *)1;
5090 }
5091
5092 /* Verify that the case labels are sorted. */
5093 prev = gimple_switch_label (stmt, 0);
5094 for (i = 1; i < n; ++i)
5095 {
5096 tree c = gimple_switch_label (stmt, i);
5097 if (!CASE_LOW (c))
5098 {
5099 error ("found default case not at the start of "
5100 "case vector");
5101 err = 1;
5102 continue;
5103 }
5104 if (CASE_LOW (prev)
5105 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5106 {
5107 error ("case labels not sorted: ");
5108 print_generic_expr (stderr, prev, 0);
5109 fprintf (stderr," is greater than ");
5110 print_generic_expr (stderr, c, 0);
5111 fprintf (stderr," but comes before it.\n");
5112 err = 1;
5113 }
5114 prev = c;
5115 }
5116 /* VRP will remove the default case if it can prove it will
5117 never be executed. So do not verify there always exists
5118 a default case here. */
5119
5120 FOR_EACH_EDGE (e, ei, bb->succs)
5121 {
5122 if (!e->dest->aux)
5123 {
5124 error ("extra outgoing edge %d->%d",
5125 bb->index, e->dest->index);
5126 err = 1;
5127 }
5128
5129 e->dest->aux = (void *)2;
5130 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5131 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5132 {
5133 error ("wrong outgoing edge flags at end of bb %d",
5134 bb->index);
5135 err = 1;
5136 }
5137 }
5138
5139 /* Check that we have all of them. */
5140 for (i = 0; i < n; ++i)
5141 {
5142 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
5143 basic_block label_bb = label_to_block (lab);
5144
5145 if (label_bb->aux != (void *)2)
5146 {
5147 error ("missing edge %i->%i", bb->index, label_bb->index);
5148 err = 1;
5149 }
5150 }
5151
5152 FOR_EACH_EDGE (e, ei, bb->succs)
5153 e->dest->aux = (void *)0;
5154 }
5155 break;
5156
5157 case GIMPLE_EH_DISPATCH:
5158 err |= verify_eh_dispatch_edge (stmt);
5159 break;
5160
5161 default:
5162 break;
5163 }
5164 }
5165
5166 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5167 verify_dominators (CDI_DOMINATORS);
5168
5169 return err;
5170 }
5171
5172
5173 /* Updates phi nodes after creating a forwarder block joined
5174 by edge FALLTHRU. */
5175
5176 static void
5177 gimple_make_forwarder_block (edge fallthru)
5178 {
5179 edge e;
5180 edge_iterator ei;
5181 basic_block dummy, bb;
5182 tree var;
5183 gimple_stmt_iterator gsi;
5184
5185 dummy = fallthru->src;
5186 bb = fallthru->dest;
5187
5188 if (single_pred_p (bb))
5189 return;
5190
5191 /* If we redirected a branch we must create new PHI nodes at the
5192 start of BB. */
5193 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5194 {
5195 gimple phi, new_phi;
5196
5197 phi = gsi_stmt (gsi);
5198 var = gimple_phi_result (phi);
5199 new_phi = create_phi_node (var, bb);
5200 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5201 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5202 UNKNOWN_LOCATION);
5203 }
5204
5205 /* Add the arguments we have stored on edges. */
5206 FOR_EACH_EDGE (e, ei, bb->preds)
5207 {
5208 if (e == fallthru)
5209 continue;
5210
5211 flush_pending_stmts (e);
5212 }
5213 }
5214
5215
5216 /* Return a non-special label in the head of basic block BLOCK.
5217 Create one if it doesn't exist. */
5218
5219 tree
5220 gimple_block_label (basic_block bb)
5221 {
5222 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5223 bool first = true;
5224 tree label;
5225 gimple stmt;
5226
5227 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5228 {
5229 stmt = gsi_stmt (i);
5230 if (gimple_code (stmt) != GIMPLE_LABEL)
5231 break;
5232 label = gimple_label_label (stmt);
5233 if (!DECL_NONLOCAL (label))
5234 {
5235 if (!first)
5236 gsi_move_before (&i, &s);
5237 return label;
5238 }
5239 }
5240
5241 label = create_artificial_label (UNKNOWN_LOCATION);
5242 stmt = gimple_build_label (label);
5243 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5244 return label;
5245 }
5246
5247
5248 /* Attempt to perform edge redirection by replacing a possibly complex
5249 jump instruction by a goto or by removing the jump completely.
5250 This can apply only if all edges now point to the same block. The
5251 parameters and return values are equivalent to
5252 redirect_edge_and_branch. */
5253
5254 static edge
5255 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5256 {
5257 basic_block src = e->src;
5258 gimple_stmt_iterator i;
5259 gimple stmt;
5260
5261 /* We can replace or remove a complex jump only when we have exactly
5262 two edges. */
5263 if (EDGE_COUNT (src->succs) != 2
5264 /* Verify that all targets will be TARGET. Specifically, the
5265 edge that is not E must also go to TARGET. */
5266 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5267 return NULL;
5268
5269 i = gsi_last_bb (src);
5270 if (gsi_end_p (i))
5271 return NULL;
5272
5273 stmt = gsi_stmt (i);
5274
5275 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5276 {
5277 gsi_remove (&i, true);
5278 e = ssa_redirect_edge (e, target);
5279 e->flags = EDGE_FALLTHRU;
5280 return e;
5281 }
5282
5283 return NULL;
5284 }
5285
5286
5287 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5288 edge representing the redirected branch. */
5289
5290 static edge
5291 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5292 {
5293 basic_block bb = e->src;
5294 gimple_stmt_iterator gsi;
5295 edge ret;
5296 gimple stmt;
5297
5298 if (e->flags & EDGE_ABNORMAL)
5299 return NULL;
5300
5301 if (e->dest == dest)
5302 return NULL;
5303
5304 if (e->flags & EDGE_EH)
5305 return redirect_eh_edge (e, dest);
5306
5307 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5308 {
5309 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5310 if (ret)
5311 return ret;
5312 }
5313
5314 gsi = gsi_last_bb (bb);
5315 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5316
5317 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5318 {
5319 case GIMPLE_COND:
5320 /* For COND_EXPR, we only need to redirect the edge. */
5321 break;
5322
5323 case GIMPLE_GOTO:
5324 /* No non-abnormal edges should lead from a non-simple goto, and
5325 simple ones should be represented implicitly. */
5326 gcc_unreachable ();
5327
5328 case GIMPLE_SWITCH:
5329 {
5330 tree label = gimple_block_label (dest);
5331 tree cases = get_cases_for_edge (e, stmt);
5332
5333 /* If we have a list of cases associated with E, then use it
5334 as it's a lot faster than walking the entire case vector. */
5335 if (cases)
5336 {
5337 edge e2 = find_edge (e->src, dest);
5338 tree last, first;
5339
5340 first = cases;
5341 while (cases)
5342 {
5343 last = cases;
5344 CASE_LABEL (cases) = label;
5345 cases = CASE_CHAIN (cases);
5346 }
5347
5348 /* If there was already an edge in the CFG, then we need
5349 to move all the cases associated with E to E2. */
5350 if (e2)
5351 {
5352 tree cases2 = get_cases_for_edge (e2, stmt);
5353
5354 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5355 CASE_CHAIN (cases2) = first;
5356 }
5357 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5358 }
5359 else
5360 {
5361 size_t i, n = gimple_switch_num_labels (stmt);
5362
5363 for (i = 0; i < n; i++)
5364 {
5365 tree elt = gimple_switch_label (stmt, i);
5366 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5367 CASE_LABEL (elt) = label;
5368 }
5369 }
5370 }
5371 break;
5372
5373 case GIMPLE_ASM:
5374 {
5375 int i, n = gimple_asm_nlabels (stmt);
5376 tree label = NULL;
5377
5378 for (i = 0; i < n; ++i)
5379 {
5380 tree cons = gimple_asm_label_op (stmt, i);
5381 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5382 {
5383 if (!label)
5384 label = gimple_block_label (dest);
5385 TREE_VALUE (cons) = label;
5386 }
5387 }
5388
5389 /* If we didn't find any label matching the former edge in the
5390 asm labels, we must be redirecting the fallthrough
5391 edge. */
5392 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5393 }
5394 break;
5395
5396 case GIMPLE_RETURN:
5397 gsi_remove (&gsi, true);
5398 e->flags |= EDGE_FALLTHRU;
5399 break;
5400
5401 case GIMPLE_OMP_RETURN:
5402 case GIMPLE_OMP_CONTINUE:
5403 case GIMPLE_OMP_SECTIONS_SWITCH:
5404 case GIMPLE_OMP_FOR:
5405 /* The edges from OMP constructs can be simply redirected. */
5406 break;
5407
5408 case GIMPLE_EH_DISPATCH:
5409 if (!(e->flags & EDGE_FALLTHRU))
5410 redirect_eh_dispatch_edge (stmt, e, dest);
5411 break;
5412
5413 case GIMPLE_TRANSACTION:
5414 /* The ABORT edge has a stored label associated with it, otherwise
5415 the edges are simply redirectable. */
5416 if (e->flags == 0)
5417 gimple_transaction_set_label (stmt, gimple_block_label (dest));
5418 break;
5419
5420 default:
5421 /* Otherwise it must be a fallthru edge, and we don't need to
5422 do anything besides redirecting it. */
5423 gcc_assert (e->flags & EDGE_FALLTHRU);
5424 break;
5425 }
5426
5427 /* Update/insert PHI nodes as necessary. */
5428
5429 /* Now update the edges in the CFG. */
5430 e = ssa_redirect_edge (e, dest);
5431
5432 return e;
5433 }
5434
5435 /* Returns true if it is possible to remove edge E by redirecting
5436 it to the destination of the other edge from E->src. */
5437
5438 static bool
5439 gimple_can_remove_branch_p (const_edge e)
5440 {
5441 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5442 return false;
5443
5444 return true;
5445 }
5446
5447 /* Simple wrapper, as we can always redirect fallthru edges. */
5448
5449 static basic_block
5450 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5451 {
5452 e = gimple_redirect_edge_and_branch (e, dest);
5453 gcc_assert (e);
5454
5455 return NULL;
5456 }
5457
5458
5459 /* Splits basic block BB after statement STMT (but at least after the
5460 labels). If STMT is NULL, BB is split just after the labels. */
5461
5462 static basic_block
5463 gimple_split_block (basic_block bb, void *stmt)
5464 {
5465 gimple_stmt_iterator gsi;
5466 gimple_stmt_iterator gsi_tgt;
5467 gimple act;
5468 gimple_seq list;
5469 basic_block new_bb;
5470 edge e;
5471 edge_iterator ei;
5472
5473 new_bb = create_empty_bb (bb);
5474
5475 /* Redirect the outgoing edges. */
5476 new_bb->succs = bb->succs;
5477 bb->succs = NULL;
5478 FOR_EACH_EDGE (e, ei, new_bb->succs)
5479 e->src = new_bb;
5480
5481 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5482 stmt = NULL;
5483
5484 /* Move everything from GSI to the new basic block. */
5485 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5486 {
5487 act = gsi_stmt (gsi);
5488 if (gimple_code (act) == GIMPLE_LABEL)
5489 continue;
5490
5491 if (!stmt)
5492 break;
5493
5494 if (stmt == act)
5495 {
5496 gsi_next (&gsi);
5497 break;
5498 }
5499 }
5500
5501 if (gsi_end_p (gsi))
5502 return new_bb;
5503
5504 /* Split the statement list - avoid re-creating new containers as this
5505 brings ugly quadratic memory consumption in the inliner.
5506 (We are still quadratic since we need to update stmt BB pointers,
5507 sadly.) */
5508 gsi_split_seq_before (&gsi, &list);
5509 set_bb_seq (new_bb, list);
5510 for (gsi_tgt = gsi_start (list);
5511 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5512 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5513
5514 return new_bb;
5515 }
5516
5517
5518 /* Moves basic block BB after block AFTER. */
5519
5520 static bool
5521 gimple_move_block_after (basic_block bb, basic_block after)
5522 {
5523 if (bb->prev_bb == after)
5524 return true;
5525
5526 unlink_block (bb);
5527 link_block (bb, after);
5528
5529 return true;
5530 }
5531
5532
5533 /* Return TRUE if block BB has no executable statements, otherwise return
5534 FALSE. */
5535
5536 static bool
5537 gimple_empty_block_p (basic_block bb)
5538 {
5539 /* BB must have no executable statements. */
5540 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5541 if (phi_nodes (bb))
5542 return false;
5543 if (gsi_end_p (gsi))
5544 return true;
5545 if (is_gimple_debug (gsi_stmt (gsi)))
5546 gsi_next_nondebug (&gsi);
5547 return gsi_end_p (gsi);
5548 }
5549
5550
5551 /* Split a basic block if it ends with a conditional branch and if the
5552 other part of the block is not empty. */
5553
5554 static basic_block
5555 gimple_split_block_before_cond_jump (basic_block bb)
5556 {
5557 gimple last, split_point;
5558 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5559 if (gsi_end_p (gsi))
5560 return NULL;
5561 last = gsi_stmt (gsi);
5562 if (gimple_code (last) != GIMPLE_COND
5563 && gimple_code (last) != GIMPLE_SWITCH)
5564 return NULL;
5565 gsi_prev_nondebug (&gsi);
5566 split_point = gsi_stmt (gsi);
5567 return split_block (bb, split_point)->dest;
5568 }
5569
5570
5571 /* Return true if basic_block can be duplicated. */
5572
5573 static bool
5574 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5575 {
5576 return true;
5577 }
5578
5579 /* Create a duplicate of the basic block BB. NOTE: This does not
5580 preserve SSA form. */
5581
5582 static basic_block
5583 gimple_duplicate_bb (basic_block bb)
5584 {
5585 basic_block new_bb;
5586 gimple_stmt_iterator gsi, gsi_tgt;
5587 gimple_seq phis = phi_nodes (bb);
5588 gimple phi, stmt, copy;
5589
5590 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
5591
5592 /* Copy the PHI nodes. We ignore PHI node arguments here because
5593 the incoming edges have not been setup yet. */
5594 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
5595 {
5596 phi = gsi_stmt (gsi);
5597 copy = create_phi_node (NULL_TREE, new_bb);
5598 create_new_def_for (gimple_phi_result (phi), copy,
5599 gimple_phi_result_ptr (copy));
5600 gimple_set_uid (copy, gimple_uid (phi));
5601 }
5602
5603 gsi_tgt = gsi_start_bb (new_bb);
5604 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5605 {
5606 def_operand_p def_p;
5607 ssa_op_iter op_iter;
5608 tree lhs;
5609
5610 stmt = gsi_stmt (gsi);
5611 if (gimple_code (stmt) == GIMPLE_LABEL)
5612 continue;
5613
5614 /* Don't duplicate label debug stmts. */
5615 if (gimple_debug_bind_p (stmt)
5616 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5617 == LABEL_DECL)
5618 continue;
5619
5620 /* Create a new copy of STMT and duplicate STMT's virtual
5621 operands. */
5622 copy = gimple_copy (stmt);
5623 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5624
5625 maybe_duplicate_eh_stmt (copy, stmt);
5626 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5627
5628 /* When copying around a stmt writing into a local non-user
5629 aggregate, make sure it won't share stack slot with other
5630 vars. */
5631 lhs = gimple_get_lhs (stmt);
5632 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5633 {
5634 tree base = get_base_address (lhs);
5635 if (base
5636 && (TREE_CODE (base) == VAR_DECL
5637 || TREE_CODE (base) == RESULT_DECL)
5638 && DECL_IGNORED_P (base)
5639 && !TREE_STATIC (base)
5640 && !DECL_EXTERNAL (base)
5641 && (TREE_CODE (base) != VAR_DECL
5642 || !DECL_HAS_VALUE_EXPR_P (base)))
5643 DECL_NONSHAREABLE (base) = 1;
5644 }
5645
5646 /* Create new names for all the definitions created by COPY and
5647 add replacement mappings for each new name. */
5648 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5649 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5650 }
5651
5652 return new_bb;
5653 }
5654
5655 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5656
5657 static void
5658 add_phi_args_after_copy_edge (edge e_copy)
5659 {
5660 basic_block bb, bb_copy = e_copy->src, dest;
5661 edge e;
5662 edge_iterator ei;
5663 gimple phi, phi_copy;
5664 tree def;
5665 gimple_stmt_iterator psi, psi_copy;
5666
5667 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5668 return;
5669
5670 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5671
5672 if (e_copy->dest->flags & BB_DUPLICATED)
5673 dest = get_bb_original (e_copy->dest);
5674 else
5675 dest = e_copy->dest;
5676
5677 e = find_edge (bb, dest);
5678 if (!e)
5679 {
5680 /* During loop unrolling the target of the latch edge is copied.
5681 In this case we are not looking for edge to dest, but to
5682 duplicated block whose original was dest. */
5683 FOR_EACH_EDGE (e, ei, bb->succs)
5684 {
5685 if ((e->dest->flags & BB_DUPLICATED)
5686 && get_bb_original (e->dest) == dest)
5687 break;
5688 }
5689
5690 gcc_assert (e != NULL);
5691 }
5692
5693 for (psi = gsi_start_phis (e->dest),
5694 psi_copy = gsi_start_phis (e_copy->dest);
5695 !gsi_end_p (psi);
5696 gsi_next (&psi), gsi_next (&psi_copy))
5697 {
5698 phi = gsi_stmt (psi);
5699 phi_copy = gsi_stmt (psi_copy);
5700 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5701 add_phi_arg (phi_copy, def, e_copy,
5702 gimple_phi_arg_location_from_edge (phi, e));
5703 }
5704 }
5705
5706
5707 /* Basic block BB_COPY was created by code duplication. Add phi node
5708 arguments for edges going out of BB_COPY. The blocks that were
5709 duplicated have BB_DUPLICATED set. */
5710
5711 void
5712 add_phi_args_after_copy_bb (basic_block bb_copy)
5713 {
5714 edge e_copy;
5715 edge_iterator ei;
5716
5717 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5718 {
5719 add_phi_args_after_copy_edge (e_copy);
5720 }
5721 }
5722
5723 /* Blocks in REGION_COPY array of length N_REGION were created by
5724 duplication of basic blocks. Add phi node arguments for edges
5725 going from these blocks. If E_COPY is not NULL, also add
5726 phi node arguments for its destination.*/
5727
5728 void
5729 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5730 edge e_copy)
5731 {
5732 unsigned i;
5733
5734 for (i = 0; i < n_region; i++)
5735 region_copy[i]->flags |= BB_DUPLICATED;
5736
5737 for (i = 0; i < n_region; i++)
5738 add_phi_args_after_copy_bb (region_copy[i]);
5739 if (e_copy)
5740 add_phi_args_after_copy_edge (e_copy);
5741
5742 for (i = 0; i < n_region; i++)
5743 region_copy[i]->flags &= ~BB_DUPLICATED;
5744 }
5745
5746 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5747 important exit edge EXIT. By important we mean that no SSA name defined
5748 inside region is live over the other exit edges of the region. All entry
5749 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5750 to the duplicate of the region. Dominance and loop information is
5751 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
5752 UPDATE_DOMINANCE is false then we assume that the caller will update the
5753 dominance information after calling this function. The new basic
5754 blocks are stored to REGION_COPY in the same order as they had in REGION,
5755 provided that REGION_COPY is not NULL.
5756 The function returns false if it is unable to copy the region,
5757 true otherwise. */
5758
5759 bool
5760 gimple_duplicate_sese_region (edge entry, edge exit,
5761 basic_block *region, unsigned n_region,
5762 basic_block *region_copy,
5763 bool update_dominance)
5764 {
5765 unsigned i;
5766 bool free_region_copy = false, copying_header = false;
5767 struct loop *loop = entry->dest->loop_father;
5768 edge exit_copy;
5769 vec<basic_block> doms;
5770 edge redirected;
5771 int total_freq = 0, entry_freq = 0;
5772 gcov_type total_count = 0, entry_count = 0;
5773
5774 if (!can_copy_bbs_p (region, n_region))
5775 return false;
5776
5777 /* Some sanity checking. Note that we do not check for all possible
5778 missuses of the functions. I.e. if you ask to copy something weird,
5779 it will work, but the state of structures probably will not be
5780 correct. */
5781 for (i = 0; i < n_region; i++)
5782 {
5783 /* We do not handle subloops, i.e. all the blocks must belong to the
5784 same loop. */
5785 if (region[i]->loop_father != loop)
5786 return false;
5787
5788 if (region[i] != entry->dest
5789 && region[i] == loop->header)
5790 return false;
5791 }
5792
5793 set_loop_copy (loop, loop);
5794
5795 /* In case the function is used for loop header copying (which is the primary
5796 use), ensure that EXIT and its copy will be new latch and entry edges. */
5797 if (loop->header == entry->dest)
5798 {
5799 copying_header = true;
5800 set_loop_copy (loop, loop_outer (loop));
5801
5802 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5803 return false;
5804
5805 for (i = 0; i < n_region; i++)
5806 if (region[i] != exit->src
5807 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5808 return false;
5809 }
5810
5811 if (!region_copy)
5812 {
5813 region_copy = XNEWVEC (basic_block, n_region);
5814 free_region_copy = true;
5815 }
5816
5817 initialize_original_copy_tables ();
5818
5819 /* Record blocks outside the region that are dominated by something
5820 inside. */
5821 if (update_dominance)
5822 {
5823 doms.create (0);
5824 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5825 }
5826
5827 if (entry->dest->count)
5828 {
5829 total_count = entry->dest->count;
5830 entry_count = entry->count;
5831 /* Fix up corner cases, to avoid division by zero or creation of negative
5832 frequencies. */
5833 if (entry_count > total_count)
5834 entry_count = total_count;
5835 }
5836 else
5837 {
5838 total_freq = entry->dest->frequency;
5839 entry_freq = EDGE_FREQUENCY (entry);
5840 /* Fix up corner cases, to avoid division by zero or creation of negative
5841 frequencies. */
5842 if (total_freq == 0)
5843 total_freq = 1;
5844 else if (entry_freq > total_freq)
5845 entry_freq = total_freq;
5846 }
5847
5848 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5849 split_edge_bb_loc (entry), update_dominance);
5850 if (total_count)
5851 {
5852 scale_bbs_frequencies_gcov_type (region, n_region,
5853 total_count - entry_count,
5854 total_count);
5855 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
5856 total_count);
5857 }
5858 else
5859 {
5860 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5861 total_freq);
5862 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5863 }
5864
5865 if (copying_header)
5866 {
5867 loop->header = exit->dest;
5868 loop->latch = exit->src;
5869 }
5870
5871 /* Redirect the entry and add the phi node arguments. */
5872 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
5873 gcc_assert (redirected != NULL);
5874 flush_pending_stmts (entry);
5875
5876 /* Concerning updating of dominators: We must recount dominators
5877 for entry block and its copy. Anything that is outside of the
5878 region, but was dominated by something inside needs recounting as
5879 well. */
5880 if (update_dominance)
5881 {
5882 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
5883 doms.safe_push (get_bb_original (entry->dest));
5884 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5885 doms.release ();
5886 }
5887
5888 /* Add the other PHI node arguments. */
5889 add_phi_args_after_copy (region_copy, n_region, NULL);
5890
5891 if (free_region_copy)
5892 free (region_copy);
5893
5894 free_original_copy_tables ();
5895 return true;
5896 }
5897
5898 /* Checks if BB is part of the region defined by N_REGION BBS. */
5899 static bool
5900 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
5901 {
5902 unsigned int n;
5903
5904 for (n = 0; n < n_region; n++)
5905 {
5906 if (bb == bbs[n])
5907 return true;
5908 }
5909 return false;
5910 }
5911
5912 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
5913 are stored to REGION_COPY in the same order in that they appear
5914 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5915 the region, EXIT an exit from it. The condition guarding EXIT
5916 is moved to ENTRY. Returns true if duplication succeeds, false
5917 otherwise.
5918
5919 For example,
5920
5921 some_code;
5922 if (cond)
5923 A;
5924 else
5925 B;
5926
5927 is transformed to
5928
5929 if (cond)
5930 {
5931 some_code;
5932 A;
5933 }
5934 else
5935 {
5936 some_code;
5937 B;
5938 }
5939 */
5940
5941 bool
5942 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
5943 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
5944 basic_block *region_copy ATTRIBUTE_UNUSED)
5945 {
5946 unsigned i;
5947 bool free_region_copy = false;
5948 struct loop *loop = exit->dest->loop_father;
5949 struct loop *orig_loop = entry->dest->loop_father;
5950 basic_block switch_bb, entry_bb, nentry_bb;
5951 vec<basic_block> doms;
5952 int total_freq = 0, exit_freq = 0;
5953 gcov_type total_count = 0, exit_count = 0;
5954 edge exits[2], nexits[2], e;
5955 gimple_stmt_iterator gsi;
5956 gimple cond_stmt;
5957 edge sorig, snew;
5958 basic_block exit_bb;
5959 gimple_stmt_iterator psi;
5960 gimple phi;
5961 tree def;
5962 struct loop *target, *aloop, *cloop;
5963
5964 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
5965 exits[0] = exit;
5966 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
5967
5968 if (!can_copy_bbs_p (region, n_region))
5969 return false;
5970
5971 initialize_original_copy_tables ();
5972 set_loop_copy (orig_loop, loop);
5973
5974 target= loop;
5975 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
5976 {
5977 if (bb_part_of_region_p (aloop->header, region, n_region))
5978 {
5979 cloop = duplicate_loop (aloop, target);
5980 duplicate_subloops (aloop, cloop);
5981 }
5982 }
5983
5984 if (!region_copy)
5985 {
5986 region_copy = XNEWVEC (basic_block, n_region);
5987 free_region_copy = true;
5988 }
5989
5990 gcc_assert (!need_ssa_update_p (cfun));
5991
5992 /* Record blocks outside the region that are dominated by something
5993 inside. */
5994 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5995
5996 if (exit->src->count)
5997 {
5998 total_count = exit->src->count;
5999 exit_count = exit->count;
6000 /* Fix up corner cases, to avoid division by zero or creation of negative
6001 frequencies. */
6002 if (exit_count > total_count)
6003 exit_count = total_count;
6004 }
6005 else
6006 {
6007 total_freq = exit->src->frequency;
6008 exit_freq = EDGE_FREQUENCY (exit);
6009 /* Fix up corner cases, to avoid division by zero or creation of negative
6010 frequencies. */
6011 if (total_freq == 0)
6012 total_freq = 1;
6013 if (exit_freq > total_freq)
6014 exit_freq = total_freq;
6015 }
6016
6017 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6018 split_edge_bb_loc (exit), true);
6019 if (total_count)
6020 {
6021 scale_bbs_frequencies_gcov_type (region, n_region,
6022 total_count - exit_count,
6023 total_count);
6024 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
6025 total_count);
6026 }
6027 else
6028 {
6029 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6030 total_freq);
6031 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6032 }
6033
6034 /* Create the switch block, and put the exit condition to it. */
6035 entry_bb = entry->dest;
6036 nentry_bb = get_bb_copy (entry_bb);
6037 if (!last_stmt (entry->src)
6038 || !stmt_ends_bb_p (last_stmt (entry->src)))
6039 switch_bb = entry->src;
6040 else
6041 switch_bb = split_edge (entry);
6042 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6043
6044 gsi = gsi_last_bb (switch_bb);
6045 cond_stmt = last_stmt (exit->src);
6046 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6047 cond_stmt = gimple_copy (cond_stmt);
6048
6049 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6050
6051 sorig = single_succ_edge (switch_bb);
6052 sorig->flags = exits[1]->flags;
6053 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6054
6055 /* Register the new edge from SWITCH_BB in loop exit lists. */
6056 rescan_loop_exit (snew, true, false);
6057
6058 /* Add the PHI node arguments. */
6059 add_phi_args_after_copy (region_copy, n_region, snew);
6060
6061 /* Get rid of now superfluous conditions and associated edges (and phi node
6062 arguments). */
6063 exit_bb = exit->dest;
6064
6065 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6066 PENDING_STMT (e) = NULL;
6067
6068 /* The latch of ORIG_LOOP was copied, and so was the backedge
6069 to the original header. We redirect this backedge to EXIT_BB. */
6070 for (i = 0; i < n_region; i++)
6071 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6072 {
6073 gcc_assert (single_succ_edge (region_copy[i]));
6074 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6075 PENDING_STMT (e) = NULL;
6076 for (psi = gsi_start_phis (exit_bb);
6077 !gsi_end_p (psi);
6078 gsi_next (&psi))
6079 {
6080 phi = gsi_stmt (psi);
6081 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6082 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6083 }
6084 }
6085 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6086 PENDING_STMT (e) = NULL;
6087
6088 /* Anything that is outside of the region, but was dominated by something
6089 inside needs to update dominance info. */
6090 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6091 doms.release ();
6092 /* Update the SSA web. */
6093 update_ssa (TODO_update_ssa);
6094
6095 if (free_region_copy)
6096 free (region_copy);
6097
6098 free_original_copy_tables ();
6099 return true;
6100 }
6101
6102 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6103 adding blocks when the dominator traversal reaches EXIT. This
6104 function silently assumes that ENTRY strictly dominates EXIT. */
6105
6106 void
6107 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6108 vec<basic_block> *bbs_p)
6109 {
6110 basic_block son;
6111
6112 for (son = first_dom_son (CDI_DOMINATORS, entry);
6113 son;
6114 son = next_dom_son (CDI_DOMINATORS, son))
6115 {
6116 bbs_p->safe_push (son);
6117 if (son != exit)
6118 gather_blocks_in_sese_region (son, exit, bbs_p);
6119 }
6120 }
6121
6122 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6123 The duplicates are recorded in VARS_MAP. */
6124
6125 static void
6126 replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
6127 tree to_context)
6128 {
6129 tree t = *tp, new_t;
6130 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6131 void **loc;
6132
6133 if (DECL_CONTEXT (t) == to_context)
6134 return;
6135
6136 loc = pointer_map_contains (vars_map, t);
6137
6138 if (!loc)
6139 {
6140 loc = pointer_map_insert (vars_map, t);
6141
6142 if (SSA_VAR_P (t))
6143 {
6144 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6145 add_local_decl (f, new_t);
6146 }
6147 else
6148 {
6149 gcc_assert (TREE_CODE (t) == CONST_DECL);
6150 new_t = copy_node (t);
6151 }
6152 DECL_CONTEXT (new_t) = to_context;
6153
6154 *loc = new_t;
6155 }
6156 else
6157 new_t = (tree) *loc;
6158
6159 *tp = new_t;
6160 }
6161
6162
6163 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6164 VARS_MAP maps old ssa names and var_decls to the new ones. */
6165
6166 static tree
6167 replace_ssa_name (tree name, struct pointer_map_t *vars_map,
6168 tree to_context)
6169 {
6170 void **loc;
6171 tree new_name;
6172
6173 gcc_assert (!virtual_operand_p (name));
6174
6175 loc = pointer_map_contains (vars_map, name);
6176
6177 if (!loc)
6178 {
6179 tree decl = SSA_NAME_VAR (name);
6180 if (decl)
6181 {
6182 replace_by_duplicate_decl (&decl, vars_map, to_context);
6183 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6184 decl, SSA_NAME_DEF_STMT (name));
6185 if (SSA_NAME_IS_DEFAULT_DEF (name))
6186 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context),
6187 decl, new_name);
6188 }
6189 else
6190 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6191 name, SSA_NAME_DEF_STMT (name));
6192
6193 loc = pointer_map_insert (vars_map, name);
6194 *loc = new_name;
6195 }
6196 else
6197 new_name = (tree) *loc;
6198
6199 return new_name;
6200 }
6201
6202 struct move_stmt_d
6203 {
6204 tree orig_block;
6205 tree new_block;
6206 tree from_context;
6207 tree to_context;
6208 struct pointer_map_t *vars_map;
6209 htab_t new_label_map;
6210 struct pointer_map_t *eh_map;
6211 bool remap_decls_p;
6212 };
6213
6214 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6215 contained in *TP if it has been ORIG_BLOCK previously and change the
6216 DECL_CONTEXT of every local variable referenced in *TP. */
6217
6218 static tree
6219 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6220 {
6221 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6222 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6223 tree t = *tp;
6224
6225 if (EXPR_P (t))
6226 {
6227 tree block = TREE_BLOCK (t);
6228 if (block == p->orig_block
6229 || (p->orig_block == NULL_TREE
6230 && block != NULL_TREE))
6231 TREE_SET_BLOCK (t, p->new_block);
6232 #ifdef ENABLE_CHECKING
6233 else if (block != NULL_TREE)
6234 {
6235 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6236 block = BLOCK_SUPERCONTEXT (block);
6237 gcc_assert (block == p->orig_block);
6238 }
6239 #endif
6240 }
6241 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6242 {
6243 if (TREE_CODE (t) == SSA_NAME)
6244 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6245 else if (TREE_CODE (t) == LABEL_DECL)
6246 {
6247 if (p->new_label_map)
6248 {
6249 struct tree_map in, *out;
6250 in.base.from = t;
6251 out = (struct tree_map *)
6252 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6253 if (out)
6254 *tp = t = out->to;
6255 }
6256
6257 DECL_CONTEXT (t) = p->to_context;
6258 }
6259 else if (p->remap_decls_p)
6260 {
6261 /* Replace T with its duplicate. T should no longer appear in the
6262 parent function, so this looks wasteful; however, it may appear
6263 in referenced_vars, and more importantly, as virtual operands of
6264 statements, and in alias lists of other variables. It would be
6265 quite difficult to expunge it from all those places. ??? It might
6266 suffice to do this for addressable variables. */
6267 if ((TREE_CODE (t) == VAR_DECL
6268 && !is_global_var (t))
6269 || TREE_CODE (t) == CONST_DECL)
6270 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6271 }
6272 *walk_subtrees = 0;
6273 }
6274 else if (TYPE_P (t))
6275 *walk_subtrees = 0;
6276
6277 return NULL_TREE;
6278 }
6279
6280 /* Helper for move_stmt_r. Given an EH region number for the source
6281 function, map that to the duplicate EH regio number in the dest. */
6282
6283 static int
6284 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6285 {
6286 eh_region old_r, new_r;
6287 void **slot;
6288
6289 old_r = get_eh_region_from_number (old_nr);
6290 slot = pointer_map_contains (p->eh_map, old_r);
6291 new_r = (eh_region) *slot;
6292
6293 return new_r->index;
6294 }
6295
6296 /* Similar, but operate on INTEGER_CSTs. */
6297
6298 static tree
6299 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6300 {
6301 int old_nr, new_nr;
6302
6303 old_nr = tree_to_shwi (old_t_nr);
6304 new_nr = move_stmt_eh_region_nr (old_nr, p);
6305
6306 return build_int_cst (integer_type_node, new_nr);
6307 }
6308
6309 /* Like move_stmt_op, but for gimple statements.
6310
6311 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6312 contained in the current statement in *GSI_P and change the
6313 DECL_CONTEXT of every local variable referenced in the current
6314 statement. */
6315
6316 static tree
6317 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6318 struct walk_stmt_info *wi)
6319 {
6320 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6321 gimple stmt = gsi_stmt (*gsi_p);
6322 tree block = gimple_block (stmt);
6323
6324 if (block == p->orig_block
6325 || (p->orig_block == NULL_TREE
6326 && block != NULL_TREE))
6327 gimple_set_block (stmt, p->new_block);
6328
6329 switch (gimple_code (stmt))
6330 {
6331 case GIMPLE_CALL:
6332 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6333 {
6334 tree r, fndecl = gimple_call_fndecl (stmt);
6335 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6336 switch (DECL_FUNCTION_CODE (fndecl))
6337 {
6338 case BUILT_IN_EH_COPY_VALUES:
6339 r = gimple_call_arg (stmt, 1);
6340 r = move_stmt_eh_region_tree_nr (r, p);
6341 gimple_call_set_arg (stmt, 1, r);
6342 /* FALLTHRU */
6343
6344 case BUILT_IN_EH_POINTER:
6345 case BUILT_IN_EH_FILTER:
6346 r = gimple_call_arg (stmt, 0);
6347 r = move_stmt_eh_region_tree_nr (r, p);
6348 gimple_call_set_arg (stmt, 0, r);
6349 break;
6350
6351 default:
6352 break;
6353 }
6354 }
6355 break;
6356
6357 case GIMPLE_RESX:
6358 {
6359 int r = gimple_resx_region (stmt);
6360 r = move_stmt_eh_region_nr (r, p);
6361 gimple_resx_set_region (stmt, r);
6362 }
6363 break;
6364
6365 case GIMPLE_EH_DISPATCH:
6366 {
6367 int r = gimple_eh_dispatch_region (stmt);
6368 r = move_stmt_eh_region_nr (r, p);
6369 gimple_eh_dispatch_set_region (stmt, r);
6370 }
6371 break;
6372
6373 case GIMPLE_OMP_RETURN:
6374 case GIMPLE_OMP_CONTINUE:
6375 break;
6376 default:
6377 if (is_gimple_omp (stmt))
6378 {
6379 /* Do not remap variables inside OMP directives. Variables
6380 referenced in clauses and directive header belong to the
6381 parent function and should not be moved into the child
6382 function. */
6383 bool save_remap_decls_p = p->remap_decls_p;
6384 p->remap_decls_p = false;
6385 *handled_ops_p = true;
6386
6387 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6388 move_stmt_op, wi);
6389
6390 p->remap_decls_p = save_remap_decls_p;
6391 }
6392 break;
6393 }
6394
6395 return NULL_TREE;
6396 }
6397
6398 /* Move basic block BB from function CFUN to function DEST_FN. The
6399 block is moved out of the original linked list and placed after
6400 block AFTER in the new list. Also, the block is removed from the
6401 original array of blocks and placed in DEST_FN's array of blocks.
6402 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6403 updated to reflect the moved edges.
6404
6405 The local variables are remapped to new instances, VARS_MAP is used
6406 to record the mapping. */
6407
6408 static void
6409 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6410 basic_block after, bool update_edge_count_p,
6411 struct move_stmt_d *d)
6412 {
6413 struct control_flow_graph *cfg;
6414 edge_iterator ei;
6415 edge e;
6416 gimple_stmt_iterator si;
6417 unsigned old_len, new_len;
6418
6419 /* Remove BB from dominance structures. */
6420 delete_from_dominance_info (CDI_DOMINATORS, bb);
6421
6422 /* Move BB from its current loop to the copy in the new function. */
6423 if (current_loops)
6424 {
6425 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6426 if (new_loop)
6427 bb->loop_father = new_loop;
6428 }
6429
6430 /* Link BB to the new linked list. */
6431 move_block_after (bb, after);
6432
6433 /* Update the edge count in the corresponding flowgraphs. */
6434 if (update_edge_count_p)
6435 FOR_EACH_EDGE (e, ei, bb->succs)
6436 {
6437 cfun->cfg->x_n_edges--;
6438 dest_cfun->cfg->x_n_edges++;
6439 }
6440
6441 /* Remove BB from the original basic block array. */
6442 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6443 cfun->cfg->x_n_basic_blocks--;
6444
6445 /* Grow DEST_CFUN's basic block array if needed. */
6446 cfg = dest_cfun->cfg;
6447 cfg->x_n_basic_blocks++;
6448 if (bb->index >= cfg->x_last_basic_block)
6449 cfg->x_last_basic_block = bb->index + 1;
6450
6451 old_len = vec_safe_length (cfg->x_basic_block_info);
6452 if ((unsigned) cfg->x_last_basic_block >= old_len)
6453 {
6454 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6455 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6456 }
6457
6458 (*cfg->x_basic_block_info)[bb->index] = bb;
6459
6460 /* Remap the variables in phi nodes. */
6461 for (si = gsi_start_phis (bb); !gsi_end_p (si); )
6462 {
6463 gimple phi = gsi_stmt (si);
6464 use_operand_p use;
6465 tree op = PHI_RESULT (phi);
6466 ssa_op_iter oi;
6467 unsigned i;
6468
6469 if (virtual_operand_p (op))
6470 {
6471 /* Remove the phi nodes for virtual operands (alias analysis will be
6472 run for the new function, anyway). */
6473 remove_phi_node (&si, true);
6474 continue;
6475 }
6476
6477 SET_PHI_RESULT (phi,
6478 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6479 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6480 {
6481 op = USE_FROM_PTR (use);
6482 if (TREE_CODE (op) == SSA_NAME)
6483 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6484 }
6485
6486 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6487 {
6488 location_t locus = gimple_phi_arg_location (phi, i);
6489 tree block = LOCATION_BLOCK (locus);
6490
6491 if (locus == UNKNOWN_LOCATION)
6492 continue;
6493 if (d->orig_block == NULL_TREE || block == d->orig_block)
6494 {
6495 if (d->new_block == NULL_TREE)
6496 locus = LOCATION_LOCUS (locus);
6497 else
6498 locus = COMBINE_LOCATION_DATA (line_table, locus, d->new_block);
6499 gimple_phi_arg_set_location (phi, i, locus);
6500 }
6501 }
6502
6503 gsi_next (&si);
6504 }
6505
6506 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6507 {
6508 gimple stmt = gsi_stmt (si);
6509 struct walk_stmt_info wi;
6510
6511 memset (&wi, 0, sizeof (wi));
6512 wi.info = d;
6513 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6514
6515 if (gimple_code (stmt) == GIMPLE_LABEL)
6516 {
6517 tree label = gimple_label_label (stmt);
6518 int uid = LABEL_DECL_UID (label);
6519
6520 gcc_assert (uid > -1);
6521
6522 old_len = vec_safe_length (cfg->x_label_to_block_map);
6523 if (old_len <= (unsigned) uid)
6524 {
6525 new_len = 3 * uid / 2 + 1;
6526 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6527 }
6528
6529 (*cfg->x_label_to_block_map)[uid] = bb;
6530 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6531
6532 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6533
6534 if (uid >= dest_cfun->cfg->last_label_uid)
6535 dest_cfun->cfg->last_label_uid = uid + 1;
6536 }
6537
6538 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6539 remove_stmt_from_eh_lp_fn (cfun, stmt);
6540
6541 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6542 gimple_remove_stmt_histograms (cfun, stmt);
6543
6544 /* We cannot leave any operands allocated from the operand caches of
6545 the current function. */
6546 free_stmt_operands (cfun, stmt);
6547 push_cfun (dest_cfun);
6548 update_stmt (stmt);
6549 pop_cfun ();
6550 }
6551
6552 FOR_EACH_EDGE (e, ei, bb->succs)
6553 if (e->goto_locus != UNKNOWN_LOCATION)
6554 {
6555 tree block = LOCATION_BLOCK (e->goto_locus);
6556 if (d->orig_block == NULL_TREE
6557 || block == d->orig_block)
6558 e->goto_locus = d->new_block ?
6559 COMBINE_LOCATION_DATA (line_table, e->goto_locus, d->new_block) :
6560 LOCATION_LOCUS (e->goto_locus);
6561 }
6562 }
6563
6564 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6565 the outermost EH region. Use REGION as the incoming base EH region. */
6566
6567 static eh_region
6568 find_outermost_region_in_block (struct function *src_cfun,
6569 basic_block bb, eh_region region)
6570 {
6571 gimple_stmt_iterator si;
6572
6573 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6574 {
6575 gimple stmt = gsi_stmt (si);
6576 eh_region stmt_region;
6577 int lp_nr;
6578
6579 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6580 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6581 if (stmt_region)
6582 {
6583 if (region == NULL)
6584 region = stmt_region;
6585 else if (stmt_region != region)
6586 {
6587 region = eh_region_outermost (src_cfun, stmt_region, region);
6588 gcc_assert (region != NULL);
6589 }
6590 }
6591 }
6592
6593 return region;
6594 }
6595
6596 static tree
6597 new_label_mapper (tree decl, void *data)
6598 {
6599 htab_t hash = (htab_t) data;
6600 struct tree_map *m;
6601 void **slot;
6602
6603 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6604
6605 m = XNEW (struct tree_map);
6606 m->hash = DECL_UID (decl);
6607 m->base.from = decl;
6608 m->to = create_artificial_label (UNKNOWN_LOCATION);
6609 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6610 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6611 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6612
6613 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6614 gcc_assert (*slot == NULL);
6615
6616 *slot = m;
6617
6618 return m->to;
6619 }
6620
6621 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6622 subblocks. */
6623
6624 static void
6625 replace_block_vars_by_duplicates (tree block, struct pointer_map_t *vars_map,
6626 tree to_context)
6627 {
6628 tree *tp, t;
6629
6630 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6631 {
6632 t = *tp;
6633 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6634 continue;
6635 replace_by_duplicate_decl (&t, vars_map, to_context);
6636 if (t != *tp)
6637 {
6638 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6639 {
6640 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6641 DECL_HAS_VALUE_EXPR_P (t) = 1;
6642 }
6643 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6644 *tp = t;
6645 }
6646 }
6647
6648 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6649 replace_block_vars_by_duplicates (block, vars_map, to_context);
6650 }
6651
6652 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6653 from FN1 to FN2. */
6654
6655 static void
6656 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
6657 struct loop *loop)
6658 {
6659 /* Discard it from the old loop array. */
6660 (*get_loops (fn1))[loop->num] = NULL;
6661
6662 /* Place it in the new loop array, assigning it a new number. */
6663 loop->num = number_of_loops (fn2);
6664 vec_safe_push (loops_for_fn (fn2)->larray, loop);
6665
6666 /* Recurse to children. */
6667 for (loop = loop->inner; loop; loop = loop->next)
6668 fixup_loop_arrays_after_move (fn1, fn2, loop);
6669 }
6670
6671 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6672 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6673 single basic block in the original CFG and the new basic block is
6674 returned. DEST_CFUN must not have a CFG yet.
6675
6676 Note that the region need not be a pure SESE region. Blocks inside
6677 the region may contain calls to abort/exit. The only restriction
6678 is that ENTRY_BB should be the only entry point and it must
6679 dominate EXIT_BB.
6680
6681 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6682 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6683 to the new function.
6684
6685 All local variables referenced in the region are assumed to be in
6686 the corresponding BLOCK_VARS and unexpanded variable lists
6687 associated with DEST_CFUN. */
6688
6689 basic_block
6690 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
6691 basic_block exit_bb, tree orig_block)
6692 {
6693 vec<basic_block> bbs, dom_bbs;
6694 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6695 basic_block after, bb, *entry_pred, *exit_succ, abb;
6696 struct function *saved_cfun = cfun;
6697 int *entry_flag, *exit_flag;
6698 unsigned *entry_prob, *exit_prob;
6699 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
6700 edge e;
6701 edge_iterator ei;
6702 htab_t new_label_map;
6703 struct pointer_map_t *vars_map, *eh_map;
6704 struct loop *loop = entry_bb->loop_father;
6705 struct loop *loop0 = get_loop (saved_cfun, 0);
6706 struct move_stmt_d d;
6707
6708 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6709 region. */
6710 gcc_assert (entry_bb != exit_bb
6711 && (!exit_bb
6712 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
6713
6714 /* Collect all the blocks in the region. Manually add ENTRY_BB
6715 because it won't be added by dfs_enumerate_from. */
6716 bbs.create (0);
6717 bbs.safe_push (entry_bb);
6718 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6719
6720 /* The blocks that used to be dominated by something in BBS will now be
6721 dominated by the new block. */
6722 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6723 bbs.address (),
6724 bbs.length ());
6725
6726 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6727 the predecessor edges to ENTRY_BB and the successor edges to
6728 EXIT_BB so that we can re-attach them to the new basic block that
6729 will replace the region. */
6730 num_entry_edges = EDGE_COUNT (entry_bb->preds);
6731 entry_pred = XNEWVEC (basic_block, num_entry_edges);
6732 entry_flag = XNEWVEC (int, num_entry_edges);
6733 entry_prob = XNEWVEC (unsigned, num_entry_edges);
6734 i = 0;
6735 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
6736 {
6737 entry_prob[i] = e->probability;
6738 entry_flag[i] = e->flags;
6739 entry_pred[i++] = e->src;
6740 remove_edge (e);
6741 }
6742
6743 if (exit_bb)
6744 {
6745 num_exit_edges = EDGE_COUNT (exit_bb->succs);
6746 exit_succ = XNEWVEC (basic_block, num_exit_edges);
6747 exit_flag = XNEWVEC (int, num_exit_edges);
6748 exit_prob = XNEWVEC (unsigned, num_exit_edges);
6749 i = 0;
6750 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
6751 {
6752 exit_prob[i] = e->probability;
6753 exit_flag[i] = e->flags;
6754 exit_succ[i++] = e->dest;
6755 remove_edge (e);
6756 }
6757 }
6758 else
6759 {
6760 num_exit_edges = 0;
6761 exit_succ = NULL;
6762 exit_flag = NULL;
6763 exit_prob = NULL;
6764 }
6765
6766 /* Switch context to the child function to initialize DEST_FN's CFG. */
6767 gcc_assert (dest_cfun->cfg == NULL);
6768 push_cfun (dest_cfun);
6769
6770 init_empty_tree_cfg ();
6771
6772 /* Initialize EH information for the new function. */
6773 eh_map = NULL;
6774 new_label_map = NULL;
6775 if (saved_cfun->eh)
6776 {
6777 eh_region region = NULL;
6778
6779 FOR_EACH_VEC_ELT (bbs, i, bb)
6780 region = find_outermost_region_in_block (saved_cfun, bb, region);
6781
6782 init_eh_for_function ();
6783 if (region != NULL)
6784 {
6785 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6786 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
6787 new_label_mapper, new_label_map);
6788 }
6789 }
6790
6791 /* Initialize an empty loop tree. */
6792 struct loops *loops = ggc_alloc_cleared_loops ();
6793 init_loops_structure (dest_cfun, loops, 1);
6794 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
6795 set_loops_for_fn (dest_cfun, loops);
6796
6797 /* Move the outlined loop tree part. */
6798 num_nodes = bbs.length ();
6799 FOR_EACH_VEC_ELT (bbs, i, bb)
6800 {
6801 if (bb->loop_father->header == bb)
6802 {
6803 struct loop *this_loop = bb->loop_father;
6804 struct loop *outer = loop_outer (this_loop);
6805 if (outer == loop
6806 /* If the SESE region contains some bbs ending with
6807 a noreturn call, those are considered to belong
6808 to the outermost loop in saved_cfun, rather than
6809 the entry_bb's loop_father. */
6810 || outer == loop0)
6811 {
6812 if (outer != loop)
6813 num_nodes -= this_loop->num_nodes;
6814 flow_loop_tree_node_remove (bb->loop_father);
6815 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
6816 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
6817 }
6818 }
6819 else if (bb->loop_father == loop0 && loop0 != loop)
6820 num_nodes--;
6821
6822 /* Remove loop exits from the outlined region. */
6823 if (loops_for_fn (saved_cfun)->exits)
6824 FOR_EACH_EDGE (e, ei, bb->succs)
6825 {
6826 void **slot = htab_find_slot_with_hash
6827 (loops_for_fn (saved_cfun)->exits, e,
6828 htab_hash_pointer (e), NO_INSERT);
6829 if (slot)
6830 htab_clear_slot (loops_for_fn (saved_cfun)->exits, slot);
6831 }
6832 }
6833
6834
6835 /* Adjust the number of blocks in the tree root of the outlined part. */
6836 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
6837
6838 /* Setup a mapping to be used by move_block_to_fn. */
6839 loop->aux = current_loops->tree_root;
6840 loop0->aux = current_loops->tree_root;
6841
6842 pop_cfun ();
6843
6844 /* Move blocks from BBS into DEST_CFUN. */
6845 gcc_assert (bbs.length () >= 2);
6846 after = dest_cfun->cfg->x_entry_block_ptr;
6847 vars_map = pointer_map_create ();
6848
6849 memset (&d, 0, sizeof (d));
6850 d.orig_block = orig_block;
6851 d.new_block = DECL_INITIAL (dest_cfun->decl);
6852 d.from_context = cfun->decl;
6853 d.to_context = dest_cfun->decl;
6854 d.vars_map = vars_map;
6855 d.new_label_map = new_label_map;
6856 d.eh_map = eh_map;
6857 d.remap_decls_p = true;
6858
6859 FOR_EACH_VEC_ELT (bbs, i, bb)
6860 {
6861 /* No need to update edge counts on the last block. It has
6862 already been updated earlier when we detached the region from
6863 the original CFG. */
6864 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
6865 after = bb;
6866 }
6867
6868 loop->aux = NULL;
6869 loop0->aux = NULL;
6870 /* Loop sizes are no longer correct, fix them up. */
6871 loop->num_nodes -= num_nodes;
6872 for (struct loop *outer = loop_outer (loop);
6873 outer; outer = loop_outer (outer))
6874 outer->num_nodes -= num_nodes;
6875 loop0->num_nodes -= bbs.length () - num_nodes;
6876
6877 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vect_loops)
6878 {
6879 struct loop *aloop;
6880 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
6881 if (aloop != NULL)
6882 {
6883 if (aloop->simduid)
6884 {
6885 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
6886 d.to_context);
6887 dest_cfun->has_simduid_loops = true;
6888 }
6889 if (aloop->force_vect)
6890 dest_cfun->has_force_vect_loops = true;
6891 }
6892 }
6893
6894 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
6895 if (orig_block)
6896 {
6897 tree block;
6898 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6899 == NULL_TREE);
6900 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6901 = BLOCK_SUBBLOCKS (orig_block);
6902 for (block = BLOCK_SUBBLOCKS (orig_block);
6903 block; block = BLOCK_CHAIN (block))
6904 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
6905 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
6906 }
6907
6908 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
6909 vars_map, dest_cfun->decl);
6910
6911 if (new_label_map)
6912 htab_delete (new_label_map);
6913 if (eh_map)
6914 pointer_map_destroy (eh_map);
6915 pointer_map_destroy (vars_map);
6916
6917 /* Rewire the entry and exit blocks. The successor to the entry
6918 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6919 the child function. Similarly, the predecessor of DEST_FN's
6920 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
6921 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6922 various CFG manipulation function get to the right CFG.
6923
6924 FIXME, this is silly. The CFG ought to become a parameter to
6925 these helpers. */
6926 push_cfun (dest_cfun);
6927 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
6928 if (exit_bb)
6929 make_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
6930 pop_cfun ();
6931
6932 /* Back in the original function, the SESE region has disappeared,
6933 create a new basic block in its place. */
6934 bb = create_empty_bb (entry_pred[0]);
6935 if (current_loops)
6936 add_bb_to_loop (bb, loop);
6937 for (i = 0; i < num_entry_edges; i++)
6938 {
6939 e = make_edge (entry_pred[i], bb, entry_flag[i]);
6940 e->probability = entry_prob[i];
6941 }
6942
6943 for (i = 0; i < num_exit_edges; i++)
6944 {
6945 e = make_edge (bb, exit_succ[i], exit_flag[i]);
6946 e->probability = exit_prob[i];
6947 }
6948
6949 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
6950 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
6951 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
6952 dom_bbs.release ();
6953
6954 if (exit_bb)
6955 {
6956 free (exit_prob);
6957 free (exit_flag);
6958 free (exit_succ);
6959 }
6960 free (entry_prob);
6961 free (entry_flag);
6962 free (entry_pred);
6963 bbs.release ();
6964
6965 return bb;
6966 }
6967
6968
6969 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
6970 */
6971
6972 void
6973 dump_function_to_file (tree fndecl, FILE *file, int flags)
6974 {
6975 tree arg, var, old_current_fndecl = current_function_decl;
6976 struct function *dsf;
6977 bool ignore_topmost_bind = false, any_var = false;
6978 basic_block bb;
6979 tree chain;
6980 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
6981 && decl_is_tm_clone (fndecl));
6982 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
6983
6984 current_function_decl = fndecl;
6985 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
6986
6987 arg = DECL_ARGUMENTS (fndecl);
6988 while (arg)
6989 {
6990 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
6991 fprintf (file, " ");
6992 print_generic_expr (file, arg, dump_flags);
6993 if (flags & TDF_VERBOSE)
6994 print_node (file, "", arg, 4);
6995 if (DECL_CHAIN (arg))
6996 fprintf (file, ", ");
6997 arg = DECL_CHAIN (arg);
6998 }
6999 fprintf (file, ")\n");
7000
7001 if (flags & TDF_VERBOSE)
7002 print_node (file, "", fndecl, 2);
7003
7004 dsf = DECL_STRUCT_FUNCTION (fndecl);
7005 if (dsf && (flags & TDF_EH))
7006 dump_eh_tree (file, dsf);
7007
7008 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7009 {
7010 dump_node (fndecl, TDF_SLIM | flags, file);
7011 current_function_decl = old_current_fndecl;
7012 return;
7013 }
7014
7015 /* When GIMPLE is lowered, the variables are no longer available in
7016 BIND_EXPRs, so display them separately. */
7017 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7018 {
7019 unsigned ix;
7020 ignore_topmost_bind = true;
7021
7022 fprintf (file, "{\n");
7023 if (!vec_safe_is_empty (fun->local_decls))
7024 FOR_EACH_LOCAL_DECL (fun, ix, var)
7025 {
7026 print_generic_decl (file, var, flags);
7027 if (flags & TDF_VERBOSE)
7028 print_node (file, "", var, 4);
7029 fprintf (file, "\n");
7030
7031 any_var = true;
7032 }
7033 if (gimple_in_ssa_p (cfun))
7034 for (ix = 1; ix < num_ssa_names; ++ix)
7035 {
7036 tree name = ssa_name (ix);
7037 if (name && !SSA_NAME_VAR (name))
7038 {
7039 fprintf (file, " ");
7040 print_generic_expr (file, TREE_TYPE (name), flags);
7041 fprintf (file, " ");
7042 print_generic_expr (file, name, flags);
7043 fprintf (file, ";\n");
7044
7045 any_var = true;
7046 }
7047 }
7048 }
7049
7050 if (fun && fun->decl == fndecl
7051 && fun->cfg
7052 && basic_block_info_for_fn (fun))
7053 {
7054 /* If the CFG has been built, emit a CFG-based dump. */
7055 if (!ignore_topmost_bind)
7056 fprintf (file, "{\n");
7057
7058 if (any_var && n_basic_blocks_for_fn (fun))
7059 fprintf (file, "\n");
7060
7061 FOR_EACH_BB_FN (bb, fun)
7062 dump_bb (file, bb, 2, flags | TDF_COMMENT);
7063
7064 fprintf (file, "}\n");
7065 }
7066 else if (DECL_SAVED_TREE (fndecl) == NULL)
7067 {
7068 /* The function is now in GIMPLE form but the CFG has not been
7069 built yet. Emit the single sequence of GIMPLE statements
7070 that make up its body. */
7071 gimple_seq body = gimple_body (fndecl);
7072
7073 if (gimple_seq_first_stmt (body)
7074 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7075 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7076 print_gimple_seq (file, body, 0, flags);
7077 else
7078 {
7079 if (!ignore_topmost_bind)
7080 fprintf (file, "{\n");
7081
7082 if (any_var)
7083 fprintf (file, "\n");
7084
7085 print_gimple_seq (file, body, 2, flags);
7086 fprintf (file, "}\n");
7087 }
7088 }
7089 else
7090 {
7091 int indent;
7092
7093 /* Make a tree based dump. */
7094 chain = DECL_SAVED_TREE (fndecl);
7095 if (chain && TREE_CODE (chain) == BIND_EXPR)
7096 {
7097 if (ignore_topmost_bind)
7098 {
7099 chain = BIND_EXPR_BODY (chain);
7100 indent = 2;
7101 }
7102 else
7103 indent = 0;
7104 }
7105 else
7106 {
7107 if (!ignore_topmost_bind)
7108 fprintf (file, "{\n");
7109 indent = 2;
7110 }
7111
7112 if (any_var)
7113 fprintf (file, "\n");
7114
7115 print_generic_stmt_indented (file, chain, flags, indent);
7116 if (ignore_topmost_bind)
7117 fprintf (file, "}\n");
7118 }
7119
7120 if (flags & TDF_ENUMERATE_LOCALS)
7121 dump_enumerated_decls (file, flags);
7122 fprintf (file, "\n\n");
7123
7124 current_function_decl = old_current_fndecl;
7125 }
7126
7127 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7128
7129 DEBUG_FUNCTION void
7130 debug_function (tree fn, int flags)
7131 {
7132 dump_function_to_file (fn, stderr, flags);
7133 }
7134
7135
7136 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7137
7138 static void
7139 print_pred_bbs (FILE *file, basic_block bb)
7140 {
7141 edge e;
7142 edge_iterator ei;
7143
7144 FOR_EACH_EDGE (e, ei, bb->preds)
7145 fprintf (file, "bb_%d ", e->src->index);
7146 }
7147
7148
7149 /* Print on FILE the indexes for the successors of basic_block BB. */
7150
7151 static void
7152 print_succ_bbs (FILE *file, basic_block bb)
7153 {
7154 edge e;
7155 edge_iterator ei;
7156
7157 FOR_EACH_EDGE (e, ei, bb->succs)
7158 fprintf (file, "bb_%d ", e->dest->index);
7159 }
7160
7161 /* Print to FILE the basic block BB following the VERBOSITY level. */
7162
7163 void
7164 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7165 {
7166 char *s_indent = (char *) alloca ((size_t) indent + 1);
7167 memset ((void *) s_indent, ' ', (size_t) indent);
7168 s_indent[indent] = '\0';
7169
7170 /* Print basic_block's header. */
7171 if (verbosity >= 2)
7172 {
7173 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7174 print_pred_bbs (file, bb);
7175 fprintf (file, "}, succs = {");
7176 print_succ_bbs (file, bb);
7177 fprintf (file, "})\n");
7178 }
7179
7180 /* Print basic_block's body. */
7181 if (verbosity >= 3)
7182 {
7183 fprintf (file, "%s {\n", s_indent);
7184 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7185 fprintf (file, "%s }\n", s_indent);
7186 }
7187 }
7188
7189 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7190
7191 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7192 VERBOSITY level this outputs the contents of the loop, or just its
7193 structure. */
7194
7195 static void
7196 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7197 {
7198 char *s_indent;
7199 basic_block bb;
7200
7201 if (loop == NULL)
7202 return;
7203
7204 s_indent = (char *) alloca ((size_t) indent + 1);
7205 memset ((void *) s_indent, ' ', (size_t) indent);
7206 s_indent[indent] = '\0';
7207
7208 /* Print loop's header. */
7209 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7210 if (loop->header)
7211 fprintf (file, "header = %d", loop->header->index);
7212 else
7213 {
7214 fprintf (file, "deleted)\n");
7215 return;
7216 }
7217 if (loop->latch)
7218 fprintf (file, ", latch = %d", loop->latch->index);
7219 else
7220 fprintf (file, ", multiple latches");
7221 fprintf (file, ", niter = ");
7222 print_generic_expr (file, loop->nb_iterations, 0);
7223
7224 if (loop->any_upper_bound)
7225 {
7226 fprintf (file, ", upper_bound = ");
7227 dump_double_int (file, loop->nb_iterations_upper_bound, true);
7228 }
7229
7230 if (loop->any_estimate)
7231 {
7232 fprintf (file, ", estimate = ");
7233 dump_double_int (file, loop->nb_iterations_estimate, true);
7234 }
7235 fprintf (file, ")\n");
7236
7237 /* Print loop's body. */
7238 if (verbosity >= 1)
7239 {
7240 fprintf (file, "%s{\n", s_indent);
7241 FOR_EACH_BB (bb)
7242 if (bb->loop_father == loop)
7243 print_loops_bb (file, bb, indent, verbosity);
7244
7245 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7246 fprintf (file, "%s}\n", s_indent);
7247 }
7248 }
7249
7250 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7251 spaces. Following VERBOSITY level this outputs the contents of the
7252 loop, or just its structure. */
7253
7254 static void
7255 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7256 int verbosity)
7257 {
7258 if (loop == NULL)
7259 return;
7260
7261 print_loop (file, loop, indent, verbosity);
7262 print_loop_and_siblings (file, loop->next, indent, verbosity);
7263 }
7264
7265 /* Follow a CFG edge from the entry point of the program, and on entry
7266 of a loop, pretty print the loop structure on FILE. */
7267
7268 void
7269 print_loops (FILE *file, int verbosity)
7270 {
7271 basic_block bb;
7272
7273 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7274 if (bb && bb->loop_father)
7275 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7276 }
7277
7278 /* Dump a loop. */
7279
7280 DEBUG_FUNCTION void
7281 debug (struct loop &ref)
7282 {
7283 print_loop (stderr, &ref, 0, /*verbosity*/0);
7284 }
7285
7286 DEBUG_FUNCTION void
7287 debug (struct loop *ptr)
7288 {
7289 if (ptr)
7290 debug (*ptr);
7291 else
7292 fprintf (stderr, "<nil>\n");
7293 }
7294
7295 /* Dump a loop verbosely. */
7296
7297 DEBUG_FUNCTION void
7298 debug_verbose (struct loop &ref)
7299 {
7300 print_loop (stderr, &ref, 0, /*verbosity*/3);
7301 }
7302
7303 DEBUG_FUNCTION void
7304 debug_verbose (struct loop *ptr)
7305 {
7306 if (ptr)
7307 debug (*ptr);
7308 else
7309 fprintf (stderr, "<nil>\n");
7310 }
7311
7312
7313 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7314
7315 DEBUG_FUNCTION void
7316 debug_loops (int verbosity)
7317 {
7318 print_loops (stderr, verbosity);
7319 }
7320
7321 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7322
7323 DEBUG_FUNCTION void
7324 debug_loop (struct loop *loop, int verbosity)
7325 {
7326 print_loop (stderr, loop, 0, verbosity);
7327 }
7328
7329 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7330 level. */
7331
7332 DEBUG_FUNCTION void
7333 debug_loop_num (unsigned num, int verbosity)
7334 {
7335 debug_loop (get_loop (cfun, num), verbosity);
7336 }
7337
7338 /* Return true if BB ends with a call, possibly followed by some
7339 instructions that must stay with the call. Return false,
7340 otherwise. */
7341
7342 static bool
7343 gimple_block_ends_with_call_p (basic_block bb)
7344 {
7345 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7346 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7347 }
7348
7349
7350 /* Return true if BB ends with a conditional branch. Return false,
7351 otherwise. */
7352
7353 static bool
7354 gimple_block_ends_with_condjump_p (const_basic_block bb)
7355 {
7356 gimple stmt = last_stmt (CONST_CAST_BB (bb));
7357 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7358 }
7359
7360
7361 /* Return true if we need to add fake edge to exit at statement T.
7362 Helper function for gimple_flow_call_edges_add. */
7363
7364 static bool
7365 need_fake_edge_p (gimple t)
7366 {
7367 tree fndecl = NULL_TREE;
7368 int call_flags = 0;
7369
7370 /* NORETURN and LONGJMP calls already have an edge to exit.
7371 CONST and PURE calls do not need one.
7372 We don't currently check for CONST and PURE here, although
7373 it would be a good idea, because those attributes are
7374 figured out from the RTL in mark_constant_function, and
7375 the counter incrementation code from -fprofile-arcs
7376 leads to different results from -fbranch-probabilities. */
7377 if (is_gimple_call (t))
7378 {
7379 fndecl = gimple_call_fndecl (t);
7380 call_flags = gimple_call_flags (t);
7381 }
7382
7383 if (is_gimple_call (t)
7384 && fndecl
7385 && DECL_BUILT_IN (fndecl)
7386 && (call_flags & ECF_NOTHROW)
7387 && !(call_flags & ECF_RETURNS_TWICE)
7388 /* fork() doesn't really return twice, but the effect of
7389 wrapping it in __gcov_fork() which calls __gcov_flush()
7390 and clears the counters before forking has the same
7391 effect as returning twice. Force a fake edge. */
7392 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7393 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7394 return false;
7395
7396 if (is_gimple_call (t))
7397 {
7398 edge_iterator ei;
7399 edge e;
7400 basic_block bb;
7401
7402 if (!(call_flags & ECF_NORETURN))
7403 return true;
7404
7405 bb = gimple_bb (t);
7406 FOR_EACH_EDGE (e, ei, bb->succs)
7407 if ((e->flags & EDGE_FAKE) == 0)
7408 return true;
7409 }
7410
7411 if (gimple_code (t) == GIMPLE_ASM
7412 && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
7413 return true;
7414
7415 return false;
7416 }
7417
7418
7419 /* Add fake edges to the function exit for any non constant and non
7420 noreturn calls (or noreturn calls with EH/abnormal edges),
7421 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7422 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7423 that were split.
7424
7425 The goal is to expose cases in which entering a basic block does
7426 not imply that all subsequent instructions must be executed. */
7427
7428 static int
7429 gimple_flow_call_edges_add (sbitmap blocks)
7430 {
7431 int i;
7432 int blocks_split = 0;
7433 int last_bb = last_basic_block;
7434 bool check_last_block = false;
7435
7436 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
7437 return 0;
7438
7439 if (! blocks)
7440 check_last_block = true;
7441 else
7442 check_last_block = bitmap_bit_p (blocks,
7443 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
7444
7445 /* In the last basic block, before epilogue generation, there will be
7446 a fallthru edge to EXIT. Special care is required if the last insn
7447 of the last basic block is a call because make_edge folds duplicate
7448 edges, which would result in the fallthru edge also being marked
7449 fake, which would result in the fallthru edge being removed by
7450 remove_fake_edges, which would result in an invalid CFG.
7451
7452 Moreover, we can't elide the outgoing fake edge, since the block
7453 profiler needs to take this into account in order to solve the minimal
7454 spanning tree in the case that the call doesn't return.
7455
7456 Handle this by adding a dummy instruction in a new last basic block. */
7457 if (check_last_block)
7458 {
7459 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
7460 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7461 gimple t = NULL;
7462
7463 if (!gsi_end_p (gsi))
7464 t = gsi_stmt (gsi);
7465
7466 if (t && need_fake_edge_p (t))
7467 {
7468 edge e;
7469
7470 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7471 if (e)
7472 {
7473 gsi_insert_on_edge (e, gimple_build_nop ());
7474 gsi_commit_edge_inserts ();
7475 }
7476 }
7477 }
7478
7479 /* Now add fake edges to the function exit for any non constant
7480 calls since there is no way that we can determine if they will
7481 return or not... */
7482 for (i = 0; i < last_bb; i++)
7483 {
7484 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7485 gimple_stmt_iterator gsi;
7486 gimple stmt, last_stmt;
7487
7488 if (!bb)
7489 continue;
7490
7491 if (blocks && !bitmap_bit_p (blocks, i))
7492 continue;
7493
7494 gsi = gsi_last_nondebug_bb (bb);
7495 if (!gsi_end_p (gsi))
7496 {
7497 last_stmt = gsi_stmt (gsi);
7498 do
7499 {
7500 stmt = gsi_stmt (gsi);
7501 if (need_fake_edge_p (stmt))
7502 {
7503 edge e;
7504
7505 /* The handling above of the final block before the
7506 epilogue should be enough to verify that there is
7507 no edge to the exit block in CFG already.
7508 Calling make_edge in such case would cause us to
7509 mark that edge as fake and remove it later. */
7510 #ifdef ENABLE_CHECKING
7511 if (stmt == last_stmt)
7512 {
7513 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7514 gcc_assert (e == NULL);
7515 }
7516 #endif
7517
7518 /* Note that the following may create a new basic block
7519 and renumber the existing basic blocks. */
7520 if (stmt != last_stmt)
7521 {
7522 e = split_block (bb, stmt);
7523 if (e)
7524 blocks_split++;
7525 }
7526 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
7527 }
7528 gsi_prev (&gsi);
7529 }
7530 while (!gsi_end_p (gsi));
7531 }
7532 }
7533
7534 if (blocks_split)
7535 verify_flow_info ();
7536
7537 return blocks_split;
7538 }
7539
7540 /* Removes edge E and all the blocks dominated by it, and updates dominance
7541 information. The IL in E->src needs to be updated separately.
7542 If dominance info is not available, only the edge E is removed.*/
7543
7544 void
7545 remove_edge_and_dominated_blocks (edge e)
7546 {
7547 vec<basic_block> bbs_to_remove = vNULL;
7548 vec<basic_block> bbs_to_fix_dom = vNULL;
7549 bitmap df, df_idom;
7550 edge f;
7551 edge_iterator ei;
7552 bool none_removed = false;
7553 unsigned i;
7554 basic_block bb, dbb;
7555 bitmap_iterator bi;
7556
7557 if (!dom_info_available_p (CDI_DOMINATORS))
7558 {
7559 remove_edge (e);
7560 return;
7561 }
7562
7563 /* No updating is needed for edges to exit. */
7564 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7565 {
7566 if (cfgcleanup_altered_bbs)
7567 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7568 remove_edge (e);
7569 return;
7570 }
7571
7572 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7573 that is not dominated by E->dest, then this set is empty. Otherwise,
7574 all the basic blocks dominated by E->dest are removed.
7575
7576 Also, to DF_IDOM we store the immediate dominators of the blocks in
7577 the dominance frontier of E (i.e., of the successors of the
7578 removed blocks, if there are any, and of E->dest otherwise). */
7579 FOR_EACH_EDGE (f, ei, e->dest->preds)
7580 {
7581 if (f == e)
7582 continue;
7583
7584 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7585 {
7586 none_removed = true;
7587 break;
7588 }
7589 }
7590
7591 df = BITMAP_ALLOC (NULL);
7592 df_idom = BITMAP_ALLOC (NULL);
7593
7594 if (none_removed)
7595 bitmap_set_bit (df_idom,
7596 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7597 else
7598 {
7599 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7600 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7601 {
7602 FOR_EACH_EDGE (f, ei, bb->succs)
7603 {
7604 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
7605 bitmap_set_bit (df, f->dest->index);
7606 }
7607 }
7608 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7609 bitmap_clear_bit (df, bb->index);
7610
7611 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7612 {
7613 bb = BASIC_BLOCK_FOR_FN (cfun, i);
7614 bitmap_set_bit (df_idom,
7615 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7616 }
7617 }
7618
7619 if (cfgcleanup_altered_bbs)
7620 {
7621 /* Record the set of the altered basic blocks. */
7622 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7623 bitmap_ior_into (cfgcleanup_altered_bbs, df);
7624 }
7625
7626 /* Remove E and the cancelled blocks. */
7627 if (none_removed)
7628 remove_edge (e);
7629 else
7630 {
7631 /* Walk backwards so as to get a chance to substitute all
7632 released DEFs into debug stmts. See
7633 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7634 details. */
7635 for (i = bbs_to_remove.length (); i-- > 0; )
7636 delete_basic_block (bbs_to_remove[i]);
7637 }
7638
7639 /* Update the dominance information. The immediate dominator may change only
7640 for blocks whose immediate dominator belongs to DF_IDOM:
7641
7642 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7643 removal. Let Z the arbitrary block such that idom(Z) = Y and
7644 Z dominates X after the removal. Before removal, there exists a path P
7645 from Y to X that avoids Z. Let F be the last edge on P that is
7646 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7647 dominates W, and because of P, Z does not dominate W), and W belongs to
7648 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7649 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
7650 {
7651 bb = BASIC_BLOCK_FOR_FN (cfun, i);
7652 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
7653 dbb;
7654 dbb = next_dom_son (CDI_DOMINATORS, dbb))
7655 bbs_to_fix_dom.safe_push (dbb);
7656 }
7657
7658 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
7659
7660 BITMAP_FREE (df);
7661 BITMAP_FREE (df_idom);
7662 bbs_to_remove.release ();
7663 bbs_to_fix_dom.release ();
7664 }
7665
7666 /* Purge dead EH edges from basic block BB. */
7667
7668 bool
7669 gimple_purge_dead_eh_edges (basic_block bb)
7670 {
7671 bool changed = false;
7672 edge e;
7673 edge_iterator ei;
7674 gimple stmt = last_stmt (bb);
7675
7676 if (stmt && stmt_can_throw_internal (stmt))
7677 return false;
7678
7679 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7680 {
7681 if (e->flags & EDGE_EH)
7682 {
7683 remove_edge_and_dominated_blocks (e);
7684 changed = true;
7685 }
7686 else
7687 ei_next (&ei);
7688 }
7689
7690 return changed;
7691 }
7692
7693 /* Purge dead EH edges from basic block listed in BLOCKS. */
7694
7695 bool
7696 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
7697 {
7698 bool changed = false;
7699 unsigned i;
7700 bitmap_iterator bi;
7701
7702 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7703 {
7704 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7705
7706 /* Earlier gimple_purge_dead_eh_edges could have removed
7707 this basic block already. */
7708 gcc_assert (bb || changed);
7709 if (bb != NULL)
7710 changed |= gimple_purge_dead_eh_edges (bb);
7711 }
7712
7713 return changed;
7714 }
7715
7716 /* Purge dead abnormal call edges from basic block BB. */
7717
7718 bool
7719 gimple_purge_dead_abnormal_call_edges (basic_block bb)
7720 {
7721 bool changed = false;
7722 edge e;
7723 edge_iterator ei;
7724 gimple stmt = last_stmt (bb);
7725
7726 if (!cfun->has_nonlocal_label
7727 && !cfun->calls_setjmp)
7728 return false;
7729
7730 if (stmt && stmt_can_make_abnormal_goto (stmt))
7731 return false;
7732
7733 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7734 {
7735 if (e->flags & EDGE_ABNORMAL)
7736 {
7737 if (e->flags & EDGE_FALLTHRU)
7738 e->flags &= ~EDGE_ABNORMAL;
7739 else
7740 remove_edge_and_dominated_blocks (e);
7741 changed = true;
7742 }
7743 else
7744 ei_next (&ei);
7745 }
7746
7747 return changed;
7748 }
7749
7750 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
7751
7752 bool
7753 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
7754 {
7755 bool changed = false;
7756 unsigned i;
7757 bitmap_iterator bi;
7758
7759 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7760 {
7761 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7762
7763 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
7764 this basic block already. */
7765 gcc_assert (bb || changed);
7766 if (bb != NULL)
7767 changed |= gimple_purge_dead_abnormal_call_edges (bb);
7768 }
7769
7770 return changed;
7771 }
7772
7773 /* This function is called whenever a new edge is created or
7774 redirected. */
7775
7776 static void
7777 gimple_execute_on_growing_pred (edge e)
7778 {
7779 basic_block bb = e->dest;
7780
7781 if (!gimple_seq_empty_p (phi_nodes (bb)))
7782 reserve_phi_args_for_new_edge (bb);
7783 }
7784
7785 /* This function is called immediately before edge E is removed from
7786 the edge vector E->dest->preds. */
7787
7788 static void
7789 gimple_execute_on_shrinking_pred (edge e)
7790 {
7791 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
7792 remove_phi_args (e);
7793 }
7794
7795 /*---------------------------------------------------------------------------
7796 Helper functions for Loop versioning
7797 ---------------------------------------------------------------------------*/
7798
7799 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
7800 of 'first'. Both of them are dominated by 'new_head' basic block. When
7801 'new_head' was created by 'second's incoming edge it received phi arguments
7802 on the edge by split_edge(). Later, additional edge 'e' was created to
7803 connect 'new_head' and 'first'. Now this routine adds phi args on this
7804 additional edge 'e' that new_head to second edge received as part of edge
7805 splitting. */
7806
7807 static void
7808 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
7809 basic_block new_head, edge e)
7810 {
7811 gimple phi1, phi2;
7812 gimple_stmt_iterator psi1, psi2;
7813 tree def;
7814 edge e2 = find_edge (new_head, second);
7815
7816 /* Because NEW_HEAD has been created by splitting SECOND's incoming
7817 edge, we should always have an edge from NEW_HEAD to SECOND. */
7818 gcc_assert (e2 != NULL);
7819
7820 /* Browse all 'second' basic block phi nodes and add phi args to
7821 edge 'e' for 'first' head. PHI args are always in correct order. */
7822
7823 for (psi2 = gsi_start_phis (second),
7824 psi1 = gsi_start_phis (first);
7825 !gsi_end_p (psi2) && !gsi_end_p (psi1);
7826 gsi_next (&psi2), gsi_next (&psi1))
7827 {
7828 phi1 = gsi_stmt (psi1);
7829 phi2 = gsi_stmt (psi2);
7830 def = PHI_ARG_DEF (phi2, e2->dest_idx);
7831 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
7832 }
7833 }
7834
7835
7836 /* Adds a if else statement to COND_BB with condition COND_EXPR.
7837 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
7838 the destination of the ELSE part. */
7839
7840 static void
7841 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
7842 basic_block second_head ATTRIBUTE_UNUSED,
7843 basic_block cond_bb, void *cond_e)
7844 {
7845 gimple_stmt_iterator gsi;
7846 gimple new_cond_expr;
7847 tree cond_expr = (tree) cond_e;
7848 edge e0;
7849
7850 /* Build new conditional expr */
7851 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
7852 NULL_TREE, NULL_TREE);
7853
7854 /* Add new cond in cond_bb. */
7855 gsi = gsi_last_bb (cond_bb);
7856 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
7857
7858 /* Adjust edges appropriately to connect new head with first head
7859 as well as second head. */
7860 e0 = single_succ_edge (cond_bb);
7861 e0->flags &= ~EDGE_FALLTHRU;
7862 e0->flags |= EDGE_FALSE_VALUE;
7863 }
7864
7865
7866 /* Do book-keeping of basic block BB for the profile consistency checker.
7867 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
7868 then do post-pass accounting. Store the counting in RECORD. */
7869 static void
7870 gimple_account_profile_record (basic_block bb, int after_pass,
7871 struct profile_record *record)
7872 {
7873 gimple_stmt_iterator i;
7874 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
7875 {
7876 record->size[after_pass]
7877 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
7878 if (profile_status == PROFILE_READ)
7879 record->time[after_pass]
7880 += estimate_num_insns (gsi_stmt (i),
7881 &eni_time_weights) * bb->count;
7882 else if (profile_status == PROFILE_GUESSED)
7883 record->time[after_pass]
7884 += estimate_num_insns (gsi_stmt (i),
7885 &eni_time_weights) * bb->frequency;
7886 }
7887 }
7888
7889 struct cfg_hooks gimple_cfg_hooks = {
7890 "gimple",
7891 gimple_verify_flow_info,
7892 gimple_dump_bb, /* dump_bb */
7893 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
7894 create_bb, /* create_basic_block */
7895 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
7896 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
7897 gimple_can_remove_branch_p, /* can_remove_branch_p */
7898 remove_bb, /* delete_basic_block */
7899 gimple_split_block, /* split_block */
7900 gimple_move_block_after, /* move_block_after */
7901 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
7902 gimple_merge_blocks, /* merge_blocks */
7903 gimple_predict_edge, /* predict_edge */
7904 gimple_predicted_by_p, /* predicted_by_p */
7905 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
7906 gimple_duplicate_bb, /* duplicate_block */
7907 gimple_split_edge, /* split_edge */
7908 gimple_make_forwarder_block, /* make_forward_block */
7909 NULL, /* tidy_fallthru_edge */
7910 NULL, /* force_nonfallthru */
7911 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
7912 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
7913 gimple_flow_call_edges_add, /* flow_call_edges_add */
7914 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
7915 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
7916 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
7917 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
7918 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
7919 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
7920 flush_pending_stmts, /* flush_pending_stmts */
7921 gimple_empty_block_p, /* block_empty_p */
7922 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
7923 gimple_account_profile_record,
7924 };
7925
7926
7927 /* Split all critical edges. */
7928
7929 static unsigned int
7930 split_critical_edges (void)
7931 {
7932 basic_block bb;
7933 edge e;
7934 edge_iterator ei;
7935
7936 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
7937 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
7938 mappings around the calls to split_edge. */
7939 start_recording_case_labels ();
7940 FOR_ALL_BB (bb)
7941 {
7942 FOR_EACH_EDGE (e, ei, bb->succs)
7943 {
7944 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
7945 split_edge (e);
7946 /* PRE inserts statements to edges and expects that
7947 since split_critical_edges was done beforehand, committing edge
7948 insertions will not split more edges. In addition to critical
7949 edges we must split edges that have multiple successors and
7950 end by control flow statements, such as RESX.
7951 Go ahead and split them too. This matches the logic in
7952 gimple_find_edge_insert_loc. */
7953 else if ((!single_pred_p (e->dest)
7954 || !gimple_seq_empty_p (phi_nodes (e->dest))
7955 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7956 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
7957 && !(e->flags & EDGE_ABNORMAL))
7958 {
7959 gimple_stmt_iterator gsi;
7960
7961 gsi = gsi_last_bb (e->src);
7962 if (!gsi_end_p (gsi)
7963 && stmt_ends_bb_p (gsi_stmt (gsi))
7964 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
7965 && !gimple_call_builtin_p (gsi_stmt (gsi),
7966 BUILT_IN_RETURN)))
7967 split_edge (e);
7968 }
7969 }
7970 }
7971 end_recording_case_labels ();
7972 return 0;
7973 }
7974
7975 namespace {
7976
7977 const pass_data pass_data_split_crit_edges =
7978 {
7979 GIMPLE_PASS, /* type */
7980 "crited", /* name */
7981 OPTGROUP_NONE, /* optinfo_flags */
7982 false, /* has_gate */
7983 true, /* has_execute */
7984 TV_TREE_SPLIT_EDGES, /* tv_id */
7985 PROP_cfg, /* properties_required */
7986 PROP_no_crit_edges, /* properties_provided */
7987 0, /* properties_destroyed */
7988 0, /* todo_flags_start */
7989 TODO_verify_flow, /* todo_flags_finish */
7990 };
7991
7992 class pass_split_crit_edges : public gimple_opt_pass
7993 {
7994 public:
7995 pass_split_crit_edges (gcc::context *ctxt)
7996 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
7997 {}
7998
7999 /* opt_pass methods: */
8000 unsigned int execute () { return split_critical_edges (); }
8001
8002 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8003 }; // class pass_split_crit_edges
8004
8005 } // anon namespace
8006
8007 gimple_opt_pass *
8008 make_pass_split_crit_edges (gcc::context *ctxt)
8009 {
8010 return new pass_split_crit_edges (ctxt);
8011 }
8012
8013
8014 /* Build a ternary operation and gimplify it. Emit code before GSI.
8015 Return the gimple_val holding the result. */
8016
8017 tree
8018 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8019 tree type, tree a, tree b, tree c)
8020 {
8021 tree ret;
8022 location_t loc = gimple_location (gsi_stmt (*gsi));
8023
8024 ret = fold_build3_loc (loc, code, type, a, b, c);
8025 STRIP_NOPS (ret);
8026
8027 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8028 GSI_SAME_STMT);
8029 }
8030
8031 /* Build a binary operation and gimplify it. Emit code before GSI.
8032 Return the gimple_val holding the result. */
8033
8034 tree
8035 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8036 tree type, tree a, tree b)
8037 {
8038 tree ret;
8039
8040 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8041 STRIP_NOPS (ret);
8042
8043 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8044 GSI_SAME_STMT);
8045 }
8046
8047 /* Build a unary operation and gimplify it. Emit code before GSI.
8048 Return the gimple_val holding the result. */
8049
8050 tree
8051 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8052 tree a)
8053 {
8054 tree ret;
8055
8056 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8057 STRIP_NOPS (ret);
8058
8059 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8060 GSI_SAME_STMT);
8061 }
8062
8063
8064 \f
8065 /* Emit return warnings. */
8066
8067 static unsigned int
8068 execute_warn_function_return (void)
8069 {
8070 source_location location;
8071 gimple last;
8072 edge e;
8073 edge_iterator ei;
8074
8075 if (!targetm.warn_func_return (cfun->decl))
8076 return 0;
8077
8078 /* If we have a path to EXIT, then we do return. */
8079 if (TREE_THIS_VOLATILE (cfun->decl)
8080 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) > 0)
8081 {
8082 location = UNKNOWN_LOCATION;
8083 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
8084 {
8085 last = last_stmt (e->src);
8086 if ((gimple_code (last) == GIMPLE_RETURN
8087 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8088 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8089 break;
8090 }
8091 if (location == UNKNOWN_LOCATION)
8092 location = cfun->function_end_locus;
8093 warning_at (location, 0, "%<noreturn%> function does return");
8094 }
8095
8096 /* If we see "return;" in some basic block, then we do reach the end
8097 without returning a value. */
8098 else if (warn_return_type
8099 && !TREE_NO_WARNING (cfun->decl)
8100 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) > 0
8101 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
8102 {
8103 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
8104 {
8105 gimple last = last_stmt (e->src);
8106 if (gimple_code (last) == GIMPLE_RETURN
8107 && gimple_return_retval (last) == NULL
8108 && !gimple_no_warning_p (last))
8109 {
8110 location = gimple_location (last);
8111 if (location == UNKNOWN_LOCATION)
8112 location = cfun->function_end_locus;
8113 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8114 TREE_NO_WARNING (cfun->decl) = 1;
8115 break;
8116 }
8117 }
8118 }
8119 return 0;
8120 }
8121
8122
8123 /* Given a basic block B which ends with a conditional and has
8124 precisely two successors, determine which of the edges is taken if
8125 the conditional is true and which is taken if the conditional is
8126 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8127
8128 void
8129 extract_true_false_edges_from_block (basic_block b,
8130 edge *true_edge,
8131 edge *false_edge)
8132 {
8133 edge e = EDGE_SUCC (b, 0);
8134
8135 if (e->flags & EDGE_TRUE_VALUE)
8136 {
8137 *true_edge = e;
8138 *false_edge = EDGE_SUCC (b, 1);
8139 }
8140 else
8141 {
8142 *false_edge = e;
8143 *true_edge = EDGE_SUCC (b, 1);
8144 }
8145 }
8146
8147 namespace {
8148
8149 const pass_data pass_data_warn_function_return =
8150 {
8151 GIMPLE_PASS, /* type */
8152 "*warn_function_return", /* name */
8153 OPTGROUP_NONE, /* optinfo_flags */
8154 false, /* has_gate */
8155 true, /* has_execute */
8156 TV_NONE, /* tv_id */
8157 PROP_cfg, /* properties_required */
8158 0, /* properties_provided */
8159 0, /* properties_destroyed */
8160 0, /* todo_flags_start */
8161 0, /* todo_flags_finish */
8162 };
8163
8164 class pass_warn_function_return : public gimple_opt_pass
8165 {
8166 public:
8167 pass_warn_function_return (gcc::context *ctxt)
8168 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8169 {}
8170
8171 /* opt_pass methods: */
8172 unsigned int execute () { return execute_warn_function_return (); }
8173
8174 }; // class pass_warn_function_return
8175
8176 } // anon namespace
8177
8178 gimple_opt_pass *
8179 make_pass_warn_function_return (gcc::context *ctxt)
8180 {
8181 return new pass_warn_function_return (ctxt);
8182 }
8183
8184 /* Walk a gimplified function and warn for functions whose return value is
8185 ignored and attribute((warn_unused_result)) is set. This is done before
8186 inlining, so we don't have to worry about that. */
8187
8188 static void
8189 do_warn_unused_result (gimple_seq seq)
8190 {
8191 tree fdecl, ftype;
8192 gimple_stmt_iterator i;
8193
8194 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8195 {
8196 gimple g = gsi_stmt (i);
8197
8198 switch (gimple_code (g))
8199 {
8200 case GIMPLE_BIND:
8201 do_warn_unused_result (gimple_bind_body (g));
8202 break;
8203 case GIMPLE_TRY:
8204 do_warn_unused_result (gimple_try_eval (g));
8205 do_warn_unused_result (gimple_try_cleanup (g));
8206 break;
8207 case GIMPLE_CATCH:
8208 do_warn_unused_result (gimple_catch_handler (g));
8209 break;
8210 case GIMPLE_EH_FILTER:
8211 do_warn_unused_result (gimple_eh_filter_failure (g));
8212 break;
8213
8214 case GIMPLE_CALL:
8215 if (gimple_call_lhs (g))
8216 break;
8217 if (gimple_call_internal_p (g))
8218 break;
8219
8220 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8221 LHS. All calls whose value is ignored should be
8222 represented like this. Look for the attribute. */
8223 fdecl = gimple_call_fndecl (g);
8224 ftype = gimple_call_fntype (g);
8225
8226 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8227 {
8228 location_t loc = gimple_location (g);
8229
8230 if (fdecl)
8231 warning_at (loc, OPT_Wunused_result,
8232 "ignoring return value of %qD, "
8233 "declared with attribute warn_unused_result",
8234 fdecl);
8235 else
8236 warning_at (loc, OPT_Wunused_result,
8237 "ignoring return value of function "
8238 "declared with attribute warn_unused_result");
8239 }
8240 break;
8241
8242 default:
8243 /* Not a container, not a call, or a call whose value is used. */
8244 break;
8245 }
8246 }
8247 }
8248
8249 static unsigned int
8250 run_warn_unused_result (void)
8251 {
8252 do_warn_unused_result (gimple_body (current_function_decl));
8253 return 0;
8254 }
8255
8256 static bool
8257 gate_warn_unused_result (void)
8258 {
8259 return flag_warn_unused_result;
8260 }
8261
8262 namespace {
8263
8264 const pass_data pass_data_warn_unused_result =
8265 {
8266 GIMPLE_PASS, /* type */
8267 "*warn_unused_result", /* name */
8268 OPTGROUP_NONE, /* optinfo_flags */
8269 true, /* has_gate */
8270 true, /* has_execute */
8271 TV_NONE, /* tv_id */
8272 PROP_gimple_any, /* properties_required */
8273 0, /* properties_provided */
8274 0, /* properties_destroyed */
8275 0, /* todo_flags_start */
8276 0, /* todo_flags_finish */
8277 };
8278
8279 class pass_warn_unused_result : public gimple_opt_pass
8280 {
8281 public:
8282 pass_warn_unused_result (gcc::context *ctxt)
8283 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8284 {}
8285
8286 /* opt_pass methods: */
8287 bool gate () { return gate_warn_unused_result (); }
8288 unsigned int execute () { return run_warn_unused_result (); }
8289
8290 }; // class pass_warn_unused_result
8291
8292 } // anon namespace
8293
8294 gimple_opt_pass *
8295 make_pass_warn_unused_result (gcc::context *ctxt)
8296 {
8297 return new pass_warn_unused_result (ctxt);
8298 }
8299
8300 /* IPA passes, compilation of earlier functions or inlining
8301 might have changed some properties, such as marked functions nothrow,
8302 pure, const or noreturn.
8303 Remove redundant edges and basic blocks, and create new ones if necessary.
8304
8305 This pass can't be executed as stand alone pass from pass manager, because
8306 in between inlining and this fixup the verify_flow_info would fail. */
8307
8308 unsigned int
8309 execute_fixup_cfg (void)
8310 {
8311 basic_block bb;
8312 gimple_stmt_iterator gsi;
8313 int todo = gimple_in_ssa_p (cfun) ? TODO_verify_ssa : 0;
8314 gcov_type count_scale;
8315 edge e;
8316 edge_iterator ei;
8317
8318 count_scale
8319 = GCOV_COMPUTE_SCALE (cgraph_get_node (current_function_decl)->count,
8320 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
8321
8322 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
8323 cgraph_get_node (current_function_decl)->count;
8324 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
8325 apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count,
8326 count_scale);
8327
8328 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
8329 e->count = apply_scale (e->count, count_scale);
8330
8331 FOR_EACH_BB (bb)
8332 {
8333 bb->count = apply_scale (bb->count, count_scale);
8334 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
8335 {
8336 gimple stmt = gsi_stmt (gsi);
8337 tree decl = is_gimple_call (stmt)
8338 ? gimple_call_fndecl (stmt)
8339 : NULL;
8340 if (decl)
8341 {
8342 int flags = gimple_call_flags (stmt);
8343 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
8344 {
8345 if (gimple_purge_dead_abnormal_call_edges (bb))
8346 todo |= TODO_cleanup_cfg;
8347
8348 if (gimple_in_ssa_p (cfun))
8349 {
8350 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8351 update_stmt (stmt);
8352 }
8353 }
8354
8355 if (flags & ECF_NORETURN
8356 && fixup_noreturn_call (stmt))
8357 todo |= TODO_cleanup_cfg;
8358 }
8359
8360 if (maybe_clean_eh_stmt (stmt)
8361 && gimple_purge_dead_eh_edges (bb))
8362 todo |= TODO_cleanup_cfg;
8363 }
8364
8365 FOR_EACH_EDGE (e, ei, bb->succs)
8366 e->count = apply_scale (e->count, count_scale);
8367
8368 /* If we have a basic block with no successors that does not
8369 end with a control statement or a noreturn call end it with
8370 a call to __builtin_unreachable. This situation can occur
8371 when inlining a noreturn call that does in fact return. */
8372 if (EDGE_COUNT (bb->succs) == 0)
8373 {
8374 gimple stmt = last_stmt (bb);
8375 if (!stmt
8376 || (!is_ctrl_stmt (stmt)
8377 && (!is_gimple_call (stmt)
8378 || (gimple_call_flags (stmt) & ECF_NORETURN) == 0)))
8379 {
8380 stmt = gimple_build_call
8381 (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
8382 gimple_stmt_iterator gsi = gsi_last_bb (bb);
8383 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
8384 }
8385 }
8386 }
8387 if (count_scale != REG_BR_PROB_BASE)
8388 compute_function_frequency ();
8389
8390 /* We just processed all calls. */
8391 if (cfun->gimple_df)
8392 vec_free (MODIFIED_NORETURN_CALLS (cfun));
8393
8394 /* Dump a textual representation of the flowgraph. */
8395 if (dump_file)
8396 gimple_dump_cfg (dump_file, dump_flags);
8397
8398 if (current_loops
8399 && (todo & TODO_cleanup_cfg))
8400 loops_state_set (LOOPS_NEED_FIXUP);
8401
8402 return todo;
8403 }
8404
8405 namespace {
8406
8407 const pass_data pass_data_fixup_cfg =
8408 {
8409 GIMPLE_PASS, /* type */
8410 "*free_cfg_annotations", /* name */
8411 OPTGROUP_NONE, /* optinfo_flags */
8412 false, /* has_gate */
8413 true, /* has_execute */
8414 TV_NONE, /* tv_id */
8415 PROP_cfg, /* properties_required */
8416 0, /* properties_provided */
8417 0, /* properties_destroyed */
8418 0, /* todo_flags_start */
8419 0, /* todo_flags_finish */
8420 };
8421
8422 class pass_fixup_cfg : public gimple_opt_pass
8423 {
8424 public:
8425 pass_fixup_cfg (gcc::context *ctxt)
8426 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
8427 {}
8428
8429 /* opt_pass methods: */
8430 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
8431 unsigned int execute () { return execute_fixup_cfg (); }
8432
8433 }; // class pass_fixup_cfg
8434
8435 } // anon namespace
8436
8437 gimple_opt_pass *
8438 make_pass_fixup_cfg (gcc::context *ctxt)
8439 {
8440 return new pass_fixup_cfg (ctxt);
8441 }
8442
8443 /* Garbage collection support for edge_def. */
8444
8445 extern void gt_ggc_mx (tree&);
8446 extern void gt_ggc_mx (gimple&);
8447 extern void gt_ggc_mx (rtx&);
8448 extern void gt_ggc_mx (basic_block&);
8449
8450 void
8451 gt_ggc_mx (edge_def *e)
8452 {
8453 tree block = LOCATION_BLOCK (e->goto_locus);
8454 gt_ggc_mx (e->src);
8455 gt_ggc_mx (e->dest);
8456 if (current_ir_type () == IR_GIMPLE)
8457 gt_ggc_mx (e->insns.g);
8458 else
8459 gt_ggc_mx (e->insns.r);
8460 gt_ggc_mx (block);
8461 }
8462
8463 /* PCH support for edge_def. */
8464
8465 extern void gt_pch_nx (tree&);
8466 extern void gt_pch_nx (gimple&);
8467 extern void gt_pch_nx (rtx&);
8468 extern void gt_pch_nx (basic_block&);
8469
8470 void
8471 gt_pch_nx (edge_def *e)
8472 {
8473 tree block = LOCATION_BLOCK (e->goto_locus);
8474 gt_pch_nx (e->src);
8475 gt_pch_nx (e->dest);
8476 if (current_ir_type () == IR_GIMPLE)
8477 gt_pch_nx (e->insns.g);
8478 else
8479 gt_pch_nx (e->insns.r);
8480 gt_pch_nx (block);
8481 }
8482
8483 void
8484 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
8485 {
8486 tree block = LOCATION_BLOCK (e->goto_locus);
8487 op (&(e->src), cookie);
8488 op (&(e->dest), cookie);
8489 if (current_ir_type () == IR_GIMPLE)
8490 op (&(e->insns.g), cookie);
8491 else
8492 op (&(e->insns.r), cookie);
8493 op (&(block), cookie);
8494 }