tree-ssa-loop.h: Remove include files.
[gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2013 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "hash-table.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "flags.h"
30 #include "function.h"
31 #include "ggc.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple.h"
34 #include "gimple-ssa.h"
35 #include "cgraph.h"
36 #include "tree-cfg.h"
37 #include "tree-phinodes.h"
38 #include "ssa-iterators.h"
39 #include "tree-ssanames.h"
40 #include "tree-ssa-loop-manip.h"
41 #include "tree-ssa-loop-niter.h"
42 #include "tree-into-ssa.h"
43 #include "tree-dfa.h"
44 #include "tree-ssa.h"
45 #include "tree-dump.h"
46 #include "tree-pass.h"
47 #include "diagnostic-core.h"
48 #include "except.h"
49 #include "cfgloop.h"
50 #include "tree-ssa-propagate.h"
51 #include "value-prof.h"
52 #include "pointer-set.h"
53 #include "tree-inline.h"
54 #include "target.h"
55 #include "tree-ssa-live.h"
56 #include "omp-low.h"
57 #include "tree-cfgcleanup.h"
58
59 /* This file contains functions for building the Control Flow Graph (CFG)
60 for a function tree. */
61
62 /* Local declarations. */
63
64 /* Initial capacity for the basic block array. */
65 static const int initial_cfg_capacity = 20;
66
67 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
68 which use a particular edge. The CASE_LABEL_EXPRs are chained together
69 via their CASE_CHAIN field, which we clear after we're done with the
70 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
71
72 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
73 update the case vector in response to edge redirections.
74
75 Right now this table is set up and torn down at key points in the
76 compilation process. It would be nice if we could make the table
77 more persistent. The key is getting notification of changes to
78 the CFG (particularly edge removal, creation and redirection). */
79
80 static struct pointer_map_t *edge_to_cases;
81
82 /* If we record edge_to_cases, this bitmap will hold indexes
83 of basic blocks that end in a GIMPLE_SWITCH which we touched
84 due to edge manipulations. */
85
86 static bitmap touched_switch_bbs;
87
88 /* CFG statistics. */
89 struct cfg_stats_d
90 {
91 long num_merged_labels;
92 };
93
94 static struct cfg_stats_d cfg_stats;
95
96 /* Nonzero if we found a computed goto while building basic blocks. */
97 static bool found_computed_goto;
98
99 /* Hash table to store last discriminator assigned for each locus. */
100 struct locus_discrim_map
101 {
102 location_t locus;
103 int discriminator;
104 };
105
106 /* Hashtable helpers. */
107
108 struct locus_discrim_hasher : typed_free_remove <locus_discrim_map>
109 {
110 typedef locus_discrim_map value_type;
111 typedef locus_discrim_map compare_type;
112 static inline hashval_t hash (const value_type *);
113 static inline bool equal (const value_type *, const compare_type *);
114 };
115
116 /* Trivial hash function for a location_t. ITEM is a pointer to
117 a hash table entry that maps a location_t to a discriminator. */
118
119 inline hashval_t
120 locus_discrim_hasher::hash (const value_type *item)
121 {
122 return LOCATION_LINE (item->locus);
123 }
124
125 /* Equality function for the locus-to-discriminator map. A and B
126 point to the two hash table entries to compare. */
127
128 inline bool
129 locus_discrim_hasher::equal (const value_type *a, const compare_type *b)
130 {
131 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
132 }
133
134 static hash_table <locus_discrim_hasher> discriminator_per_locus;
135
136 /* Basic blocks and flowgraphs. */
137 static void make_blocks (gimple_seq);
138 static void factor_computed_gotos (void);
139
140 /* Edges. */
141 static void make_edges (void);
142 static void assign_discriminators (void);
143 static void make_cond_expr_edges (basic_block);
144 static void make_gimple_switch_edges (basic_block);
145 static void make_goto_expr_edges (basic_block);
146 static void make_gimple_asm_edges (basic_block);
147 static edge gimple_redirect_edge_and_branch (edge, basic_block);
148 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
149 static unsigned int split_critical_edges (void);
150
151 /* Various helpers. */
152 static inline bool stmt_starts_bb_p (gimple, gimple);
153 static int gimple_verify_flow_info (void);
154 static void gimple_make_forwarder_block (edge);
155 static gimple first_non_label_stmt (basic_block);
156 static bool verify_gimple_transaction (gimple);
157
158 /* Flowgraph optimization and cleanup. */
159 static void gimple_merge_blocks (basic_block, basic_block);
160 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
161 static void remove_bb (basic_block);
162 static edge find_taken_edge_computed_goto (basic_block, tree);
163 static edge find_taken_edge_cond_expr (basic_block, tree);
164 static edge find_taken_edge_switch_expr (basic_block, tree);
165 static tree find_case_label_for_value (gimple, tree);
166
167 void
168 init_empty_tree_cfg_for_function (struct function *fn)
169 {
170 /* Initialize the basic block array. */
171 init_flow (fn);
172 profile_status_for_function (fn) = PROFILE_ABSENT;
173 n_basic_blocks_for_function (fn) = NUM_FIXED_BLOCKS;
174 last_basic_block_for_function (fn) = NUM_FIXED_BLOCKS;
175 vec_alloc (basic_block_info_for_function (fn), initial_cfg_capacity);
176 vec_safe_grow_cleared (basic_block_info_for_function (fn),
177 initial_cfg_capacity);
178
179 /* Build a mapping of labels to their associated blocks. */
180 vec_alloc (label_to_block_map_for_function (fn), initial_cfg_capacity);
181 vec_safe_grow_cleared (label_to_block_map_for_function (fn),
182 initial_cfg_capacity);
183
184 SET_BASIC_BLOCK_FOR_FUNCTION (fn, ENTRY_BLOCK,
185 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn));
186 SET_BASIC_BLOCK_FOR_FUNCTION (fn, EXIT_BLOCK,
187 EXIT_BLOCK_PTR_FOR_FUNCTION (fn));
188
189 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)->next_bb
190 = EXIT_BLOCK_PTR_FOR_FUNCTION (fn);
191 EXIT_BLOCK_PTR_FOR_FUNCTION (fn)->prev_bb
192 = ENTRY_BLOCK_PTR_FOR_FUNCTION (fn);
193 }
194
195 void
196 init_empty_tree_cfg (void)
197 {
198 init_empty_tree_cfg_for_function (cfun);
199 }
200
201 /*---------------------------------------------------------------------------
202 Create basic blocks
203 ---------------------------------------------------------------------------*/
204
205 /* Entry point to the CFG builder for trees. SEQ is the sequence of
206 statements to be added to the flowgraph. */
207
208 static void
209 build_gimple_cfg (gimple_seq seq)
210 {
211 /* Register specific gimple functions. */
212 gimple_register_cfg_hooks ();
213
214 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
215
216 init_empty_tree_cfg ();
217
218 found_computed_goto = 0;
219 make_blocks (seq);
220
221 /* Computed gotos are hell to deal with, especially if there are
222 lots of them with a large number of destinations. So we factor
223 them to a common computed goto location before we build the
224 edge list. After we convert back to normal form, we will un-factor
225 the computed gotos since factoring introduces an unwanted jump. */
226 if (found_computed_goto)
227 factor_computed_gotos ();
228
229 /* Make sure there is always at least one block, even if it's empty. */
230 if (n_basic_blocks == NUM_FIXED_BLOCKS)
231 create_empty_bb (ENTRY_BLOCK_PTR);
232
233 /* Adjust the size of the array. */
234 if (basic_block_info->length () < (size_t) n_basic_blocks)
235 vec_safe_grow_cleared (basic_block_info, n_basic_blocks);
236
237 /* To speed up statement iterator walks, we first purge dead labels. */
238 cleanup_dead_labels ();
239
240 /* Group case nodes to reduce the number of edges.
241 We do this after cleaning up dead labels because otherwise we miss
242 a lot of obvious case merging opportunities. */
243 group_case_labels ();
244
245 /* Create the edges of the flowgraph. */
246 discriminator_per_locus.create (13);
247 make_edges ();
248 assign_discriminators ();
249 cleanup_dead_labels ();
250 discriminator_per_locus.dispose ();
251 }
252
253 static unsigned int
254 execute_build_cfg (void)
255 {
256 gimple_seq body = gimple_body (current_function_decl);
257
258 build_gimple_cfg (body);
259 gimple_set_body (current_function_decl, NULL);
260 if (dump_file && (dump_flags & TDF_DETAILS))
261 {
262 fprintf (dump_file, "Scope blocks:\n");
263 dump_scope_blocks (dump_file, dump_flags);
264 }
265 cleanup_tree_cfg ();
266 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
267 return 0;
268 }
269
270 namespace {
271
272 const pass_data pass_data_build_cfg =
273 {
274 GIMPLE_PASS, /* type */
275 "cfg", /* name */
276 OPTGROUP_NONE, /* optinfo_flags */
277 false, /* has_gate */
278 true, /* has_execute */
279 TV_TREE_CFG, /* tv_id */
280 PROP_gimple_leh, /* properties_required */
281 ( PROP_cfg | PROP_loops ), /* properties_provided */
282 0, /* properties_destroyed */
283 0, /* todo_flags_start */
284 TODO_verify_stmts, /* todo_flags_finish */
285 };
286
287 class pass_build_cfg : public gimple_opt_pass
288 {
289 public:
290 pass_build_cfg (gcc::context *ctxt)
291 : gimple_opt_pass (pass_data_build_cfg, ctxt)
292 {}
293
294 /* opt_pass methods: */
295 unsigned int execute () { return execute_build_cfg (); }
296
297 }; // class pass_build_cfg
298
299 } // anon namespace
300
301 gimple_opt_pass *
302 make_pass_build_cfg (gcc::context *ctxt)
303 {
304 return new pass_build_cfg (ctxt);
305 }
306
307
308 /* Return true if T is a computed goto. */
309
310 static bool
311 computed_goto_p (gimple t)
312 {
313 return (gimple_code (t) == GIMPLE_GOTO
314 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
315 }
316
317
318 /* Search the CFG for any computed gotos. If found, factor them to a
319 common computed goto site. Also record the location of that site so
320 that we can un-factor the gotos after we have converted back to
321 normal form. */
322
323 static void
324 factor_computed_gotos (void)
325 {
326 basic_block bb;
327 tree factored_label_decl = NULL;
328 tree var = NULL;
329 gimple factored_computed_goto_label = NULL;
330 gimple factored_computed_goto = NULL;
331
332 /* We know there are one or more computed gotos in this function.
333 Examine the last statement in each basic block to see if the block
334 ends with a computed goto. */
335
336 FOR_EACH_BB (bb)
337 {
338 gimple_stmt_iterator gsi = gsi_last_bb (bb);
339 gimple last;
340
341 if (gsi_end_p (gsi))
342 continue;
343
344 last = gsi_stmt (gsi);
345
346 /* Ignore the computed goto we create when we factor the original
347 computed gotos. */
348 if (last == factored_computed_goto)
349 continue;
350
351 /* If the last statement is a computed goto, factor it. */
352 if (computed_goto_p (last))
353 {
354 gimple assignment;
355
356 /* The first time we find a computed goto we need to create
357 the factored goto block and the variable each original
358 computed goto will use for their goto destination. */
359 if (!factored_computed_goto)
360 {
361 basic_block new_bb = create_empty_bb (bb);
362 gimple_stmt_iterator new_gsi = gsi_start_bb (new_bb);
363
364 /* Create the destination of the factored goto. Each original
365 computed goto will put its desired destination into this
366 variable and jump to the label we create immediately
367 below. */
368 var = create_tmp_var (ptr_type_node, "gotovar");
369
370 /* Build a label for the new block which will contain the
371 factored computed goto. */
372 factored_label_decl = create_artificial_label (UNKNOWN_LOCATION);
373 factored_computed_goto_label
374 = gimple_build_label (factored_label_decl);
375 gsi_insert_after (&new_gsi, factored_computed_goto_label,
376 GSI_NEW_STMT);
377
378 /* Build our new computed goto. */
379 factored_computed_goto = gimple_build_goto (var);
380 gsi_insert_after (&new_gsi, factored_computed_goto, GSI_NEW_STMT);
381 }
382
383 /* Copy the original computed goto's destination into VAR. */
384 assignment = gimple_build_assign (var, gimple_goto_dest (last));
385 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
386
387 /* And re-vector the computed goto to the new destination. */
388 gimple_goto_set_dest (last, factored_label_decl);
389 }
390 }
391 }
392
393
394 /* Build a flowgraph for the sequence of stmts SEQ. */
395
396 static void
397 make_blocks (gimple_seq seq)
398 {
399 gimple_stmt_iterator i = gsi_start (seq);
400 gimple stmt = NULL;
401 bool start_new_block = true;
402 bool first_stmt_of_seq = true;
403 basic_block bb = ENTRY_BLOCK_PTR;
404
405 while (!gsi_end_p (i))
406 {
407 gimple prev_stmt;
408
409 prev_stmt = stmt;
410 stmt = gsi_stmt (i);
411
412 /* If the statement starts a new basic block or if we have determined
413 in a previous pass that we need to create a new block for STMT, do
414 so now. */
415 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
416 {
417 if (!first_stmt_of_seq)
418 gsi_split_seq_before (&i, &seq);
419 bb = create_basic_block (seq, NULL, bb);
420 start_new_block = false;
421 }
422
423 /* Now add STMT to BB and create the subgraphs for special statement
424 codes. */
425 gimple_set_bb (stmt, bb);
426
427 if (computed_goto_p (stmt))
428 found_computed_goto = true;
429
430 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
431 next iteration. */
432 if (stmt_ends_bb_p (stmt))
433 {
434 /* If the stmt can make abnormal goto use a new temporary
435 for the assignment to the LHS. This makes sure the old value
436 of the LHS is available on the abnormal edge. Otherwise
437 we will end up with overlapping life-ranges for abnormal
438 SSA names. */
439 if (gimple_has_lhs (stmt)
440 && stmt_can_make_abnormal_goto (stmt)
441 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
442 {
443 tree lhs = gimple_get_lhs (stmt);
444 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
445 gimple s = gimple_build_assign (lhs, tmp);
446 gimple_set_location (s, gimple_location (stmt));
447 gimple_set_block (s, gimple_block (stmt));
448 gimple_set_lhs (stmt, tmp);
449 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
450 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
451 DECL_GIMPLE_REG_P (tmp) = 1;
452 gsi_insert_after (&i, s, GSI_SAME_STMT);
453 }
454 start_new_block = true;
455 }
456
457 gsi_next (&i);
458 first_stmt_of_seq = false;
459 }
460 }
461
462
463 /* Create and return a new empty basic block after bb AFTER. */
464
465 static basic_block
466 create_bb (void *h, void *e, basic_block after)
467 {
468 basic_block bb;
469
470 gcc_assert (!e);
471
472 /* Create and initialize a new basic block. Since alloc_block uses
473 GC allocation that clears memory to allocate a basic block, we do
474 not have to clear the newly allocated basic block here. */
475 bb = alloc_block ();
476
477 bb->index = last_basic_block;
478 bb->flags = BB_NEW;
479 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
480
481 /* Add the new block to the linked list of blocks. */
482 link_block (bb, after);
483
484 /* Grow the basic block array if needed. */
485 if ((size_t) last_basic_block == basic_block_info->length ())
486 {
487 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
488 vec_safe_grow_cleared (basic_block_info, new_size);
489 }
490
491 /* Add the newly created block to the array. */
492 SET_BASIC_BLOCK (last_basic_block, bb);
493
494 n_basic_blocks++;
495 last_basic_block++;
496
497 return bb;
498 }
499
500
501 /*---------------------------------------------------------------------------
502 Edge creation
503 ---------------------------------------------------------------------------*/
504
505 /* Fold COND_EXPR_COND of each COND_EXPR. */
506
507 void
508 fold_cond_expr_cond (void)
509 {
510 basic_block bb;
511
512 FOR_EACH_BB (bb)
513 {
514 gimple stmt = last_stmt (bb);
515
516 if (stmt && gimple_code (stmt) == GIMPLE_COND)
517 {
518 location_t loc = gimple_location (stmt);
519 tree cond;
520 bool zerop, onep;
521
522 fold_defer_overflow_warnings ();
523 cond = fold_binary_loc (loc, gimple_cond_code (stmt), boolean_type_node,
524 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
525 if (cond)
526 {
527 zerop = integer_zerop (cond);
528 onep = integer_onep (cond);
529 }
530 else
531 zerop = onep = false;
532
533 fold_undefer_overflow_warnings (zerop || onep,
534 stmt,
535 WARN_STRICT_OVERFLOW_CONDITIONAL);
536 if (zerop)
537 gimple_cond_make_false (stmt);
538 else if (onep)
539 gimple_cond_make_true (stmt);
540 }
541 }
542 }
543
544 /* Join all the blocks in the flowgraph. */
545
546 static void
547 make_edges (void)
548 {
549 basic_block bb;
550 struct omp_region *cur_region = NULL;
551
552 /* Create an edge from entry to the first block with executable
553 statements in it. */
554 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
555
556 /* Traverse the basic block array placing edges. */
557 FOR_EACH_BB (bb)
558 {
559 gimple last = last_stmt (bb);
560 bool fallthru;
561
562 if (last)
563 {
564 enum gimple_code code = gimple_code (last);
565 switch (code)
566 {
567 case GIMPLE_GOTO:
568 make_goto_expr_edges (bb);
569 fallthru = false;
570 break;
571 case GIMPLE_RETURN:
572 make_edge (bb, EXIT_BLOCK_PTR, 0);
573 fallthru = false;
574 break;
575 case GIMPLE_COND:
576 make_cond_expr_edges (bb);
577 fallthru = false;
578 break;
579 case GIMPLE_SWITCH:
580 make_gimple_switch_edges (bb);
581 fallthru = false;
582 break;
583 case GIMPLE_RESX:
584 make_eh_edges (last);
585 fallthru = false;
586 break;
587 case GIMPLE_EH_DISPATCH:
588 fallthru = make_eh_dispatch_edges (last);
589 break;
590
591 case GIMPLE_CALL:
592 /* If this function receives a nonlocal goto, then we need to
593 make edges from this call site to all the nonlocal goto
594 handlers. */
595 if (stmt_can_make_abnormal_goto (last))
596 make_abnormal_goto_edges (bb, true);
597
598 /* If this statement has reachable exception handlers, then
599 create abnormal edges to them. */
600 make_eh_edges (last);
601
602 /* BUILTIN_RETURN is really a return statement. */
603 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
604 make_edge (bb, EXIT_BLOCK_PTR, 0), fallthru = false;
605 /* Some calls are known not to return. */
606 else
607 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
608 break;
609
610 case GIMPLE_ASSIGN:
611 /* A GIMPLE_ASSIGN may throw internally and thus be considered
612 control-altering. */
613 if (is_ctrl_altering_stmt (last))
614 make_eh_edges (last);
615 fallthru = true;
616 break;
617
618 case GIMPLE_ASM:
619 make_gimple_asm_edges (bb);
620 fallthru = true;
621 break;
622
623 CASE_GIMPLE_OMP:
624 fallthru = make_gimple_omp_edges (bb, &cur_region);
625 break;
626
627 case GIMPLE_TRANSACTION:
628 {
629 tree abort_label = gimple_transaction_label (last);
630 if (abort_label)
631 make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT);
632 fallthru = true;
633 }
634 break;
635
636 default:
637 gcc_assert (!stmt_ends_bb_p (last));
638 fallthru = true;
639 }
640 }
641 else
642 fallthru = true;
643
644 if (fallthru)
645 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
646 }
647
648 free_omp_regions ();
649
650 /* Fold COND_EXPR_COND of each COND_EXPR. */
651 fold_cond_expr_cond ();
652 }
653
654 /* Find the next available discriminator value for LOCUS. The
655 discriminator distinguishes among several basic blocks that
656 share a common locus, allowing for more accurate sample-based
657 profiling. */
658
659 static int
660 next_discriminator_for_locus (location_t locus)
661 {
662 struct locus_discrim_map item;
663 struct locus_discrim_map **slot;
664
665 item.locus = locus;
666 item.discriminator = 0;
667 slot = discriminator_per_locus.find_slot_with_hash (
668 &item, LOCATION_LINE (locus), INSERT);
669 gcc_assert (slot);
670 if (*slot == HTAB_EMPTY_ENTRY)
671 {
672 *slot = XNEW (struct locus_discrim_map);
673 gcc_assert (*slot);
674 (*slot)->locus = locus;
675 (*slot)->discriminator = 0;
676 }
677 (*slot)->discriminator++;
678 return (*slot)->discriminator;
679 }
680
681 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
682
683 static bool
684 same_line_p (location_t locus1, location_t locus2)
685 {
686 expanded_location from, to;
687
688 if (locus1 == locus2)
689 return true;
690
691 from = expand_location (locus1);
692 to = expand_location (locus2);
693
694 if (from.line != to.line)
695 return false;
696 if (from.file == to.file)
697 return true;
698 return (from.file != NULL
699 && to.file != NULL
700 && filename_cmp (from.file, to.file) == 0);
701 }
702
703 /* Assign discriminators to each basic block. */
704
705 static void
706 assign_discriminators (void)
707 {
708 basic_block bb;
709
710 FOR_EACH_BB (bb)
711 {
712 edge e;
713 edge_iterator ei;
714 gimple last = last_stmt (bb);
715 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
716
717 if (locus == UNKNOWN_LOCATION)
718 continue;
719
720 FOR_EACH_EDGE (e, ei, bb->succs)
721 {
722 gimple first = first_non_label_stmt (e->dest);
723 gimple last = last_stmt (e->dest);
724 if ((first && same_line_p (locus, gimple_location (first)))
725 || (last && same_line_p (locus, gimple_location (last))))
726 {
727 if (e->dest->discriminator != 0 && bb->discriminator == 0)
728 bb->discriminator = next_discriminator_for_locus (locus);
729 else
730 e->dest->discriminator = next_discriminator_for_locus (locus);
731 }
732 }
733 }
734 }
735
736 /* Create the edges for a GIMPLE_COND starting at block BB. */
737
738 static void
739 make_cond_expr_edges (basic_block bb)
740 {
741 gimple entry = last_stmt (bb);
742 gimple then_stmt, else_stmt;
743 basic_block then_bb, else_bb;
744 tree then_label, else_label;
745 edge e;
746
747 gcc_assert (entry);
748 gcc_assert (gimple_code (entry) == GIMPLE_COND);
749
750 /* Entry basic blocks for each component. */
751 then_label = gimple_cond_true_label (entry);
752 else_label = gimple_cond_false_label (entry);
753 then_bb = label_to_block (then_label);
754 else_bb = label_to_block (else_label);
755 then_stmt = first_stmt (then_bb);
756 else_stmt = first_stmt (else_bb);
757
758 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
759 e->goto_locus = gimple_location (then_stmt);
760 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
761 if (e)
762 e->goto_locus = gimple_location (else_stmt);
763
764 /* We do not need the labels anymore. */
765 gimple_cond_set_true_label (entry, NULL_TREE);
766 gimple_cond_set_false_label (entry, NULL_TREE);
767 }
768
769
770 /* Called for each element in the hash table (P) as we delete the
771 edge to cases hash table.
772
773 Clear all the TREE_CHAINs to prevent problems with copying of
774 SWITCH_EXPRs and structure sharing rules, then free the hash table
775 element. */
776
777 static bool
778 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
779 void *data ATTRIBUTE_UNUSED)
780 {
781 tree t, next;
782
783 for (t = (tree) *value; t; t = next)
784 {
785 next = CASE_CHAIN (t);
786 CASE_CHAIN (t) = NULL;
787 }
788
789 *value = NULL;
790 return true;
791 }
792
793 /* Start recording information mapping edges to case labels. */
794
795 void
796 start_recording_case_labels (void)
797 {
798 gcc_assert (edge_to_cases == NULL);
799 edge_to_cases = pointer_map_create ();
800 touched_switch_bbs = BITMAP_ALLOC (NULL);
801 }
802
803 /* Return nonzero if we are recording information for case labels. */
804
805 static bool
806 recording_case_labels_p (void)
807 {
808 return (edge_to_cases != NULL);
809 }
810
811 /* Stop recording information mapping edges to case labels and
812 remove any information we have recorded. */
813 void
814 end_recording_case_labels (void)
815 {
816 bitmap_iterator bi;
817 unsigned i;
818 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
819 pointer_map_destroy (edge_to_cases);
820 edge_to_cases = NULL;
821 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
822 {
823 basic_block bb = BASIC_BLOCK (i);
824 if (bb)
825 {
826 gimple stmt = last_stmt (bb);
827 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
828 group_case_labels_stmt (stmt);
829 }
830 }
831 BITMAP_FREE (touched_switch_bbs);
832 }
833
834 /* If we are inside a {start,end}_recording_cases block, then return
835 a chain of CASE_LABEL_EXPRs from T which reference E.
836
837 Otherwise return NULL. */
838
839 static tree
840 get_cases_for_edge (edge e, gimple t)
841 {
842 void **slot;
843 size_t i, n;
844
845 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
846 chains available. Return NULL so the caller can detect this case. */
847 if (!recording_case_labels_p ())
848 return NULL;
849
850 slot = pointer_map_contains (edge_to_cases, e);
851 if (slot)
852 return (tree) *slot;
853
854 /* If we did not find E in the hash table, then this must be the first
855 time we have been queried for information about E & T. Add all the
856 elements from T to the hash table then perform the query again. */
857
858 n = gimple_switch_num_labels (t);
859 for (i = 0; i < n; i++)
860 {
861 tree elt = gimple_switch_label (t, i);
862 tree lab = CASE_LABEL (elt);
863 basic_block label_bb = label_to_block (lab);
864 edge this_edge = find_edge (e->src, label_bb);
865
866 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
867 a new chain. */
868 slot = pointer_map_insert (edge_to_cases, this_edge);
869 CASE_CHAIN (elt) = (tree) *slot;
870 *slot = elt;
871 }
872
873 return (tree) *pointer_map_contains (edge_to_cases, e);
874 }
875
876 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
877
878 static void
879 make_gimple_switch_edges (basic_block bb)
880 {
881 gimple entry = last_stmt (bb);
882 size_t i, n;
883
884 n = gimple_switch_num_labels (entry);
885
886 for (i = 0; i < n; ++i)
887 {
888 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
889 basic_block label_bb = label_to_block (lab);
890 make_edge (bb, label_bb, 0);
891 }
892 }
893
894
895 /* Return the basic block holding label DEST. */
896
897 basic_block
898 label_to_block_fn (struct function *ifun, tree dest)
899 {
900 int uid = LABEL_DECL_UID (dest);
901
902 /* We would die hard when faced by an undefined label. Emit a label to
903 the very first basic block. This will hopefully make even the dataflow
904 and undefined variable warnings quite right. */
905 if (seen_error () && uid < 0)
906 {
907 gimple_stmt_iterator gsi = gsi_start_bb (BASIC_BLOCK (NUM_FIXED_BLOCKS));
908 gimple stmt;
909
910 stmt = gimple_build_label (dest);
911 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
912 uid = LABEL_DECL_UID (dest);
913 }
914 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
915 return NULL;
916 return (*ifun->cfg->x_label_to_block_map)[uid];
917 }
918
919 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
920 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
921
922 void
923 make_abnormal_goto_edges (basic_block bb, bool for_call)
924 {
925 basic_block target_bb;
926 gimple_stmt_iterator gsi;
927
928 FOR_EACH_BB (target_bb)
929 {
930 for (gsi = gsi_start_bb (target_bb); !gsi_end_p (gsi); gsi_next (&gsi))
931 {
932 gimple label_stmt = gsi_stmt (gsi);
933 tree target;
934
935 if (gimple_code (label_stmt) != GIMPLE_LABEL)
936 break;
937
938 target = gimple_label_label (label_stmt);
939
940 /* Make an edge to every label block that has been marked as a
941 potential target for a computed goto or a non-local goto. */
942 if ((FORCED_LABEL (target) && !for_call)
943 || (DECL_NONLOCAL (target) && for_call))
944 {
945 make_edge (bb, target_bb, EDGE_ABNORMAL);
946 break;
947 }
948 }
949 if (!gsi_end_p (gsi)
950 && is_gimple_debug (gsi_stmt (gsi)))
951 gsi_next_nondebug (&gsi);
952 if (!gsi_end_p (gsi))
953 {
954 /* Make an edge to every setjmp-like call. */
955 gimple call_stmt = gsi_stmt (gsi);
956 if (is_gimple_call (call_stmt)
957 && (gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE))
958 make_edge (bb, target_bb, EDGE_ABNORMAL);
959 }
960 }
961 }
962
963 /* Create edges for a goto statement at block BB. */
964
965 static void
966 make_goto_expr_edges (basic_block bb)
967 {
968 gimple_stmt_iterator last = gsi_last_bb (bb);
969 gimple goto_t = gsi_stmt (last);
970
971 /* A simple GOTO creates normal edges. */
972 if (simple_goto_p (goto_t))
973 {
974 tree dest = gimple_goto_dest (goto_t);
975 basic_block label_bb = label_to_block (dest);
976 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
977 e->goto_locus = gimple_location (goto_t);
978 gsi_remove (&last, true);
979 return;
980 }
981
982 /* A computed GOTO creates abnormal edges. */
983 make_abnormal_goto_edges (bb, false);
984 }
985
986 /* Create edges for an asm statement with labels at block BB. */
987
988 static void
989 make_gimple_asm_edges (basic_block bb)
990 {
991 gimple stmt = last_stmt (bb);
992 int i, n = gimple_asm_nlabels (stmt);
993
994 for (i = 0; i < n; ++i)
995 {
996 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
997 basic_block label_bb = label_to_block (label);
998 make_edge (bb, label_bb, 0);
999 }
1000 }
1001
1002 /*---------------------------------------------------------------------------
1003 Flowgraph analysis
1004 ---------------------------------------------------------------------------*/
1005
1006 /* Cleanup useless labels in basic blocks. This is something we wish
1007 to do early because it allows us to group case labels before creating
1008 the edges for the CFG, and it speeds up block statement iterators in
1009 all passes later on.
1010 We rerun this pass after CFG is created, to get rid of the labels that
1011 are no longer referenced. After then we do not run it any more, since
1012 (almost) no new labels should be created. */
1013
1014 /* A map from basic block index to the leading label of that block. */
1015 static struct label_record
1016 {
1017 /* The label. */
1018 tree label;
1019
1020 /* True if the label is referenced from somewhere. */
1021 bool used;
1022 } *label_for_bb;
1023
1024 /* Given LABEL return the first label in the same basic block. */
1025
1026 static tree
1027 main_block_label (tree label)
1028 {
1029 basic_block bb = label_to_block (label);
1030 tree main_label = label_for_bb[bb->index].label;
1031
1032 /* label_to_block possibly inserted undefined label into the chain. */
1033 if (!main_label)
1034 {
1035 label_for_bb[bb->index].label = label;
1036 main_label = label;
1037 }
1038
1039 label_for_bb[bb->index].used = true;
1040 return main_label;
1041 }
1042
1043 /* Clean up redundant labels within the exception tree. */
1044
1045 static void
1046 cleanup_dead_labels_eh (void)
1047 {
1048 eh_landing_pad lp;
1049 eh_region r;
1050 tree lab;
1051 int i;
1052
1053 if (cfun->eh == NULL)
1054 return;
1055
1056 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1057 if (lp && lp->post_landing_pad)
1058 {
1059 lab = main_block_label (lp->post_landing_pad);
1060 if (lab != lp->post_landing_pad)
1061 {
1062 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1063 EH_LANDING_PAD_NR (lab) = lp->index;
1064 }
1065 }
1066
1067 FOR_ALL_EH_REGION (r)
1068 switch (r->type)
1069 {
1070 case ERT_CLEANUP:
1071 case ERT_MUST_NOT_THROW:
1072 break;
1073
1074 case ERT_TRY:
1075 {
1076 eh_catch c;
1077 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1078 {
1079 lab = c->label;
1080 if (lab)
1081 c->label = main_block_label (lab);
1082 }
1083 }
1084 break;
1085
1086 case ERT_ALLOWED_EXCEPTIONS:
1087 lab = r->u.allowed.label;
1088 if (lab)
1089 r->u.allowed.label = main_block_label (lab);
1090 break;
1091 }
1092 }
1093
1094
1095 /* Cleanup redundant labels. This is a three-step process:
1096 1) Find the leading label for each block.
1097 2) Redirect all references to labels to the leading labels.
1098 3) Cleanup all useless labels. */
1099
1100 void
1101 cleanup_dead_labels (void)
1102 {
1103 basic_block bb;
1104 label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
1105
1106 /* Find a suitable label for each block. We use the first user-defined
1107 label if there is one, or otherwise just the first label we see. */
1108 FOR_EACH_BB (bb)
1109 {
1110 gimple_stmt_iterator i;
1111
1112 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1113 {
1114 tree label;
1115 gimple stmt = gsi_stmt (i);
1116
1117 if (gimple_code (stmt) != GIMPLE_LABEL)
1118 break;
1119
1120 label = gimple_label_label (stmt);
1121
1122 /* If we have not yet seen a label for the current block,
1123 remember this one and see if there are more labels. */
1124 if (!label_for_bb[bb->index].label)
1125 {
1126 label_for_bb[bb->index].label = label;
1127 continue;
1128 }
1129
1130 /* If we did see a label for the current block already, but it
1131 is an artificially created label, replace it if the current
1132 label is a user defined label. */
1133 if (!DECL_ARTIFICIAL (label)
1134 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1135 {
1136 label_for_bb[bb->index].label = label;
1137 break;
1138 }
1139 }
1140 }
1141
1142 /* Now redirect all jumps/branches to the selected label.
1143 First do so for each block ending in a control statement. */
1144 FOR_EACH_BB (bb)
1145 {
1146 gimple stmt = last_stmt (bb);
1147 tree label, new_label;
1148
1149 if (!stmt)
1150 continue;
1151
1152 switch (gimple_code (stmt))
1153 {
1154 case GIMPLE_COND:
1155 label = gimple_cond_true_label (stmt);
1156 if (label)
1157 {
1158 new_label = main_block_label (label);
1159 if (new_label != label)
1160 gimple_cond_set_true_label (stmt, new_label);
1161 }
1162
1163 label = gimple_cond_false_label (stmt);
1164 if (label)
1165 {
1166 new_label = main_block_label (label);
1167 if (new_label != label)
1168 gimple_cond_set_false_label (stmt, new_label);
1169 }
1170 break;
1171
1172 case GIMPLE_SWITCH:
1173 {
1174 size_t i, n = gimple_switch_num_labels (stmt);
1175
1176 /* Replace all destination labels. */
1177 for (i = 0; i < n; ++i)
1178 {
1179 tree case_label = gimple_switch_label (stmt, i);
1180 label = CASE_LABEL (case_label);
1181 new_label = main_block_label (label);
1182 if (new_label != label)
1183 CASE_LABEL (case_label) = new_label;
1184 }
1185 break;
1186 }
1187
1188 case GIMPLE_ASM:
1189 {
1190 int i, n = gimple_asm_nlabels (stmt);
1191
1192 for (i = 0; i < n; ++i)
1193 {
1194 tree cons = gimple_asm_label_op (stmt, i);
1195 tree label = main_block_label (TREE_VALUE (cons));
1196 TREE_VALUE (cons) = label;
1197 }
1198 break;
1199 }
1200
1201 /* We have to handle gotos until they're removed, and we don't
1202 remove them until after we've created the CFG edges. */
1203 case GIMPLE_GOTO:
1204 if (!computed_goto_p (stmt))
1205 {
1206 label = gimple_goto_dest (stmt);
1207 new_label = main_block_label (label);
1208 if (new_label != label)
1209 gimple_goto_set_dest (stmt, new_label);
1210 }
1211 break;
1212
1213 case GIMPLE_TRANSACTION:
1214 {
1215 tree label = gimple_transaction_label (stmt);
1216 if (label)
1217 {
1218 tree new_label = main_block_label (label);
1219 if (new_label != label)
1220 gimple_transaction_set_label (stmt, new_label);
1221 }
1222 }
1223 break;
1224
1225 default:
1226 break;
1227 }
1228 }
1229
1230 /* Do the same for the exception region tree labels. */
1231 cleanup_dead_labels_eh ();
1232
1233 /* Finally, purge dead labels. All user-defined labels and labels that
1234 can be the target of non-local gotos and labels which have their
1235 address taken are preserved. */
1236 FOR_EACH_BB (bb)
1237 {
1238 gimple_stmt_iterator i;
1239 tree label_for_this_bb = label_for_bb[bb->index].label;
1240
1241 if (!label_for_this_bb)
1242 continue;
1243
1244 /* If the main label of the block is unused, we may still remove it. */
1245 if (!label_for_bb[bb->index].used)
1246 label_for_this_bb = NULL;
1247
1248 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1249 {
1250 tree label;
1251 gimple stmt = gsi_stmt (i);
1252
1253 if (gimple_code (stmt) != GIMPLE_LABEL)
1254 break;
1255
1256 label = gimple_label_label (stmt);
1257
1258 if (label == label_for_this_bb
1259 || !DECL_ARTIFICIAL (label)
1260 || DECL_NONLOCAL (label)
1261 || FORCED_LABEL (label))
1262 gsi_next (&i);
1263 else
1264 gsi_remove (&i, true);
1265 }
1266 }
1267
1268 free (label_for_bb);
1269 }
1270
1271 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1272 the ones jumping to the same label.
1273 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1274
1275 void
1276 group_case_labels_stmt (gimple stmt)
1277 {
1278 int old_size = gimple_switch_num_labels (stmt);
1279 int i, j, new_size = old_size;
1280 basic_block default_bb = NULL;
1281
1282 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1283
1284 /* Look for possible opportunities to merge cases. */
1285 i = 1;
1286 while (i < old_size)
1287 {
1288 tree base_case, base_high;
1289 basic_block base_bb;
1290
1291 base_case = gimple_switch_label (stmt, i);
1292
1293 gcc_assert (base_case);
1294 base_bb = label_to_block (CASE_LABEL (base_case));
1295
1296 /* Discard cases that have the same destination as the
1297 default case. */
1298 if (base_bb == default_bb)
1299 {
1300 gimple_switch_set_label (stmt, i, NULL_TREE);
1301 i++;
1302 new_size--;
1303 continue;
1304 }
1305
1306 base_high = CASE_HIGH (base_case)
1307 ? CASE_HIGH (base_case)
1308 : CASE_LOW (base_case);
1309 i++;
1310
1311 /* Try to merge case labels. Break out when we reach the end
1312 of the label vector or when we cannot merge the next case
1313 label with the current one. */
1314 while (i < old_size)
1315 {
1316 tree merge_case = gimple_switch_label (stmt, i);
1317 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1318 double_int bhp1 = tree_to_double_int (base_high) + double_int_one;
1319
1320 /* Merge the cases if they jump to the same place,
1321 and their ranges are consecutive. */
1322 if (merge_bb == base_bb
1323 && tree_to_double_int (CASE_LOW (merge_case)) == bhp1)
1324 {
1325 base_high = CASE_HIGH (merge_case) ?
1326 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1327 CASE_HIGH (base_case) = base_high;
1328 gimple_switch_set_label (stmt, i, NULL_TREE);
1329 new_size--;
1330 i++;
1331 }
1332 else
1333 break;
1334 }
1335 }
1336
1337 /* Compress the case labels in the label vector, and adjust the
1338 length of the vector. */
1339 for (i = 0, j = 0; i < new_size; i++)
1340 {
1341 while (! gimple_switch_label (stmt, j))
1342 j++;
1343 gimple_switch_set_label (stmt, i,
1344 gimple_switch_label (stmt, j++));
1345 }
1346
1347 gcc_assert (new_size <= old_size);
1348 gimple_switch_set_num_labels (stmt, new_size);
1349 }
1350
1351 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1352 and scan the sorted vector of cases. Combine the ones jumping to the
1353 same label. */
1354
1355 void
1356 group_case_labels (void)
1357 {
1358 basic_block bb;
1359
1360 FOR_EACH_BB (bb)
1361 {
1362 gimple stmt = last_stmt (bb);
1363 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1364 group_case_labels_stmt (stmt);
1365 }
1366 }
1367
1368 /* Checks whether we can merge block B into block A. */
1369
1370 static bool
1371 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1372 {
1373 gimple stmt;
1374 gimple_stmt_iterator gsi;
1375
1376 if (!single_succ_p (a))
1377 return false;
1378
1379 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1380 return false;
1381
1382 if (single_succ (a) != b)
1383 return false;
1384
1385 if (!single_pred_p (b))
1386 return false;
1387
1388 if (b == EXIT_BLOCK_PTR)
1389 return false;
1390
1391 /* If A ends by a statement causing exceptions or something similar, we
1392 cannot merge the blocks. */
1393 stmt = last_stmt (a);
1394 if (stmt && stmt_ends_bb_p (stmt))
1395 return false;
1396
1397 /* Do not allow a block with only a non-local label to be merged. */
1398 if (stmt
1399 && gimple_code (stmt) == GIMPLE_LABEL
1400 && DECL_NONLOCAL (gimple_label_label (stmt)))
1401 return false;
1402
1403 /* Examine the labels at the beginning of B. */
1404 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
1405 {
1406 tree lab;
1407 stmt = gsi_stmt (gsi);
1408 if (gimple_code (stmt) != GIMPLE_LABEL)
1409 break;
1410 lab = gimple_label_label (stmt);
1411
1412 /* Do not remove user forced labels or for -O0 any user labels. */
1413 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1414 return false;
1415 }
1416
1417 /* Protect the loop latches. */
1418 if (current_loops && b->loop_father->latch == b)
1419 return false;
1420
1421 /* It must be possible to eliminate all phi nodes in B. If ssa form
1422 is not up-to-date and a name-mapping is registered, we cannot eliminate
1423 any phis. Symbols marked for renaming are never a problem though. */
1424 for (gsi = gsi_start_phis (b); !gsi_end_p (gsi); gsi_next (&gsi))
1425 {
1426 gimple phi = gsi_stmt (gsi);
1427 /* Technically only new names matter. */
1428 if (name_registered_for_update_p (PHI_RESULT (phi)))
1429 return false;
1430 }
1431
1432 /* When not optimizing, don't merge if we'd lose goto_locus. */
1433 if (!optimize
1434 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1435 {
1436 location_t goto_locus = single_succ_edge (a)->goto_locus;
1437 gimple_stmt_iterator prev, next;
1438 prev = gsi_last_nondebug_bb (a);
1439 next = gsi_after_labels (b);
1440 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1441 gsi_next_nondebug (&next);
1442 if ((gsi_end_p (prev)
1443 || gimple_location (gsi_stmt (prev)) != goto_locus)
1444 && (gsi_end_p (next)
1445 || gimple_location (gsi_stmt (next)) != goto_locus))
1446 return false;
1447 }
1448
1449 return true;
1450 }
1451
1452 /* Replaces all uses of NAME by VAL. */
1453
1454 void
1455 replace_uses_by (tree name, tree val)
1456 {
1457 imm_use_iterator imm_iter;
1458 use_operand_p use;
1459 gimple stmt;
1460 edge e;
1461
1462 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1463 {
1464 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1465 {
1466 replace_exp (use, val);
1467
1468 if (gimple_code (stmt) == GIMPLE_PHI)
1469 {
1470 e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
1471 if (e->flags & EDGE_ABNORMAL)
1472 {
1473 /* This can only occur for virtual operands, since
1474 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1475 would prevent replacement. */
1476 gcc_checking_assert (virtual_operand_p (name));
1477 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1478 }
1479 }
1480 }
1481
1482 if (gimple_code (stmt) != GIMPLE_PHI)
1483 {
1484 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1485 gimple orig_stmt = stmt;
1486 size_t i;
1487
1488 /* Mark the block if we changed the last stmt in it. */
1489 if (cfgcleanup_altered_bbs
1490 && stmt_ends_bb_p (stmt))
1491 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1492
1493 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1494 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1495 only change sth from non-invariant to invariant, and only
1496 when propagating constants. */
1497 if (is_gimple_min_invariant (val))
1498 for (i = 0; i < gimple_num_ops (stmt); i++)
1499 {
1500 tree op = gimple_op (stmt, i);
1501 /* Operands may be empty here. For example, the labels
1502 of a GIMPLE_COND are nulled out following the creation
1503 of the corresponding CFG edges. */
1504 if (op && TREE_CODE (op) == ADDR_EXPR)
1505 recompute_tree_invariant_for_addr_expr (op);
1506 }
1507
1508 if (fold_stmt (&gsi))
1509 stmt = gsi_stmt (gsi);
1510
1511 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1512 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1513
1514 update_stmt (stmt);
1515 }
1516 }
1517
1518 gcc_checking_assert (has_zero_uses (name));
1519
1520 /* Also update the trees stored in loop structures. */
1521 if (current_loops)
1522 {
1523 struct loop *loop;
1524 loop_iterator li;
1525
1526 FOR_EACH_LOOP (li, loop, 0)
1527 {
1528 substitute_in_loop_info (loop, name, val);
1529 }
1530 }
1531 }
1532
1533 /* Merge block B into block A. */
1534
1535 static void
1536 gimple_merge_blocks (basic_block a, basic_block b)
1537 {
1538 gimple_stmt_iterator last, gsi, psi;
1539
1540 if (dump_file)
1541 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1542
1543 /* Remove all single-valued PHI nodes from block B of the form
1544 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1545 gsi = gsi_last_bb (a);
1546 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1547 {
1548 gimple phi = gsi_stmt (psi);
1549 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1550 gimple copy;
1551 bool may_replace_uses = (virtual_operand_p (def)
1552 || may_propagate_copy (def, use));
1553
1554 /* In case we maintain loop closed ssa form, do not propagate arguments
1555 of loop exit phi nodes. */
1556 if (current_loops
1557 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1558 && !virtual_operand_p (def)
1559 && TREE_CODE (use) == SSA_NAME
1560 && a->loop_father != b->loop_father)
1561 may_replace_uses = false;
1562
1563 if (!may_replace_uses)
1564 {
1565 gcc_assert (!virtual_operand_p (def));
1566
1567 /* Note that just emitting the copies is fine -- there is no problem
1568 with ordering of phi nodes. This is because A is the single
1569 predecessor of B, therefore results of the phi nodes cannot
1570 appear as arguments of the phi nodes. */
1571 copy = gimple_build_assign (def, use);
1572 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1573 remove_phi_node (&psi, false);
1574 }
1575 else
1576 {
1577 /* If we deal with a PHI for virtual operands, we can simply
1578 propagate these without fussing with folding or updating
1579 the stmt. */
1580 if (virtual_operand_p (def))
1581 {
1582 imm_use_iterator iter;
1583 use_operand_p use_p;
1584 gimple stmt;
1585
1586 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1587 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1588 SET_USE (use_p, use);
1589
1590 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1591 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1592 }
1593 else
1594 replace_uses_by (def, use);
1595
1596 remove_phi_node (&psi, true);
1597 }
1598 }
1599
1600 /* Ensure that B follows A. */
1601 move_block_after (b, a);
1602
1603 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1604 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1605
1606 /* Remove labels from B and set gimple_bb to A for other statements. */
1607 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1608 {
1609 gimple stmt = gsi_stmt (gsi);
1610 if (gimple_code (stmt) == GIMPLE_LABEL)
1611 {
1612 tree label = gimple_label_label (stmt);
1613 int lp_nr;
1614
1615 gsi_remove (&gsi, false);
1616
1617 /* Now that we can thread computed gotos, we might have
1618 a situation where we have a forced label in block B
1619 However, the label at the start of block B might still be
1620 used in other ways (think about the runtime checking for
1621 Fortran assigned gotos). So we can not just delete the
1622 label. Instead we move the label to the start of block A. */
1623 if (FORCED_LABEL (label))
1624 {
1625 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1626 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1627 }
1628 /* Other user labels keep around in a form of a debug stmt. */
1629 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1630 {
1631 gimple dbg = gimple_build_debug_bind (label,
1632 integer_zero_node,
1633 stmt);
1634 gimple_debug_bind_reset_value (dbg);
1635 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1636 }
1637
1638 lp_nr = EH_LANDING_PAD_NR (label);
1639 if (lp_nr)
1640 {
1641 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1642 lp->post_landing_pad = NULL;
1643 }
1644 }
1645 else
1646 {
1647 gimple_set_bb (stmt, a);
1648 gsi_next (&gsi);
1649 }
1650 }
1651
1652 /* Merge the sequences. */
1653 last = gsi_last_bb (a);
1654 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1655 set_bb_seq (b, NULL);
1656
1657 if (cfgcleanup_altered_bbs)
1658 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1659 }
1660
1661
1662 /* Return the one of two successors of BB that is not reachable by a
1663 complex edge, if there is one. Else, return BB. We use
1664 this in optimizations that use post-dominators for their heuristics,
1665 to catch the cases in C++ where function calls are involved. */
1666
1667 basic_block
1668 single_noncomplex_succ (basic_block bb)
1669 {
1670 edge e0, e1;
1671 if (EDGE_COUNT (bb->succs) != 2)
1672 return bb;
1673
1674 e0 = EDGE_SUCC (bb, 0);
1675 e1 = EDGE_SUCC (bb, 1);
1676 if (e0->flags & EDGE_COMPLEX)
1677 return e1->dest;
1678 if (e1->flags & EDGE_COMPLEX)
1679 return e0->dest;
1680
1681 return bb;
1682 }
1683
1684 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1685
1686 void
1687 notice_special_calls (gimple call)
1688 {
1689 int flags = gimple_call_flags (call);
1690
1691 if (flags & ECF_MAY_BE_ALLOCA)
1692 cfun->calls_alloca = true;
1693 if (flags & ECF_RETURNS_TWICE)
1694 cfun->calls_setjmp = true;
1695 }
1696
1697
1698 /* Clear flags set by notice_special_calls. Used by dead code removal
1699 to update the flags. */
1700
1701 void
1702 clear_special_calls (void)
1703 {
1704 cfun->calls_alloca = false;
1705 cfun->calls_setjmp = false;
1706 }
1707
1708 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1709
1710 static void
1711 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1712 {
1713 /* Since this block is no longer reachable, we can just delete all
1714 of its PHI nodes. */
1715 remove_phi_nodes (bb);
1716
1717 /* Remove edges to BB's successors. */
1718 while (EDGE_COUNT (bb->succs) > 0)
1719 remove_edge (EDGE_SUCC (bb, 0));
1720 }
1721
1722
1723 /* Remove statements of basic block BB. */
1724
1725 static void
1726 remove_bb (basic_block bb)
1727 {
1728 gimple_stmt_iterator i;
1729
1730 if (dump_file)
1731 {
1732 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1733 if (dump_flags & TDF_DETAILS)
1734 {
1735 dump_bb (dump_file, bb, 0, dump_flags);
1736 fprintf (dump_file, "\n");
1737 }
1738 }
1739
1740 if (current_loops)
1741 {
1742 struct loop *loop = bb->loop_father;
1743
1744 /* If a loop gets removed, clean up the information associated
1745 with it. */
1746 if (loop->latch == bb
1747 || loop->header == bb)
1748 free_numbers_of_iterations_estimates_loop (loop);
1749 }
1750
1751 /* Remove all the instructions in the block. */
1752 if (bb_seq (bb) != NULL)
1753 {
1754 /* Walk backwards so as to get a chance to substitute all
1755 released DEFs into debug stmts. See
1756 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
1757 details. */
1758 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
1759 {
1760 gimple stmt = gsi_stmt (i);
1761 if (gimple_code (stmt) == GIMPLE_LABEL
1762 && (FORCED_LABEL (gimple_label_label (stmt))
1763 || DECL_NONLOCAL (gimple_label_label (stmt))))
1764 {
1765 basic_block new_bb;
1766 gimple_stmt_iterator new_gsi;
1767
1768 /* A non-reachable non-local label may still be referenced.
1769 But it no longer needs to carry the extra semantics of
1770 non-locality. */
1771 if (DECL_NONLOCAL (gimple_label_label (stmt)))
1772 {
1773 DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
1774 FORCED_LABEL (gimple_label_label (stmt)) = 1;
1775 }
1776
1777 new_bb = bb->prev_bb;
1778 new_gsi = gsi_start_bb (new_bb);
1779 gsi_remove (&i, false);
1780 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
1781 }
1782 else
1783 {
1784 /* Release SSA definitions if we are in SSA. Note that we
1785 may be called when not in SSA. For example,
1786 final_cleanup calls this function via
1787 cleanup_tree_cfg. */
1788 if (gimple_in_ssa_p (cfun))
1789 release_defs (stmt);
1790
1791 gsi_remove (&i, true);
1792 }
1793
1794 if (gsi_end_p (i))
1795 i = gsi_last_bb (bb);
1796 else
1797 gsi_prev (&i);
1798 }
1799 }
1800
1801 remove_phi_nodes_and_edges_for_unreachable_block (bb);
1802 bb->il.gimple.seq = NULL;
1803 bb->il.gimple.phi_nodes = NULL;
1804 }
1805
1806
1807 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
1808 predicate VAL, return the edge that will be taken out of the block.
1809 If VAL does not match a unique edge, NULL is returned. */
1810
1811 edge
1812 find_taken_edge (basic_block bb, tree val)
1813 {
1814 gimple stmt;
1815
1816 stmt = last_stmt (bb);
1817
1818 gcc_assert (stmt);
1819 gcc_assert (is_ctrl_stmt (stmt));
1820
1821 if (val == NULL)
1822 return NULL;
1823
1824 if (!is_gimple_min_invariant (val))
1825 return NULL;
1826
1827 if (gimple_code (stmt) == GIMPLE_COND)
1828 return find_taken_edge_cond_expr (bb, val);
1829
1830 if (gimple_code (stmt) == GIMPLE_SWITCH)
1831 return find_taken_edge_switch_expr (bb, val);
1832
1833 if (computed_goto_p (stmt))
1834 {
1835 /* Only optimize if the argument is a label, if the argument is
1836 not a label then we can not construct a proper CFG.
1837
1838 It may be the case that we only need to allow the LABEL_REF to
1839 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
1840 appear inside a LABEL_EXPR just to be safe. */
1841 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
1842 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
1843 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
1844 return NULL;
1845 }
1846
1847 gcc_unreachable ();
1848 }
1849
1850 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
1851 statement, determine which of the outgoing edges will be taken out of the
1852 block. Return NULL if either edge may be taken. */
1853
1854 static edge
1855 find_taken_edge_computed_goto (basic_block bb, tree val)
1856 {
1857 basic_block dest;
1858 edge e = NULL;
1859
1860 dest = label_to_block (val);
1861 if (dest)
1862 {
1863 e = find_edge (bb, dest);
1864 gcc_assert (e != NULL);
1865 }
1866
1867 return e;
1868 }
1869
1870 /* Given a constant value VAL and the entry block BB to a COND_EXPR
1871 statement, determine which of the two edges will be taken out of the
1872 block. Return NULL if either edge may be taken. */
1873
1874 static edge
1875 find_taken_edge_cond_expr (basic_block bb, tree val)
1876 {
1877 edge true_edge, false_edge;
1878
1879 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1880
1881 gcc_assert (TREE_CODE (val) == INTEGER_CST);
1882 return (integer_zerop (val) ? false_edge : true_edge);
1883 }
1884
1885 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
1886 statement, determine which edge will be taken out of the block. Return
1887 NULL if any edge may be taken. */
1888
1889 static edge
1890 find_taken_edge_switch_expr (basic_block bb, tree val)
1891 {
1892 basic_block dest_bb;
1893 edge e;
1894 gimple switch_stmt;
1895 tree taken_case;
1896
1897 switch_stmt = last_stmt (bb);
1898 taken_case = find_case_label_for_value (switch_stmt, val);
1899 dest_bb = label_to_block (CASE_LABEL (taken_case));
1900
1901 e = find_edge (bb, dest_bb);
1902 gcc_assert (e);
1903 return e;
1904 }
1905
1906
1907 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
1908 We can make optimal use here of the fact that the case labels are
1909 sorted: We can do a binary search for a case matching VAL. */
1910
1911 static tree
1912 find_case_label_for_value (gimple switch_stmt, tree val)
1913 {
1914 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
1915 tree default_case = gimple_switch_default_label (switch_stmt);
1916
1917 for (low = 0, high = n; high - low > 1; )
1918 {
1919 size_t i = (high + low) / 2;
1920 tree t = gimple_switch_label (switch_stmt, i);
1921 int cmp;
1922
1923 /* Cache the result of comparing CASE_LOW and val. */
1924 cmp = tree_int_cst_compare (CASE_LOW (t), val);
1925
1926 if (cmp > 0)
1927 high = i;
1928 else
1929 low = i;
1930
1931 if (CASE_HIGH (t) == NULL)
1932 {
1933 /* A singe-valued case label. */
1934 if (cmp == 0)
1935 return t;
1936 }
1937 else
1938 {
1939 /* A case range. We can only handle integer ranges. */
1940 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
1941 return t;
1942 }
1943 }
1944
1945 return default_case;
1946 }
1947
1948
1949 /* Dump a basic block on stderr. */
1950
1951 void
1952 gimple_debug_bb (basic_block bb)
1953 {
1954 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
1955 }
1956
1957
1958 /* Dump basic block with index N on stderr. */
1959
1960 basic_block
1961 gimple_debug_bb_n (int n)
1962 {
1963 gimple_debug_bb (BASIC_BLOCK (n));
1964 return BASIC_BLOCK (n);
1965 }
1966
1967
1968 /* Dump the CFG on stderr.
1969
1970 FLAGS are the same used by the tree dumping functions
1971 (see TDF_* in dumpfile.h). */
1972
1973 void
1974 gimple_debug_cfg (int flags)
1975 {
1976 gimple_dump_cfg (stderr, flags);
1977 }
1978
1979
1980 /* Dump the program showing basic block boundaries on the given FILE.
1981
1982 FLAGS are the same used by the tree dumping functions (see TDF_* in
1983 tree.h). */
1984
1985 void
1986 gimple_dump_cfg (FILE *file, int flags)
1987 {
1988 if (flags & TDF_DETAILS)
1989 {
1990 dump_function_header (file, current_function_decl, flags);
1991 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
1992 n_basic_blocks, n_edges, last_basic_block);
1993
1994 brief_dump_cfg (file, flags | TDF_COMMENT);
1995 fprintf (file, "\n");
1996 }
1997
1998 if (flags & TDF_STATS)
1999 dump_cfg_stats (file);
2000
2001 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2002 }
2003
2004
2005 /* Dump CFG statistics on FILE. */
2006
2007 void
2008 dump_cfg_stats (FILE *file)
2009 {
2010 static long max_num_merged_labels = 0;
2011 unsigned long size, total = 0;
2012 long num_edges;
2013 basic_block bb;
2014 const char * const fmt_str = "%-30s%-13s%12s\n";
2015 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2016 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2017 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2018 const char *funcname = current_function_name ();
2019
2020 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2021
2022 fprintf (file, "---------------------------------------------------------\n");
2023 fprintf (file, fmt_str, "", " Number of ", "Memory");
2024 fprintf (file, fmt_str, "", " instances ", "used ");
2025 fprintf (file, "---------------------------------------------------------\n");
2026
2027 size = n_basic_blocks * sizeof (struct basic_block_def);
2028 total += size;
2029 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2030 SCALE (size), LABEL (size));
2031
2032 num_edges = 0;
2033 FOR_EACH_BB (bb)
2034 num_edges += EDGE_COUNT (bb->succs);
2035 size = num_edges * sizeof (struct edge_def);
2036 total += size;
2037 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2038
2039 fprintf (file, "---------------------------------------------------------\n");
2040 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2041 LABEL (total));
2042 fprintf (file, "---------------------------------------------------------\n");
2043 fprintf (file, "\n");
2044
2045 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2046 max_num_merged_labels = cfg_stats.num_merged_labels;
2047
2048 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2049 cfg_stats.num_merged_labels, max_num_merged_labels);
2050
2051 fprintf (file, "\n");
2052 }
2053
2054
2055 /* Dump CFG statistics on stderr. Keep extern so that it's always
2056 linked in the final executable. */
2057
2058 DEBUG_FUNCTION void
2059 debug_cfg_stats (void)
2060 {
2061 dump_cfg_stats (stderr);
2062 }
2063
2064 /*---------------------------------------------------------------------------
2065 Miscellaneous helpers
2066 ---------------------------------------------------------------------------*/
2067
2068 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2069 flow. Transfers of control flow associated with EH are excluded. */
2070
2071 static bool
2072 call_can_make_abnormal_goto (gimple t)
2073 {
2074 /* If the function has no non-local labels, then a call cannot make an
2075 abnormal transfer of control. */
2076 if (!cfun->has_nonlocal_label
2077 && !cfun->calls_setjmp)
2078 return false;
2079
2080 /* Likewise if the call has no side effects. */
2081 if (!gimple_has_side_effects (t))
2082 return false;
2083
2084 /* Likewise if the called function is leaf. */
2085 if (gimple_call_flags (t) & ECF_LEAF)
2086 return false;
2087
2088 return true;
2089 }
2090
2091
2092 /* Return true if T can make an abnormal transfer of control flow.
2093 Transfers of control flow associated with EH are excluded. */
2094
2095 bool
2096 stmt_can_make_abnormal_goto (gimple t)
2097 {
2098 if (computed_goto_p (t))
2099 return true;
2100 if (is_gimple_call (t))
2101 return call_can_make_abnormal_goto (t);
2102 return false;
2103 }
2104
2105
2106 /* Return true if T represents a stmt that always transfers control. */
2107
2108 bool
2109 is_ctrl_stmt (gimple t)
2110 {
2111 switch (gimple_code (t))
2112 {
2113 case GIMPLE_COND:
2114 case GIMPLE_SWITCH:
2115 case GIMPLE_GOTO:
2116 case GIMPLE_RETURN:
2117 case GIMPLE_RESX:
2118 return true;
2119 default:
2120 return false;
2121 }
2122 }
2123
2124
2125 /* Return true if T is a statement that may alter the flow of control
2126 (e.g., a call to a non-returning function). */
2127
2128 bool
2129 is_ctrl_altering_stmt (gimple t)
2130 {
2131 gcc_assert (t);
2132
2133 switch (gimple_code (t))
2134 {
2135 case GIMPLE_CALL:
2136 {
2137 int flags = gimple_call_flags (t);
2138
2139 /* A call alters control flow if it can make an abnormal goto. */
2140 if (call_can_make_abnormal_goto (t))
2141 return true;
2142
2143 /* A call also alters control flow if it does not return. */
2144 if (flags & ECF_NORETURN)
2145 return true;
2146
2147 /* TM ending statements have backedges out of the transaction.
2148 Return true so we split the basic block containing them.
2149 Note that the TM_BUILTIN test is merely an optimization. */
2150 if ((flags & ECF_TM_BUILTIN)
2151 && is_tm_ending_fndecl (gimple_call_fndecl (t)))
2152 return true;
2153
2154 /* BUILT_IN_RETURN call is same as return statement. */
2155 if (gimple_call_builtin_p (t, BUILT_IN_RETURN))
2156 return true;
2157 }
2158 break;
2159
2160 case GIMPLE_EH_DISPATCH:
2161 /* EH_DISPATCH branches to the individual catch handlers at
2162 this level of a try or allowed-exceptions region. It can
2163 fallthru to the next statement as well. */
2164 return true;
2165
2166 case GIMPLE_ASM:
2167 if (gimple_asm_nlabels (t) > 0)
2168 return true;
2169 break;
2170
2171 CASE_GIMPLE_OMP:
2172 /* OpenMP directives alter control flow. */
2173 return true;
2174
2175 case GIMPLE_TRANSACTION:
2176 /* A transaction start alters control flow. */
2177 return true;
2178
2179 default:
2180 break;
2181 }
2182
2183 /* If a statement can throw, it alters control flow. */
2184 return stmt_can_throw_internal (t);
2185 }
2186
2187
2188 /* Return true if T is a simple local goto. */
2189
2190 bool
2191 simple_goto_p (gimple t)
2192 {
2193 return (gimple_code (t) == GIMPLE_GOTO
2194 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2195 }
2196
2197
2198 /* Return true if STMT should start a new basic block. PREV_STMT is
2199 the statement preceding STMT. It is used when STMT is a label or a
2200 case label. Labels should only start a new basic block if their
2201 previous statement wasn't a label. Otherwise, sequence of labels
2202 would generate unnecessary basic blocks that only contain a single
2203 label. */
2204
2205 static inline bool
2206 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2207 {
2208 if (stmt == NULL)
2209 return false;
2210
2211 /* Labels start a new basic block only if the preceding statement
2212 wasn't a label of the same type. This prevents the creation of
2213 consecutive blocks that have nothing but a single label. */
2214 if (gimple_code (stmt) == GIMPLE_LABEL)
2215 {
2216 /* Nonlocal and computed GOTO targets always start a new block. */
2217 if (DECL_NONLOCAL (gimple_label_label (stmt))
2218 || FORCED_LABEL (gimple_label_label (stmt)))
2219 return true;
2220
2221 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2222 {
2223 if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
2224 return true;
2225
2226 cfg_stats.num_merged_labels++;
2227 return false;
2228 }
2229 else
2230 return true;
2231 }
2232 else if (gimple_code (stmt) == GIMPLE_CALL
2233 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2234 /* setjmp acts similar to a nonlocal GOTO target and thus should
2235 start a new block. */
2236 return true;
2237
2238 return false;
2239 }
2240
2241
2242 /* Return true if T should end a basic block. */
2243
2244 bool
2245 stmt_ends_bb_p (gimple t)
2246 {
2247 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2248 }
2249
2250 /* Remove block annotations and other data structures. */
2251
2252 void
2253 delete_tree_cfg_annotations (void)
2254 {
2255 vec_free (label_to_block_map);
2256 }
2257
2258
2259 /* Return the first statement in basic block BB. */
2260
2261 gimple
2262 first_stmt (basic_block bb)
2263 {
2264 gimple_stmt_iterator i = gsi_start_bb (bb);
2265 gimple stmt = NULL;
2266
2267 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2268 {
2269 gsi_next (&i);
2270 stmt = NULL;
2271 }
2272 return stmt;
2273 }
2274
2275 /* Return the first non-label statement in basic block BB. */
2276
2277 static gimple
2278 first_non_label_stmt (basic_block bb)
2279 {
2280 gimple_stmt_iterator i = gsi_start_bb (bb);
2281 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2282 gsi_next (&i);
2283 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2284 }
2285
2286 /* Return the last statement in basic block BB. */
2287
2288 gimple
2289 last_stmt (basic_block bb)
2290 {
2291 gimple_stmt_iterator i = gsi_last_bb (bb);
2292 gimple stmt = NULL;
2293
2294 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2295 {
2296 gsi_prev (&i);
2297 stmt = NULL;
2298 }
2299 return stmt;
2300 }
2301
2302 /* Return the last statement of an otherwise empty block. Return NULL
2303 if the block is totally empty, or if it contains more than one
2304 statement. */
2305
2306 gimple
2307 last_and_only_stmt (basic_block bb)
2308 {
2309 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2310 gimple last, prev;
2311
2312 if (gsi_end_p (i))
2313 return NULL;
2314
2315 last = gsi_stmt (i);
2316 gsi_prev_nondebug (&i);
2317 if (gsi_end_p (i))
2318 return last;
2319
2320 /* Empty statements should no longer appear in the instruction stream.
2321 Everything that might have appeared before should be deleted by
2322 remove_useless_stmts, and the optimizers should just gsi_remove
2323 instead of smashing with build_empty_stmt.
2324
2325 Thus the only thing that should appear here in a block containing
2326 one executable statement is a label. */
2327 prev = gsi_stmt (i);
2328 if (gimple_code (prev) == GIMPLE_LABEL)
2329 return last;
2330 else
2331 return NULL;
2332 }
2333
2334 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2335
2336 static void
2337 reinstall_phi_args (edge new_edge, edge old_edge)
2338 {
2339 edge_var_map_vector *v;
2340 edge_var_map *vm;
2341 int i;
2342 gimple_stmt_iterator phis;
2343
2344 v = redirect_edge_var_map_vector (old_edge);
2345 if (!v)
2346 return;
2347
2348 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2349 v->iterate (i, &vm) && !gsi_end_p (phis);
2350 i++, gsi_next (&phis))
2351 {
2352 gimple phi = gsi_stmt (phis);
2353 tree result = redirect_edge_var_map_result (vm);
2354 tree arg = redirect_edge_var_map_def (vm);
2355
2356 gcc_assert (result == gimple_phi_result (phi));
2357
2358 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2359 }
2360
2361 redirect_edge_var_map_clear (old_edge);
2362 }
2363
2364 /* Returns the basic block after which the new basic block created
2365 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2366 near its "logical" location. This is of most help to humans looking
2367 at debugging dumps. */
2368
2369 static basic_block
2370 split_edge_bb_loc (edge edge_in)
2371 {
2372 basic_block dest = edge_in->dest;
2373 basic_block dest_prev = dest->prev_bb;
2374
2375 if (dest_prev)
2376 {
2377 edge e = find_edge (dest_prev, dest);
2378 if (e && !(e->flags & EDGE_COMPLEX))
2379 return edge_in->src;
2380 }
2381 return dest_prev;
2382 }
2383
2384 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2385 Abort on abnormal edges. */
2386
2387 static basic_block
2388 gimple_split_edge (edge edge_in)
2389 {
2390 basic_block new_bb, after_bb, dest;
2391 edge new_edge, e;
2392
2393 /* Abnormal edges cannot be split. */
2394 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2395
2396 dest = edge_in->dest;
2397
2398 after_bb = split_edge_bb_loc (edge_in);
2399
2400 new_bb = create_empty_bb (after_bb);
2401 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2402 new_bb->count = edge_in->count;
2403 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2404 new_edge->probability = REG_BR_PROB_BASE;
2405 new_edge->count = edge_in->count;
2406
2407 e = redirect_edge_and_branch (edge_in, new_bb);
2408 gcc_assert (e == edge_in);
2409 reinstall_phi_args (new_edge, e);
2410
2411 return new_bb;
2412 }
2413
2414
2415 /* Verify properties of the address expression T with base object BASE. */
2416
2417 static tree
2418 verify_address (tree t, tree base)
2419 {
2420 bool old_constant;
2421 bool old_side_effects;
2422 bool new_constant;
2423 bool new_side_effects;
2424
2425 old_constant = TREE_CONSTANT (t);
2426 old_side_effects = TREE_SIDE_EFFECTS (t);
2427
2428 recompute_tree_invariant_for_addr_expr (t);
2429 new_side_effects = TREE_SIDE_EFFECTS (t);
2430 new_constant = TREE_CONSTANT (t);
2431
2432 if (old_constant != new_constant)
2433 {
2434 error ("constant not recomputed when ADDR_EXPR changed");
2435 return t;
2436 }
2437 if (old_side_effects != new_side_effects)
2438 {
2439 error ("side effects not recomputed when ADDR_EXPR changed");
2440 return t;
2441 }
2442
2443 if (!(TREE_CODE (base) == VAR_DECL
2444 || TREE_CODE (base) == PARM_DECL
2445 || TREE_CODE (base) == RESULT_DECL))
2446 return NULL_TREE;
2447
2448 if (DECL_GIMPLE_REG_P (base))
2449 {
2450 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2451 return base;
2452 }
2453
2454 return NULL_TREE;
2455 }
2456
2457 /* Callback for walk_tree, check that all elements with address taken are
2458 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2459 inside a PHI node. */
2460
2461 static tree
2462 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2463 {
2464 tree t = *tp, x;
2465
2466 if (TYPE_P (t))
2467 *walk_subtrees = 0;
2468
2469 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2470 #define CHECK_OP(N, MSG) \
2471 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2472 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2473
2474 switch (TREE_CODE (t))
2475 {
2476 case SSA_NAME:
2477 if (SSA_NAME_IN_FREE_LIST (t))
2478 {
2479 error ("SSA name in freelist but still referenced");
2480 return *tp;
2481 }
2482 break;
2483
2484 case INDIRECT_REF:
2485 error ("INDIRECT_REF in gimple IL");
2486 return t;
2487
2488 case MEM_REF:
2489 x = TREE_OPERAND (t, 0);
2490 if (!POINTER_TYPE_P (TREE_TYPE (x))
2491 || !is_gimple_mem_ref_addr (x))
2492 {
2493 error ("invalid first operand of MEM_REF");
2494 return x;
2495 }
2496 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2497 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2498 {
2499 error ("invalid offset operand of MEM_REF");
2500 return TREE_OPERAND (t, 1);
2501 }
2502 if (TREE_CODE (x) == ADDR_EXPR
2503 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2504 return x;
2505 *walk_subtrees = 0;
2506 break;
2507
2508 case ASSERT_EXPR:
2509 x = fold (ASSERT_EXPR_COND (t));
2510 if (x == boolean_false_node)
2511 {
2512 error ("ASSERT_EXPR with an always-false condition");
2513 return *tp;
2514 }
2515 break;
2516
2517 case MODIFY_EXPR:
2518 error ("MODIFY_EXPR not expected while having tuples");
2519 return *tp;
2520
2521 case ADDR_EXPR:
2522 {
2523 tree tem;
2524
2525 gcc_assert (is_gimple_address (t));
2526
2527 /* Skip any references (they will be checked when we recurse down the
2528 tree) and ensure that any variable used as a prefix is marked
2529 addressable. */
2530 for (x = TREE_OPERAND (t, 0);
2531 handled_component_p (x);
2532 x = TREE_OPERAND (x, 0))
2533 ;
2534
2535 if ((tem = verify_address (t, x)))
2536 return tem;
2537
2538 if (!(TREE_CODE (x) == VAR_DECL
2539 || TREE_CODE (x) == PARM_DECL
2540 || TREE_CODE (x) == RESULT_DECL))
2541 return NULL;
2542
2543 if (!TREE_ADDRESSABLE (x))
2544 {
2545 error ("address taken, but ADDRESSABLE bit not set");
2546 return x;
2547 }
2548
2549 break;
2550 }
2551
2552 case COND_EXPR:
2553 x = COND_EXPR_COND (t);
2554 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2555 {
2556 error ("non-integral used in condition");
2557 return x;
2558 }
2559 if (!is_gimple_condexpr (x))
2560 {
2561 error ("invalid conditional operand");
2562 return x;
2563 }
2564 break;
2565
2566 case NON_LVALUE_EXPR:
2567 case TRUTH_NOT_EXPR:
2568 gcc_unreachable ();
2569
2570 CASE_CONVERT:
2571 case FIX_TRUNC_EXPR:
2572 case FLOAT_EXPR:
2573 case NEGATE_EXPR:
2574 case ABS_EXPR:
2575 case BIT_NOT_EXPR:
2576 CHECK_OP (0, "invalid operand to unary operator");
2577 break;
2578
2579 case REALPART_EXPR:
2580 case IMAGPART_EXPR:
2581 case BIT_FIELD_REF:
2582 if (!is_gimple_reg_type (TREE_TYPE (t)))
2583 {
2584 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2585 return t;
2586 }
2587
2588 if (TREE_CODE (t) == BIT_FIELD_REF)
2589 {
2590 if (!host_integerp (TREE_OPERAND (t, 1), 1)
2591 || !host_integerp (TREE_OPERAND (t, 2), 1))
2592 {
2593 error ("invalid position or size operand to BIT_FIELD_REF");
2594 return t;
2595 }
2596 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2597 && (TYPE_PRECISION (TREE_TYPE (t))
2598 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2599 {
2600 error ("integral result type precision does not match "
2601 "field size of BIT_FIELD_REF");
2602 return t;
2603 }
2604 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2605 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2606 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2607 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2608 {
2609 error ("mode precision of non-integral result does not "
2610 "match field size of BIT_FIELD_REF");
2611 return t;
2612 }
2613 }
2614 t = TREE_OPERAND (t, 0);
2615
2616 /* Fall-through. */
2617 case COMPONENT_REF:
2618 case ARRAY_REF:
2619 case ARRAY_RANGE_REF:
2620 case VIEW_CONVERT_EXPR:
2621 /* We have a nest of references. Verify that each of the operands
2622 that determine where to reference is either a constant or a variable,
2623 verify that the base is valid, and then show we've already checked
2624 the subtrees. */
2625 while (handled_component_p (t))
2626 {
2627 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2628 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2629 else if (TREE_CODE (t) == ARRAY_REF
2630 || TREE_CODE (t) == ARRAY_RANGE_REF)
2631 {
2632 CHECK_OP (1, "invalid array index");
2633 if (TREE_OPERAND (t, 2))
2634 CHECK_OP (2, "invalid array lower bound");
2635 if (TREE_OPERAND (t, 3))
2636 CHECK_OP (3, "invalid array stride");
2637 }
2638 else if (TREE_CODE (t) == BIT_FIELD_REF
2639 || TREE_CODE (t) == REALPART_EXPR
2640 || TREE_CODE (t) == IMAGPART_EXPR)
2641 {
2642 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
2643 "REALPART_EXPR");
2644 return t;
2645 }
2646
2647 t = TREE_OPERAND (t, 0);
2648 }
2649
2650 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2651 {
2652 error ("invalid reference prefix");
2653 return t;
2654 }
2655 *walk_subtrees = 0;
2656 break;
2657 case PLUS_EXPR:
2658 case MINUS_EXPR:
2659 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2660 POINTER_PLUS_EXPR. */
2661 if (POINTER_TYPE_P (TREE_TYPE (t)))
2662 {
2663 error ("invalid operand to plus/minus, type is a pointer");
2664 return t;
2665 }
2666 CHECK_OP (0, "invalid operand to binary operator");
2667 CHECK_OP (1, "invalid operand to binary operator");
2668 break;
2669
2670 case POINTER_PLUS_EXPR:
2671 /* Check to make sure the first operand is a pointer or reference type. */
2672 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
2673 {
2674 error ("invalid operand to pointer plus, first operand is not a pointer");
2675 return t;
2676 }
2677 /* Check to make sure the second operand is a ptrofftype. */
2678 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
2679 {
2680 error ("invalid operand to pointer plus, second operand is not an "
2681 "integer type of appropriate width");
2682 return t;
2683 }
2684 /* FALLTHROUGH */
2685 case LT_EXPR:
2686 case LE_EXPR:
2687 case GT_EXPR:
2688 case GE_EXPR:
2689 case EQ_EXPR:
2690 case NE_EXPR:
2691 case UNORDERED_EXPR:
2692 case ORDERED_EXPR:
2693 case UNLT_EXPR:
2694 case UNLE_EXPR:
2695 case UNGT_EXPR:
2696 case UNGE_EXPR:
2697 case UNEQ_EXPR:
2698 case LTGT_EXPR:
2699 case MULT_EXPR:
2700 case TRUNC_DIV_EXPR:
2701 case CEIL_DIV_EXPR:
2702 case FLOOR_DIV_EXPR:
2703 case ROUND_DIV_EXPR:
2704 case TRUNC_MOD_EXPR:
2705 case CEIL_MOD_EXPR:
2706 case FLOOR_MOD_EXPR:
2707 case ROUND_MOD_EXPR:
2708 case RDIV_EXPR:
2709 case EXACT_DIV_EXPR:
2710 case MIN_EXPR:
2711 case MAX_EXPR:
2712 case LSHIFT_EXPR:
2713 case RSHIFT_EXPR:
2714 case LROTATE_EXPR:
2715 case RROTATE_EXPR:
2716 case BIT_IOR_EXPR:
2717 case BIT_XOR_EXPR:
2718 case BIT_AND_EXPR:
2719 CHECK_OP (0, "invalid operand to binary operator");
2720 CHECK_OP (1, "invalid operand to binary operator");
2721 break;
2722
2723 case CONSTRUCTOR:
2724 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2725 *walk_subtrees = 0;
2726 break;
2727
2728 case CASE_LABEL_EXPR:
2729 if (CASE_CHAIN (t))
2730 {
2731 error ("invalid CASE_CHAIN");
2732 return t;
2733 }
2734 break;
2735
2736 default:
2737 break;
2738 }
2739 return NULL;
2740
2741 #undef CHECK_OP
2742 }
2743
2744
2745 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
2746 Returns true if there is an error, otherwise false. */
2747
2748 static bool
2749 verify_types_in_gimple_min_lval (tree expr)
2750 {
2751 tree op;
2752
2753 if (is_gimple_id (expr))
2754 return false;
2755
2756 if (TREE_CODE (expr) != TARGET_MEM_REF
2757 && TREE_CODE (expr) != MEM_REF)
2758 {
2759 error ("invalid expression for min lvalue");
2760 return true;
2761 }
2762
2763 /* TARGET_MEM_REFs are strange beasts. */
2764 if (TREE_CODE (expr) == TARGET_MEM_REF)
2765 return false;
2766
2767 op = TREE_OPERAND (expr, 0);
2768 if (!is_gimple_val (op))
2769 {
2770 error ("invalid operand in indirect reference");
2771 debug_generic_stmt (op);
2772 return true;
2773 }
2774 /* Memory references now generally can involve a value conversion. */
2775
2776 return false;
2777 }
2778
2779 /* Verify if EXPR is a valid GIMPLE reference expression. If
2780 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
2781 if there is an error, otherwise false. */
2782
2783 static bool
2784 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
2785 {
2786 while (handled_component_p (expr))
2787 {
2788 tree op = TREE_OPERAND (expr, 0);
2789
2790 if (TREE_CODE (expr) == ARRAY_REF
2791 || TREE_CODE (expr) == ARRAY_RANGE_REF)
2792 {
2793 if (!is_gimple_val (TREE_OPERAND (expr, 1))
2794 || (TREE_OPERAND (expr, 2)
2795 && !is_gimple_val (TREE_OPERAND (expr, 2)))
2796 || (TREE_OPERAND (expr, 3)
2797 && !is_gimple_val (TREE_OPERAND (expr, 3))))
2798 {
2799 error ("invalid operands to array reference");
2800 debug_generic_stmt (expr);
2801 return true;
2802 }
2803 }
2804
2805 /* Verify if the reference array element types are compatible. */
2806 if (TREE_CODE (expr) == ARRAY_REF
2807 && !useless_type_conversion_p (TREE_TYPE (expr),
2808 TREE_TYPE (TREE_TYPE (op))))
2809 {
2810 error ("type mismatch in array reference");
2811 debug_generic_stmt (TREE_TYPE (expr));
2812 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2813 return true;
2814 }
2815 if (TREE_CODE (expr) == ARRAY_RANGE_REF
2816 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
2817 TREE_TYPE (TREE_TYPE (op))))
2818 {
2819 error ("type mismatch in array range reference");
2820 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
2821 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2822 return true;
2823 }
2824
2825 if ((TREE_CODE (expr) == REALPART_EXPR
2826 || TREE_CODE (expr) == IMAGPART_EXPR)
2827 && !useless_type_conversion_p (TREE_TYPE (expr),
2828 TREE_TYPE (TREE_TYPE (op))))
2829 {
2830 error ("type mismatch in real/imagpart reference");
2831 debug_generic_stmt (TREE_TYPE (expr));
2832 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2833 return true;
2834 }
2835
2836 if (TREE_CODE (expr) == COMPONENT_REF
2837 && !useless_type_conversion_p (TREE_TYPE (expr),
2838 TREE_TYPE (TREE_OPERAND (expr, 1))))
2839 {
2840 error ("type mismatch in component reference");
2841 debug_generic_stmt (TREE_TYPE (expr));
2842 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
2843 return true;
2844 }
2845
2846 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2847 {
2848 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
2849 that their operand is not an SSA name or an invariant when
2850 requiring an lvalue (this usually means there is a SRA or IPA-SRA
2851 bug). Otherwise there is nothing to verify, gross mismatches at
2852 most invoke undefined behavior. */
2853 if (require_lvalue
2854 && (TREE_CODE (op) == SSA_NAME
2855 || is_gimple_min_invariant (op)))
2856 {
2857 error ("conversion of an SSA_NAME on the left hand side");
2858 debug_generic_stmt (expr);
2859 return true;
2860 }
2861 else if (TREE_CODE (op) == SSA_NAME
2862 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
2863 {
2864 error ("conversion of register to a different size");
2865 debug_generic_stmt (expr);
2866 return true;
2867 }
2868 else if (!handled_component_p (op))
2869 return false;
2870 }
2871
2872 expr = op;
2873 }
2874
2875 if (TREE_CODE (expr) == MEM_REF)
2876 {
2877 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
2878 {
2879 error ("invalid address operand in MEM_REF");
2880 debug_generic_stmt (expr);
2881 return true;
2882 }
2883 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
2884 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
2885 {
2886 error ("invalid offset operand in MEM_REF");
2887 debug_generic_stmt (expr);
2888 return true;
2889 }
2890 }
2891 else if (TREE_CODE (expr) == TARGET_MEM_REF)
2892 {
2893 if (!TMR_BASE (expr)
2894 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
2895 {
2896 error ("invalid address operand in TARGET_MEM_REF");
2897 return true;
2898 }
2899 if (!TMR_OFFSET (expr)
2900 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
2901 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
2902 {
2903 error ("invalid offset operand in TARGET_MEM_REF");
2904 debug_generic_stmt (expr);
2905 return true;
2906 }
2907 }
2908
2909 return ((require_lvalue || !is_gimple_min_invariant (expr))
2910 && verify_types_in_gimple_min_lval (expr));
2911 }
2912
2913 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
2914 list of pointer-to types that is trivially convertible to DEST. */
2915
2916 static bool
2917 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
2918 {
2919 tree src;
2920
2921 if (!TYPE_POINTER_TO (src_obj))
2922 return true;
2923
2924 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
2925 if (useless_type_conversion_p (dest, src))
2926 return true;
2927
2928 return false;
2929 }
2930
2931 /* Return true if TYPE1 is a fixed-point type and if conversions to and
2932 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
2933
2934 static bool
2935 valid_fixed_convert_types_p (tree type1, tree type2)
2936 {
2937 return (FIXED_POINT_TYPE_P (type1)
2938 && (INTEGRAL_TYPE_P (type2)
2939 || SCALAR_FLOAT_TYPE_P (type2)
2940 || FIXED_POINT_TYPE_P (type2)));
2941 }
2942
2943 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
2944 is a problem, otherwise false. */
2945
2946 static bool
2947 verify_gimple_call (gimple stmt)
2948 {
2949 tree fn = gimple_call_fn (stmt);
2950 tree fntype, fndecl;
2951 unsigned i;
2952
2953 if (gimple_call_internal_p (stmt))
2954 {
2955 if (fn)
2956 {
2957 error ("gimple call has two targets");
2958 debug_generic_stmt (fn);
2959 return true;
2960 }
2961 }
2962 else
2963 {
2964 if (!fn)
2965 {
2966 error ("gimple call has no target");
2967 return true;
2968 }
2969 }
2970
2971 if (fn && !is_gimple_call_addr (fn))
2972 {
2973 error ("invalid function in gimple call");
2974 debug_generic_stmt (fn);
2975 return true;
2976 }
2977
2978 if (fn
2979 && (!POINTER_TYPE_P (TREE_TYPE (fn))
2980 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
2981 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
2982 {
2983 error ("non-function in gimple call");
2984 return true;
2985 }
2986
2987 fndecl = gimple_call_fndecl (stmt);
2988 if (fndecl
2989 && TREE_CODE (fndecl) == FUNCTION_DECL
2990 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
2991 && !DECL_PURE_P (fndecl)
2992 && !TREE_READONLY (fndecl))
2993 {
2994 error ("invalid pure const state for function");
2995 return true;
2996 }
2997
2998 if (gimple_call_lhs (stmt)
2999 && (!is_gimple_lvalue (gimple_call_lhs (stmt))
3000 || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
3001 {
3002 error ("invalid LHS in gimple call");
3003 return true;
3004 }
3005
3006 if (gimple_call_lhs (stmt) && gimple_call_noreturn_p (stmt))
3007 {
3008 error ("LHS in noreturn call");
3009 return true;
3010 }
3011
3012 fntype = gimple_call_fntype (stmt);
3013 if (fntype
3014 && gimple_call_lhs (stmt)
3015 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3016 TREE_TYPE (fntype))
3017 /* ??? At least C++ misses conversions at assignments from
3018 void * call results.
3019 ??? Java is completely off. Especially with functions
3020 returning java.lang.Object.
3021 For now simply allow arbitrary pointer type conversions. */
3022 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3023 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3024 {
3025 error ("invalid conversion in gimple call");
3026 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3027 debug_generic_stmt (TREE_TYPE (fntype));
3028 return true;
3029 }
3030
3031 if (gimple_call_chain (stmt)
3032 && !is_gimple_val (gimple_call_chain (stmt)))
3033 {
3034 error ("invalid static chain in gimple call");
3035 debug_generic_stmt (gimple_call_chain (stmt));
3036 return true;
3037 }
3038
3039 /* If there is a static chain argument, this should not be an indirect
3040 call, and the decl should have DECL_STATIC_CHAIN set. */
3041 if (gimple_call_chain (stmt))
3042 {
3043 if (!gimple_call_fndecl (stmt))
3044 {
3045 error ("static chain in indirect gimple call");
3046 return true;
3047 }
3048 fn = TREE_OPERAND (fn, 0);
3049
3050 if (!DECL_STATIC_CHAIN (fn))
3051 {
3052 error ("static chain with function that doesn%'t use one");
3053 return true;
3054 }
3055 }
3056
3057 /* ??? The C frontend passes unpromoted arguments in case it
3058 didn't see a function declaration before the call. So for now
3059 leave the call arguments mostly unverified. Once we gimplify
3060 unit-at-a-time we have a chance to fix this. */
3061
3062 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3063 {
3064 tree arg = gimple_call_arg (stmt, i);
3065 if ((is_gimple_reg_type (TREE_TYPE (arg))
3066 && !is_gimple_val (arg))
3067 || (!is_gimple_reg_type (TREE_TYPE (arg))
3068 && !is_gimple_lvalue (arg)))
3069 {
3070 error ("invalid argument to gimple call");
3071 debug_generic_expr (arg);
3072 return true;
3073 }
3074 }
3075
3076 return false;
3077 }
3078
3079 /* Verifies the gimple comparison with the result type TYPE and
3080 the operands OP0 and OP1. */
3081
3082 static bool
3083 verify_gimple_comparison (tree type, tree op0, tree op1)
3084 {
3085 tree op0_type = TREE_TYPE (op0);
3086 tree op1_type = TREE_TYPE (op1);
3087
3088 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3089 {
3090 error ("invalid operands in gimple comparison");
3091 return true;
3092 }
3093
3094 /* For comparisons we do not have the operations type as the
3095 effective type the comparison is carried out in. Instead
3096 we require that either the first operand is trivially
3097 convertible into the second, or the other way around.
3098 Because we special-case pointers to void we allow
3099 comparisons of pointers with the same mode as well. */
3100 if (!useless_type_conversion_p (op0_type, op1_type)
3101 && !useless_type_conversion_p (op1_type, op0_type)
3102 && (!POINTER_TYPE_P (op0_type)
3103 || !POINTER_TYPE_P (op1_type)
3104 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3105 {
3106 error ("mismatching comparison operand types");
3107 debug_generic_expr (op0_type);
3108 debug_generic_expr (op1_type);
3109 return true;
3110 }
3111
3112 /* The resulting type of a comparison may be an effective boolean type. */
3113 if (INTEGRAL_TYPE_P (type)
3114 && (TREE_CODE (type) == BOOLEAN_TYPE
3115 || TYPE_PRECISION (type) == 1))
3116 {
3117 if (TREE_CODE (op0_type) == VECTOR_TYPE
3118 || TREE_CODE (op1_type) == VECTOR_TYPE)
3119 {
3120 error ("vector comparison returning a boolean");
3121 debug_generic_expr (op0_type);
3122 debug_generic_expr (op1_type);
3123 return true;
3124 }
3125 }
3126 /* Or an integer vector type with the same size and element count
3127 as the comparison operand types. */
3128 else if (TREE_CODE (type) == VECTOR_TYPE
3129 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3130 {
3131 if (TREE_CODE (op0_type) != VECTOR_TYPE
3132 || TREE_CODE (op1_type) != VECTOR_TYPE)
3133 {
3134 error ("non-vector operands in vector comparison");
3135 debug_generic_expr (op0_type);
3136 debug_generic_expr (op1_type);
3137 return true;
3138 }
3139
3140 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3141 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3142 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type))))
3143 /* The result of a vector comparison is of signed
3144 integral type. */
3145 || TYPE_UNSIGNED (TREE_TYPE (type)))
3146 {
3147 error ("invalid vector comparison resulting type");
3148 debug_generic_expr (type);
3149 return true;
3150 }
3151 }
3152 else
3153 {
3154 error ("bogus comparison result type");
3155 debug_generic_expr (type);
3156 return true;
3157 }
3158
3159 return false;
3160 }
3161
3162 /* Verify a gimple assignment statement STMT with an unary rhs.
3163 Returns true if anything is wrong. */
3164
3165 static bool
3166 verify_gimple_assign_unary (gimple stmt)
3167 {
3168 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3169 tree lhs = gimple_assign_lhs (stmt);
3170 tree lhs_type = TREE_TYPE (lhs);
3171 tree rhs1 = gimple_assign_rhs1 (stmt);
3172 tree rhs1_type = TREE_TYPE (rhs1);
3173
3174 if (!is_gimple_reg (lhs))
3175 {
3176 error ("non-register as LHS of unary operation");
3177 return true;
3178 }
3179
3180 if (!is_gimple_val (rhs1))
3181 {
3182 error ("invalid operand in unary operation");
3183 return true;
3184 }
3185
3186 /* First handle conversions. */
3187 switch (rhs_code)
3188 {
3189 CASE_CONVERT:
3190 {
3191 /* Allow conversions from pointer type to integral type only if
3192 there is no sign or zero extension involved.
3193 For targets were the precision of ptrofftype doesn't match that
3194 of pointers we need to allow arbitrary conversions to ptrofftype. */
3195 if ((POINTER_TYPE_P (lhs_type)
3196 && INTEGRAL_TYPE_P (rhs1_type))
3197 || (POINTER_TYPE_P (rhs1_type)
3198 && INTEGRAL_TYPE_P (lhs_type)
3199 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3200 || ptrofftype_p (sizetype))))
3201 return false;
3202
3203 /* Allow conversion from integral to offset type and vice versa. */
3204 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3205 && INTEGRAL_TYPE_P (rhs1_type))
3206 || (INTEGRAL_TYPE_P (lhs_type)
3207 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3208 return false;
3209
3210 /* Otherwise assert we are converting between types of the
3211 same kind. */
3212 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3213 {
3214 error ("invalid types in nop conversion");
3215 debug_generic_expr (lhs_type);
3216 debug_generic_expr (rhs1_type);
3217 return true;
3218 }
3219
3220 return false;
3221 }
3222
3223 case ADDR_SPACE_CONVERT_EXPR:
3224 {
3225 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3226 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3227 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3228 {
3229 error ("invalid types in address space conversion");
3230 debug_generic_expr (lhs_type);
3231 debug_generic_expr (rhs1_type);
3232 return true;
3233 }
3234
3235 return false;
3236 }
3237
3238 case FIXED_CONVERT_EXPR:
3239 {
3240 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3241 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3242 {
3243 error ("invalid types in fixed-point conversion");
3244 debug_generic_expr (lhs_type);
3245 debug_generic_expr (rhs1_type);
3246 return true;
3247 }
3248
3249 return false;
3250 }
3251
3252 case FLOAT_EXPR:
3253 {
3254 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3255 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3256 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3257 {
3258 error ("invalid types in conversion to floating point");
3259 debug_generic_expr (lhs_type);
3260 debug_generic_expr (rhs1_type);
3261 return true;
3262 }
3263
3264 return false;
3265 }
3266
3267 case FIX_TRUNC_EXPR:
3268 {
3269 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3270 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3271 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3272 {
3273 error ("invalid types in conversion to integer");
3274 debug_generic_expr (lhs_type);
3275 debug_generic_expr (rhs1_type);
3276 return true;
3277 }
3278
3279 return false;
3280 }
3281
3282 case VEC_UNPACK_HI_EXPR:
3283 case VEC_UNPACK_LO_EXPR:
3284 case REDUC_MAX_EXPR:
3285 case REDUC_MIN_EXPR:
3286 case REDUC_PLUS_EXPR:
3287 case VEC_UNPACK_FLOAT_HI_EXPR:
3288 case VEC_UNPACK_FLOAT_LO_EXPR:
3289 /* FIXME. */
3290 return false;
3291
3292 case NEGATE_EXPR:
3293 case ABS_EXPR:
3294 case BIT_NOT_EXPR:
3295 case PAREN_EXPR:
3296 case NON_LVALUE_EXPR:
3297 case CONJ_EXPR:
3298 break;
3299
3300 default:
3301 gcc_unreachable ();
3302 }
3303
3304 /* For the remaining codes assert there is no conversion involved. */
3305 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3306 {
3307 error ("non-trivial conversion in unary operation");
3308 debug_generic_expr (lhs_type);
3309 debug_generic_expr (rhs1_type);
3310 return true;
3311 }
3312
3313 return false;
3314 }
3315
3316 /* Verify a gimple assignment statement STMT with a binary rhs.
3317 Returns true if anything is wrong. */
3318
3319 static bool
3320 verify_gimple_assign_binary (gimple stmt)
3321 {
3322 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3323 tree lhs = gimple_assign_lhs (stmt);
3324 tree lhs_type = TREE_TYPE (lhs);
3325 tree rhs1 = gimple_assign_rhs1 (stmt);
3326 tree rhs1_type = TREE_TYPE (rhs1);
3327 tree rhs2 = gimple_assign_rhs2 (stmt);
3328 tree rhs2_type = TREE_TYPE (rhs2);
3329
3330 if (!is_gimple_reg (lhs))
3331 {
3332 error ("non-register as LHS of binary operation");
3333 return true;
3334 }
3335
3336 if (!is_gimple_val (rhs1)
3337 || !is_gimple_val (rhs2))
3338 {
3339 error ("invalid operands in binary operation");
3340 return true;
3341 }
3342
3343 /* First handle operations that involve different types. */
3344 switch (rhs_code)
3345 {
3346 case COMPLEX_EXPR:
3347 {
3348 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3349 || !(INTEGRAL_TYPE_P (rhs1_type)
3350 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3351 || !(INTEGRAL_TYPE_P (rhs2_type)
3352 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3353 {
3354 error ("type mismatch in complex expression");
3355 debug_generic_expr (lhs_type);
3356 debug_generic_expr (rhs1_type);
3357 debug_generic_expr (rhs2_type);
3358 return true;
3359 }
3360
3361 return false;
3362 }
3363
3364 case LSHIFT_EXPR:
3365 case RSHIFT_EXPR:
3366 case LROTATE_EXPR:
3367 case RROTATE_EXPR:
3368 {
3369 /* Shifts and rotates are ok on integral types, fixed point
3370 types and integer vector types. */
3371 if ((!INTEGRAL_TYPE_P (rhs1_type)
3372 && !FIXED_POINT_TYPE_P (rhs1_type)
3373 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3374 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3375 || (!INTEGRAL_TYPE_P (rhs2_type)
3376 /* Vector shifts of vectors are also ok. */
3377 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3378 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3379 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3380 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3381 || !useless_type_conversion_p (lhs_type, rhs1_type))
3382 {
3383 error ("type mismatch in shift expression");
3384 debug_generic_expr (lhs_type);
3385 debug_generic_expr (rhs1_type);
3386 debug_generic_expr (rhs2_type);
3387 return true;
3388 }
3389
3390 return false;
3391 }
3392
3393 case VEC_LSHIFT_EXPR:
3394 case VEC_RSHIFT_EXPR:
3395 {
3396 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3397 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3398 || POINTER_TYPE_P (TREE_TYPE (rhs1_type))
3399 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type))
3400 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3401 || (!INTEGRAL_TYPE_P (rhs2_type)
3402 && (TREE_CODE (rhs2_type) != VECTOR_TYPE
3403 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3404 || !useless_type_conversion_p (lhs_type, rhs1_type))
3405 {
3406 error ("type mismatch in vector shift expression");
3407 debug_generic_expr (lhs_type);
3408 debug_generic_expr (rhs1_type);
3409 debug_generic_expr (rhs2_type);
3410 return true;
3411 }
3412 /* For shifting a vector of non-integral components we
3413 only allow shifting by a constant multiple of the element size. */
3414 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3415 && (TREE_CODE (rhs2) != INTEGER_CST
3416 || !div_if_zero_remainder (EXACT_DIV_EXPR, rhs2,
3417 TYPE_SIZE (TREE_TYPE (rhs1_type)))))
3418 {
3419 error ("non-element sized vector shift of floating point vector");
3420 return true;
3421 }
3422
3423 return false;
3424 }
3425
3426 case WIDEN_LSHIFT_EXPR:
3427 {
3428 if (!INTEGRAL_TYPE_P (lhs_type)
3429 || !INTEGRAL_TYPE_P (rhs1_type)
3430 || TREE_CODE (rhs2) != INTEGER_CST
3431 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3432 {
3433 error ("type mismatch in widening vector shift expression");
3434 debug_generic_expr (lhs_type);
3435 debug_generic_expr (rhs1_type);
3436 debug_generic_expr (rhs2_type);
3437 return true;
3438 }
3439
3440 return false;
3441 }
3442
3443 case VEC_WIDEN_LSHIFT_HI_EXPR:
3444 case VEC_WIDEN_LSHIFT_LO_EXPR:
3445 {
3446 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3447 || TREE_CODE (lhs_type) != VECTOR_TYPE
3448 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3449 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3450 || TREE_CODE (rhs2) != INTEGER_CST
3451 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3452 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3453 {
3454 error ("type mismatch in widening vector shift expression");
3455 debug_generic_expr (lhs_type);
3456 debug_generic_expr (rhs1_type);
3457 debug_generic_expr (rhs2_type);
3458 return true;
3459 }
3460
3461 return false;
3462 }
3463
3464 case PLUS_EXPR:
3465 case MINUS_EXPR:
3466 {
3467 tree lhs_etype = lhs_type;
3468 tree rhs1_etype = rhs1_type;
3469 tree rhs2_etype = rhs2_type;
3470 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3471 {
3472 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3473 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3474 {
3475 error ("invalid non-vector operands to vector valued plus");
3476 return true;
3477 }
3478 lhs_etype = TREE_TYPE (lhs_type);
3479 rhs1_etype = TREE_TYPE (rhs1_type);
3480 rhs2_etype = TREE_TYPE (rhs2_type);
3481 }
3482 if (POINTER_TYPE_P (lhs_etype)
3483 || POINTER_TYPE_P (rhs1_etype)
3484 || POINTER_TYPE_P (rhs2_etype))
3485 {
3486 error ("invalid (pointer) operands to plus/minus");
3487 return true;
3488 }
3489
3490 /* Continue with generic binary expression handling. */
3491 break;
3492 }
3493
3494 case POINTER_PLUS_EXPR:
3495 {
3496 if (!POINTER_TYPE_P (rhs1_type)
3497 || !useless_type_conversion_p (lhs_type, rhs1_type)
3498 || !ptrofftype_p (rhs2_type))
3499 {
3500 error ("type mismatch in pointer plus expression");
3501 debug_generic_stmt (lhs_type);
3502 debug_generic_stmt (rhs1_type);
3503 debug_generic_stmt (rhs2_type);
3504 return true;
3505 }
3506
3507 return false;
3508 }
3509
3510 case TRUTH_ANDIF_EXPR:
3511 case TRUTH_ORIF_EXPR:
3512 case TRUTH_AND_EXPR:
3513 case TRUTH_OR_EXPR:
3514 case TRUTH_XOR_EXPR:
3515
3516 gcc_unreachable ();
3517
3518 case LT_EXPR:
3519 case LE_EXPR:
3520 case GT_EXPR:
3521 case GE_EXPR:
3522 case EQ_EXPR:
3523 case NE_EXPR:
3524 case UNORDERED_EXPR:
3525 case ORDERED_EXPR:
3526 case UNLT_EXPR:
3527 case UNLE_EXPR:
3528 case UNGT_EXPR:
3529 case UNGE_EXPR:
3530 case UNEQ_EXPR:
3531 case LTGT_EXPR:
3532 /* Comparisons are also binary, but the result type is not
3533 connected to the operand types. */
3534 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3535
3536 case WIDEN_MULT_EXPR:
3537 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3538 return true;
3539 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3540 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3541
3542 case WIDEN_SUM_EXPR:
3543 case VEC_WIDEN_MULT_HI_EXPR:
3544 case VEC_WIDEN_MULT_LO_EXPR:
3545 case VEC_WIDEN_MULT_EVEN_EXPR:
3546 case VEC_WIDEN_MULT_ODD_EXPR:
3547 case VEC_PACK_TRUNC_EXPR:
3548 case VEC_PACK_SAT_EXPR:
3549 case VEC_PACK_FIX_TRUNC_EXPR:
3550 /* FIXME. */
3551 return false;
3552
3553 case MULT_EXPR:
3554 case MULT_HIGHPART_EXPR:
3555 case TRUNC_DIV_EXPR:
3556 case CEIL_DIV_EXPR:
3557 case FLOOR_DIV_EXPR:
3558 case ROUND_DIV_EXPR:
3559 case TRUNC_MOD_EXPR:
3560 case CEIL_MOD_EXPR:
3561 case FLOOR_MOD_EXPR:
3562 case ROUND_MOD_EXPR:
3563 case RDIV_EXPR:
3564 case EXACT_DIV_EXPR:
3565 case MIN_EXPR:
3566 case MAX_EXPR:
3567 case BIT_IOR_EXPR:
3568 case BIT_XOR_EXPR:
3569 case BIT_AND_EXPR:
3570 /* Continue with generic binary expression handling. */
3571 break;
3572
3573 default:
3574 gcc_unreachable ();
3575 }
3576
3577 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3578 || !useless_type_conversion_p (lhs_type, rhs2_type))
3579 {
3580 error ("type mismatch in binary expression");
3581 debug_generic_stmt (lhs_type);
3582 debug_generic_stmt (rhs1_type);
3583 debug_generic_stmt (rhs2_type);
3584 return true;
3585 }
3586
3587 return false;
3588 }
3589
3590 /* Verify a gimple assignment statement STMT with a ternary rhs.
3591 Returns true if anything is wrong. */
3592
3593 static bool
3594 verify_gimple_assign_ternary (gimple stmt)
3595 {
3596 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3597 tree lhs = gimple_assign_lhs (stmt);
3598 tree lhs_type = TREE_TYPE (lhs);
3599 tree rhs1 = gimple_assign_rhs1 (stmt);
3600 tree rhs1_type = TREE_TYPE (rhs1);
3601 tree rhs2 = gimple_assign_rhs2 (stmt);
3602 tree rhs2_type = TREE_TYPE (rhs2);
3603 tree rhs3 = gimple_assign_rhs3 (stmt);
3604 tree rhs3_type = TREE_TYPE (rhs3);
3605
3606 if (!is_gimple_reg (lhs))
3607 {
3608 error ("non-register as LHS of ternary operation");
3609 return true;
3610 }
3611
3612 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3613 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3614 || !is_gimple_val (rhs2)
3615 || !is_gimple_val (rhs3))
3616 {
3617 error ("invalid operands in ternary operation");
3618 return true;
3619 }
3620
3621 /* First handle operations that involve different types. */
3622 switch (rhs_code)
3623 {
3624 case WIDEN_MULT_PLUS_EXPR:
3625 case WIDEN_MULT_MINUS_EXPR:
3626 if ((!INTEGRAL_TYPE_P (rhs1_type)
3627 && !FIXED_POINT_TYPE_P (rhs1_type))
3628 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3629 || !useless_type_conversion_p (lhs_type, rhs3_type)
3630 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3631 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3632 {
3633 error ("type mismatch in widening multiply-accumulate expression");
3634 debug_generic_expr (lhs_type);
3635 debug_generic_expr (rhs1_type);
3636 debug_generic_expr (rhs2_type);
3637 debug_generic_expr (rhs3_type);
3638 return true;
3639 }
3640 break;
3641
3642 case FMA_EXPR:
3643 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3644 || !useless_type_conversion_p (lhs_type, rhs2_type)
3645 || !useless_type_conversion_p (lhs_type, rhs3_type))
3646 {
3647 error ("type mismatch in fused multiply-add expression");
3648 debug_generic_expr (lhs_type);
3649 debug_generic_expr (rhs1_type);
3650 debug_generic_expr (rhs2_type);
3651 debug_generic_expr (rhs3_type);
3652 return true;
3653 }
3654 break;
3655
3656 case COND_EXPR:
3657 case VEC_COND_EXPR:
3658 if (!useless_type_conversion_p (lhs_type, rhs2_type)
3659 || !useless_type_conversion_p (lhs_type, rhs3_type))
3660 {
3661 error ("type mismatch in conditional expression");
3662 debug_generic_expr (lhs_type);
3663 debug_generic_expr (rhs2_type);
3664 debug_generic_expr (rhs3_type);
3665 return true;
3666 }
3667 break;
3668
3669 case VEC_PERM_EXPR:
3670 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3671 || !useless_type_conversion_p (lhs_type, rhs2_type))
3672 {
3673 error ("type mismatch in vector permute expression");
3674 debug_generic_expr (lhs_type);
3675 debug_generic_expr (rhs1_type);
3676 debug_generic_expr (rhs2_type);
3677 debug_generic_expr (rhs3_type);
3678 return true;
3679 }
3680
3681 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3682 || TREE_CODE (rhs2_type) != VECTOR_TYPE
3683 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
3684 {
3685 error ("vector types expected in vector permute expression");
3686 debug_generic_expr (lhs_type);
3687 debug_generic_expr (rhs1_type);
3688 debug_generic_expr (rhs2_type);
3689 debug_generic_expr (rhs3_type);
3690 return true;
3691 }
3692
3693 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
3694 || TYPE_VECTOR_SUBPARTS (rhs2_type)
3695 != TYPE_VECTOR_SUBPARTS (rhs3_type)
3696 || TYPE_VECTOR_SUBPARTS (rhs3_type)
3697 != TYPE_VECTOR_SUBPARTS (lhs_type))
3698 {
3699 error ("vectors with different element number found "
3700 "in vector permute expression");
3701 debug_generic_expr (lhs_type);
3702 debug_generic_expr (rhs1_type);
3703 debug_generic_expr (rhs2_type);
3704 debug_generic_expr (rhs3_type);
3705 return true;
3706 }
3707
3708 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
3709 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
3710 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
3711 {
3712 error ("invalid mask type in vector permute expression");
3713 debug_generic_expr (lhs_type);
3714 debug_generic_expr (rhs1_type);
3715 debug_generic_expr (rhs2_type);
3716 debug_generic_expr (rhs3_type);
3717 return true;
3718 }
3719
3720 return false;
3721
3722 case DOT_PROD_EXPR:
3723 case REALIGN_LOAD_EXPR:
3724 /* FIXME. */
3725 return false;
3726
3727 default:
3728 gcc_unreachable ();
3729 }
3730 return false;
3731 }
3732
3733 /* Verify a gimple assignment statement STMT with a single rhs.
3734 Returns true if anything is wrong. */
3735
3736 static bool
3737 verify_gimple_assign_single (gimple stmt)
3738 {
3739 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3740 tree lhs = gimple_assign_lhs (stmt);
3741 tree lhs_type = TREE_TYPE (lhs);
3742 tree rhs1 = gimple_assign_rhs1 (stmt);
3743 tree rhs1_type = TREE_TYPE (rhs1);
3744 bool res = false;
3745
3746 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3747 {
3748 error ("non-trivial conversion at assignment");
3749 debug_generic_expr (lhs_type);
3750 debug_generic_expr (rhs1_type);
3751 return true;
3752 }
3753
3754 if (gimple_clobber_p (stmt)
3755 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
3756 {
3757 error ("non-decl/MEM_REF LHS in clobber statement");
3758 debug_generic_expr (lhs);
3759 return true;
3760 }
3761
3762 if (handled_component_p (lhs))
3763 res |= verify_types_in_gimple_reference (lhs, true);
3764
3765 /* Special codes we cannot handle via their class. */
3766 switch (rhs_code)
3767 {
3768 case ADDR_EXPR:
3769 {
3770 tree op = TREE_OPERAND (rhs1, 0);
3771 if (!is_gimple_addressable (op))
3772 {
3773 error ("invalid operand in unary expression");
3774 return true;
3775 }
3776
3777 /* Technically there is no longer a need for matching types, but
3778 gimple hygiene asks for this check. In LTO we can end up
3779 combining incompatible units and thus end up with addresses
3780 of globals that change their type to a common one. */
3781 if (!in_lto_p
3782 && !types_compatible_p (TREE_TYPE (op),
3783 TREE_TYPE (TREE_TYPE (rhs1)))
3784 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
3785 TREE_TYPE (op)))
3786 {
3787 error ("type mismatch in address expression");
3788 debug_generic_stmt (TREE_TYPE (rhs1));
3789 debug_generic_stmt (TREE_TYPE (op));
3790 return true;
3791 }
3792
3793 return verify_types_in_gimple_reference (op, true);
3794 }
3795
3796 /* tcc_reference */
3797 case INDIRECT_REF:
3798 error ("INDIRECT_REF in gimple IL");
3799 return true;
3800
3801 case COMPONENT_REF:
3802 case BIT_FIELD_REF:
3803 case ARRAY_REF:
3804 case ARRAY_RANGE_REF:
3805 case VIEW_CONVERT_EXPR:
3806 case REALPART_EXPR:
3807 case IMAGPART_EXPR:
3808 case TARGET_MEM_REF:
3809 case MEM_REF:
3810 if (!is_gimple_reg (lhs)
3811 && is_gimple_reg_type (TREE_TYPE (lhs)))
3812 {
3813 error ("invalid rhs for gimple memory store");
3814 debug_generic_stmt (lhs);
3815 debug_generic_stmt (rhs1);
3816 return true;
3817 }
3818 return res || verify_types_in_gimple_reference (rhs1, false);
3819
3820 /* tcc_constant */
3821 case SSA_NAME:
3822 case INTEGER_CST:
3823 case REAL_CST:
3824 case FIXED_CST:
3825 case COMPLEX_CST:
3826 case VECTOR_CST:
3827 case STRING_CST:
3828 return res;
3829
3830 /* tcc_declaration */
3831 case CONST_DECL:
3832 return res;
3833 case VAR_DECL:
3834 case PARM_DECL:
3835 if (!is_gimple_reg (lhs)
3836 && !is_gimple_reg (rhs1)
3837 && is_gimple_reg_type (TREE_TYPE (lhs)))
3838 {
3839 error ("invalid rhs for gimple memory store");
3840 debug_generic_stmt (lhs);
3841 debug_generic_stmt (rhs1);
3842 return true;
3843 }
3844 return res;
3845
3846 case CONSTRUCTOR:
3847 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
3848 {
3849 unsigned int i;
3850 tree elt_i, elt_v, elt_t = NULL_TREE;
3851
3852 if (CONSTRUCTOR_NELTS (rhs1) == 0)
3853 return res;
3854 /* For vector CONSTRUCTORs we require that either it is empty
3855 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
3856 (then the element count must be correct to cover the whole
3857 outer vector and index must be NULL on all elements, or it is
3858 a CONSTRUCTOR of scalar elements, where we as an exception allow
3859 smaller number of elements (assuming zero filling) and
3860 consecutive indexes as compared to NULL indexes (such
3861 CONSTRUCTORs can appear in the IL from FEs). */
3862 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
3863 {
3864 if (elt_t == NULL_TREE)
3865 {
3866 elt_t = TREE_TYPE (elt_v);
3867 if (TREE_CODE (elt_t) == VECTOR_TYPE)
3868 {
3869 tree elt_t = TREE_TYPE (elt_v);
3870 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
3871 TREE_TYPE (elt_t)))
3872 {
3873 error ("incorrect type of vector CONSTRUCTOR"
3874 " elements");
3875 debug_generic_stmt (rhs1);
3876 return true;
3877 }
3878 else if (CONSTRUCTOR_NELTS (rhs1)
3879 * TYPE_VECTOR_SUBPARTS (elt_t)
3880 != TYPE_VECTOR_SUBPARTS (rhs1_type))
3881 {
3882 error ("incorrect number of vector CONSTRUCTOR"
3883 " elements");
3884 debug_generic_stmt (rhs1);
3885 return true;
3886 }
3887 }
3888 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
3889 elt_t))
3890 {
3891 error ("incorrect type of vector CONSTRUCTOR elements");
3892 debug_generic_stmt (rhs1);
3893 return true;
3894 }
3895 else if (CONSTRUCTOR_NELTS (rhs1)
3896 > TYPE_VECTOR_SUBPARTS (rhs1_type))
3897 {
3898 error ("incorrect number of vector CONSTRUCTOR elements");
3899 debug_generic_stmt (rhs1);
3900 return true;
3901 }
3902 }
3903 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
3904 {
3905 error ("incorrect type of vector CONSTRUCTOR elements");
3906 debug_generic_stmt (rhs1);
3907 return true;
3908 }
3909 if (elt_i != NULL_TREE
3910 && (TREE_CODE (elt_t) == VECTOR_TYPE
3911 || TREE_CODE (elt_i) != INTEGER_CST
3912 || compare_tree_int (elt_i, i) != 0))
3913 {
3914 error ("vector CONSTRUCTOR with non-NULL element index");
3915 debug_generic_stmt (rhs1);
3916 return true;
3917 }
3918 }
3919 }
3920 return res;
3921 case OBJ_TYPE_REF:
3922 case ASSERT_EXPR:
3923 case WITH_SIZE_EXPR:
3924 /* FIXME. */
3925 return res;
3926
3927 default:;
3928 }
3929
3930 return res;
3931 }
3932
3933 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
3934 is a problem, otherwise false. */
3935
3936 static bool
3937 verify_gimple_assign (gimple stmt)
3938 {
3939 switch (gimple_assign_rhs_class (stmt))
3940 {
3941 case GIMPLE_SINGLE_RHS:
3942 return verify_gimple_assign_single (stmt);
3943
3944 case GIMPLE_UNARY_RHS:
3945 return verify_gimple_assign_unary (stmt);
3946
3947 case GIMPLE_BINARY_RHS:
3948 return verify_gimple_assign_binary (stmt);
3949
3950 case GIMPLE_TERNARY_RHS:
3951 return verify_gimple_assign_ternary (stmt);
3952
3953 default:
3954 gcc_unreachable ();
3955 }
3956 }
3957
3958 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
3959 is a problem, otherwise false. */
3960
3961 static bool
3962 verify_gimple_return (gimple stmt)
3963 {
3964 tree op = gimple_return_retval (stmt);
3965 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
3966
3967 /* We cannot test for present return values as we do not fix up missing
3968 return values from the original source. */
3969 if (op == NULL)
3970 return false;
3971
3972 if (!is_gimple_val (op)
3973 && TREE_CODE (op) != RESULT_DECL)
3974 {
3975 error ("invalid operand in return statement");
3976 debug_generic_stmt (op);
3977 return true;
3978 }
3979
3980 if ((TREE_CODE (op) == RESULT_DECL
3981 && DECL_BY_REFERENCE (op))
3982 || (TREE_CODE (op) == SSA_NAME
3983 && SSA_NAME_VAR (op)
3984 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
3985 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
3986 op = TREE_TYPE (op);
3987
3988 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
3989 {
3990 error ("invalid conversion in return statement");
3991 debug_generic_stmt (restype);
3992 debug_generic_stmt (TREE_TYPE (op));
3993 return true;
3994 }
3995
3996 return false;
3997 }
3998
3999
4000 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4001 is a problem, otherwise false. */
4002
4003 static bool
4004 verify_gimple_goto (gimple stmt)
4005 {
4006 tree dest = gimple_goto_dest (stmt);
4007
4008 /* ??? We have two canonical forms of direct goto destinations, a
4009 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4010 if (TREE_CODE (dest) != LABEL_DECL
4011 && (!is_gimple_val (dest)
4012 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4013 {
4014 error ("goto destination is neither a label nor a pointer");
4015 return true;
4016 }
4017
4018 return false;
4019 }
4020
4021 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4022 is a problem, otherwise false. */
4023
4024 static bool
4025 verify_gimple_switch (gimple stmt)
4026 {
4027 unsigned int i, n;
4028 tree elt, prev_upper_bound = NULL_TREE;
4029 tree index_type, elt_type = NULL_TREE;
4030
4031 if (!is_gimple_val (gimple_switch_index (stmt)))
4032 {
4033 error ("invalid operand to switch statement");
4034 debug_generic_stmt (gimple_switch_index (stmt));
4035 return true;
4036 }
4037
4038 index_type = TREE_TYPE (gimple_switch_index (stmt));
4039 if (! INTEGRAL_TYPE_P (index_type))
4040 {
4041 error ("non-integral type switch statement");
4042 debug_generic_expr (index_type);
4043 return true;
4044 }
4045
4046 elt = gimple_switch_label (stmt, 0);
4047 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4048 {
4049 error ("invalid default case label in switch statement");
4050 debug_generic_expr (elt);
4051 return true;
4052 }
4053
4054 n = gimple_switch_num_labels (stmt);
4055 for (i = 1; i < n; i++)
4056 {
4057 elt = gimple_switch_label (stmt, i);
4058
4059 if (! CASE_LOW (elt))
4060 {
4061 error ("invalid case label in switch statement");
4062 debug_generic_expr (elt);
4063 return true;
4064 }
4065 if (CASE_HIGH (elt)
4066 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4067 {
4068 error ("invalid case range in switch statement");
4069 debug_generic_expr (elt);
4070 return true;
4071 }
4072
4073 if (elt_type)
4074 {
4075 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4076 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4077 {
4078 error ("type mismatch for case label in switch statement");
4079 debug_generic_expr (elt);
4080 return true;
4081 }
4082 }
4083 else
4084 {
4085 elt_type = TREE_TYPE (CASE_LOW (elt));
4086 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4087 {
4088 error ("type precision mismatch in switch statement");
4089 return true;
4090 }
4091 }
4092
4093 if (prev_upper_bound)
4094 {
4095 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4096 {
4097 error ("case labels not sorted in switch statement");
4098 return true;
4099 }
4100 }
4101
4102 prev_upper_bound = CASE_HIGH (elt);
4103 if (! prev_upper_bound)
4104 prev_upper_bound = CASE_LOW (elt);
4105 }
4106
4107 return false;
4108 }
4109
4110 /* Verify a gimple debug statement STMT.
4111 Returns true if anything is wrong. */
4112
4113 static bool
4114 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4115 {
4116 /* There isn't much that could be wrong in a gimple debug stmt. A
4117 gimple debug bind stmt, for example, maps a tree, that's usually
4118 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4119 component or member of an aggregate type, to another tree, that
4120 can be an arbitrary expression. These stmts expand into debug
4121 insns, and are converted to debug notes by var-tracking.c. */
4122 return false;
4123 }
4124
4125 /* Verify a gimple label statement STMT.
4126 Returns true if anything is wrong. */
4127
4128 static bool
4129 verify_gimple_label (gimple stmt)
4130 {
4131 tree decl = gimple_label_label (stmt);
4132 int uid;
4133 bool err = false;
4134
4135 if (TREE_CODE (decl) != LABEL_DECL)
4136 return true;
4137 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4138 && DECL_CONTEXT (decl) != current_function_decl)
4139 {
4140 error ("label's context is not the current function decl");
4141 err |= true;
4142 }
4143
4144 uid = LABEL_DECL_UID (decl);
4145 if (cfun->cfg
4146 && (uid == -1 || (*label_to_block_map)[uid] != gimple_bb (stmt)))
4147 {
4148 error ("incorrect entry in label_to_block_map");
4149 err |= true;
4150 }
4151
4152 uid = EH_LANDING_PAD_NR (decl);
4153 if (uid)
4154 {
4155 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4156 if (decl != lp->post_landing_pad)
4157 {
4158 error ("incorrect setting of landing pad number");
4159 err |= true;
4160 }
4161 }
4162
4163 return err;
4164 }
4165
4166 /* Verify the GIMPLE statement STMT. Returns true if there is an
4167 error, otherwise false. */
4168
4169 static bool
4170 verify_gimple_stmt (gimple stmt)
4171 {
4172 switch (gimple_code (stmt))
4173 {
4174 case GIMPLE_ASSIGN:
4175 return verify_gimple_assign (stmt);
4176
4177 case GIMPLE_LABEL:
4178 return verify_gimple_label (stmt);
4179
4180 case GIMPLE_CALL:
4181 return verify_gimple_call (stmt);
4182
4183 case GIMPLE_COND:
4184 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4185 {
4186 error ("invalid comparison code in gimple cond");
4187 return true;
4188 }
4189 if (!(!gimple_cond_true_label (stmt)
4190 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4191 || !(!gimple_cond_false_label (stmt)
4192 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4193 {
4194 error ("invalid labels in gimple cond");
4195 return true;
4196 }
4197
4198 return verify_gimple_comparison (boolean_type_node,
4199 gimple_cond_lhs (stmt),
4200 gimple_cond_rhs (stmt));
4201
4202 case GIMPLE_GOTO:
4203 return verify_gimple_goto (stmt);
4204
4205 case GIMPLE_SWITCH:
4206 return verify_gimple_switch (stmt);
4207
4208 case GIMPLE_RETURN:
4209 return verify_gimple_return (stmt);
4210
4211 case GIMPLE_ASM:
4212 return false;
4213
4214 case GIMPLE_TRANSACTION:
4215 return verify_gimple_transaction (stmt);
4216
4217 /* Tuples that do not have tree operands. */
4218 case GIMPLE_NOP:
4219 case GIMPLE_PREDICT:
4220 case GIMPLE_RESX:
4221 case GIMPLE_EH_DISPATCH:
4222 case GIMPLE_EH_MUST_NOT_THROW:
4223 return false;
4224
4225 CASE_GIMPLE_OMP:
4226 /* OpenMP directives are validated by the FE and never operated
4227 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4228 non-gimple expressions when the main index variable has had
4229 its address taken. This does not affect the loop itself
4230 because the header of an GIMPLE_OMP_FOR is merely used to determine
4231 how to setup the parallel iteration. */
4232 return false;
4233
4234 case GIMPLE_DEBUG:
4235 return verify_gimple_debug (stmt);
4236
4237 default:
4238 gcc_unreachable ();
4239 }
4240 }
4241
4242 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4243 and false otherwise. */
4244
4245 static bool
4246 verify_gimple_phi (gimple phi)
4247 {
4248 bool err = false;
4249 unsigned i;
4250 tree phi_result = gimple_phi_result (phi);
4251 bool virtual_p;
4252
4253 if (!phi_result)
4254 {
4255 error ("invalid PHI result");
4256 return true;
4257 }
4258
4259 virtual_p = virtual_operand_p (phi_result);
4260 if (TREE_CODE (phi_result) != SSA_NAME
4261 || (virtual_p
4262 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4263 {
4264 error ("invalid PHI result");
4265 err = true;
4266 }
4267
4268 for (i = 0; i < gimple_phi_num_args (phi); i++)
4269 {
4270 tree t = gimple_phi_arg_def (phi, i);
4271
4272 if (!t)
4273 {
4274 error ("missing PHI def");
4275 err |= true;
4276 continue;
4277 }
4278 /* Addressable variables do have SSA_NAMEs but they
4279 are not considered gimple values. */
4280 else if ((TREE_CODE (t) == SSA_NAME
4281 && virtual_p != virtual_operand_p (t))
4282 || (virtual_p
4283 && (TREE_CODE (t) != SSA_NAME
4284 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4285 || (!virtual_p
4286 && !is_gimple_val (t)))
4287 {
4288 error ("invalid PHI argument");
4289 debug_generic_expr (t);
4290 err |= true;
4291 }
4292 #ifdef ENABLE_TYPES_CHECKING
4293 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4294 {
4295 error ("incompatible types in PHI argument %u", i);
4296 debug_generic_stmt (TREE_TYPE (phi_result));
4297 debug_generic_stmt (TREE_TYPE (t));
4298 err |= true;
4299 }
4300 #endif
4301 }
4302
4303 return err;
4304 }
4305
4306 /* Verify the GIMPLE statements inside the sequence STMTS. */
4307
4308 static bool
4309 verify_gimple_in_seq_2 (gimple_seq stmts)
4310 {
4311 gimple_stmt_iterator ittr;
4312 bool err = false;
4313
4314 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4315 {
4316 gimple stmt = gsi_stmt (ittr);
4317
4318 switch (gimple_code (stmt))
4319 {
4320 case GIMPLE_BIND:
4321 err |= verify_gimple_in_seq_2 (gimple_bind_body (stmt));
4322 break;
4323
4324 case GIMPLE_TRY:
4325 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4326 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4327 break;
4328
4329 case GIMPLE_EH_FILTER:
4330 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4331 break;
4332
4333 case GIMPLE_EH_ELSE:
4334 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (stmt));
4335 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (stmt));
4336 break;
4337
4338 case GIMPLE_CATCH:
4339 err |= verify_gimple_in_seq_2 (gimple_catch_handler (stmt));
4340 break;
4341
4342 case GIMPLE_TRANSACTION:
4343 err |= verify_gimple_transaction (stmt);
4344 break;
4345
4346 default:
4347 {
4348 bool err2 = verify_gimple_stmt (stmt);
4349 if (err2)
4350 debug_gimple_stmt (stmt);
4351 err |= err2;
4352 }
4353 }
4354 }
4355
4356 return err;
4357 }
4358
4359 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4360 is a problem, otherwise false. */
4361
4362 static bool
4363 verify_gimple_transaction (gimple stmt)
4364 {
4365 tree lab = gimple_transaction_label (stmt);
4366 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4367 return true;
4368 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4369 }
4370
4371
4372 /* Verify the GIMPLE statements inside the statement list STMTS. */
4373
4374 DEBUG_FUNCTION void
4375 verify_gimple_in_seq (gimple_seq stmts)
4376 {
4377 timevar_push (TV_TREE_STMT_VERIFY);
4378 if (verify_gimple_in_seq_2 (stmts))
4379 internal_error ("verify_gimple failed");
4380 timevar_pop (TV_TREE_STMT_VERIFY);
4381 }
4382
4383 /* Return true when the T can be shared. */
4384
4385 static bool
4386 tree_node_can_be_shared (tree t)
4387 {
4388 if (IS_TYPE_OR_DECL_P (t)
4389 || is_gimple_min_invariant (t)
4390 || TREE_CODE (t) == SSA_NAME
4391 || t == error_mark_node
4392 || TREE_CODE (t) == IDENTIFIER_NODE)
4393 return true;
4394
4395 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4396 return true;
4397
4398 if (DECL_P (t))
4399 return true;
4400
4401 return false;
4402 }
4403
4404 /* Called via walk_tree. Verify tree sharing. */
4405
4406 static tree
4407 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4408 {
4409 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4410
4411 if (tree_node_can_be_shared (*tp))
4412 {
4413 *walk_subtrees = false;
4414 return NULL;
4415 }
4416
4417 if (pointer_set_insert (visited, *tp))
4418 return *tp;
4419
4420 return NULL;
4421 }
4422
4423 /* Called via walk_gimple_stmt. Verify tree sharing. */
4424
4425 static tree
4426 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4427 {
4428 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4429 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4430 }
4431
4432 static bool eh_error_found;
4433 static int
4434 verify_eh_throw_stmt_node (void **slot, void *data)
4435 {
4436 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4437 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4438
4439 if (!pointer_set_contains (visited, node->stmt))
4440 {
4441 error ("dead STMT in EH table");
4442 debug_gimple_stmt (node->stmt);
4443 eh_error_found = true;
4444 }
4445 return 1;
4446 }
4447
4448 /* Verify if the location LOCs block is in BLOCKS. */
4449
4450 static bool
4451 verify_location (pointer_set_t *blocks, location_t loc)
4452 {
4453 tree block = LOCATION_BLOCK (loc);
4454 if (block != NULL_TREE
4455 && !pointer_set_contains (blocks, block))
4456 {
4457 error ("location references block not in block tree");
4458 return true;
4459 }
4460 if (block != NULL_TREE)
4461 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4462 return false;
4463 }
4464
4465 /* Called via walk_tree. Verify that expressions have no blocks. */
4466
4467 static tree
4468 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4469 {
4470 if (!EXPR_P (*tp))
4471 {
4472 *walk_subtrees = false;
4473 return NULL;
4474 }
4475
4476 location_t loc = EXPR_LOCATION (*tp);
4477 if (LOCATION_BLOCK (loc) != NULL)
4478 return *tp;
4479
4480 return NULL;
4481 }
4482
4483 /* Called via walk_tree. Verify locations of expressions. */
4484
4485 static tree
4486 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4487 {
4488 struct pointer_set_t *blocks = (struct pointer_set_t *) data;
4489
4490 if (TREE_CODE (*tp) == VAR_DECL
4491 && DECL_HAS_DEBUG_EXPR_P (*tp))
4492 {
4493 tree t = DECL_DEBUG_EXPR (*tp);
4494 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4495 if (addr)
4496 return addr;
4497 }
4498 if ((TREE_CODE (*tp) == VAR_DECL
4499 || TREE_CODE (*tp) == PARM_DECL
4500 || TREE_CODE (*tp) == RESULT_DECL)
4501 && DECL_HAS_VALUE_EXPR_P (*tp))
4502 {
4503 tree t = DECL_VALUE_EXPR (*tp);
4504 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4505 if (addr)
4506 return addr;
4507 }
4508
4509 if (!EXPR_P (*tp))
4510 {
4511 *walk_subtrees = false;
4512 return NULL;
4513 }
4514
4515 location_t loc = EXPR_LOCATION (*tp);
4516 if (verify_location (blocks, loc))
4517 return *tp;
4518
4519 return NULL;
4520 }
4521
4522 /* Called via walk_gimple_op. Verify locations of expressions. */
4523
4524 static tree
4525 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
4526 {
4527 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4528 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
4529 }
4530
4531 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4532
4533 static void
4534 collect_subblocks (pointer_set_t *blocks, tree block)
4535 {
4536 tree t;
4537 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
4538 {
4539 pointer_set_insert (blocks, t);
4540 collect_subblocks (blocks, t);
4541 }
4542 }
4543
4544 /* Verify the GIMPLE statements in the CFG of FN. */
4545
4546 DEBUG_FUNCTION void
4547 verify_gimple_in_cfg (struct function *fn)
4548 {
4549 basic_block bb;
4550 bool err = false;
4551 struct pointer_set_t *visited, *visited_stmts, *blocks;
4552
4553 timevar_push (TV_TREE_STMT_VERIFY);
4554 visited = pointer_set_create ();
4555 visited_stmts = pointer_set_create ();
4556
4557 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4558 blocks = pointer_set_create ();
4559 if (DECL_INITIAL (fn->decl))
4560 {
4561 pointer_set_insert (blocks, DECL_INITIAL (fn->decl));
4562 collect_subblocks (blocks, DECL_INITIAL (fn->decl));
4563 }
4564
4565 FOR_EACH_BB_FN (bb, fn)
4566 {
4567 gimple_stmt_iterator gsi;
4568
4569 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4570 {
4571 gimple phi = gsi_stmt (gsi);
4572 bool err2 = false;
4573 unsigned i;
4574
4575 pointer_set_insert (visited_stmts, phi);
4576
4577 if (gimple_bb (phi) != bb)
4578 {
4579 error ("gimple_bb (phi) is set to a wrong basic block");
4580 err2 = true;
4581 }
4582
4583 err2 |= verify_gimple_phi (phi);
4584
4585 /* Only PHI arguments have locations. */
4586 if (gimple_location (phi) != UNKNOWN_LOCATION)
4587 {
4588 error ("PHI node with location");
4589 err2 = true;
4590 }
4591
4592 for (i = 0; i < gimple_phi_num_args (phi); i++)
4593 {
4594 tree arg = gimple_phi_arg_def (phi, i);
4595 tree addr = walk_tree (&arg, verify_node_sharing_1,
4596 visited, NULL);
4597 if (addr)
4598 {
4599 error ("incorrect sharing of tree nodes");
4600 debug_generic_expr (addr);
4601 err2 |= true;
4602 }
4603 location_t loc = gimple_phi_arg_location (phi, i);
4604 if (virtual_operand_p (gimple_phi_result (phi))
4605 && loc != UNKNOWN_LOCATION)
4606 {
4607 error ("virtual PHI with argument locations");
4608 err2 = true;
4609 }
4610 addr = walk_tree (&arg, verify_expr_location_1, blocks, NULL);
4611 if (addr)
4612 {
4613 debug_generic_expr (addr);
4614 err2 = true;
4615 }
4616 err2 |= verify_location (blocks, loc);
4617 }
4618
4619 if (err2)
4620 debug_gimple_stmt (phi);
4621 err |= err2;
4622 }
4623
4624 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4625 {
4626 gimple stmt = gsi_stmt (gsi);
4627 bool err2 = false;
4628 struct walk_stmt_info wi;
4629 tree addr;
4630 int lp_nr;
4631
4632 pointer_set_insert (visited_stmts, stmt);
4633
4634 if (gimple_bb (stmt) != bb)
4635 {
4636 error ("gimple_bb (stmt) is set to a wrong basic block");
4637 err2 = true;
4638 }
4639
4640 err2 |= verify_gimple_stmt (stmt);
4641 err2 |= verify_location (blocks, gimple_location (stmt));
4642
4643 memset (&wi, 0, sizeof (wi));
4644 wi.info = (void *) visited;
4645 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
4646 if (addr)
4647 {
4648 error ("incorrect sharing of tree nodes");
4649 debug_generic_expr (addr);
4650 err2 |= true;
4651 }
4652
4653 memset (&wi, 0, sizeof (wi));
4654 wi.info = (void *) blocks;
4655 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
4656 if (addr)
4657 {
4658 debug_generic_expr (addr);
4659 err2 |= true;
4660 }
4661
4662 /* ??? Instead of not checking these stmts at all the walker
4663 should know its context via wi. */
4664 if (!is_gimple_debug (stmt)
4665 && !is_gimple_omp (stmt))
4666 {
4667 memset (&wi, 0, sizeof (wi));
4668 addr = walk_gimple_op (stmt, verify_expr, &wi);
4669 if (addr)
4670 {
4671 debug_generic_expr (addr);
4672 inform (gimple_location (stmt), "in statement");
4673 err2 |= true;
4674 }
4675 }
4676
4677 /* If the statement is marked as part of an EH region, then it is
4678 expected that the statement could throw. Verify that when we
4679 have optimizations that simplify statements such that we prove
4680 that they cannot throw, that we update other data structures
4681 to match. */
4682 lp_nr = lookup_stmt_eh_lp (stmt);
4683 if (lp_nr != 0)
4684 {
4685 if (!stmt_could_throw_p (stmt))
4686 {
4687 error ("statement marked for throw, but doesn%'t");
4688 err2 |= true;
4689 }
4690 else if (lp_nr > 0
4691 && !gsi_one_before_end_p (gsi)
4692 && stmt_can_throw_internal (stmt))
4693 {
4694 error ("statement marked for throw in middle of block");
4695 err2 |= true;
4696 }
4697 }
4698
4699 if (err2)
4700 debug_gimple_stmt (stmt);
4701 err |= err2;
4702 }
4703 }
4704
4705 eh_error_found = false;
4706 if (get_eh_throw_stmt_table (cfun))
4707 htab_traverse (get_eh_throw_stmt_table (cfun),
4708 verify_eh_throw_stmt_node,
4709 visited_stmts);
4710
4711 if (err || eh_error_found)
4712 internal_error ("verify_gimple failed");
4713
4714 pointer_set_destroy (visited);
4715 pointer_set_destroy (visited_stmts);
4716 pointer_set_destroy (blocks);
4717 verify_histograms ();
4718 timevar_pop (TV_TREE_STMT_VERIFY);
4719 }
4720
4721
4722 /* Verifies that the flow information is OK. */
4723
4724 static int
4725 gimple_verify_flow_info (void)
4726 {
4727 int err = 0;
4728 basic_block bb;
4729 gimple_stmt_iterator gsi;
4730 gimple stmt;
4731 edge e;
4732 edge_iterator ei;
4733
4734 if (ENTRY_BLOCK_PTR->il.gimple.seq || ENTRY_BLOCK_PTR->il.gimple.phi_nodes)
4735 {
4736 error ("ENTRY_BLOCK has IL associated with it");
4737 err = 1;
4738 }
4739
4740 if (EXIT_BLOCK_PTR->il.gimple.seq || EXIT_BLOCK_PTR->il.gimple.phi_nodes)
4741 {
4742 error ("EXIT_BLOCK has IL associated with it");
4743 err = 1;
4744 }
4745
4746 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4747 if (e->flags & EDGE_FALLTHRU)
4748 {
4749 error ("fallthru to exit from bb %d", e->src->index);
4750 err = 1;
4751 }
4752
4753 FOR_EACH_BB (bb)
4754 {
4755 bool found_ctrl_stmt = false;
4756
4757 stmt = NULL;
4758
4759 /* Skip labels on the start of basic block. */
4760 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4761 {
4762 tree label;
4763 gimple prev_stmt = stmt;
4764
4765 stmt = gsi_stmt (gsi);
4766
4767 if (gimple_code (stmt) != GIMPLE_LABEL)
4768 break;
4769
4770 label = gimple_label_label (stmt);
4771 if (prev_stmt && DECL_NONLOCAL (label))
4772 {
4773 error ("nonlocal label ");
4774 print_generic_expr (stderr, label, 0);
4775 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4776 bb->index);
4777 err = 1;
4778 }
4779
4780 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
4781 {
4782 error ("EH landing pad label ");
4783 print_generic_expr (stderr, label, 0);
4784 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4785 bb->index);
4786 err = 1;
4787 }
4788
4789 if (label_to_block (label) != bb)
4790 {
4791 error ("label ");
4792 print_generic_expr (stderr, label, 0);
4793 fprintf (stderr, " to block does not match in bb %d",
4794 bb->index);
4795 err = 1;
4796 }
4797
4798 if (decl_function_context (label) != current_function_decl)
4799 {
4800 error ("label ");
4801 print_generic_expr (stderr, label, 0);
4802 fprintf (stderr, " has incorrect context in bb %d",
4803 bb->index);
4804 err = 1;
4805 }
4806 }
4807
4808 /* Verify that body of basic block BB is free of control flow. */
4809 for (; !gsi_end_p (gsi); gsi_next (&gsi))
4810 {
4811 gimple stmt = gsi_stmt (gsi);
4812
4813 if (found_ctrl_stmt)
4814 {
4815 error ("control flow in the middle of basic block %d",
4816 bb->index);
4817 err = 1;
4818 }
4819
4820 if (stmt_ends_bb_p (stmt))
4821 found_ctrl_stmt = true;
4822
4823 if (gimple_code (stmt) == GIMPLE_LABEL)
4824 {
4825 error ("label ");
4826 print_generic_expr (stderr, gimple_label_label (stmt), 0);
4827 fprintf (stderr, " in the middle of basic block %d", bb->index);
4828 err = 1;
4829 }
4830 }
4831
4832 gsi = gsi_last_bb (bb);
4833 if (gsi_end_p (gsi))
4834 continue;
4835
4836 stmt = gsi_stmt (gsi);
4837
4838 if (gimple_code (stmt) == GIMPLE_LABEL)
4839 continue;
4840
4841 err |= verify_eh_edges (stmt);
4842
4843 if (is_ctrl_stmt (stmt))
4844 {
4845 FOR_EACH_EDGE (e, ei, bb->succs)
4846 if (e->flags & EDGE_FALLTHRU)
4847 {
4848 error ("fallthru edge after a control statement in bb %d",
4849 bb->index);
4850 err = 1;
4851 }
4852 }
4853
4854 if (gimple_code (stmt) != GIMPLE_COND)
4855 {
4856 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4857 after anything else but if statement. */
4858 FOR_EACH_EDGE (e, ei, bb->succs)
4859 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
4860 {
4861 error ("true/false edge after a non-GIMPLE_COND in bb %d",
4862 bb->index);
4863 err = 1;
4864 }
4865 }
4866
4867 switch (gimple_code (stmt))
4868 {
4869 case GIMPLE_COND:
4870 {
4871 edge true_edge;
4872 edge false_edge;
4873
4874 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
4875
4876 if (!true_edge
4877 || !false_edge
4878 || !(true_edge->flags & EDGE_TRUE_VALUE)
4879 || !(false_edge->flags & EDGE_FALSE_VALUE)
4880 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4881 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4882 || EDGE_COUNT (bb->succs) >= 3)
4883 {
4884 error ("wrong outgoing edge flags at end of bb %d",
4885 bb->index);
4886 err = 1;
4887 }
4888 }
4889 break;
4890
4891 case GIMPLE_GOTO:
4892 if (simple_goto_p (stmt))
4893 {
4894 error ("explicit goto at end of bb %d", bb->index);
4895 err = 1;
4896 }
4897 else
4898 {
4899 /* FIXME. We should double check that the labels in the
4900 destination blocks have their address taken. */
4901 FOR_EACH_EDGE (e, ei, bb->succs)
4902 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
4903 | EDGE_FALSE_VALUE))
4904 || !(e->flags & EDGE_ABNORMAL))
4905 {
4906 error ("wrong outgoing edge flags at end of bb %d",
4907 bb->index);
4908 err = 1;
4909 }
4910 }
4911 break;
4912
4913 case GIMPLE_CALL:
4914 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
4915 break;
4916 /* ... fallthru ... */
4917 case GIMPLE_RETURN:
4918 if (!single_succ_p (bb)
4919 || (single_succ_edge (bb)->flags
4920 & (EDGE_FALLTHRU | EDGE_ABNORMAL
4921 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4922 {
4923 error ("wrong outgoing edge flags at end of bb %d", bb->index);
4924 err = 1;
4925 }
4926 if (single_succ (bb) != EXIT_BLOCK_PTR)
4927 {
4928 error ("return edge does not point to exit in bb %d",
4929 bb->index);
4930 err = 1;
4931 }
4932 break;
4933
4934 case GIMPLE_SWITCH:
4935 {
4936 tree prev;
4937 edge e;
4938 size_t i, n;
4939
4940 n = gimple_switch_num_labels (stmt);
4941
4942 /* Mark all the destination basic blocks. */
4943 for (i = 0; i < n; ++i)
4944 {
4945 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
4946 basic_block label_bb = label_to_block (lab);
4947 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
4948 label_bb->aux = (void *)1;
4949 }
4950
4951 /* Verify that the case labels are sorted. */
4952 prev = gimple_switch_label (stmt, 0);
4953 for (i = 1; i < n; ++i)
4954 {
4955 tree c = gimple_switch_label (stmt, i);
4956 if (!CASE_LOW (c))
4957 {
4958 error ("found default case not at the start of "
4959 "case vector");
4960 err = 1;
4961 continue;
4962 }
4963 if (CASE_LOW (prev)
4964 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
4965 {
4966 error ("case labels not sorted: ");
4967 print_generic_expr (stderr, prev, 0);
4968 fprintf (stderr," is greater than ");
4969 print_generic_expr (stderr, c, 0);
4970 fprintf (stderr," but comes before it.\n");
4971 err = 1;
4972 }
4973 prev = c;
4974 }
4975 /* VRP will remove the default case if it can prove it will
4976 never be executed. So do not verify there always exists
4977 a default case here. */
4978
4979 FOR_EACH_EDGE (e, ei, bb->succs)
4980 {
4981 if (!e->dest->aux)
4982 {
4983 error ("extra outgoing edge %d->%d",
4984 bb->index, e->dest->index);
4985 err = 1;
4986 }
4987
4988 e->dest->aux = (void *)2;
4989 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
4990 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4991 {
4992 error ("wrong outgoing edge flags at end of bb %d",
4993 bb->index);
4994 err = 1;
4995 }
4996 }
4997
4998 /* Check that we have all of them. */
4999 for (i = 0; i < n; ++i)
5000 {
5001 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
5002 basic_block label_bb = label_to_block (lab);
5003
5004 if (label_bb->aux != (void *)2)
5005 {
5006 error ("missing edge %i->%i", bb->index, label_bb->index);
5007 err = 1;
5008 }
5009 }
5010
5011 FOR_EACH_EDGE (e, ei, bb->succs)
5012 e->dest->aux = (void *)0;
5013 }
5014 break;
5015
5016 case GIMPLE_EH_DISPATCH:
5017 err |= verify_eh_dispatch_edge (stmt);
5018 break;
5019
5020 default:
5021 break;
5022 }
5023 }
5024
5025 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5026 verify_dominators (CDI_DOMINATORS);
5027
5028 return err;
5029 }
5030
5031
5032 /* Updates phi nodes after creating a forwarder block joined
5033 by edge FALLTHRU. */
5034
5035 static void
5036 gimple_make_forwarder_block (edge fallthru)
5037 {
5038 edge e;
5039 edge_iterator ei;
5040 basic_block dummy, bb;
5041 tree var;
5042 gimple_stmt_iterator gsi;
5043
5044 dummy = fallthru->src;
5045 bb = fallthru->dest;
5046
5047 if (single_pred_p (bb))
5048 return;
5049
5050 /* If we redirected a branch we must create new PHI nodes at the
5051 start of BB. */
5052 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5053 {
5054 gimple phi, new_phi;
5055
5056 phi = gsi_stmt (gsi);
5057 var = gimple_phi_result (phi);
5058 new_phi = create_phi_node (var, bb);
5059 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5060 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5061 UNKNOWN_LOCATION);
5062 }
5063
5064 /* Add the arguments we have stored on edges. */
5065 FOR_EACH_EDGE (e, ei, bb->preds)
5066 {
5067 if (e == fallthru)
5068 continue;
5069
5070 flush_pending_stmts (e);
5071 }
5072 }
5073
5074
5075 /* Return a non-special label in the head of basic block BLOCK.
5076 Create one if it doesn't exist. */
5077
5078 tree
5079 gimple_block_label (basic_block bb)
5080 {
5081 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5082 bool first = true;
5083 tree label;
5084 gimple stmt;
5085
5086 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5087 {
5088 stmt = gsi_stmt (i);
5089 if (gimple_code (stmt) != GIMPLE_LABEL)
5090 break;
5091 label = gimple_label_label (stmt);
5092 if (!DECL_NONLOCAL (label))
5093 {
5094 if (!first)
5095 gsi_move_before (&i, &s);
5096 return label;
5097 }
5098 }
5099
5100 label = create_artificial_label (UNKNOWN_LOCATION);
5101 stmt = gimple_build_label (label);
5102 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5103 return label;
5104 }
5105
5106
5107 /* Attempt to perform edge redirection by replacing a possibly complex
5108 jump instruction by a goto or by removing the jump completely.
5109 This can apply only if all edges now point to the same block. The
5110 parameters and return values are equivalent to
5111 redirect_edge_and_branch. */
5112
5113 static edge
5114 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5115 {
5116 basic_block src = e->src;
5117 gimple_stmt_iterator i;
5118 gimple stmt;
5119
5120 /* We can replace or remove a complex jump only when we have exactly
5121 two edges. */
5122 if (EDGE_COUNT (src->succs) != 2
5123 /* Verify that all targets will be TARGET. Specifically, the
5124 edge that is not E must also go to TARGET. */
5125 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5126 return NULL;
5127
5128 i = gsi_last_bb (src);
5129 if (gsi_end_p (i))
5130 return NULL;
5131
5132 stmt = gsi_stmt (i);
5133
5134 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5135 {
5136 gsi_remove (&i, true);
5137 e = ssa_redirect_edge (e, target);
5138 e->flags = EDGE_FALLTHRU;
5139 return e;
5140 }
5141
5142 return NULL;
5143 }
5144
5145
5146 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5147 edge representing the redirected branch. */
5148
5149 static edge
5150 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5151 {
5152 basic_block bb = e->src;
5153 gimple_stmt_iterator gsi;
5154 edge ret;
5155 gimple stmt;
5156
5157 if (e->flags & EDGE_ABNORMAL)
5158 return NULL;
5159
5160 if (e->dest == dest)
5161 return NULL;
5162
5163 if (e->flags & EDGE_EH)
5164 return redirect_eh_edge (e, dest);
5165
5166 if (e->src != ENTRY_BLOCK_PTR)
5167 {
5168 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5169 if (ret)
5170 return ret;
5171 }
5172
5173 gsi = gsi_last_bb (bb);
5174 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5175
5176 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5177 {
5178 case GIMPLE_COND:
5179 /* For COND_EXPR, we only need to redirect the edge. */
5180 break;
5181
5182 case GIMPLE_GOTO:
5183 /* No non-abnormal edges should lead from a non-simple goto, and
5184 simple ones should be represented implicitly. */
5185 gcc_unreachable ();
5186
5187 case GIMPLE_SWITCH:
5188 {
5189 tree label = gimple_block_label (dest);
5190 tree cases = get_cases_for_edge (e, stmt);
5191
5192 /* If we have a list of cases associated with E, then use it
5193 as it's a lot faster than walking the entire case vector. */
5194 if (cases)
5195 {
5196 edge e2 = find_edge (e->src, dest);
5197 tree last, first;
5198
5199 first = cases;
5200 while (cases)
5201 {
5202 last = cases;
5203 CASE_LABEL (cases) = label;
5204 cases = CASE_CHAIN (cases);
5205 }
5206
5207 /* If there was already an edge in the CFG, then we need
5208 to move all the cases associated with E to E2. */
5209 if (e2)
5210 {
5211 tree cases2 = get_cases_for_edge (e2, stmt);
5212
5213 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5214 CASE_CHAIN (cases2) = first;
5215 }
5216 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5217 }
5218 else
5219 {
5220 size_t i, n = gimple_switch_num_labels (stmt);
5221
5222 for (i = 0; i < n; i++)
5223 {
5224 tree elt = gimple_switch_label (stmt, i);
5225 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5226 CASE_LABEL (elt) = label;
5227 }
5228 }
5229 }
5230 break;
5231
5232 case GIMPLE_ASM:
5233 {
5234 int i, n = gimple_asm_nlabels (stmt);
5235 tree label = NULL;
5236
5237 for (i = 0; i < n; ++i)
5238 {
5239 tree cons = gimple_asm_label_op (stmt, i);
5240 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5241 {
5242 if (!label)
5243 label = gimple_block_label (dest);
5244 TREE_VALUE (cons) = label;
5245 }
5246 }
5247
5248 /* If we didn't find any label matching the former edge in the
5249 asm labels, we must be redirecting the fallthrough
5250 edge. */
5251 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5252 }
5253 break;
5254
5255 case GIMPLE_RETURN:
5256 gsi_remove (&gsi, true);
5257 e->flags |= EDGE_FALLTHRU;
5258 break;
5259
5260 case GIMPLE_OMP_RETURN:
5261 case GIMPLE_OMP_CONTINUE:
5262 case GIMPLE_OMP_SECTIONS_SWITCH:
5263 case GIMPLE_OMP_FOR:
5264 /* The edges from OMP constructs can be simply redirected. */
5265 break;
5266
5267 case GIMPLE_EH_DISPATCH:
5268 if (!(e->flags & EDGE_FALLTHRU))
5269 redirect_eh_dispatch_edge (stmt, e, dest);
5270 break;
5271
5272 case GIMPLE_TRANSACTION:
5273 /* The ABORT edge has a stored label associated with it, otherwise
5274 the edges are simply redirectable. */
5275 if (e->flags == 0)
5276 gimple_transaction_set_label (stmt, gimple_block_label (dest));
5277 break;
5278
5279 default:
5280 /* Otherwise it must be a fallthru edge, and we don't need to
5281 do anything besides redirecting it. */
5282 gcc_assert (e->flags & EDGE_FALLTHRU);
5283 break;
5284 }
5285
5286 /* Update/insert PHI nodes as necessary. */
5287
5288 /* Now update the edges in the CFG. */
5289 e = ssa_redirect_edge (e, dest);
5290
5291 return e;
5292 }
5293
5294 /* Returns true if it is possible to remove edge E by redirecting
5295 it to the destination of the other edge from E->src. */
5296
5297 static bool
5298 gimple_can_remove_branch_p (const_edge e)
5299 {
5300 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5301 return false;
5302
5303 return true;
5304 }
5305
5306 /* Simple wrapper, as we can always redirect fallthru edges. */
5307
5308 static basic_block
5309 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5310 {
5311 e = gimple_redirect_edge_and_branch (e, dest);
5312 gcc_assert (e);
5313
5314 return NULL;
5315 }
5316
5317
5318 /* Splits basic block BB after statement STMT (but at least after the
5319 labels). If STMT is NULL, BB is split just after the labels. */
5320
5321 static basic_block
5322 gimple_split_block (basic_block bb, void *stmt)
5323 {
5324 gimple_stmt_iterator gsi;
5325 gimple_stmt_iterator gsi_tgt;
5326 gimple act;
5327 gimple_seq list;
5328 basic_block new_bb;
5329 edge e;
5330 edge_iterator ei;
5331
5332 new_bb = create_empty_bb (bb);
5333
5334 /* Redirect the outgoing edges. */
5335 new_bb->succs = bb->succs;
5336 bb->succs = NULL;
5337 FOR_EACH_EDGE (e, ei, new_bb->succs)
5338 e->src = new_bb;
5339
5340 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5341 stmt = NULL;
5342
5343 /* Move everything from GSI to the new basic block. */
5344 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5345 {
5346 act = gsi_stmt (gsi);
5347 if (gimple_code (act) == GIMPLE_LABEL)
5348 continue;
5349
5350 if (!stmt)
5351 break;
5352
5353 if (stmt == act)
5354 {
5355 gsi_next (&gsi);
5356 break;
5357 }
5358 }
5359
5360 if (gsi_end_p (gsi))
5361 return new_bb;
5362
5363 /* Split the statement list - avoid re-creating new containers as this
5364 brings ugly quadratic memory consumption in the inliner.
5365 (We are still quadratic since we need to update stmt BB pointers,
5366 sadly.) */
5367 gsi_split_seq_before (&gsi, &list);
5368 set_bb_seq (new_bb, list);
5369 for (gsi_tgt = gsi_start (list);
5370 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5371 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5372
5373 return new_bb;
5374 }
5375
5376
5377 /* Moves basic block BB after block AFTER. */
5378
5379 static bool
5380 gimple_move_block_after (basic_block bb, basic_block after)
5381 {
5382 if (bb->prev_bb == after)
5383 return true;
5384
5385 unlink_block (bb);
5386 link_block (bb, after);
5387
5388 return true;
5389 }
5390
5391
5392 /* Return TRUE if block BB has no executable statements, otherwise return
5393 FALSE. */
5394
5395 static bool
5396 gimple_empty_block_p (basic_block bb)
5397 {
5398 /* BB must have no executable statements. */
5399 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5400 if (phi_nodes (bb))
5401 return false;
5402 if (gsi_end_p (gsi))
5403 return true;
5404 if (is_gimple_debug (gsi_stmt (gsi)))
5405 gsi_next_nondebug (&gsi);
5406 return gsi_end_p (gsi);
5407 }
5408
5409
5410 /* Split a basic block if it ends with a conditional branch and if the
5411 other part of the block is not empty. */
5412
5413 static basic_block
5414 gimple_split_block_before_cond_jump (basic_block bb)
5415 {
5416 gimple last, split_point;
5417 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5418 if (gsi_end_p (gsi))
5419 return NULL;
5420 last = gsi_stmt (gsi);
5421 if (gimple_code (last) != GIMPLE_COND
5422 && gimple_code (last) != GIMPLE_SWITCH)
5423 return NULL;
5424 gsi_prev_nondebug (&gsi);
5425 split_point = gsi_stmt (gsi);
5426 return split_block (bb, split_point)->dest;
5427 }
5428
5429
5430 /* Return true if basic_block can be duplicated. */
5431
5432 static bool
5433 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5434 {
5435 return true;
5436 }
5437
5438 /* Create a duplicate of the basic block BB. NOTE: This does not
5439 preserve SSA form. */
5440
5441 static basic_block
5442 gimple_duplicate_bb (basic_block bb)
5443 {
5444 basic_block new_bb;
5445 gimple_stmt_iterator gsi, gsi_tgt;
5446 gimple_seq phis = phi_nodes (bb);
5447 gimple phi, stmt, copy;
5448
5449 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
5450
5451 /* Copy the PHI nodes. We ignore PHI node arguments here because
5452 the incoming edges have not been setup yet. */
5453 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
5454 {
5455 phi = gsi_stmt (gsi);
5456 copy = create_phi_node (NULL_TREE, new_bb);
5457 create_new_def_for (gimple_phi_result (phi), copy,
5458 gimple_phi_result_ptr (copy));
5459 gimple_set_uid (copy, gimple_uid (phi));
5460 }
5461
5462 gsi_tgt = gsi_start_bb (new_bb);
5463 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5464 {
5465 def_operand_p def_p;
5466 ssa_op_iter op_iter;
5467 tree lhs;
5468
5469 stmt = gsi_stmt (gsi);
5470 if (gimple_code (stmt) == GIMPLE_LABEL)
5471 continue;
5472
5473 /* Don't duplicate label debug stmts. */
5474 if (gimple_debug_bind_p (stmt)
5475 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5476 == LABEL_DECL)
5477 continue;
5478
5479 /* Create a new copy of STMT and duplicate STMT's virtual
5480 operands. */
5481 copy = gimple_copy (stmt);
5482 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5483
5484 maybe_duplicate_eh_stmt (copy, stmt);
5485 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5486
5487 /* When copying around a stmt writing into a local non-user
5488 aggregate, make sure it won't share stack slot with other
5489 vars. */
5490 lhs = gimple_get_lhs (stmt);
5491 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5492 {
5493 tree base = get_base_address (lhs);
5494 if (base
5495 && (TREE_CODE (base) == VAR_DECL
5496 || TREE_CODE (base) == RESULT_DECL)
5497 && DECL_IGNORED_P (base)
5498 && !TREE_STATIC (base)
5499 && !DECL_EXTERNAL (base)
5500 && (TREE_CODE (base) != VAR_DECL
5501 || !DECL_HAS_VALUE_EXPR_P (base)))
5502 DECL_NONSHAREABLE (base) = 1;
5503 }
5504
5505 /* Create new names for all the definitions created by COPY and
5506 add replacement mappings for each new name. */
5507 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5508 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5509 }
5510
5511 return new_bb;
5512 }
5513
5514 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5515
5516 static void
5517 add_phi_args_after_copy_edge (edge e_copy)
5518 {
5519 basic_block bb, bb_copy = e_copy->src, dest;
5520 edge e;
5521 edge_iterator ei;
5522 gimple phi, phi_copy;
5523 tree def;
5524 gimple_stmt_iterator psi, psi_copy;
5525
5526 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5527 return;
5528
5529 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5530
5531 if (e_copy->dest->flags & BB_DUPLICATED)
5532 dest = get_bb_original (e_copy->dest);
5533 else
5534 dest = e_copy->dest;
5535
5536 e = find_edge (bb, dest);
5537 if (!e)
5538 {
5539 /* During loop unrolling the target of the latch edge is copied.
5540 In this case we are not looking for edge to dest, but to
5541 duplicated block whose original was dest. */
5542 FOR_EACH_EDGE (e, ei, bb->succs)
5543 {
5544 if ((e->dest->flags & BB_DUPLICATED)
5545 && get_bb_original (e->dest) == dest)
5546 break;
5547 }
5548
5549 gcc_assert (e != NULL);
5550 }
5551
5552 for (psi = gsi_start_phis (e->dest),
5553 psi_copy = gsi_start_phis (e_copy->dest);
5554 !gsi_end_p (psi);
5555 gsi_next (&psi), gsi_next (&psi_copy))
5556 {
5557 phi = gsi_stmt (psi);
5558 phi_copy = gsi_stmt (psi_copy);
5559 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5560 add_phi_arg (phi_copy, def, e_copy,
5561 gimple_phi_arg_location_from_edge (phi, e));
5562 }
5563 }
5564
5565
5566 /* Basic block BB_COPY was created by code duplication. Add phi node
5567 arguments for edges going out of BB_COPY. The blocks that were
5568 duplicated have BB_DUPLICATED set. */
5569
5570 void
5571 add_phi_args_after_copy_bb (basic_block bb_copy)
5572 {
5573 edge e_copy;
5574 edge_iterator ei;
5575
5576 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5577 {
5578 add_phi_args_after_copy_edge (e_copy);
5579 }
5580 }
5581
5582 /* Blocks in REGION_COPY array of length N_REGION were created by
5583 duplication of basic blocks. Add phi node arguments for edges
5584 going from these blocks. If E_COPY is not NULL, also add
5585 phi node arguments for its destination.*/
5586
5587 void
5588 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5589 edge e_copy)
5590 {
5591 unsigned i;
5592
5593 for (i = 0; i < n_region; i++)
5594 region_copy[i]->flags |= BB_DUPLICATED;
5595
5596 for (i = 0; i < n_region; i++)
5597 add_phi_args_after_copy_bb (region_copy[i]);
5598 if (e_copy)
5599 add_phi_args_after_copy_edge (e_copy);
5600
5601 for (i = 0; i < n_region; i++)
5602 region_copy[i]->flags &= ~BB_DUPLICATED;
5603 }
5604
5605 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5606 important exit edge EXIT. By important we mean that no SSA name defined
5607 inside region is live over the other exit edges of the region. All entry
5608 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5609 to the duplicate of the region. Dominance and loop information is
5610 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
5611 UPDATE_DOMINANCE is false then we assume that the caller will update the
5612 dominance information after calling this function. The new basic
5613 blocks are stored to REGION_COPY in the same order as they had in REGION,
5614 provided that REGION_COPY is not NULL.
5615 The function returns false if it is unable to copy the region,
5616 true otherwise. */
5617
5618 bool
5619 gimple_duplicate_sese_region (edge entry, edge exit,
5620 basic_block *region, unsigned n_region,
5621 basic_block *region_copy,
5622 bool update_dominance)
5623 {
5624 unsigned i;
5625 bool free_region_copy = false, copying_header = false;
5626 struct loop *loop = entry->dest->loop_father;
5627 edge exit_copy;
5628 vec<basic_block> doms;
5629 edge redirected;
5630 int total_freq = 0, entry_freq = 0;
5631 gcov_type total_count = 0, entry_count = 0;
5632
5633 if (!can_copy_bbs_p (region, n_region))
5634 return false;
5635
5636 /* Some sanity checking. Note that we do not check for all possible
5637 missuses of the functions. I.e. if you ask to copy something weird,
5638 it will work, but the state of structures probably will not be
5639 correct. */
5640 for (i = 0; i < n_region; i++)
5641 {
5642 /* We do not handle subloops, i.e. all the blocks must belong to the
5643 same loop. */
5644 if (region[i]->loop_father != loop)
5645 return false;
5646
5647 if (region[i] != entry->dest
5648 && region[i] == loop->header)
5649 return false;
5650 }
5651
5652 set_loop_copy (loop, loop);
5653
5654 /* In case the function is used for loop header copying (which is the primary
5655 use), ensure that EXIT and its copy will be new latch and entry edges. */
5656 if (loop->header == entry->dest)
5657 {
5658 copying_header = true;
5659 set_loop_copy (loop, loop_outer (loop));
5660
5661 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5662 return false;
5663
5664 for (i = 0; i < n_region; i++)
5665 if (region[i] != exit->src
5666 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5667 return false;
5668 }
5669
5670 if (!region_copy)
5671 {
5672 region_copy = XNEWVEC (basic_block, n_region);
5673 free_region_copy = true;
5674 }
5675
5676 initialize_original_copy_tables ();
5677
5678 /* Record blocks outside the region that are dominated by something
5679 inside. */
5680 if (update_dominance)
5681 {
5682 doms.create (0);
5683 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5684 }
5685
5686 if (entry->dest->count)
5687 {
5688 total_count = entry->dest->count;
5689 entry_count = entry->count;
5690 /* Fix up corner cases, to avoid division by zero or creation of negative
5691 frequencies. */
5692 if (entry_count > total_count)
5693 entry_count = total_count;
5694 }
5695 else
5696 {
5697 total_freq = entry->dest->frequency;
5698 entry_freq = EDGE_FREQUENCY (entry);
5699 /* Fix up corner cases, to avoid division by zero or creation of negative
5700 frequencies. */
5701 if (total_freq == 0)
5702 total_freq = 1;
5703 else if (entry_freq > total_freq)
5704 entry_freq = total_freq;
5705 }
5706
5707 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5708 split_edge_bb_loc (entry), update_dominance);
5709 if (total_count)
5710 {
5711 scale_bbs_frequencies_gcov_type (region, n_region,
5712 total_count - entry_count,
5713 total_count);
5714 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
5715 total_count);
5716 }
5717 else
5718 {
5719 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5720 total_freq);
5721 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5722 }
5723
5724 if (copying_header)
5725 {
5726 loop->header = exit->dest;
5727 loop->latch = exit->src;
5728 }
5729
5730 /* Redirect the entry and add the phi node arguments. */
5731 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
5732 gcc_assert (redirected != NULL);
5733 flush_pending_stmts (entry);
5734
5735 /* Concerning updating of dominators: We must recount dominators
5736 for entry block and its copy. Anything that is outside of the
5737 region, but was dominated by something inside needs recounting as
5738 well. */
5739 if (update_dominance)
5740 {
5741 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
5742 doms.safe_push (get_bb_original (entry->dest));
5743 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5744 doms.release ();
5745 }
5746
5747 /* Add the other PHI node arguments. */
5748 add_phi_args_after_copy (region_copy, n_region, NULL);
5749
5750 if (free_region_copy)
5751 free (region_copy);
5752
5753 free_original_copy_tables ();
5754 return true;
5755 }
5756
5757 /* Checks if BB is part of the region defined by N_REGION BBS. */
5758 static bool
5759 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
5760 {
5761 unsigned int n;
5762
5763 for (n = 0; n < n_region; n++)
5764 {
5765 if (bb == bbs[n])
5766 return true;
5767 }
5768 return false;
5769 }
5770
5771 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
5772 are stored to REGION_COPY in the same order in that they appear
5773 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5774 the region, EXIT an exit from it. The condition guarding EXIT
5775 is moved to ENTRY. Returns true if duplication succeeds, false
5776 otherwise.
5777
5778 For example,
5779
5780 some_code;
5781 if (cond)
5782 A;
5783 else
5784 B;
5785
5786 is transformed to
5787
5788 if (cond)
5789 {
5790 some_code;
5791 A;
5792 }
5793 else
5794 {
5795 some_code;
5796 B;
5797 }
5798 */
5799
5800 bool
5801 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
5802 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
5803 basic_block *region_copy ATTRIBUTE_UNUSED)
5804 {
5805 unsigned i;
5806 bool free_region_copy = false;
5807 struct loop *loop = exit->dest->loop_father;
5808 struct loop *orig_loop = entry->dest->loop_father;
5809 basic_block switch_bb, entry_bb, nentry_bb;
5810 vec<basic_block> doms;
5811 int total_freq = 0, exit_freq = 0;
5812 gcov_type total_count = 0, exit_count = 0;
5813 edge exits[2], nexits[2], e;
5814 gimple_stmt_iterator gsi;
5815 gimple cond_stmt;
5816 edge sorig, snew;
5817 basic_block exit_bb;
5818 gimple_stmt_iterator psi;
5819 gimple phi;
5820 tree def;
5821 struct loop *target, *aloop, *cloop;
5822
5823 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
5824 exits[0] = exit;
5825 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
5826
5827 if (!can_copy_bbs_p (region, n_region))
5828 return false;
5829
5830 initialize_original_copy_tables ();
5831 set_loop_copy (orig_loop, loop);
5832
5833 target= loop;
5834 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
5835 {
5836 if (bb_part_of_region_p (aloop->header, region, n_region))
5837 {
5838 cloop = duplicate_loop (aloop, target);
5839 duplicate_subloops (aloop, cloop);
5840 }
5841 }
5842
5843 if (!region_copy)
5844 {
5845 region_copy = XNEWVEC (basic_block, n_region);
5846 free_region_copy = true;
5847 }
5848
5849 gcc_assert (!need_ssa_update_p (cfun));
5850
5851 /* Record blocks outside the region that are dominated by something
5852 inside. */
5853 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5854
5855 if (exit->src->count)
5856 {
5857 total_count = exit->src->count;
5858 exit_count = exit->count;
5859 /* Fix up corner cases, to avoid division by zero or creation of negative
5860 frequencies. */
5861 if (exit_count > total_count)
5862 exit_count = total_count;
5863 }
5864 else
5865 {
5866 total_freq = exit->src->frequency;
5867 exit_freq = EDGE_FREQUENCY (exit);
5868 /* Fix up corner cases, to avoid division by zero or creation of negative
5869 frequencies. */
5870 if (total_freq == 0)
5871 total_freq = 1;
5872 if (exit_freq > total_freq)
5873 exit_freq = total_freq;
5874 }
5875
5876 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
5877 split_edge_bb_loc (exit), true);
5878 if (total_count)
5879 {
5880 scale_bbs_frequencies_gcov_type (region, n_region,
5881 total_count - exit_count,
5882 total_count);
5883 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
5884 total_count);
5885 }
5886 else
5887 {
5888 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
5889 total_freq);
5890 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
5891 }
5892
5893 /* Create the switch block, and put the exit condition to it. */
5894 entry_bb = entry->dest;
5895 nentry_bb = get_bb_copy (entry_bb);
5896 if (!last_stmt (entry->src)
5897 || !stmt_ends_bb_p (last_stmt (entry->src)))
5898 switch_bb = entry->src;
5899 else
5900 switch_bb = split_edge (entry);
5901 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
5902
5903 gsi = gsi_last_bb (switch_bb);
5904 cond_stmt = last_stmt (exit->src);
5905 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
5906 cond_stmt = gimple_copy (cond_stmt);
5907
5908 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
5909
5910 sorig = single_succ_edge (switch_bb);
5911 sorig->flags = exits[1]->flags;
5912 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
5913
5914 /* Register the new edge from SWITCH_BB in loop exit lists. */
5915 rescan_loop_exit (snew, true, false);
5916
5917 /* Add the PHI node arguments. */
5918 add_phi_args_after_copy (region_copy, n_region, snew);
5919
5920 /* Get rid of now superfluous conditions and associated edges (and phi node
5921 arguments). */
5922 exit_bb = exit->dest;
5923
5924 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
5925 PENDING_STMT (e) = NULL;
5926
5927 /* The latch of ORIG_LOOP was copied, and so was the backedge
5928 to the original header. We redirect this backedge to EXIT_BB. */
5929 for (i = 0; i < n_region; i++)
5930 if (get_bb_original (region_copy[i]) == orig_loop->latch)
5931 {
5932 gcc_assert (single_succ_edge (region_copy[i]));
5933 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
5934 PENDING_STMT (e) = NULL;
5935 for (psi = gsi_start_phis (exit_bb);
5936 !gsi_end_p (psi);
5937 gsi_next (&psi))
5938 {
5939 phi = gsi_stmt (psi);
5940 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
5941 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
5942 }
5943 }
5944 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
5945 PENDING_STMT (e) = NULL;
5946
5947 /* Anything that is outside of the region, but was dominated by something
5948 inside needs to update dominance info. */
5949 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5950 doms.release ();
5951 /* Update the SSA web. */
5952 update_ssa (TODO_update_ssa);
5953
5954 if (free_region_copy)
5955 free (region_copy);
5956
5957 free_original_copy_tables ();
5958 return true;
5959 }
5960
5961 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
5962 adding blocks when the dominator traversal reaches EXIT. This
5963 function silently assumes that ENTRY strictly dominates EXIT. */
5964
5965 void
5966 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
5967 vec<basic_block> *bbs_p)
5968 {
5969 basic_block son;
5970
5971 for (son = first_dom_son (CDI_DOMINATORS, entry);
5972 son;
5973 son = next_dom_son (CDI_DOMINATORS, son))
5974 {
5975 bbs_p->safe_push (son);
5976 if (son != exit)
5977 gather_blocks_in_sese_region (son, exit, bbs_p);
5978 }
5979 }
5980
5981 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
5982 The duplicates are recorded in VARS_MAP. */
5983
5984 static void
5985 replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
5986 tree to_context)
5987 {
5988 tree t = *tp, new_t;
5989 struct function *f = DECL_STRUCT_FUNCTION (to_context);
5990 void **loc;
5991
5992 if (DECL_CONTEXT (t) == to_context)
5993 return;
5994
5995 loc = pointer_map_contains (vars_map, t);
5996
5997 if (!loc)
5998 {
5999 loc = pointer_map_insert (vars_map, t);
6000
6001 if (SSA_VAR_P (t))
6002 {
6003 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6004 add_local_decl (f, new_t);
6005 }
6006 else
6007 {
6008 gcc_assert (TREE_CODE (t) == CONST_DECL);
6009 new_t = copy_node (t);
6010 }
6011 DECL_CONTEXT (new_t) = to_context;
6012
6013 *loc = new_t;
6014 }
6015 else
6016 new_t = (tree) *loc;
6017
6018 *tp = new_t;
6019 }
6020
6021
6022 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6023 VARS_MAP maps old ssa names and var_decls to the new ones. */
6024
6025 static tree
6026 replace_ssa_name (tree name, struct pointer_map_t *vars_map,
6027 tree to_context)
6028 {
6029 void **loc;
6030 tree new_name;
6031
6032 gcc_assert (!virtual_operand_p (name));
6033
6034 loc = pointer_map_contains (vars_map, name);
6035
6036 if (!loc)
6037 {
6038 tree decl = SSA_NAME_VAR (name);
6039 if (decl)
6040 {
6041 replace_by_duplicate_decl (&decl, vars_map, to_context);
6042 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6043 decl, SSA_NAME_DEF_STMT (name));
6044 if (SSA_NAME_IS_DEFAULT_DEF (name))
6045 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context),
6046 decl, new_name);
6047 }
6048 else
6049 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6050 name, SSA_NAME_DEF_STMT (name));
6051
6052 loc = pointer_map_insert (vars_map, name);
6053 *loc = new_name;
6054 }
6055 else
6056 new_name = (tree) *loc;
6057
6058 return new_name;
6059 }
6060
6061 struct move_stmt_d
6062 {
6063 tree orig_block;
6064 tree new_block;
6065 tree from_context;
6066 tree to_context;
6067 struct pointer_map_t *vars_map;
6068 htab_t new_label_map;
6069 struct pointer_map_t *eh_map;
6070 bool remap_decls_p;
6071 };
6072
6073 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6074 contained in *TP if it has been ORIG_BLOCK previously and change the
6075 DECL_CONTEXT of every local variable referenced in *TP. */
6076
6077 static tree
6078 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6079 {
6080 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6081 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6082 tree t = *tp;
6083
6084 if (EXPR_P (t))
6085 {
6086 tree block = TREE_BLOCK (t);
6087 if (block == p->orig_block
6088 || (p->orig_block == NULL_TREE
6089 && block != NULL_TREE))
6090 TREE_SET_BLOCK (t, p->new_block);
6091 #ifdef ENABLE_CHECKING
6092 else if (block != NULL_TREE)
6093 {
6094 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6095 block = BLOCK_SUPERCONTEXT (block);
6096 gcc_assert (block == p->orig_block);
6097 }
6098 #endif
6099 }
6100 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6101 {
6102 if (TREE_CODE (t) == SSA_NAME)
6103 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6104 else if (TREE_CODE (t) == LABEL_DECL)
6105 {
6106 if (p->new_label_map)
6107 {
6108 struct tree_map in, *out;
6109 in.base.from = t;
6110 out = (struct tree_map *)
6111 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6112 if (out)
6113 *tp = t = out->to;
6114 }
6115
6116 DECL_CONTEXT (t) = p->to_context;
6117 }
6118 else if (p->remap_decls_p)
6119 {
6120 /* Replace T with its duplicate. T should no longer appear in the
6121 parent function, so this looks wasteful; however, it may appear
6122 in referenced_vars, and more importantly, as virtual operands of
6123 statements, and in alias lists of other variables. It would be
6124 quite difficult to expunge it from all those places. ??? It might
6125 suffice to do this for addressable variables. */
6126 if ((TREE_CODE (t) == VAR_DECL
6127 && !is_global_var (t))
6128 || TREE_CODE (t) == CONST_DECL)
6129 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6130 }
6131 *walk_subtrees = 0;
6132 }
6133 else if (TYPE_P (t))
6134 *walk_subtrees = 0;
6135
6136 return NULL_TREE;
6137 }
6138
6139 /* Helper for move_stmt_r. Given an EH region number for the source
6140 function, map that to the duplicate EH regio number in the dest. */
6141
6142 static int
6143 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6144 {
6145 eh_region old_r, new_r;
6146 void **slot;
6147
6148 old_r = get_eh_region_from_number (old_nr);
6149 slot = pointer_map_contains (p->eh_map, old_r);
6150 new_r = (eh_region) *slot;
6151
6152 return new_r->index;
6153 }
6154
6155 /* Similar, but operate on INTEGER_CSTs. */
6156
6157 static tree
6158 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6159 {
6160 int old_nr, new_nr;
6161
6162 old_nr = tree_low_cst (old_t_nr, 0);
6163 new_nr = move_stmt_eh_region_nr (old_nr, p);
6164
6165 return build_int_cst (integer_type_node, new_nr);
6166 }
6167
6168 /* Like move_stmt_op, but for gimple statements.
6169
6170 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6171 contained in the current statement in *GSI_P and change the
6172 DECL_CONTEXT of every local variable referenced in the current
6173 statement. */
6174
6175 static tree
6176 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6177 struct walk_stmt_info *wi)
6178 {
6179 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6180 gimple stmt = gsi_stmt (*gsi_p);
6181 tree block = gimple_block (stmt);
6182
6183 if (block == p->orig_block
6184 || (p->orig_block == NULL_TREE
6185 && block != NULL_TREE))
6186 gimple_set_block (stmt, p->new_block);
6187
6188 switch (gimple_code (stmt))
6189 {
6190 case GIMPLE_CALL:
6191 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6192 {
6193 tree r, fndecl = gimple_call_fndecl (stmt);
6194 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6195 switch (DECL_FUNCTION_CODE (fndecl))
6196 {
6197 case BUILT_IN_EH_COPY_VALUES:
6198 r = gimple_call_arg (stmt, 1);
6199 r = move_stmt_eh_region_tree_nr (r, p);
6200 gimple_call_set_arg (stmt, 1, r);
6201 /* FALLTHRU */
6202
6203 case BUILT_IN_EH_POINTER:
6204 case BUILT_IN_EH_FILTER:
6205 r = gimple_call_arg (stmt, 0);
6206 r = move_stmt_eh_region_tree_nr (r, p);
6207 gimple_call_set_arg (stmt, 0, r);
6208 break;
6209
6210 default:
6211 break;
6212 }
6213 }
6214 break;
6215
6216 case GIMPLE_RESX:
6217 {
6218 int r = gimple_resx_region (stmt);
6219 r = move_stmt_eh_region_nr (r, p);
6220 gimple_resx_set_region (stmt, r);
6221 }
6222 break;
6223
6224 case GIMPLE_EH_DISPATCH:
6225 {
6226 int r = gimple_eh_dispatch_region (stmt);
6227 r = move_stmt_eh_region_nr (r, p);
6228 gimple_eh_dispatch_set_region (stmt, r);
6229 }
6230 break;
6231
6232 case GIMPLE_OMP_RETURN:
6233 case GIMPLE_OMP_CONTINUE:
6234 break;
6235 default:
6236 if (is_gimple_omp (stmt))
6237 {
6238 /* Do not remap variables inside OMP directives. Variables
6239 referenced in clauses and directive header belong to the
6240 parent function and should not be moved into the child
6241 function. */
6242 bool save_remap_decls_p = p->remap_decls_p;
6243 p->remap_decls_p = false;
6244 *handled_ops_p = true;
6245
6246 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6247 move_stmt_op, wi);
6248
6249 p->remap_decls_p = save_remap_decls_p;
6250 }
6251 break;
6252 }
6253
6254 return NULL_TREE;
6255 }
6256
6257 /* Move basic block BB from function CFUN to function DEST_FN. The
6258 block is moved out of the original linked list and placed after
6259 block AFTER in the new list. Also, the block is removed from the
6260 original array of blocks and placed in DEST_FN's array of blocks.
6261 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6262 updated to reflect the moved edges.
6263
6264 The local variables are remapped to new instances, VARS_MAP is used
6265 to record the mapping. */
6266
6267 static void
6268 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6269 basic_block after, bool update_edge_count_p,
6270 struct move_stmt_d *d)
6271 {
6272 struct control_flow_graph *cfg;
6273 edge_iterator ei;
6274 edge e;
6275 gimple_stmt_iterator si;
6276 unsigned old_len, new_len;
6277
6278 /* Remove BB from dominance structures. */
6279 delete_from_dominance_info (CDI_DOMINATORS, bb);
6280
6281 /* Move BB from its current loop to the copy in the new function. */
6282 if (current_loops)
6283 {
6284 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6285 if (new_loop)
6286 bb->loop_father = new_loop;
6287 }
6288
6289 /* Link BB to the new linked list. */
6290 move_block_after (bb, after);
6291
6292 /* Update the edge count in the corresponding flowgraphs. */
6293 if (update_edge_count_p)
6294 FOR_EACH_EDGE (e, ei, bb->succs)
6295 {
6296 cfun->cfg->x_n_edges--;
6297 dest_cfun->cfg->x_n_edges++;
6298 }
6299
6300 /* Remove BB from the original basic block array. */
6301 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6302 cfun->cfg->x_n_basic_blocks--;
6303
6304 /* Grow DEST_CFUN's basic block array if needed. */
6305 cfg = dest_cfun->cfg;
6306 cfg->x_n_basic_blocks++;
6307 if (bb->index >= cfg->x_last_basic_block)
6308 cfg->x_last_basic_block = bb->index + 1;
6309
6310 old_len = vec_safe_length (cfg->x_basic_block_info);
6311 if ((unsigned) cfg->x_last_basic_block >= old_len)
6312 {
6313 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6314 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6315 }
6316
6317 (*cfg->x_basic_block_info)[bb->index] = bb;
6318
6319 /* Remap the variables in phi nodes. */
6320 for (si = gsi_start_phis (bb); !gsi_end_p (si); )
6321 {
6322 gimple phi = gsi_stmt (si);
6323 use_operand_p use;
6324 tree op = PHI_RESULT (phi);
6325 ssa_op_iter oi;
6326 unsigned i;
6327
6328 if (virtual_operand_p (op))
6329 {
6330 /* Remove the phi nodes for virtual operands (alias analysis will be
6331 run for the new function, anyway). */
6332 remove_phi_node (&si, true);
6333 continue;
6334 }
6335
6336 SET_PHI_RESULT (phi,
6337 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6338 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6339 {
6340 op = USE_FROM_PTR (use);
6341 if (TREE_CODE (op) == SSA_NAME)
6342 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6343 }
6344
6345 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6346 {
6347 location_t locus = gimple_phi_arg_location (phi, i);
6348 tree block = LOCATION_BLOCK (locus);
6349
6350 if (locus == UNKNOWN_LOCATION)
6351 continue;
6352 if (d->orig_block == NULL_TREE || block == d->orig_block)
6353 {
6354 if (d->new_block == NULL_TREE)
6355 locus = LOCATION_LOCUS (locus);
6356 else
6357 locus = COMBINE_LOCATION_DATA (line_table, locus, d->new_block);
6358 gimple_phi_arg_set_location (phi, i, locus);
6359 }
6360 }
6361
6362 gsi_next (&si);
6363 }
6364
6365 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6366 {
6367 gimple stmt = gsi_stmt (si);
6368 struct walk_stmt_info wi;
6369
6370 memset (&wi, 0, sizeof (wi));
6371 wi.info = d;
6372 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6373
6374 if (gimple_code (stmt) == GIMPLE_LABEL)
6375 {
6376 tree label = gimple_label_label (stmt);
6377 int uid = LABEL_DECL_UID (label);
6378
6379 gcc_assert (uid > -1);
6380
6381 old_len = vec_safe_length (cfg->x_label_to_block_map);
6382 if (old_len <= (unsigned) uid)
6383 {
6384 new_len = 3 * uid / 2 + 1;
6385 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6386 }
6387
6388 (*cfg->x_label_to_block_map)[uid] = bb;
6389 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6390
6391 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6392
6393 if (uid >= dest_cfun->cfg->last_label_uid)
6394 dest_cfun->cfg->last_label_uid = uid + 1;
6395 }
6396
6397 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6398 remove_stmt_from_eh_lp_fn (cfun, stmt);
6399
6400 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6401 gimple_remove_stmt_histograms (cfun, stmt);
6402
6403 /* We cannot leave any operands allocated from the operand caches of
6404 the current function. */
6405 free_stmt_operands (stmt);
6406 push_cfun (dest_cfun);
6407 update_stmt (stmt);
6408 pop_cfun ();
6409 }
6410
6411 FOR_EACH_EDGE (e, ei, bb->succs)
6412 if (e->goto_locus != UNKNOWN_LOCATION)
6413 {
6414 tree block = LOCATION_BLOCK (e->goto_locus);
6415 if (d->orig_block == NULL_TREE
6416 || block == d->orig_block)
6417 e->goto_locus = d->new_block ?
6418 COMBINE_LOCATION_DATA (line_table, e->goto_locus, d->new_block) :
6419 LOCATION_LOCUS (e->goto_locus);
6420 }
6421 }
6422
6423 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6424 the outermost EH region. Use REGION as the incoming base EH region. */
6425
6426 static eh_region
6427 find_outermost_region_in_block (struct function *src_cfun,
6428 basic_block bb, eh_region region)
6429 {
6430 gimple_stmt_iterator si;
6431
6432 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6433 {
6434 gimple stmt = gsi_stmt (si);
6435 eh_region stmt_region;
6436 int lp_nr;
6437
6438 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6439 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6440 if (stmt_region)
6441 {
6442 if (region == NULL)
6443 region = stmt_region;
6444 else if (stmt_region != region)
6445 {
6446 region = eh_region_outermost (src_cfun, stmt_region, region);
6447 gcc_assert (region != NULL);
6448 }
6449 }
6450 }
6451
6452 return region;
6453 }
6454
6455 static tree
6456 new_label_mapper (tree decl, void *data)
6457 {
6458 htab_t hash = (htab_t) data;
6459 struct tree_map *m;
6460 void **slot;
6461
6462 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6463
6464 m = XNEW (struct tree_map);
6465 m->hash = DECL_UID (decl);
6466 m->base.from = decl;
6467 m->to = create_artificial_label (UNKNOWN_LOCATION);
6468 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6469 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6470 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6471
6472 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6473 gcc_assert (*slot == NULL);
6474
6475 *slot = m;
6476
6477 return m->to;
6478 }
6479
6480 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6481 subblocks. */
6482
6483 static void
6484 replace_block_vars_by_duplicates (tree block, struct pointer_map_t *vars_map,
6485 tree to_context)
6486 {
6487 tree *tp, t;
6488
6489 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6490 {
6491 t = *tp;
6492 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6493 continue;
6494 replace_by_duplicate_decl (&t, vars_map, to_context);
6495 if (t != *tp)
6496 {
6497 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6498 {
6499 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6500 DECL_HAS_VALUE_EXPR_P (t) = 1;
6501 }
6502 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6503 *tp = t;
6504 }
6505 }
6506
6507 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6508 replace_block_vars_by_duplicates (block, vars_map, to_context);
6509 }
6510
6511 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6512 from FN1 to FN2. */
6513
6514 static void
6515 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
6516 struct loop *loop)
6517 {
6518 /* Discard it from the old loop array. */
6519 (*get_loops (fn1))[loop->num] = NULL;
6520
6521 /* Place it in the new loop array, assigning it a new number. */
6522 loop->num = number_of_loops (fn2);
6523 vec_safe_push (loops_for_fn (fn2)->larray, loop);
6524
6525 /* Recurse to children. */
6526 for (loop = loop->inner; loop; loop = loop->next)
6527 fixup_loop_arrays_after_move (fn1, fn2, loop);
6528 }
6529
6530 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6531 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6532 single basic block in the original CFG and the new basic block is
6533 returned. DEST_CFUN must not have a CFG yet.
6534
6535 Note that the region need not be a pure SESE region. Blocks inside
6536 the region may contain calls to abort/exit. The only restriction
6537 is that ENTRY_BB should be the only entry point and it must
6538 dominate EXIT_BB.
6539
6540 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6541 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6542 to the new function.
6543
6544 All local variables referenced in the region are assumed to be in
6545 the corresponding BLOCK_VARS and unexpanded variable lists
6546 associated with DEST_CFUN. */
6547
6548 basic_block
6549 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
6550 basic_block exit_bb, tree orig_block)
6551 {
6552 vec<basic_block> bbs, dom_bbs;
6553 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6554 basic_block after, bb, *entry_pred, *exit_succ, abb;
6555 struct function *saved_cfun = cfun;
6556 int *entry_flag, *exit_flag;
6557 unsigned *entry_prob, *exit_prob;
6558 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
6559 edge e;
6560 edge_iterator ei;
6561 htab_t new_label_map;
6562 struct pointer_map_t *vars_map, *eh_map;
6563 struct loop *loop = entry_bb->loop_father;
6564 struct loop *loop0 = get_loop (saved_cfun, 0);
6565 struct move_stmt_d d;
6566
6567 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6568 region. */
6569 gcc_assert (entry_bb != exit_bb
6570 && (!exit_bb
6571 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
6572
6573 /* Collect all the blocks in the region. Manually add ENTRY_BB
6574 because it won't be added by dfs_enumerate_from. */
6575 bbs.create (0);
6576 bbs.safe_push (entry_bb);
6577 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6578
6579 /* The blocks that used to be dominated by something in BBS will now be
6580 dominated by the new block. */
6581 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6582 bbs.address (),
6583 bbs.length ());
6584
6585 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6586 the predecessor edges to ENTRY_BB and the successor edges to
6587 EXIT_BB so that we can re-attach them to the new basic block that
6588 will replace the region. */
6589 num_entry_edges = EDGE_COUNT (entry_bb->preds);
6590 entry_pred = XNEWVEC (basic_block, num_entry_edges);
6591 entry_flag = XNEWVEC (int, num_entry_edges);
6592 entry_prob = XNEWVEC (unsigned, num_entry_edges);
6593 i = 0;
6594 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
6595 {
6596 entry_prob[i] = e->probability;
6597 entry_flag[i] = e->flags;
6598 entry_pred[i++] = e->src;
6599 remove_edge (e);
6600 }
6601
6602 if (exit_bb)
6603 {
6604 num_exit_edges = EDGE_COUNT (exit_bb->succs);
6605 exit_succ = XNEWVEC (basic_block, num_exit_edges);
6606 exit_flag = XNEWVEC (int, num_exit_edges);
6607 exit_prob = XNEWVEC (unsigned, num_exit_edges);
6608 i = 0;
6609 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
6610 {
6611 exit_prob[i] = e->probability;
6612 exit_flag[i] = e->flags;
6613 exit_succ[i++] = e->dest;
6614 remove_edge (e);
6615 }
6616 }
6617 else
6618 {
6619 num_exit_edges = 0;
6620 exit_succ = NULL;
6621 exit_flag = NULL;
6622 exit_prob = NULL;
6623 }
6624
6625 /* Switch context to the child function to initialize DEST_FN's CFG. */
6626 gcc_assert (dest_cfun->cfg == NULL);
6627 push_cfun (dest_cfun);
6628
6629 init_empty_tree_cfg ();
6630
6631 /* Initialize EH information for the new function. */
6632 eh_map = NULL;
6633 new_label_map = NULL;
6634 if (saved_cfun->eh)
6635 {
6636 eh_region region = NULL;
6637
6638 FOR_EACH_VEC_ELT (bbs, i, bb)
6639 region = find_outermost_region_in_block (saved_cfun, bb, region);
6640
6641 init_eh_for_function ();
6642 if (region != NULL)
6643 {
6644 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6645 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
6646 new_label_mapper, new_label_map);
6647 }
6648 }
6649
6650 /* Initialize an empty loop tree. */
6651 struct loops *loops = ggc_alloc_cleared_loops ();
6652 init_loops_structure (dest_cfun, loops, 1);
6653 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
6654 set_loops_for_fn (dest_cfun, loops);
6655
6656 /* Move the outlined loop tree part. */
6657 num_nodes = bbs.length ();
6658 FOR_EACH_VEC_ELT (bbs, i, bb)
6659 {
6660 if (bb->loop_father->header == bb)
6661 {
6662 struct loop *this_loop = bb->loop_father;
6663 struct loop *outer = loop_outer (this_loop);
6664 if (outer == loop
6665 /* If the SESE region contains some bbs ending with
6666 a noreturn call, those are considered to belong
6667 to the outermost loop in saved_cfun, rather than
6668 the entry_bb's loop_father. */
6669 || outer == loop0)
6670 {
6671 if (outer != loop)
6672 num_nodes -= this_loop->num_nodes;
6673 flow_loop_tree_node_remove (bb->loop_father);
6674 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
6675 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
6676 }
6677 }
6678 else if (bb->loop_father == loop0 && loop0 != loop)
6679 num_nodes--;
6680
6681 /* Remove loop exits from the outlined region. */
6682 if (loops_for_fn (saved_cfun)->exits)
6683 FOR_EACH_EDGE (e, ei, bb->succs)
6684 {
6685 void **slot = htab_find_slot_with_hash
6686 (loops_for_fn (saved_cfun)->exits, e,
6687 htab_hash_pointer (e), NO_INSERT);
6688 if (slot)
6689 htab_clear_slot (loops_for_fn (saved_cfun)->exits, slot);
6690 }
6691 }
6692
6693
6694 /* Adjust the number of blocks in the tree root of the outlined part. */
6695 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
6696
6697 /* Setup a mapping to be used by move_block_to_fn. */
6698 loop->aux = current_loops->tree_root;
6699 loop0->aux = current_loops->tree_root;
6700
6701 pop_cfun ();
6702
6703 /* Move blocks from BBS into DEST_CFUN. */
6704 gcc_assert (bbs.length () >= 2);
6705 after = dest_cfun->cfg->x_entry_block_ptr;
6706 vars_map = pointer_map_create ();
6707
6708 memset (&d, 0, sizeof (d));
6709 d.orig_block = orig_block;
6710 d.new_block = DECL_INITIAL (dest_cfun->decl);
6711 d.from_context = cfun->decl;
6712 d.to_context = dest_cfun->decl;
6713 d.vars_map = vars_map;
6714 d.new_label_map = new_label_map;
6715 d.eh_map = eh_map;
6716 d.remap_decls_p = true;
6717
6718 FOR_EACH_VEC_ELT (bbs, i, bb)
6719 {
6720 /* No need to update edge counts on the last block. It has
6721 already been updated earlier when we detached the region from
6722 the original CFG. */
6723 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
6724 after = bb;
6725 }
6726
6727 loop->aux = NULL;
6728 loop0->aux = NULL;
6729 /* Loop sizes are no longer correct, fix them up. */
6730 loop->num_nodes -= num_nodes;
6731 for (struct loop *outer = loop_outer (loop);
6732 outer; outer = loop_outer (outer))
6733 outer->num_nodes -= num_nodes;
6734 loop0->num_nodes -= bbs.length () - num_nodes;
6735
6736 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vect_loops)
6737 {
6738 struct loop *aloop;
6739 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
6740 if (aloop != NULL)
6741 {
6742 if (aloop->simduid)
6743 {
6744 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
6745 d.to_context);
6746 dest_cfun->has_simduid_loops = true;
6747 }
6748 if (aloop->force_vect)
6749 dest_cfun->has_force_vect_loops = true;
6750 }
6751 }
6752
6753 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
6754 if (orig_block)
6755 {
6756 tree block;
6757 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6758 == NULL_TREE);
6759 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6760 = BLOCK_SUBBLOCKS (orig_block);
6761 for (block = BLOCK_SUBBLOCKS (orig_block);
6762 block; block = BLOCK_CHAIN (block))
6763 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
6764 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
6765 }
6766
6767 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
6768 vars_map, dest_cfun->decl);
6769
6770 if (new_label_map)
6771 htab_delete (new_label_map);
6772 if (eh_map)
6773 pointer_map_destroy (eh_map);
6774 pointer_map_destroy (vars_map);
6775
6776 /* Rewire the entry and exit blocks. The successor to the entry
6777 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6778 the child function. Similarly, the predecessor of DEST_FN's
6779 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
6780 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6781 various CFG manipulation function get to the right CFG.
6782
6783 FIXME, this is silly. The CFG ought to become a parameter to
6784 these helpers. */
6785 push_cfun (dest_cfun);
6786 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
6787 if (exit_bb)
6788 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
6789 pop_cfun ();
6790
6791 /* Back in the original function, the SESE region has disappeared,
6792 create a new basic block in its place. */
6793 bb = create_empty_bb (entry_pred[0]);
6794 if (current_loops)
6795 add_bb_to_loop (bb, loop);
6796 for (i = 0; i < num_entry_edges; i++)
6797 {
6798 e = make_edge (entry_pred[i], bb, entry_flag[i]);
6799 e->probability = entry_prob[i];
6800 }
6801
6802 for (i = 0; i < num_exit_edges; i++)
6803 {
6804 e = make_edge (bb, exit_succ[i], exit_flag[i]);
6805 e->probability = exit_prob[i];
6806 }
6807
6808 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
6809 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
6810 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
6811 dom_bbs.release ();
6812
6813 if (exit_bb)
6814 {
6815 free (exit_prob);
6816 free (exit_flag);
6817 free (exit_succ);
6818 }
6819 free (entry_prob);
6820 free (entry_flag);
6821 free (entry_pred);
6822 bbs.release ();
6823
6824 return bb;
6825 }
6826
6827
6828 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
6829 */
6830
6831 void
6832 dump_function_to_file (tree fndecl, FILE *file, int flags)
6833 {
6834 tree arg, var, old_current_fndecl = current_function_decl;
6835 struct function *dsf;
6836 bool ignore_topmost_bind = false, any_var = false;
6837 basic_block bb;
6838 tree chain;
6839 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
6840 && decl_is_tm_clone (fndecl));
6841 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
6842
6843 current_function_decl = fndecl;
6844 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
6845
6846 arg = DECL_ARGUMENTS (fndecl);
6847 while (arg)
6848 {
6849 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
6850 fprintf (file, " ");
6851 print_generic_expr (file, arg, dump_flags);
6852 if (flags & TDF_VERBOSE)
6853 print_node (file, "", arg, 4);
6854 if (DECL_CHAIN (arg))
6855 fprintf (file, ", ");
6856 arg = DECL_CHAIN (arg);
6857 }
6858 fprintf (file, ")\n");
6859
6860 if (flags & TDF_VERBOSE)
6861 print_node (file, "", fndecl, 2);
6862
6863 dsf = DECL_STRUCT_FUNCTION (fndecl);
6864 if (dsf && (flags & TDF_EH))
6865 dump_eh_tree (file, dsf);
6866
6867 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
6868 {
6869 dump_node (fndecl, TDF_SLIM | flags, file);
6870 current_function_decl = old_current_fndecl;
6871 return;
6872 }
6873
6874 /* When GIMPLE is lowered, the variables are no longer available in
6875 BIND_EXPRs, so display them separately. */
6876 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
6877 {
6878 unsigned ix;
6879 ignore_topmost_bind = true;
6880
6881 fprintf (file, "{\n");
6882 if (!vec_safe_is_empty (fun->local_decls))
6883 FOR_EACH_LOCAL_DECL (fun, ix, var)
6884 {
6885 print_generic_decl (file, var, flags);
6886 if (flags & TDF_VERBOSE)
6887 print_node (file, "", var, 4);
6888 fprintf (file, "\n");
6889
6890 any_var = true;
6891 }
6892 if (gimple_in_ssa_p (cfun))
6893 for (ix = 1; ix < num_ssa_names; ++ix)
6894 {
6895 tree name = ssa_name (ix);
6896 if (name && !SSA_NAME_VAR (name))
6897 {
6898 fprintf (file, " ");
6899 print_generic_expr (file, TREE_TYPE (name), flags);
6900 fprintf (file, " ");
6901 print_generic_expr (file, name, flags);
6902 fprintf (file, ";\n");
6903
6904 any_var = true;
6905 }
6906 }
6907 }
6908
6909 if (fun && fun->decl == fndecl
6910 && fun->cfg
6911 && basic_block_info_for_function (fun))
6912 {
6913 /* If the CFG has been built, emit a CFG-based dump. */
6914 if (!ignore_topmost_bind)
6915 fprintf (file, "{\n");
6916
6917 if (any_var && n_basic_blocks_for_function (fun))
6918 fprintf (file, "\n");
6919
6920 FOR_EACH_BB_FN (bb, fun)
6921 dump_bb (file, bb, 2, flags | TDF_COMMENT);
6922
6923 fprintf (file, "}\n");
6924 }
6925 else if (DECL_SAVED_TREE (fndecl) == NULL)
6926 {
6927 /* The function is now in GIMPLE form but the CFG has not been
6928 built yet. Emit the single sequence of GIMPLE statements
6929 that make up its body. */
6930 gimple_seq body = gimple_body (fndecl);
6931
6932 if (gimple_seq_first_stmt (body)
6933 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
6934 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
6935 print_gimple_seq (file, body, 0, flags);
6936 else
6937 {
6938 if (!ignore_topmost_bind)
6939 fprintf (file, "{\n");
6940
6941 if (any_var)
6942 fprintf (file, "\n");
6943
6944 print_gimple_seq (file, body, 2, flags);
6945 fprintf (file, "}\n");
6946 }
6947 }
6948 else
6949 {
6950 int indent;
6951
6952 /* Make a tree based dump. */
6953 chain = DECL_SAVED_TREE (fndecl);
6954 if (chain && TREE_CODE (chain) == BIND_EXPR)
6955 {
6956 if (ignore_topmost_bind)
6957 {
6958 chain = BIND_EXPR_BODY (chain);
6959 indent = 2;
6960 }
6961 else
6962 indent = 0;
6963 }
6964 else
6965 {
6966 if (!ignore_topmost_bind)
6967 fprintf (file, "{\n");
6968 indent = 2;
6969 }
6970
6971 if (any_var)
6972 fprintf (file, "\n");
6973
6974 print_generic_stmt_indented (file, chain, flags, indent);
6975 if (ignore_topmost_bind)
6976 fprintf (file, "}\n");
6977 }
6978
6979 if (flags & TDF_ENUMERATE_LOCALS)
6980 dump_enumerated_decls (file, flags);
6981 fprintf (file, "\n\n");
6982
6983 current_function_decl = old_current_fndecl;
6984 }
6985
6986 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
6987
6988 DEBUG_FUNCTION void
6989 debug_function (tree fn, int flags)
6990 {
6991 dump_function_to_file (fn, stderr, flags);
6992 }
6993
6994
6995 /* Print on FILE the indexes for the predecessors of basic_block BB. */
6996
6997 static void
6998 print_pred_bbs (FILE *file, basic_block bb)
6999 {
7000 edge e;
7001 edge_iterator ei;
7002
7003 FOR_EACH_EDGE (e, ei, bb->preds)
7004 fprintf (file, "bb_%d ", e->src->index);
7005 }
7006
7007
7008 /* Print on FILE the indexes for the successors of basic_block BB. */
7009
7010 static void
7011 print_succ_bbs (FILE *file, basic_block bb)
7012 {
7013 edge e;
7014 edge_iterator ei;
7015
7016 FOR_EACH_EDGE (e, ei, bb->succs)
7017 fprintf (file, "bb_%d ", e->dest->index);
7018 }
7019
7020 /* Print to FILE the basic block BB following the VERBOSITY level. */
7021
7022 void
7023 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7024 {
7025 char *s_indent = (char *) alloca ((size_t) indent + 1);
7026 memset ((void *) s_indent, ' ', (size_t) indent);
7027 s_indent[indent] = '\0';
7028
7029 /* Print basic_block's header. */
7030 if (verbosity >= 2)
7031 {
7032 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7033 print_pred_bbs (file, bb);
7034 fprintf (file, "}, succs = {");
7035 print_succ_bbs (file, bb);
7036 fprintf (file, "})\n");
7037 }
7038
7039 /* Print basic_block's body. */
7040 if (verbosity >= 3)
7041 {
7042 fprintf (file, "%s {\n", s_indent);
7043 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7044 fprintf (file, "%s }\n", s_indent);
7045 }
7046 }
7047
7048 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7049
7050 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7051 VERBOSITY level this outputs the contents of the loop, or just its
7052 structure. */
7053
7054 static void
7055 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7056 {
7057 char *s_indent;
7058 basic_block bb;
7059
7060 if (loop == NULL)
7061 return;
7062
7063 s_indent = (char *) alloca ((size_t) indent + 1);
7064 memset ((void *) s_indent, ' ', (size_t) indent);
7065 s_indent[indent] = '\0';
7066
7067 /* Print loop's header. */
7068 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7069 if (loop->header)
7070 fprintf (file, "header = %d", loop->header->index);
7071 else
7072 {
7073 fprintf (file, "deleted)\n");
7074 return;
7075 }
7076 if (loop->latch)
7077 fprintf (file, ", latch = %d", loop->latch->index);
7078 else
7079 fprintf (file, ", multiple latches");
7080 fprintf (file, ", niter = ");
7081 print_generic_expr (file, loop->nb_iterations, 0);
7082
7083 if (loop->any_upper_bound)
7084 {
7085 fprintf (file, ", upper_bound = ");
7086 dump_double_int (file, loop->nb_iterations_upper_bound, true);
7087 }
7088
7089 if (loop->any_estimate)
7090 {
7091 fprintf (file, ", estimate = ");
7092 dump_double_int (file, loop->nb_iterations_estimate, true);
7093 }
7094 fprintf (file, ")\n");
7095
7096 /* Print loop's body. */
7097 if (verbosity >= 1)
7098 {
7099 fprintf (file, "%s{\n", s_indent);
7100 FOR_EACH_BB (bb)
7101 if (bb->loop_father == loop)
7102 print_loops_bb (file, bb, indent, verbosity);
7103
7104 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7105 fprintf (file, "%s}\n", s_indent);
7106 }
7107 }
7108
7109 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7110 spaces. Following VERBOSITY level this outputs the contents of the
7111 loop, or just its structure. */
7112
7113 static void
7114 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7115 int verbosity)
7116 {
7117 if (loop == NULL)
7118 return;
7119
7120 print_loop (file, loop, indent, verbosity);
7121 print_loop_and_siblings (file, loop->next, indent, verbosity);
7122 }
7123
7124 /* Follow a CFG edge from the entry point of the program, and on entry
7125 of a loop, pretty print the loop structure on FILE. */
7126
7127 void
7128 print_loops (FILE *file, int verbosity)
7129 {
7130 basic_block bb;
7131
7132 bb = ENTRY_BLOCK_PTR;
7133 if (bb && bb->loop_father)
7134 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7135 }
7136
7137 /* Dump a loop. */
7138
7139 DEBUG_FUNCTION void
7140 debug (struct loop &ref)
7141 {
7142 print_loop (stderr, &ref, 0, /*verbosity*/0);
7143 }
7144
7145 DEBUG_FUNCTION void
7146 debug (struct loop *ptr)
7147 {
7148 if (ptr)
7149 debug (*ptr);
7150 else
7151 fprintf (stderr, "<nil>\n");
7152 }
7153
7154 /* Dump a loop verbosely. */
7155
7156 DEBUG_FUNCTION void
7157 debug_verbose (struct loop &ref)
7158 {
7159 print_loop (stderr, &ref, 0, /*verbosity*/3);
7160 }
7161
7162 DEBUG_FUNCTION void
7163 debug_verbose (struct loop *ptr)
7164 {
7165 if (ptr)
7166 debug (*ptr);
7167 else
7168 fprintf (stderr, "<nil>\n");
7169 }
7170
7171
7172 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7173
7174 DEBUG_FUNCTION void
7175 debug_loops (int verbosity)
7176 {
7177 print_loops (stderr, verbosity);
7178 }
7179
7180 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7181
7182 DEBUG_FUNCTION void
7183 debug_loop (struct loop *loop, int verbosity)
7184 {
7185 print_loop (stderr, loop, 0, verbosity);
7186 }
7187
7188 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7189 level. */
7190
7191 DEBUG_FUNCTION void
7192 debug_loop_num (unsigned num, int verbosity)
7193 {
7194 debug_loop (get_loop (cfun, num), verbosity);
7195 }
7196
7197 /* Return true if BB ends with a call, possibly followed by some
7198 instructions that must stay with the call. Return false,
7199 otherwise. */
7200
7201 static bool
7202 gimple_block_ends_with_call_p (basic_block bb)
7203 {
7204 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7205 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7206 }
7207
7208
7209 /* Return true if BB ends with a conditional branch. Return false,
7210 otherwise. */
7211
7212 static bool
7213 gimple_block_ends_with_condjump_p (const_basic_block bb)
7214 {
7215 gimple stmt = last_stmt (CONST_CAST_BB (bb));
7216 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7217 }
7218
7219
7220 /* Return true if we need to add fake edge to exit at statement T.
7221 Helper function for gimple_flow_call_edges_add. */
7222
7223 static bool
7224 need_fake_edge_p (gimple t)
7225 {
7226 tree fndecl = NULL_TREE;
7227 int call_flags = 0;
7228
7229 /* NORETURN and LONGJMP calls already have an edge to exit.
7230 CONST and PURE calls do not need one.
7231 We don't currently check for CONST and PURE here, although
7232 it would be a good idea, because those attributes are
7233 figured out from the RTL in mark_constant_function, and
7234 the counter incrementation code from -fprofile-arcs
7235 leads to different results from -fbranch-probabilities. */
7236 if (is_gimple_call (t))
7237 {
7238 fndecl = gimple_call_fndecl (t);
7239 call_flags = gimple_call_flags (t);
7240 }
7241
7242 if (is_gimple_call (t)
7243 && fndecl
7244 && DECL_BUILT_IN (fndecl)
7245 && (call_flags & ECF_NOTHROW)
7246 && !(call_flags & ECF_RETURNS_TWICE)
7247 /* fork() doesn't really return twice, but the effect of
7248 wrapping it in __gcov_fork() which calls __gcov_flush()
7249 and clears the counters before forking has the same
7250 effect as returning twice. Force a fake edge. */
7251 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7252 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7253 return false;
7254
7255 if (is_gimple_call (t))
7256 {
7257 edge_iterator ei;
7258 edge e;
7259 basic_block bb;
7260
7261 if (!(call_flags & ECF_NORETURN))
7262 return true;
7263
7264 bb = gimple_bb (t);
7265 FOR_EACH_EDGE (e, ei, bb->succs)
7266 if ((e->flags & EDGE_FAKE) == 0)
7267 return true;
7268 }
7269
7270 if (gimple_code (t) == GIMPLE_ASM
7271 && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
7272 return true;
7273
7274 return false;
7275 }
7276
7277
7278 /* Add fake edges to the function exit for any non constant and non
7279 noreturn calls (or noreturn calls with EH/abnormal edges),
7280 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7281 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7282 that were split.
7283
7284 The goal is to expose cases in which entering a basic block does
7285 not imply that all subsequent instructions must be executed. */
7286
7287 static int
7288 gimple_flow_call_edges_add (sbitmap blocks)
7289 {
7290 int i;
7291 int blocks_split = 0;
7292 int last_bb = last_basic_block;
7293 bool check_last_block = false;
7294
7295 if (n_basic_blocks == NUM_FIXED_BLOCKS)
7296 return 0;
7297
7298 if (! blocks)
7299 check_last_block = true;
7300 else
7301 check_last_block = bitmap_bit_p (blocks, EXIT_BLOCK_PTR->prev_bb->index);
7302
7303 /* In the last basic block, before epilogue generation, there will be
7304 a fallthru edge to EXIT. Special care is required if the last insn
7305 of the last basic block is a call because make_edge folds duplicate
7306 edges, which would result in the fallthru edge also being marked
7307 fake, which would result in the fallthru edge being removed by
7308 remove_fake_edges, which would result in an invalid CFG.
7309
7310 Moreover, we can't elide the outgoing fake edge, since the block
7311 profiler needs to take this into account in order to solve the minimal
7312 spanning tree in the case that the call doesn't return.
7313
7314 Handle this by adding a dummy instruction in a new last basic block. */
7315 if (check_last_block)
7316 {
7317 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
7318 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7319 gimple t = NULL;
7320
7321 if (!gsi_end_p (gsi))
7322 t = gsi_stmt (gsi);
7323
7324 if (t && need_fake_edge_p (t))
7325 {
7326 edge e;
7327
7328 e = find_edge (bb, EXIT_BLOCK_PTR);
7329 if (e)
7330 {
7331 gsi_insert_on_edge (e, gimple_build_nop ());
7332 gsi_commit_edge_inserts ();
7333 }
7334 }
7335 }
7336
7337 /* Now add fake edges to the function exit for any non constant
7338 calls since there is no way that we can determine if they will
7339 return or not... */
7340 for (i = 0; i < last_bb; i++)
7341 {
7342 basic_block bb = BASIC_BLOCK (i);
7343 gimple_stmt_iterator gsi;
7344 gimple stmt, last_stmt;
7345
7346 if (!bb)
7347 continue;
7348
7349 if (blocks && !bitmap_bit_p (blocks, i))
7350 continue;
7351
7352 gsi = gsi_last_nondebug_bb (bb);
7353 if (!gsi_end_p (gsi))
7354 {
7355 last_stmt = gsi_stmt (gsi);
7356 do
7357 {
7358 stmt = gsi_stmt (gsi);
7359 if (need_fake_edge_p (stmt))
7360 {
7361 edge e;
7362
7363 /* The handling above of the final block before the
7364 epilogue should be enough to verify that there is
7365 no edge to the exit block in CFG already.
7366 Calling make_edge in such case would cause us to
7367 mark that edge as fake and remove it later. */
7368 #ifdef ENABLE_CHECKING
7369 if (stmt == last_stmt)
7370 {
7371 e = find_edge (bb, EXIT_BLOCK_PTR);
7372 gcc_assert (e == NULL);
7373 }
7374 #endif
7375
7376 /* Note that the following may create a new basic block
7377 and renumber the existing basic blocks. */
7378 if (stmt != last_stmt)
7379 {
7380 e = split_block (bb, stmt);
7381 if (e)
7382 blocks_split++;
7383 }
7384 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
7385 }
7386 gsi_prev (&gsi);
7387 }
7388 while (!gsi_end_p (gsi));
7389 }
7390 }
7391
7392 if (blocks_split)
7393 verify_flow_info ();
7394
7395 return blocks_split;
7396 }
7397
7398 /* Removes edge E and all the blocks dominated by it, and updates dominance
7399 information. The IL in E->src needs to be updated separately.
7400 If dominance info is not available, only the edge E is removed.*/
7401
7402 void
7403 remove_edge_and_dominated_blocks (edge e)
7404 {
7405 vec<basic_block> bbs_to_remove = vNULL;
7406 vec<basic_block> bbs_to_fix_dom = vNULL;
7407 bitmap df, df_idom;
7408 edge f;
7409 edge_iterator ei;
7410 bool none_removed = false;
7411 unsigned i;
7412 basic_block bb, dbb;
7413 bitmap_iterator bi;
7414
7415 if (!dom_info_available_p (CDI_DOMINATORS))
7416 {
7417 remove_edge (e);
7418 return;
7419 }
7420
7421 /* No updating is needed for edges to exit. */
7422 if (e->dest == EXIT_BLOCK_PTR)
7423 {
7424 if (cfgcleanup_altered_bbs)
7425 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7426 remove_edge (e);
7427 return;
7428 }
7429
7430 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7431 that is not dominated by E->dest, then this set is empty. Otherwise,
7432 all the basic blocks dominated by E->dest are removed.
7433
7434 Also, to DF_IDOM we store the immediate dominators of the blocks in
7435 the dominance frontier of E (i.e., of the successors of the
7436 removed blocks, if there are any, and of E->dest otherwise). */
7437 FOR_EACH_EDGE (f, ei, e->dest->preds)
7438 {
7439 if (f == e)
7440 continue;
7441
7442 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7443 {
7444 none_removed = true;
7445 break;
7446 }
7447 }
7448
7449 df = BITMAP_ALLOC (NULL);
7450 df_idom = BITMAP_ALLOC (NULL);
7451
7452 if (none_removed)
7453 bitmap_set_bit (df_idom,
7454 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7455 else
7456 {
7457 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7458 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7459 {
7460 FOR_EACH_EDGE (f, ei, bb->succs)
7461 {
7462 if (f->dest != EXIT_BLOCK_PTR)
7463 bitmap_set_bit (df, f->dest->index);
7464 }
7465 }
7466 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7467 bitmap_clear_bit (df, bb->index);
7468
7469 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7470 {
7471 bb = BASIC_BLOCK (i);
7472 bitmap_set_bit (df_idom,
7473 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7474 }
7475 }
7476
7477 if (cfgcleanup_altered_bbs)
7478 {
7479 /* Record the set of the altered basic blocks. */
7480 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7481 bitmap_ior_into (cfgcleanup_altered_bbs, df);
7482 }
7483
7484 /* Remove E and the cancelled blocks. */
7485 if (none_removed)
7486 remove_edge (e);
7487 else
7488 {
7489 /* Walk backwards so as to get a chance to substitute all
7490 released DEFs into debug stmts. See
7491 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7492 details. */
7493 for (i = bbs_to_remove.length (); i-- > 0; )
7494 delete_basic_block (bbs_to_remove[i]);
7495 }
7496
7497 /* Update the dominance information. The immediate dominator may change only
7498 for blocks whose immediate dominator belongs to DF_IDOM:
7499
7500 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7501 removal. Let Z the arbitrary block such that idom(Z) = Y and
7502 Z dominates X after the removal. Before removal, there exists a path P
7503 from Y to X that avoids Z. Let F be the last edge on P that is
7504 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7505 dominates W, and because of P, Z does not dominate W), and W belongs to
7506 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7507 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
7508 {
7509 bb = BASIC_BLOCK (i);
7510 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
7511 dbb;
7512 dbb = next_dom_son (CDI_DOMINATORS, dbb))
7513 bbs_to_fix_dom.safe_push (dbb);
7514 }
7515
7516 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
7517
7518 BITMAP_FREE (df);
7519 BITMAP_FREE (df_idom);
7520 bbs_to_remove.release ();
7521 bbs_to_fix_dom.release ();
7522 }
7523
7524 /* Purge dead EH edges from basic block BB. */
7525
7526 bool
7527 gimple_purge_dead_eh_edges (basic_block bb)
7528 {
7529 bool changed = false;
7530 edge e;
7531 edge_iterator ei;
7532 gimple stmt = last_stmt (bb);
7533
7534 if (stmt && stmt_can_throw_internal (stmt))
7535 return false;
7536
7537 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7538 {
7539 if (e->flags & EDGE_EH)
7540 {
7541 remove_edge_and_dominated_blocks (e);
7542 changed = true;
7543 }
7544 else
7545 ei_next (&ei);
7546 }
7547
7548 return changed;
7549 }
7550
7551 /* Purge dead EH edges from basic block listed in BLOCKS. */
7552
7553 bool
7554 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
7555 {
7556 bool changed = false;
7557 unsigned i;
7558 bitmap_iterator bi;
7559
7560 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7561 {
7562 basic_block bb = BASIC_BLOCK (i);
7563
7564 /* Earlier gimple_purge_dead_eh_edges could have removed
7565 this basic block already. */
7566 gcc_assert (bb || changed);
7567 if (bb != NULL)
7568 changed |= gimple_purge_dead_eh_edges (bb);
7569 }
7570
7571 return changed;
7572 }
7573
7574 /* Purge dead abnormal call edges from basic block BB. */
7575
7576 bool
7577 gimple_purge_dead_abnormal_call_edges (basic_block bb)
7578 {
7579 bool changed = false;
7580 edge e;
7581 edge_iterator ei;
7582 gimple stmt = last_stmt (bb);
7583
7584 if (!cfun->has_nonlocal_label
7585 && !cfun->calls_setjmp)
7586 return false;
7587
7588 if (stmt && stmt_can_make_abnormal_goto (stmt))
7589 return false;
7590
7591 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7592 {
7593 if (e->flags & EDGE_ABNORMAL)
7594 {
7595 if (e->flags & EDGE_FALLTHRU)
7596 e->flags &= ~EDGE_ABNORMAL;
7597 else
7598 remove_edge_and_dominated_blocks (e);
7599 changed = true;
7600 }
7601 else
7602 ei_next (&ei);
7603 }
7604
7605 return changed;
7606 }
7607
7608 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
7609
7610 bool
7611 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
7612 {
7613 bool changed = false;
7614 unsigned i;
7615 bitmap_iterator bi;
7616
7617 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7618 {
7619 basic_block bb = BASIC_BLOCK (i);
7620
7621 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
7622 this basic block already. */
7623 gcc_assert (bb || changed);
7624 if (bb != NULL)
7625 changed |= gimple_purge_dead_abnormal_call_edges (bb);
7626 }
7627
7628 return changed;
7629 }
7630
7631 /* This function is called whenever a new edge is created or
7632 redirected. */
7633
7634 static void
7635 gimple_execute_on_growing_pred (edge e)
7636 {
7637 basic_block bb = e->dest;
7638
7639 if (!gimple_seq_empty_p (phi_nodes (bb)))
7640 reserve_phi_args_for_new_edge (bb);
7641 }
7642
7643 /* This function is called immediately before edge E is removed from
7644 the edge vector E->dest->preds. */
7645
7646 static void
7647 gimple_execute_on_shrinking_pred (edge e)
7648 {
7649 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
7650 remove_phi_args (e);
7651 }
7652
7653 /*---------------------------------------------------------------------------
7654 Helper functions for Loop versioning
7655 ---------------------------------------------------------------------------*/
7656
7657 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
7658 of 'first'. Both of them are dominated by 'new_head' basic block. When
7659 'new_head' was created by 'second's incoming edge it received phi arguments
7660 on the edge by split_edge(). Later, additional edge 'e' was created to
7661 connect 'new_head' and 'first'. Now this routine adds phi args on this
7662 additional edge 'e' that new_head to second edge received as part of edge
7663 splitting. */
7664
7665 static void
7666 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
7667 basic_block new_head, edge e)
7668 {
7669 gimple phi1, phi2;
7670 gimple_stmt_iterator psi1, psi2;
7671 tree def;
7672 edge e2 = find_edge (new_head, second);
7673
7674 /* Because NEW_HEAD has been created by splitting SECOND's incoming
7675 edge, we should always have an edge from NEW_HEAD to SECOND. */
7676 gcc_assert (e2 != NULL);
7677
7678 /* Browse all 'second' basic block phi nodes and add phi args to
7679 edge 'e' for 'first' head. PHI args are always in correct order. */
7680
7681 for (psi2 = gsi_start_phis (second),
7682 psi1 = gsi_start_phis (first);
7683 !gsi_end_p (psi2) && !gsi_end_p (psi1);
7684 gsi_next (&psi2), gsi_next (&psi1))
7685 {
7686 phi1 = gsi_stmt (psi1);
7687 phi2 = gsi_stmt (psi2);
7688 def = PHI_ARG_DEF (phi2, e2->dest_idx);
7689 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
7690 }
7691 }
7692
7693
7694 /* Adds a if else statement to COND_BB with condition COND_EXPR.
7695 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
7696 the destination of the ELSE part. */
7697
7698 static void
7699 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
7700 basic_block second_head ATTRIBUTE_UNUSED,
7701 basic_block cond_bb, void *cond_e)
7702 {
7703 gimple_stmt_iterator gsi;
7704 gimple new_cond_expr;
7705 tree cond_expr = (tree) cond_e;
7706 edge e0;
7707
7708 /* Build new conditional expr */
7709 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
7710 NULL_TREE, NULL_TREE);
7711
7712 /* Add new cond in cond_bb. */
7713 gsi = gsi_last_bb (cond_bb);
7714 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
7715
7716 /* Adjust edges appropriately to connect new head with first head
7717 as well as second head. */
7718 e0 = single_succ_edge (cond_bb);
7719 e0->flags &= ~EDGE_FALLTHRU;
7720 e0->flags |= EDGE_FALSE_VALUE;
7721 }
7722
7723
7724 /* Do book-keeping of basic block BB for the profile consistency checker.
7725 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
7726 then do post-pass accounting. Store the counting in RECORD. */
7727 static void
7728 gimple_account_profile_record (basic_block bb, int after_pass,
7729 struct profile_record *record)
7730 {
7731 gimple_stmt_iterator i;
7732 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
7733 {
7734 record->size[after_pass]
7735 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
7736 if (profile_status == PROFILE_READ)
7737 record->time[after_pass]
7738 += estimate_num_insns (gsi_stmt (i),
7739 &eni_time_weights) * bb->count;
7740 else if (profile_status == PROFILE_GUESSED)
7741 record->time[after_pass]
7742 += estimate_num_insns (gsi_stmt (i),
7743 &eni_time_weights) * bb->frequency;
7744 }
7745 }
7746
7747 struct cfg_hooks gimple_cfg_hooks = {
7748 "gimple",
7749 gimple_verify_flow_info,
7750 gimple_dump_bb, /* dump_bb */
7751 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
7752 create_bb, /* create_basic_block */
7753 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
7754 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
7755 gimple_can_remove_branch_p, /* can_remove_branch_p */
7756 remove_bb, /* delete_basic_block */
7757 gimple_split_block, /* split_block */
7758 gimple_move_block_after, /* move_block_after */
7759 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
7760 gimple_merge_blocks, /* merge_blocks */
7761 gimple_predict_edge, /* predict_edge */
7762 gimple_predicted_by_p, /* predicted_by_p */
7763 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
7764 gimple_duplicate_bb, /* duplicate_block */
7765 gimple_split_edge, /* split_edge */
7766 gimple_make_forwarder_block, /* make_forward_block */
7767 NULL, /* tidy_fallthru_edge */
7768 NULL, /* force_nonfallthru */
7769 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
7770 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
7771 gimple_flow_call_edges_add, /* flow_call_edges_add */
7772 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
7773 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
7774 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
7775 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
7776 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
7777 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
7778 flush_pending_stmts, /* flush_pending_stmts */
7779 gimple_empty_block_p, /* block_empty_p */
7780 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
7781 gimple_account_profile_record,
7782 };
7783
7784
7785 /* Split all critical edges. */
7786
7787 static unsigned int
7788 split_critical_edges (void)
7789 {
7790 basic_block bb;
7791 edge e;
7792 edge_iterator ei;
7793
7794 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
7795 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
7796 mappings around the calls to split_edge. */
7797 start_recording_case_labels ();
7798 FOR_ALL_BB (bb)
7799 {
7800 FOR_EACH_EDGE (e, ei, bb->succs)
7801 {
7802 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
7803 split_edge (e);
7804 /* PRE inserts statements to edges and expects that
7805 since split_critical_edges was done beforehand, committing edge
7806 insertions will not split more edges. In addition to critical
7807 edges we must split edges that have multiple successors and
7808 end by control flow statements, such as RESX.
7809 Go ahead and split them too. This matches the logic in
7810 gimple_find_edge_insert_loc. */
7811 else if ((!single_pred_p (e->dest)
7812 || !gimple_seq_empty_p (phi_nodes (e->dest))
7813 || e->dest == EXIT_BLOCK_PTR)
7814 && e->src != ENTRY_BLOCK_PTR
7815 && !(e->flags & EDGE_ABNORMAL))
7816 {
7817 gimple_stmt_iterator gsi;
7818
7819 gsi = gsi_last_bb (e->src);
7820 if (!gsi_end_p (gsi)
7821 && stmt_ends_bb_p (gsi_stmt (gsi))
7822 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
7823 && !gimple_call_builtin_p (gsi_stmt (gsi),
7824 BUILT_IN_RETURN)))
7825 split_edge (e);
7826 }
7827 }
7828 }
7829 end_recording_case_labels ();
7830 return 0;
7831 }
7832
7833 namespace {
7834
7835 const pass_data pass_data_split_crit_edges =
7836 {
7837 GIMPLE_PASS, /* type */
7838 "crited", /* name */
7839 OPTGROUP_NONE, /* optinfo_flags */
7840 false, /* has_gate */
7841 true, /* has_execute */
7842 TV_TREE_SPLIT_EDGES, /* tv_id */
7843 PROP_cfg, /* properties_required */
7844 PROP_no_crit_edges, /* properties_provided */
7845 0, /* properties_destroyed */
7846 0, /* todo_flags_start */
7847 TODO_verify_flow, /* todo_flags_finish */
7848 };
7849
7850 class pass_split_crit_edges : public gimple_opt_pass
7851 {
7852 public:
7853 pass_split_crit_edges (gcc::context *ctxt)
7854 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
7855 {}
7856
7857 /* opt_pass methods: */
7858 unsigned int execute () { return split_critical_edges (); }
7859
7860 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
7861 }; // class pass_split_crit_edges
7862
7863 } // anon namespace
7864
7865 gimple_opt_pass *
7866 make_pass_split_crit_edges (gcc::context *ctxt)
7867 {
7868 return new pass_split_crit_edges (ctxt);
7869 }
7870
7871
7872 /* Build a ternary operation and gimplify it. Emit code before GSI.
7873 Return the gimple_val holding the result. */
7874
7875 tree
7876 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
7877 tree type, tree a, tree b, tree c)
7878 {
7879 tree ret;
7880 location_t loc = gimple_location (gsi_stmt (*gsi));
7881
7882 ret = fold_build3_loc (loc, code, type, a, b, c);
7883 STRIP_NOPS (ret);
7884
7885 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7886 GSI_SAME_STMT);
7887 }
7888
7889 /* Build a binary operation and gimplify it. Emit code before GSI.
7890 Return the gimple_val holding the result. */
7891
7892 tree
7893 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
7894 tree type, tree a, tree b)
7895 {
7896 tree ret;
7897
7898 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
7899 STRIP_NOPS (ret);
7900
7901 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7902 GSI_SAME_STMT);
7903 }
7904
7905 /* Build a unary operation and gimplify it. Emit code before GSI.
7906 Return the gimple_val holding the result. */
7907
7908 tree
7909 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
7910 tree a)
7911 {
7912 tree ret;
7913
7914 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
7915 STRIP_NOPS (ret);
7916
7917 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7918 GSI_SAME_STMT);
7919 }
7920
7921
7922 \f
7923 /* Emit return warnings. */
7924
7925 static unsigned int
7926 execute_warn_function_return (void)
7927 {
7928 source_location location;
7929 gimple last;
7930 edge e;
7931 edge_iterator ei;
7932
7933 if (!targetm.warn_func_return (cfun->decl))
7934 return 0;
7935
7936 /* If we have a path to EXIT, then we do return. */
7937 if (TREE_THIS_VOLATILE (cfun->decl)
7938 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
7939 {
7940 location = UNKNOWN_LOCATION;
7941 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7942 {
7943 last = last_stmt (e->src);
7944 if ((gimple_code (last) == GIMPLE_RETURN
7945 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
7946 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
7947 break;
7948 }
7949 if (location == UNKNOWN_LOCATION)
7950 location = cfun->function_end_locus;
7951 warning_at (location, 0, "%<noreturn%> function does return");
7952 }
7953
7954 /* If we see "return;" in some basic block, then we do reach the end
7955 without returning a value. */
7956 else if (warn_return_type
7957 && !TREE_NO_WARNING (cfun->decl)
7958 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
7959 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
7960 {
7961 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7962 {
7963 gimple last = last_stmt (e->src);
7964 if (gimple_code (last) == GIMPLE_RETURN
7965 && gimple_return_retval (last) == NULL
7966 && !gimple_no_warning_p (last))
7967 {
7968 location = gimple_location (last);
7969 if (location == UNKNOWN_LOCATION)
7970 location = cfun->function_end_locus;
7971 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
7972 TREE_NO_WARNING (cfun->decl) = 1;
7973 break;
7974 }
7975 }
7976 }
7977 return 0;
7978 }
7979
7980
7981 /* Given a basic block B which ends with a conditional and has
7982 precisely two successors, determine which of the edges is taken if
7983 the conditional is true and which is taken if the conditional is
7984 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
7985
7986 void
7987 extract_true_false_edges_from_block (basic_block b,
7988 edge *true_edge,
7989 edge *false_edge)
7990 {
7991 edge e = EDGE_SUCC (b, 0);
7992
7993 if (e->flags & EDGE_TRUE_VALUE)
7994 {
7995 *true_edge = e;
7996 *false_edge = EDGE_SUCC (b, 1);
7997 }
7998 else
7999 {
8000 *false_edge = e;
8001 *true_edge = EDGE_SUCC (b, 1);
8002 }
8003 }
8004
8005 namespace {
8006
8007 const pass_data pass_data_warn_function_return =
8008 {
8009 GIMPLE_PASS, /* type */
8010 "*warn_function_return", /* name */
8011 OPTGROUP_NONE, /* optinfo_flags */
8012 false, /* has_gate */
8013 true, /* has_execute */
8014 TV_NONE, /* tv_id */
8015 PROP_cfg, /* properties_required */
8016 0, /* properties_provided */
8017 0, /* properties_destroyed */
8018 0, /* todo_flags_start */
8019 0, /* todo_flags_finish */
8020 };
8021
8022 class pass_warn_function_return : public gimple_opt_pass
8023 {
8024 public:
8025 pass_warn_function_return (gcc::context *ctxt)
8026 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8027 {}
8028
8029 /* opt_pass methods: */
8030 unsigned int execute () { return execute_warn_function_return (); }
8031
8032 }; // class pass_warn_function_return
8033
8034 } // anon namespace
8035
8036 gimple_opt_pass *
8037 make_pass_warn_function_return (gcc::context *ctxt)
8038 {
8039 return new pass_warn_function_return (ctxt);
8040 }
8041
8042 /* Walk a gimplified function and warn for functions whose return value is
8043 ignored and attribute((warn_unused_result)) is set. This is done before
8044 inlining, so we don't have to worry about that. */
8045
8046 static void
8047 do_warn_unused_result (gimple_seq seq)
8048 {
8049 tree fdecl, ftype;
8050 gimple_stmt_iterator i;
8051
8052 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8053 {
8054 gimple g = gsi_stmt (i);
8055
8056 switch (gimple_code (g))
8057 {
8058 case GIMPLE_BIND:
8059 do_warn_unused_result (gimple_bind_body (g));
8060 break;
8061 case GIMPLE_TRY:
8062 do_warn_unused_result (gimple_try_eval (g));
8063 do_warn_unused_result (gimple_try_cleanup (g));
8064 break;
8065 case GIMPLE_CATCH:
8066 do_warn_unused_result (gimple_catch_handler (g));
8067 break;
8068 case GIMPLE_EH_FILTER:
8069 do_warn_unused_result (gimple_eh_filter_failure (g));
8070 break;
8071
8072 case GIMPLE_CALL:
8073 if (gimple_call_lhs (g))
8074 break;
8075 if (gimple_call_internal_p (g))
8076 break;
8077
8078 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8079 LHS. All calls whose value is ignored should be
8080 represented like this. Look for the attribute. */
8081 fdecl = gimple_call_fndecl (g);
8082 ftype = gimple_call_fntype (g);
8083
8084 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8085 {
8086 location_t loc = gimple_location (g);
8087
8088 if (fdecl)
8089 warning_at (loc, OPT_Wunused_result,
8090 "ignoring return value of %qD, "
8091 "declared with attribute warn_unused_result",
8092 fdecl);
8093 else
8094 warning_at (loc, OPT_Wunused_result,
8095 "ignoring return value of function "
8096 "declared with attribute warn_unused_result");
8097 }
8098 break;
8099
8100 default:
8101 /* Not a container, not a call, or a call whose value is used. */
8102 break;
8103 }
8104 }
8105 }
8106
8107 static unsigned int
8108 run_warn_unused_result (void)
8109 {
8110 do_warn_unused_result (gimple_body (current_function_decl));
8111 return 0;
8112 }
8113
8114 static bool
8115 gate_warn_unused_result (void)
8116 {
8117 return flag_warn_unused_result;
8118 }
8119
8120 namespace {
8121
8122 const pass_data pass_data_warn_unused_result =
8123 {
8124 GIMPLE_PASS, /* type */
8125 "*warn_unused_result", /* name */
8126 OPTGROUP_NONE, /* optinfo_flags */
8127 true, /* has_gate */
8128 true, /* has_execute */
8129 TV_NONE, /* tv_id */
8130 PROP_gimple_any, /* properties_required */
8131 0, /* properties_provided */
8132 0, /* properties_destroyed */
8133 0, /* todo_flags_start */
8134 0, /* todo_flags_finish */
8135 };
8136
8137 class pass_warn_unused_result : public gimple_opt_pass
8138 {
8139 public:
8140 pass_warn_unused_result (gcc::context *ctxt)
8141 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8142 {}
8143
8144 /* opt_pass methods: */
8145 bool gate () { return gate_warn_unused_result (); }
8146 unsigned int execute () { return run_warn_unused_result (); }
8147
8148 }; // class pass_warn_unused_result
8149
8150 } // anon namespace
8151
8152 gimple_opt_pass *
8153 make_pass_warn_unused_result (gcc::context *ctxt)
8154 {
8155 return new pass_warn_unused_result (ctxt);
8156 }
8157
8158 /* IPA passes, compilation of earlier functions or inlining
8159 might have changed some properties, such as marked functions nothrow,
8160 pure, const or noreturn.
8161 Remove redundant edges and basic blocks, and create new ones if necessary.
8162
8163 This pass can't be executed as stand alone pass from pass manager, because
8164 in between inlining and this fixup the verify_flow_info would fail. */
8165
8166 unsigned int
8167 execute_fixup_cfg (void)
8168 {
8169 basic_block bb;
8170 gimple_stmt_iterator gsi;
8171 int todo = gimple_in_ssa_p (cfun) ? TODO_verify_ssa : 0;
8172 gcov_type count_scale;
8173 edge e;
8174 edge_iterator ei;
8175
8176 count_scale
8177 = GCOV_COMPUTE_SCALE (cgraph_get_node (current_function_decl)->count,
8178 ENTRY_BLOCK_PTR->count);
8179
8180 ENTRY_BLOCK_PTR->count = cgraph_get_node (current_function_decl)->count;
8181 EXIT_BLOCK_PTR->count = apply_scale (EXIT_BLOCK_PTR->count,
8182 count_scale);
8183
8184 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
8185 e->count = apply_scale (e->count, count_scale);
8186
8187 FOR_EACH_BB (bb)
8188 {
8189 bb->count = apply_scale (bb->count, count_scale);
8190 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
8191 {
8192 gimple stmt = gsi_stmt (gsi);
8193 tree decl = is_gimple_call (stmt)
8194 ? gimple_call_fndecl (stmt)
8195 : NULL;
8196 if (decl)
8197 {
8198 int flags = gimple_call_flags (stmt);
8199 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
8200 {
8201 if (gimple_purge_dead_abnormal_call_edges (bb))
8202 todo |= TODO_cleanup_cfg;
8203
8204 if (gimple_in_ssa_p (cfun))
8205 {
8206 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8207 update_stmt (stmt);
8208 }
8209 }
8210
8211 if (flags & ECF_NORETURN
8212 && fixup_noreturn_call (stmt))
8213 todo |= TODO_cleanup_cfg;
8214 }
8215
8216 if (maybe_clean_eh_stmt (stmt)
8217 && gimple_purge_dead_eh_edges (bb))
8218 todo |= TODO_cleanup_cfg;
8219 }
8220
8221 FOR_EACH_EDGE (e, ei, bb->succs)
8222 e->count = apply_scale (e->count, count_scale);
8223
8224 /* If we have a basic block with no successors that does not
8225 end with a control statement or a noreturn call end it with
8226 a call to __builtin_unreachable. This situation can occur
8227 when inlining a noreturn call that does in fact return. */
8228 if (EDGE_COUNT (bb->succs) == 0)
8229 {
8230 gimple stmt = last_stmt (bb);
8231 if (!stmt
8232 || (!is_ctrl_stmt (stmt)
8233 && (!is_gimple_call (stmt)
8234 || (gimple_call_flags (stmt) & ECF_NORETURN) == 0)))
8235 {
8236 stmt = gimple_build_call
8237 (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
8238 gimple_stmt_iterator gsi = gsi_last_bb (bb);
8239 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
8240 }
8241 }
8242 }
8243 if (count_scale != REG_BR_PROB_BASE)
8244 compute_function_frequency ();
8245
8246 /* We just processed all calls. */
8247 if (cfun->gimple_df)
8248 vec_free (MODIFIED_NORETURN_CALLS (cfun));
8249
8250 /* Dump a textual representation of the flowgraph. */
8251 if (dump_file)
8252 gimple_dump_cfg (dump_file, dump_flags);
8253
8254 if (current_loops
8255 && (todo & TODO_cleanup_cfg))
8256 loops_state_set (LOOPS_NEED_FIXUP);
8257
8258 return todo;
8259 }
8260
8261 namespace {
8262
8263 const pass_data pass_data_fixup_cfg =
8264 {
8265 GIMPLE_PASS, /* type */
8266 "*free_cfg_annotations", /* name */
8267 OPTGROUP_NONE, /* optinfo_flags */
8268 false, /* has_gate */
8269 true, /* has_execute */
8270 TV_NONE, /* tv_id */
8271 PROP_cfg, /* properties_required */
8272 0, /* properties_provided */
8273 0, /* properties_destroyed */
8274 0, /* todo_flags_start */
8275 0, /* todo_flags_finish */
8276 };
8277
8278 class pass_fixup_cfg : public gimple_opt_pass
8279 {
8280 public:
8281 pass_fixup_cfg (gcc::context *ctxt)
8282 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
8283 {}
8284
8285 /* opt_pass methods: */
8286 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
8287 unsigned int execute () { return execute_fixup_cfg (); }
8288
8289 }; // class pass_fixup_cfg
8290
8291 } // anon namespace
8292
8293 gimple_opt_pass *
8294 make_pass_fixup_cfg (gcc::context *ctxt)
8295 {
8296 return new pass_fixup_cfg (ctxt);
8297 }
8298
8299 /* Garbage collection support for edge_def. */
8300
8301 extern void gt_ggc_mx (tree&);
8302 extern void gt_ggc_mx (gimple&);
8303 extern void gt_ggc_mx (rtx&);
8304 extern void gt_ggc_mx (basic_block&);
8305
8306 void
8307 gt_ggc_mx (edge_def *e)
8308 {
8309 tree block = LOCATION_BLOCK (e->goto_locus);
8310 gt_ggc_mx (e->src);
8311 gt_ggc_mx (e->dest);
8312 if (current_ir_type () == IR_GIMPLE)
8313 gt_ggc_mx (e->insns.g);
8314 else
8315 gt_ggc_mx (e->insns.r);
8316 gt_ggc_mx (block);
8317 }
8318
8319 /* PCH support for edge_def. */
8320
8321 extern void gt_pch_nx (tree&);
8322 extern void gt_pch_nx (gimple&);
8323 extern void gt_pch_nx (rtx&);
8324 extern void gt_pch_nx (basic_block&);
8325
8326 void
8327 gt_pch_nx (edge_def *e)
8328 {
8329 tree block = LOCATION_BLOCK (e->goto_locus);
8330 gt_pch_nx (e->src);
8331 gt_pch_nx (e->dest);
8332 if (current_ir_type () == IR_GIMPLE)
8333 gt_pch_nx (e->insns.g);
8334 else
8335 gt_pch_nx (e->insns.r);
8336 gt_pch_nx (block);
8337 }
8338
8339 void
8340 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
8341 {
8342 tree block = LOCATION_BLOCK (e->goto_locus);
8343 op (&(e->src), cookie);
8344 op (&(e->dest), cookie);
8345 if (current_ir_type () == IR_GIMPLE)
8346 op (&(e->insns.g), cookie);
8347 else
8348 op (&(e->insns.r), cookie);
8349 op (&(block), cookie);
8350 }