tree-ssa.h: Remove all #include's
[gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2013 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "hash-table.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "flags.h"
30 #include "function.h"
31 #include "ggc.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple.h"
34 #include "gimple-ssa.h"
35 #include "cgraph.h"
36 #include "tree-cfg.h"
37 #include "tree-phinodes.h"
38 #include "ssa-iterators.h"
39 #include "tree-ssanames.h"
40 #include "tree-ssa-loop.h"
41 #include "tree-into-ssa.h"
42 #include "tree-dfa.h"
43 #include "tree-ssa.h"
44 #include "tree-dump.h"
45 #include "tree-pass.h"
46 #include "diagnostic-core.h"
47 #include "except.h"
48 #include "cfgloop.h"
49 #include "tree-ssa-propagate.h"
50 #include "value-prof.h"
51 #include "pointer-set.h"
52 #include "tree-inline.h"
53 #include "target.h"
54 #include "tree-ssa-live.h"
55 #include "omp-low.h"
56 #include "tree-cfgcleanup.h"
57
58 /* This file contains functions for building the Control Flow Graph (CFG)
59 for a function tree. */
60
61 /* Local declarations. */
62
63 /* Initial capacity for the basic block array. */
64 static const int initial_cfg_capacity = 20;
65
66 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
67 which use a particular edge. The CASE_LABEL_EXPRs are chained together
68 via their CASE_CHAIN field, which we clear after we're done with the
69 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
70
71 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
72 update the case vector in response to edge redirections.
73
74 Right now this table is set up and torn down at key points in the
75 compilation process. It would be nice if we could make the table
76 more persistent. The key is getting notification of changes to
77 the CFG (particularly edge removal, creation and redirection). */
78
79 static struct pointer_map_t *edge_to_cases;
80
81 /* If we record edge_to_cases, this bitmap will hold indexes
82 of basic blocks that end in a GIMPLE_SWITCH which we touched
83 due to edge manipulations. */
84
85 static bitmap touched_switch_bbs;
86
87 /* CFG statistics. */
88 struct cfg_stats_d
89 {
90 long num_merged_labels;
91 };
92
93 static struct cfg_stats_d cfg_stats;
94
95 /* Nonzero if we found a computed goto while building basic blocks. */
96 static bool found_computed_goto;
97
98 /* Hash table to store last discriminator assigned for each locus. */
99 struct locus_discrim_map
100 {
101 location_t locus;
102 int discriminator;
103 };
104
105 /* Hashtable helpers. */
106
107 struct locus_discrim_hasher : typed_free_remove <locus_discrim_map>
108 {
109 typedef locus_discrim_map value_type;
110 typedef locus_discrim_map compare_type;
111 static inline hashval_t hash (const value_type *);
112 static inline bool equal (const value_type *, const compare_type *);
113 };
114
115 /* Trivial hash function for a location_t. ITEM is a pointer to
116 a hash table entry that maps a location_t to a discriminator. */
117
118 inline hashval_t
119 locus_discrim_hasher::hash (const value_type *item)
120 {
121 return LOCATION_LINE (item->locus);
122 }
123
124 /* Equality function for the locus-to-discriminator map. A and B
125 point to the two hash table entries to compare. */
126
127 inline bool
128 locus_discrim_hasher::equal (const value_type *a, const compare_type *b)
129 {
130 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
131 }
132
133 static hash_table <locus_discrim_hasher> discriminator_per_locus;
134
135 /* Basic blocks and flowgraphs. */
136 static void make_blocks (gimple_seq);
137 static void factor_computed_gotos (void);
138
139 /* Edges. */
140 static void make_edges (void);
141 static void assign_discriminators (void);
142 static void make_cond_expr_edges (basic_block);
143 static void make_gimple_switch_edges (basic_block);
144 static void make_goto_expr_edges (basic_block);
145 static void make_gimple_asm_edges (basic_block);
146 static edge gimple_redirect_edge_and_branch (edge, basic_block);
147 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
148 static unsigned int split_critical_edges (void);
149
150 /* Various helpers. */
151 static inline bool stmt_starts_bb_p (gimple, gimple);
152 static int gimple_verify_flow_info (void);
153 static void gimple_make_forwarder_block (edge);
154 static gimple first_non_label_stmt (basic_block);
155 static bool verify_gimple_transaction (gimple);
156
157 /* Flowgraph optimization and cleanup. */
158 static void gimple_merge_blocks (basic_block, basic_block);
159 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
160 static void remove_bb (basic_block);
161 static edge find_taken_edge_computed_goto (basic_block, tree);
162 static edge find_taken_edge_cond_expr (basic_block, tree);
163 static edge find_taken_edge_switch_expr (basic_block, tree);
164 static tree find_case_label_for_value (gimple, tree);
165
166 void
167 init_empty_tree_cfg_for_function (struct function *fn)
168 {
169 /* Initialize the basic block array. */
170 init_flow (fn);
171 profile_status_for_function (fn) = PROFILE_ABSENT;
172 n_basic_blocks_for_function (fn) = NUM_FIXED_BLOCKS;
173 last_basic_block_for_function (fn) = NUM_FIXED_BLOCKS;
174 vec_alloc (basic_block_info_for_function (fn), initial_cfg_capacity);
175 vec_safe_grow_cleared (basic_block_info_for_function (fn),
176 initial_cfg_capacity);
177
178 /* Build a mapping of labels to their associated blocks. */
179 vec_alloc (label_to_block_map_for_function (fn), initial_cfg_capacity);
180 vec_safe_grow_cleared (label_to_block_map_for_function (fn),
181 initial_cfg_capacity);
182
183 SET_BASIC_BLOCK_FOR_FUNCTION (fn, ENTRY_BLOCK,
184 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn));
185 SET_BASIC_BLOCK_FOR_FUNCTION (fn, EXIT_BLOCK,
186 EXIT_BLOCK_PTR_FOR_FUNCTION (fn));
187
188 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)->next_bb
189 = EXIT_BLOCK_PTR_FOR_FUNCTION (fn);
190 EXIT_BLOCK_PTR_FOR_FUNCTION (fn)->prev_bb
191 = ENTRY_BLOCK_PTR_FOR_FUNCTION (fn);
192 }
193
194 void
195 init_empty_tree_cfg (void)
196 {
197 init_empty_tree_cfg_for_function (cfun);
198 }
199
200 /*---------------------------------------------------------------------------
201 Create basic blocks
202 ---------------------------------------------------------------------------*/
203
204 /* Entry point to the CFG builder for trees. SEQ is the sequence of
205 statements to be added to the flowgraph. */
206
207 static void
208 build_gimple_cfg (gimple_seq seq)
209 {
210 /* Register specific gimple functions. */
211 gimple_register_cfg_hooks ();
212
213 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
214
215 init_empty_tree_cfg ();
216
217 found_computed_goto = 0;
218 make_blocks (seq);
219
220 /* Computed gotos are hell to deal with, especially if there are
221 lots of them with a large number of destinations. So we factor
222 them to a common computed goto location before we build the
223 edge list. After we convert back to normal form, we will un-factor
224 the computed gotos since factoring introduces an unwanted jump. */
225 if (found_computed_goto)
226 factor_computed_gotos ();
227
228 /* Make sure there is always at least one block, even if it's empty. */
229 if (n_basic_blocks == NUM_FIXED_BLOCKS)
230 create_empty_bb (ENTRY_BLOCK_PTR);
231
232 /* Adjust the size of the array. */
233 if (basic_block_info->length () < (size_t) n_basic_blocks)
234 vec_safe_grow_cleared (basic_block_info, n_basic_blocks);
235
236 /* To speed up statement iterator walks, we first purge dead labels. */
237 cleanup_dead_labels ();
238
239 /* Group case nodes to reduce the number of edges.
240 We do this after cleaning up dead labels because otherwise we miss
241 a lot of obvious case merging opportunities. */
242 group_case_labels ();
243
244 /* Create the edges of the flowgraph. */
245 discriminator_per_locus.create (13);
246 make_edges ();
247 assign_discriminators ();
248 cleanup_dead_labels ();
249 discriminator_per_locus.dispose ();
250 }
251
252 static unsigned int
253 execute_build_cfg (void)
254 {
255 gimple_seq body = gimple_body (current_function_decl);
256
257 build_gimple_cfg (body);
258 gimple_set_body (current_function_decl, NULL);
259 if (dump_file && (dump_flags & TDF_DETAILS))
260 {
261 fprintf (dump_file, "Scope blocks:\n");
262 dump_scope_blocks (dump_file, dump_flags);
263 }
264 cleanup_tree_cfg ();
265 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
266 return 0;
267 }
268
269 namespace {
270
271 const pass_data pass_data_build_cfg =
272 {
273 GIMPLE_PASS, /* type */
274 "cfg", /* name */
275 OPTGROUP_NONE, /* optinfo_flags */
276 false, /* has_gate */
277 true, /* has_execute */
278 TV_TREE_CFG, /* tv_id */
279 PROP_gimple_leh, /* properties_required */
280 ( PROP_cfg | PROP_loops ), /* properties_provided */
281 0, /* properties_destroyed */
282 0, /* todo_flags_start */
283 TODO_verify_stmts, /* todo_flags_finish */
284 };
285
286 class pass_build_cfg : public gimple_opt_pass
287 {
288 public:
289 pass_build_cfg (gcc::context *ctxt)
290 : gimple_opt_pass (pass_data_build_cfg, ctxt)
291 {}
292
293 /* opt_pass methods: */
294 unsigned int execute () { return execute_build_cfg (); }
295
296 }; // class pass_build_cfg
297
298 } // anon namespace
299
300 gimple_opt_pass *
301 make_pass_build_cfg (gcc::context *ctxt)
302 {
303 return new pass_build_cfg (ctxt);
304 }
305
306
307 /* Return true if T is a computed goto. */
308
309 static bool
310 computed_goto_p (gimple t)
311 {
312 return (gimple_code (t) == GIMPLE_GOTO
313 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
314 }
315
316
317 /* Search the CFG for any computed gotos. If found, factor them to a
318 common computed goto site. Also record the location of that site so
319 that we can un-factor the gotos after we have converted back to
320 normal form. */
321
322 static void
323 factor_computed_gotos (void)
324 {
325 basic_block bb;
326 tree factored_label_decl = NULL;
327 tree var = NULL;
328 gimple factored_computed_goto_label = NULL;
329 gimple factored_computed_goto = NULL;
330
331 /* We know there are one or more computed gotos in this function.
332 Examine the last statement in each basic block to see if the block
333 ends with a computed goto. */
334
335 FOR_EACH_BB (bb)
336 {
337 gimple_stmt_iterator gsi = gsi_last_bb (bb);
338 gimple last;
339
340 if (gsi_end_p (gsi))
341 continue;
342
343 last = gsi_stmt (gsi);
344
345 /* Ignore the computed goto we create when we factor the original
346 computed gotos. */
347 if (last == factored_computed_goto)
348 continue;
349
350 /* If the last statement is a computed goto, factor it. */
351 if (computed_goto_p (last))
352 {
353 gimple assignment;
354
355 /* The first time we find a computed goto we need to create
356 the factored goto block and the variable each original
357 computed goto will use for their goto destination. */
358 if (!factored_computed_goto)
359 {
360 basic_block new_bb = create_empty_bb (bb);
361 gimple_stmt_iterator new_gsi = gsi_start_bb (new_bb);
362
363 /* Create the destination of the factored goto. Each original
364 computed goto will put its desired destination into this
365 variable and jump to the label we create immediately
366 below. */
367 var = create_tmp_var (ptr_type_node, "gotovar");
368
369 /* Build a label for the new block which will contain the
370 factored computed goto. */
371 factored_label_decl = create_artificial_label (UNKNOWN_LOCATION);
372 factored_computed_goto_label
373 = gimple_build_label (factored_label_decl);
374 gsi_insert_after (&new_gsi, factored_computed_goto_label,
375 GSI_NEW_STMT);
376
377 /* Build our new computed goto. */
378 factored_computed_goto = gimple_build_goto (var);
379 gsi_insert_after (&new_gsi, factored_computed_goto, GSI_NEW_STMT);
380 }
381
382 /* Copy the original computed goto's destination into VAR. */
383 assignment = gimple_build_assign (var, gimple_goto_dest (last));
384 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
385
386 /* And re-vector the computed goto to the new destination. */
387 gimple_goto_set_dest (last, factored_label_decl);
388 }
389 }
390 }
391
392
393 /* Build a flowgraph for the sequence of stmts SEQ. */
394
395 static void
396 make_blocks (gimple_seq seq)
397 {
398 gimple_stmt_iterator i = gsi_start (seq);
399 gimple stmt = NULL;
400 bool start_new_block = true;
401 bool first_stmt_of_seq = true;
402 basic_block bb = ENTRY_BLOCK_PTR;
403
404 while (!gsi_end_p (i))
405 {
406 gimple prev_stmt;
407
408 prev_stmt = stmt;
409 stmt = gsi_stmt (i);
410
411 /* If the statement starts a new basic block or if we have determined
412 in a previous pass that we need to create a new block for STMT, do
413 so now. */
414 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
415 {
416 if (!first_stmt_of_seq)
417 gsi_split_seq_before (&i, &seq);
418 bb = create_basic_block (seq, NULL, bb);
419 start_new_block = false;
420 }
421
422 /* Now add STMT to BB and create the subgraphs for special statement
423 codes. */
424 gimple_set_bb (stmt, bb);
425
426 if (computed_goto_p (stmt))
427 found_computed_goto = true;
428
429 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
430 next iteration. */
431 if (stmt_ends_bb_p (stmt))
432 {
433 /* If the stmt can make abnormal goto use a new temporary
434 for the assignment to the LHS. This makes sure the old value
435 of the LHS is available on the abnormal edge. Otherwise
436 we will end up with overlapping life-ranges for abnormal
437 SSA names. */
438 if (gimple_has_lhs (stmt)
439 && stmt_can_make_abnormal_goto (stmt)
440 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
441 {
442 tree lhs = gimple_get_lhs (stmt);
443 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
444 gimple s = gimple_build_assign (lhs, tmp);
445 gimple_set_location (s, gimple_location (stmt));
446 gimple_set_block (s, gimple_block (stmt));
447 gimple_set_lhs (stmt, tmp);
448 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
449 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
450 DECL_GIMPLE_REG_P (tmp) = 1;
451 gsi_insert_after (&i, s, GSI_SAME_STMT);
452 }
453 start_new_block = true;
454 }
455
456 gsi_next (&i);
457 first_stmt_of_seq = false;
458 }
459 }
460
461
462 /* Create and return a new empty basic block after bb AFTER. */
463
464 static basic_block
465 create_bb (void *h, void *e, basic_block after)
466 {
467 basic_block bb;
468
469 gcc_assert (!e);
470
471 /* Create and initialize a new basic block. Since alloc_block uses
472 GC allocation that clears memory to allocate a basic block, we do
473 not have to clear the newly allocated basic block here. */
474 bb = alloc_block ();
475
476 bb->index = last_basic_block;
477 bb->flags = BB_NEW;
478 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
479
480 /* Add the new block to the linked list of blocks. */
481 link_block (bb, after);
482
483 /* Grow the basic block array if needed. */
484 if ((size_t) last_basic_block == basic_block_info->length ())
485 {
486 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
487 vec_safe_grow_cleared (basic_block_info, new_size);
488 }
489
490 /* Add the newly created block to the array. */
491 SET_BASIC_BLOCK (last_basic_block, bb);
492
493 n_basic_blocks++;
494 last_basic_block++;
495
496 return bb;
497 }
498
499
500 /*---------------------------------------------------------------------------
501 Edge creation
502 ---------------------------------------------------------------------------*/
503
504 /* Fold COND_EXPR_COND of each COND_EXPR. */
505
506 void
507 fold_cond_expr_cond (void)
508 {
509 basic_block bb;
510
511 FOR_EACH_BB (bb)
512 {
513 gimple stmt = last_stmt (bb);
514
515 if (stmt && gimple_code (stmt) == GIMPLE_COND)
516 {
517 location_t loc = gimple_location (stmt);
518 tree cond;
519 bool zerop, onep;
520
521 fold_defer_overflow_warnings ();
522 cond = fold_binary_loc (loc, gimple_cond_code (stmt), boolean_type_node,
523 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
524 if (cond)
525 {
526 zerop = integer_zerop (cond);
527 onep = integer_onep (cond);
528 }
529 else
530 zerop = onep = false;
531
532 fold_undefer_overflow_warnings (zerop || onep,
533 stmt,
534 WARN_STRICT_OVERFLOW_CONDITIONAL);
535 if (zerop)
536 gimple_cond_make_false (stmt);
537 else if (onep)
538 gimple_cond_make_true (stmt);
539 }
540 }
541 }
542
543 /* Join all the blocks in the flowgraph. */
544
545 static void
546 make_edges (void)
547 {
548 basic_block bb;
549 struct omp_region *cur_region = NULL;
550
551 /* Create an edge from entry to the first block with executable
552 statements in it. */
553 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
554
555 /* Traverse the basic block array placing edges. */
556 FOR_EACH_BB (bb)
557 {
558 gimple last = last_stmt (bb);
559 bool fallthru;
560
561 if (last)
562 {
563 enum gimple_code code = gimple_code (last);
564 switch (code)
565 {
566 case GIMPLE_GOTO:
567 make_goto_expr_edges (bb);
568 fallthru = false;
569 break;
570 case GIMPLE_RETURN:
571 make_edge (bb, EXIT_BLOCK_PTR, 0);
572 fallthru = false;
573 break;
574 case GIMPLE_COND:
575 make_cond_expr_edges (bb);
576 fallthru = false;
577 break;
578 case GIMPLE_SWITCH:
579 make_gimple_switch_edges (bb);
580 fallthru = false;
581 break;
582 case GIMPLE_RESX:
583 make_eh_edges (last);
584 fallthru = false;
585 break;
586 case GIMPLE_EH_DISPATCH:
587 fallthru = make_eh_dispatch_edges (last);
588 break;
589
590 case GIMPLE_CALL:
591 /* If this function receives a nonlocal goto, then we need to
592 make edges from this call site to all the nonlocal goto
593 handlers. */
594 if (stmt_can_make_abnormal_goto (last))
595 make_abnormal_goto_edges (bb, true);
596
597 /* If this statement has reachable exception handlers, then
598 create abnormal edges to them. */
599 make_eh_edges (last);
600
601 /* BUILTIN_RETURN is really a return statement. */
602 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
603 make_edge (bb, EXIT_BLOCK_PTR, 0), fallthru = false;
604 /* Some calls are known not to return. */
605 else
606 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
607 break;
608
609 case GIMPLE_ASSIGN:
610 /* A GIMPLE_ASSIGN may throw internally and thus be considered
611 control-altering. */
612 if (is_ctrl_altering_stmt (last))
613 make_eh_edges (last);
614 fallthru = true;
615 break;
616
617 case GIMPLE_ASM:
618 make_gimple_asm_edges (bb);
619 fallthru = true;
620 break;
621
622 CASE_GIMPLE_OMP:
623 fallthru = make_gimple_omp_edges (bb, &cur_region);
624 break;
625
626 case GIMPLE_TRANSACTION:
627 {
628 tree abort_label = gimple_transaction_label (last);
629 if (abort_label)
630 make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT);
631 fallthru = true;
632 }
633 break;
634
635 default:
636 gcc_assert (!stmt_ends_bb_p (last));
637 fallthru = true;
638 }
639 }
640 else
641 fallthru = true;
642
643 if (fallthru)
644 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
645 }
646
647 free_omp_regions ();
648
649 /* Fold COND_EXPR_COND of each COND_EXPR. */
650 fold_cond_expr_cond ();
651 }
652
653 /* Find the next available discriminator value for LOCUS. The
654 discriminator distinguishes among several basic blocks that
655 share a common locus, allowing for more accurate sample-based
656 profiling. */
657
658 static int
659 next_discriminator_for_locus (location_t locus)
660 {
661 struct locus_discrim_map item;
662 struct locus_discrim_map **slot;
663
664 item.locus = locus;
665 item.discriminator = 0;
666 slot = discriminator_per_locus.find_slot_with_hash (
667 &item, LOCATION_LINE (locus), INSERT);
668 gcc_assert (slot);
669 if (*slot == HTAB_EMPTY_ENTRY)
670 {
671 *slot = XNEW (struct locus_discrim_map);
672 gcc_assert (*slot);
673 (*slot)->locus = locus;
674 (*slot)->discriminator = 0;
675 }
676 (*slot)->discriminator++;
677 return (*slot)->discriminator;
678 }
679
680 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
681
682 static bool
683 same_line_p (location_t locus1, location_t locus2)
684 {
685 expanded_location from, to;
686
687 if (locus1 == locus2)
688 return true;
689
690 from = expand_location (locus1);
691 to = expand_location (locus2);
692
693 if (from.line != to.line)
694 return false;
695 if (from.file == to.file)
696 return true;
697 return (from.file != NULL
698 && to.file != NULL
699 && filename_cmp (from.file, to.file) == 0);
700 }
701
702 /* Assign discriminators to each basic block. */
703
704 static void
705 assign_discriminators (void)
706 {
707 basic_block bb;
708
709 FOR_EACH_BB (bb)
710 {
711 edge e;
712 edge_iterator ei;
713 gimple last = last_stmt (bb);
714 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
715
716 if (locus == UNKNOWN_LOCATION)
717 continue;
718
719 FOR_EACH_EDGE (e, ei, bb->succs)
720 {
721 gimple first = first_non_label_stmt (e->dest);
722 gimple last = last_stmt (e->dest);
723 if ((first && same_line_p (locus, gimple_location (first)))
724 || (last && same_line_p (locus, gimple_location (last))))
725 {
726 if (e->dest->discriminator != 0 && bb->discriminator == 0)
727 bb->discriminator = next_discriminator_for_locus (locus);
728 else
729 e->dest->discriminator = next_discriminator_for_locus (locus);
730 }
731 }
732 }
733 }
734
735 /* Create the edges for a GIMPLE_COND starting at block BB. */
736
737 static void
738 make_cond_expr_edges (basic_block bb)
739 {
740 gimple entry = last_stmt (bb);
741 gimple then_stmt, else_stmt;
742 basic_block then_bb, else_bb;
743 tree then_label, else_label;
744 edge e;
745
746 gcc_assert (entry);
747 gcc_assert (gimple_code (entry) == GIMPLE_COND);
748
749 /* Entry basic blocks for each component. */
750 then_label = gimple_cond_true_label (entry);
751 else_label = gimple_cond_false_label (entry);
752 then_bb = label_to_block (then_label);
753 else_bb = label_to_block (else_label);
754 then_stmt = first_stmt (then_bb);
755 else_stmt = first_stmt (else_bb);
756
757 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
758 e->goto_locus = gimple_location (then_stmt);
759 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
760 if (e)
761 e->goto_locus = gimple_location (else_stmt);
762
763 /* We do not need the labels anymore. */
764 gimple_cond_set_true_label (entry, NULL_TREE);
765 gimple_cond_set_false_label (entry, NULL_TREE);
766 }
767
768
769 /* Called for each element in the hash table (P) as we delete the
770 edge to cases hash table.
771
772 Clear all the TREE_CHAINs to prevent problems with copying of
773 SWITCH_EXPRs and structure sharing rules, then free the hash table
774 element. */
775
776 static bool
777 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
778 void *data ATTRIBUTE_UNUSED)
779 {
780 tree t, next;
781
782 for (t = (tree) *value; t; t = next)
783 {
784 next = CASE_CHAIN (t);
785 CASE_CHAIN (t) = NULL;
786 }
787
788 *value = NULL;
789 return true;
790 }
791
792 /* Start recording information mapping edges to case labels. */
793
794 void
795 start_recording_case_labels (void)
796 {
797 gcc_assert (edge_to_cases == NULL);
798 edge_to_cases = pointer_map_create ();
799 touched_switch_bbs = BITMAP_ALLOC (NULL);
800 }
801
802 /* Return nonzero if we are recording information for case labels. */
803
804 static bool
805 recording_case_labels_p (void)
806 {
807 return (edge_to_cases != NULL);
808 }
809
810 /* Stop recording information mapping edges to case labels and
811 remove any information we have recorded. */
812 void
813 end_recording_case_labels (void)
814 {
815 bitmap_iterator bi;
816 unsigned i;
817 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
818 pointer_map_destroy (edge_to_cases);
819 edge_to_cases = NULL;
820 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
821 {
822 basic_block bb = BASIC_BLOCK (i);
823 if (bb)
824 {
825 gimple stmt = last_stmt (bb);
826 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
827 group_case_labels_stmt (stmt);
828 }
829 }
830 BITMAP_FREE (touched_switch_bbs);
831 }
832
833 /* If we are inside a {start,end}_recording_cases block, then return
834 a chain of CASE_LABEL_EXPRs from T which reference E.
835
836 Otherwise return NULL. */
837
838 static tree
839 get_cases_for_edge (edge e, gimple t)
840 {
841 void **slot;
842 size_t i, n;
843
844 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
845 chains available. Return NULL so the caller can detect this case. */
846 if (!recording_case_labels_p ())
847 return NULL;
848
849 slot = pointer_map_contains (edge_to_cases, e);
850 if (slot)
851 return (tree) *slot;
852
853 /* If we did not find E in the hash table, then this must be the first
854 time we have been queried for information about E & T. Add all the
855 elements from T to the hash table then perform the query again. */
856
857 n = gimple_switch_num_labels (t);
858 for (i = 0; i < n; i++)
859 {
860 tree elt = gimple_switch_label (t, i);
861 tree lab = CASE_LABEL (elt);
862 basic_block label_bb = label_to_block (lab);
863 edge this_edge = find_edge (e->src, label_bb);
864
865 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
866 a new chain. */
867 slot = pointer_map_insert (edge_to_cases, this_edge);
868 CASE_CHAIN (elt) = (tree) *slot;
869 *slot = elt;
870 }
871
872 return (tree) *pointer_map_contains (edge_to_cases, e);
873 }
874
875 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
876
877 static void
878 make_gimple_switch_edges (basic_block bb)
879 {
880 gimple entry = last_stmt (bb);
881 size_t i, n;
882
883 n = gimple_switch_num_labels (entry);
884
885 for (i = 0; i < n; ++i)
886 {
887 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
888 basic_block label_bb = label_to_block (lab);
889 make_edge (bb, label_bb, 0);
890 }
891 }
892
893
894 /* Return the basic block holding label DEST. */
895
896 basic_block
897 label_to_block_fn (struct function *ifun, tree dest)
898 {
899 int uid = LABEL_DECL_UID (dest);
900
901 /* We would die hard when faced by an undefined label. Emit a label to
902 the very first basic block. This will hopefully make even the dataflow
903 and undefined variable warnings quite right. */
904 if (seen_error () && uid < 0)
905 {
906 gimple_stmt_iterator gsi = gsi_start_bb (BASIC_BLOCK (NUM_FIXED_BLOCKS));
907 gimple stmt;
908
909 stmt = gimple_build_label (dest);
910 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
911 uid = LABEL_DECL_UID (dest);
912 }
913 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
914 return NULL;
915 return (*ifun->cfg->x_label_to_block_map)[uid];
916 }
917
918 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
919 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
920
921 void
922 make_abnormal_goto_edges (basic_block bb, bool for_call)
923 {
924 basic_block target_bb;
925 gimple_stmt_iterator gsi;
926
927 FOR_EACH_BB (target_bb)
928 {
929 for (gsi = gsi_start_bb (target_bb); !gsi_end_p (gsi); gsi_next (&gsi))
930 {
931 gimple label_stmt = gsi_stmt (gsi);
932 tree target;
933
934 if (gimple_code (label_stmt) != GIMPLE_LABEL)
935 break;
936
937 target = gimple_label_label (label_stmt);
938
939 /* Make an edge to every label block that has been marked as a
940 potential target for a computed goto or a non-local goto. */
941 if ((FORCED_LABEL (target) && !for_call)
942 || (DECL_NONLOCAL (target) && for_call))
943 {
944 make_edge (bb, target_bb, EDGE_ABNORMAL);
945 break;
946 }
947 }
948 if (!gsi_end_p (gsi)
949 && is_gimple_debug (gsi_stmt (gsi)))
950 gsi_next_nondebug (&gsi);
951 if (!gsi_end_p (gsi))
952 {
953 /* Make an edge to every setjmp-like call. */
954 gimple call_stmt = gsi_stmt (gsi);
955 if (is_gimple_call (call_stmt)
956 && (gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE))
957 make_edge (bb, target_bb, EDGE_ABNORMAL);
958 }
959 }
960 }
961
962 /* Create edges for a goto statement at block BB. */
963
964 static void
965 make_goto_expr_edges (basic_block bb)
966 {
967 gimple_stmt_iterator last = gsi_last_bb (bb);
968 gimple goto_t = gsi_stmt (last);
969
970 /* A simple GOTO creates normal edges. */
971 if (simple_goto_p (goto_t))
972 {
973 tree dest = gimple_goto_dest (goto_t);
974 basic_block label_bb = label_to_block (dest);
975 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
976 e->goto_locus = gimple_location (goto_t);
977 gsi_remove (&last, true);
978 return;
979 }
980
981 /* A computed GOTO creates abnormal edges. */
982 make_abnormal_goto_edges (bb, false);
983 }
984
985 /* Create edges for an asm statement with labels at block BB. */
986
987 static void
988 make_gimple_asm_edges (basic_block bb)
989 {
990 gimple stmt = last_stmt (bb);
991 int i, n = gimple_asm_nlabels (stmt);
992
993 for (i = 0; i < n; ++i)
994 {
995 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
996 basic_block label_bb = label_to_block (label);
997 make_edge (bb, label_bb, 0);
998 }
999 }
1000
1001 /*---------------------------------------------------------------------------
1002 Flowgraph analysis
1003 ---------------------------------------------------------------------------*/
1004
1005 /* Cleanup useless labels in basic blocks. This is something we wish
1006 to do early because it allows us to group case labels before creating
1007 the edges for the CFG, and it speeds up block statement iterators in
1008 all passes later on.
1009 We rerun this pass after CFG is created, to get rid of the labels that
1010 are no longer referenced. After then we do not run it any more, since
1011 (almost) no new labels should be created. */
1012
1013 /* A map from basic block index to the leading label of that block. */
1014 static struct label_record
1015 {
1016 /* The label. */
1017 tree label;
1018
1019 /* True if the label is referenced from somewhere. */
1020 bool used;
1021 } *label_for_bb;
1022
1023 /* Given LABEL return the first label in the same basic block. */
1024
1025 static tree
1026 main_block_label (tree label)
1027 {
1028 basic_block bb = label_to_block (label);
1029 tree main_label = label_for_bb[bb->index].label;
1030
1031 /* label_to_block possibly inserted undefined label into the chain. */
1032 if (!main_label)
1033 {
1034 label_for_bb[bb->index].label = label;
1035 main_label = label;
1036 }
1037
1038 label_for_bb[bb->index].used = true;
1039 return main_label;
1040 }
1041
1042 /* Clean up redundant labels within the exception tree. */
1043
1044 static void
1045 cleanup_dead_labels_eh (void)
1046 {
1047 eh_landing_pad lp;
1048 eh_region r;
1049 tree lab;
1050 int i;
1051
1052 if (cfun->eh == NULL)
1053 return;
1054
1055 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1056 if (lp && lp->post_landing_pad)
1057 {
1058 lab = main_block_label (lp->post_landing_pad);
1059 if (lab != lp->post_landing_pad)
1060 {
1061 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1062 EH_LANDING_PAD_NR (lab) = lp->index;
1063 }
1064 }
1065
1066 FOR_ALL_EH_REGION (r)
1067 switch (r->type)
1068 {
1069 case ERT_CLEANUP:
1070 case ERT_MUST_NOT_THROW:
1071 break;
1072
1073 case ERT_TRY:
1074 {
1075 eh_catch c;
1076 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1077 {
1078 lab = c->label;
1079 if (lab)
1080 c->label = main_block_label (lab);
1081 }
1082 }
1083 break;
1084
1085 case ERT_ALLOWED_EXCEPTIONS:
1086 lab = r->u.allowed.label;
1087 if (lab)
1088 r->u.allowed.label = main_block_label (lab);
1089 break;
1090 }
1091 }
1092
1093
1094 /* Cleanup redundant labels. This is a three-step process:
1095 1) Find the leading label for each block.
1096 2) Redirect all references to labels to the leading labels.
1097 3) Cleanup all useless labels. */
1098
1099 void
1100 cleanup_dead_labels (void)
1101 {
1102 basic_block bb;
1103 label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
1104
1105 /* Find a suitable label for each block. We use the first user-defined
1106 label if there is one, or otherwise just the first label we see. */
1107 FOR_EACH_BB (bb)
1108 {
1109 gimple_stmt_iterator i;
1110
1111 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1112 {
1113 tree label;
1114 gimple stmt = gsi_stmt (i);
1115
1116 if (gimple_code (stmt) != GIMPLE_LABEL)
1117 break;
1118
1119 label = gimple_label_label (stmt);
1120
1121 /* If we have not yet seen a label for the current block,
1122 remember this one and see if there are more labels. */
1123 if (!label_for_bb[bb->index].label)
1124 {
1125 label_for_bb[bb->index].label = label;
1126 continue;
1127 }
1128
1129 /* If we did see a label for the current block already, but it
1130 is an artificially created label, replace it if the current
1131 label is a user defined label. */
1132 if (!DECL_ARTIFICIAL (label)
1133 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1134 {
1135 label_for_bb[bb->index].label = label;
1136 break;
1137 }
1138 }
1139 }
1140
1141 /* Now redirect all jumps/branches to the selected label.
1142 First do so for each block ending in a control statement. */
1143 FOR_EACH_BB (bb)
1144 {
1145 gimple stmt = last_stmt (bb);
1146 tree label, new_label;
1147
1148 if (!stmt)
1149 continue;
1150
1151 switch (gimple_code (stmt))
1152 {
1153 case GIMPLE_COND:
1154 label = gimple_cond_true_label (stmt);
1155 if (label)
1156 {
1157 new_label = main_block_label (label);
1158 if (new_label != label)
1159 gimple_cond_set_true_label (stmt, new_label);
1160 }
1161
1162 label = gimple_cond_false_label (stmt);
1163 if (label)
1164 {
1165 new_label = main_block_label (label);
1166 if (new_label != label)
1167 gimple_cond_set_false_label (stmt, new_label);
1168 }
1169 break;
1170
1171 case GIMPLE_SWITCH:
1172 {
1173 size_t i, n = gimple_switch_num_labels (stmt);
1174
1175 /* Replace all destination labels. */
1176 for (i = 0; i < n; ++i)
1177 {
1178 tree case_label = gimple_switch_label (stmt, i);
1179 label = CASE_LABEL (case_label);
1180 new_label = main_block_label (label);
1181 if (new_label != label)
1182 CASE_LABEL (case_label) = new_label;
1183 }
1184 break;
1185 }
1186
1187 case GIMPLE_ASM:
1188 {
1189 int i, n = gimple_asm_nlabels (stmt);
1190
1191 for (i = 0; i < n; ++i)
1192 {
1193 tree cons = gimple_asm_label_op (stmt, i);
1194 tree label = main_block_label (TREE_VALUE (cons));
1195 TREE_VALUE (cons) = label;
1196 }
1197 break;
1198 }
1199
1200 /* We have to handle gotos until they're removed, and we don't
1201 remove them until after we've created the CFG edges. */
1202 case GIMPLE_GOTO:
1203 if (!computed_goto_p (stmt))
1204 {
1205 label = gimple_goto_dest (stmt);
1206 new_label = main_block_label (label);
1207 if (new_label != label)
1208 gimple_goto_set_dest (stmt, new_label);
1209 }
1210 break;
1211
1212 case GIMPLE_TRANSACTION:
1213 {
1214 tree label = gimple_transaction_label (stmt);
1215 if (label)
1216 {
1217 tree new_label = main_block_label (label);
1218 if (new_label != label)
1219 gimple_transaction_set_label (stmt, new_label);
1220 }
1221 }
1222 break;
1223
1224 default:
1225 break;
1226 }
1227 }
1228
1229 /* Do the same for the exception region tree labels. */
1230 cleanup_dead_labels_eh ();
1231
1232 /* Finally, purge dead labels. All user-defined labels and labels that
1233 can be the target of non-local gotos and labels which have their
1234 address taken are preserved. */
1235 FOR_EACH_BB (bb)
1236 {
1237 gimple_stmt_iterator i;
1238 tree label_for_this_bb = label_for_bb[bb->index].label;
1239
1240 if (!label_for_this_bb)
1241 continue;
1242
1243 /* If the main label of the block is unused, we may still remove it. */
1244 if (!label_for_bb[bb->index].used)
1245 label_for_this_bb = NULL;
1246
1247 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1248 {
1249 tree label;
1250 gimple stmt = gsi_stmt (i);
1251
1252 if (gimple_code (stmt) != GIMPLE_LABEL)
1253 break;
1254
1255 label = gimple_label_label (stmt);
1256
1257 if (label == label_for_this_bb
1258 || !DECL_ARTIFICIAL (label)
1259 || DECL_NONLOCAL (label)
1260 || FORCED_LABEL (label))
1261 gsi_next (&i);
1262 else
1263 gsi_remove (&i, true);
1264 }
1265 }
1266
1267 free (label_for_bb);
1268 }
1269
1270 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1271 the ones jumping to the same label.
1272 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1273
1274 void
1275 group_case_labels_stmt (gimple stmt)
1276 {
1277 int old_size = gimple_switch_num_labels (stmt);
1278 int i, j, new_size = old_size;
1279 basic_block default_bb = NULL;
1280
1281 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1282
1283 /* Look for possible opportunities to merge cases. */
1284 i = 1;
1285 while (i < old_size)
1286 {
1287 tree base_case, base_high;
1288 basic_block base_bb;
1289
1290 base_case = gimple_switch_label (stmt, i);
1291
1292 gcc_assert (base_case);
1293 base_bb = label_to_block (CASE_LABEL (base_case));
1294
1295 /* Discard cases that have the same destination as the
1296 default case. */
1297 if (base_bb == default_bb)
1298 {
1299 gimple_switch_set_label (stmt, i, NULL_TREE);
1300 i++;
1301 new_size--;
1302 continue;
1303 }
1304
1305 base_high = CASE_HIGH (base_case)
1306 ? CASE_HIGH (base_case)
1307 : CASE_LOW (base_case);
1308 i++;
1309
1310 /* Try to merge case labels. Break out when we reach the end
1311 of the label vector or when we cannot merge the next case
1312 label with the current one. */
1313 while (i < old_size)
1314 {
1315 tree merge_case = gimple_switch_label (stmt, i);
1316 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1317 double_int bhp1 = tree_to_double_int (base_high) + double_int_one;
1318
1319 /* Merge the cases if they jump to the same place,
1320 and their ranges are consecutive. */
1321 if (merge_bb == base_bb
1322 && tree_to_double_int (CASE_LOW (merge_case)) == bhp1)
1323 {
1324 base_high = CASE_HIGH (merge_case) ?
1325 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1326 CASE_HIGH (base_case) = base_high;
1327 gimple_switch_set_label (stmt, i, NULL_TREE);
1328 new_size--;
1329 i++;
1330 }
1331 else
1332 break;
1333 }
1334 }
1335
1336 /* Compress the case labels in the label vector, and adjust the
1337 length of the vector. */
1338 for (i = 0, j = 0; i < new_size; i++)
1339 {
1340 while (! gimple_switch_label (stmt, j))
1341 j++;
1342 gimple_switch_set_label (stmt, i,
1343 gimple_switch_label (stmt, j++));
1344 }
1345
1346 gcc_assert (new_size <= old_size);
1347 gimple_switch_set_num_labels (stmt, new_size);
1348 }
1349
1350 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1351 and scan the sorted vector of cases. Combine the ones jumping to the
1352 same label. */
1353
1354 void
1355 group_case_labels (void)
1356 {
1357 basic_block bb;
1358
1359 FOR_EACH_BB (bb)
1360 {
1361 gimple stmt = last_stmt (bb);
1362 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1363 group_case_labels_stmt (stmt);
1364 }
1365 }
1366
1367 /* Checks whether we can merge block B into block A. */
1368
1369 static bool
1370 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1371 {
1372 gimple stmt;
1373 gimple_stmt_iterator gsi;
1374
1375 if (!single_succ_p (a))
1376 return false;
1377
1378 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1379 return false;
1380
1381 if (single_succ (a) != b)
1382 return false;
1383
1384 if (!single_pred_p (b))
1385 return false;
1386
1387 if (b == EXIT_BLOCK_PTR)
1388 return false;
1389
1390 /* If A ends by a statement causing exceptions or something similar, we
1391 cannot merge the blocks. */
1392 stmt = last_stmt (a);
1393 if (stmt && stmt_ends_bb_p (stmt))
1394 return false;
1395
1396 /* Do not allow a block with only a non-local label to be merged. */
1397 if (stmt
1398 && gimple_code (stmt) == GIMPLE_LABEL
1399 && DECL_NONLOCAL (gimple_label_label (stmt)))
1400 return false;
1401
1402 /* Examine the labels at the beginning of B. */
1403 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
1404 {
1405 tree lab;
1406 stmt = gsi_stmt (gsi);
1407 if (gimple_code (stmt) != GIMPLE_LABEL)
1408 break;
1409 lab = gimple_label_label (stmt);
1410
1411 /* Do not remove user forced labels or for -O0 any user labels. */
1412 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1413 return false;
1414 }
1415
1416 /* Protect the loop latches. */
1417 if (current_loops && b->loop_father->latch == b)
1418 return false;
1419
1420 /* It must be possible to eliminate all phi nodes in B. If ssa form
1421 is not up-to-date and a name-mapping is registered, we cannot eliminate
1422 any phis. Symbols marked for renaming are never a problem though. */
1423 for (gsi = gsi_start_phis (b); !gsi_end_p (gsi); gsi_next (&gsi))
1424 {
1425 gimple phi = gsi_stmt (gsi);
1426 /* Technically only new names matter. */
1427 if (name_registered_for_update_p (PHI_RESULT (phi)))
1428 return false;
1429 }
1430
1431 /* When not optimizing, don't merge if we'd lose goto_locus. */
1432 if (!optimize
1433 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1434 {
1435 location_t goto_locus = single_succ_edge (a)->goto_locus;
1436 gimple_stmt_iterator prev, next;
1437 prev = gsi_last_nondebug_bb (a);
1438 next = gsi_after_labels (b);
1439 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1440 gsi_next_nondebug (&next);
1441 if ((gsi_end_p (prev)
1442 || gimple_location (gsi_stmt (prev)) != goto_locus)
1443 && (gsi_end_p (next)
1444 || gimple_location (gsi_stmt (next)) != goto_locus))
1445 return false;
1446 }
1447
1448 return true;
1449 }
1450
1451 /* Replaces all uses of NAME by VAL. */
1452
1453 void
1454 replace_uses_by (tree name, tree val)
1455 {
1456 imm_use_iterator imm_iter;
1457 use_operand_p use;
1458 gimple stmt;
1459 edge e;
1460
1461 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1462 {
1463 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1464 {
1465 replace_exp (use, val);
1466
1467 if (gimple_code (stmt) == GIMPLE_PHI)
1468 {
1469 e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
1470 if (e->flags & EDGE_ABNORMAL)
1471 {
1472 /* This can only occur for virtual operands, since
1473 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1474 would prevent replacement. */
1475 gcc_checking_assert (virtual_operand_p (name));
1476 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1477 }
1478 }
1479 }
1480
1481 if (gimple_code (stmt) != GIMPLE_PHI)
1482 {
1483 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1484 gimple orig_stmt = stmt;
1485 size_t i;
1486
1487 /* Mark the block if we changed the last stmt in it. */
1488 if (cfgcleanup_altered_bbs
1489 && stmt_ends_bb_p (stmt))
1490 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1491
1492 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1493 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1494 only change sth from non-invariant to invariant, and only
1495 when propagating constants. */
1496 if (is_gimple_min_invariant (val))
1497 for (i = 0; i < gimple_num_ops (stmt); i++)
1498 {
1499 tree op = gimple_op (stmt, i);
1500 /* Operands may be empty here. For example, the labels
1501 of a GIMPLE_COND are nulled out following the creation
1502 of the corresponding CFG edges. */
1503 if (op && TREE_CODE (op) == ADDR_EXPR)
1504 recompute_tree_invariant_for_addr_expr (op);
1505 }
1506
1507 if (fold_stmt (&gsi))
1508 stmt = gsi_stmt (gsi);
1509
1510 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1511 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1512
1513 update_stmt (stmt);
1514 }
1515 }
1516
1517 gcc_checking_assert (has_zero_uses (name));
1518
1519 /* Also update the trees stored in loop structures. */
1520 if (current_loops)
1521 {
1522 struct loop *loop;
1523 loop_iterator li;
1524
1525 FOR_EACH_LOOP (li, loop, 0)
1526 {
1527 substitute_in_loop_info (loop, name, val);
1528 }
1529 }
1530 }
1531
1532 /* Merge block B into block A. */
1533
1534 static void
1535 gimple_merge_blocks (basic_block a, basic_block b)
1536 {
1537 gimple_stmt_iterator last, gsi, psi;
1538
1539 if (dump_file)
1540 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1541
1542 /* Remove all single-valued PHI nodes from block B of the form
1543 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1544 gsi = gsi_last_bb (a);
1545 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1546 {
1547 gimple phi = gsi_stmt (psi);
1548 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1549 gimple copy;
1550 bool may_replace_uses = (virtual_operand_p (def)
1551 || may_propagate_copy (def, use));
1552
1553 /* In case we maintain loop closed ssa form, do not propagate arguments
1554 of loop exit phi nodes. */
1555 if (current_loops
1556 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1557 && !virtual_operand_p (def)
1558 && TREE_CODE (use) == SSA_NAME
1559 && a->loop_father != b->loop_father)
1560 may_replace_uses = false;
1561
1562 if (!may_replace_uses)
1563 {
1564 gcc_assert (!virtual_operand_p (def));
1565
1566 /* Note that just emitting the copies is fine -- there is no problem
1567 with ordering of phi nodes. This is because A is the single
1568 predecessor of B, therefore results of the phi nodes cannot
1569 appear as arguments of the phi nodes. */
1570 copy = gimple_build_assign (def, use);
1571 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1572 remove_phi_node (&psi, false);
1573 }
1574 else
1575 {
1576 /* If we deal with a PHI for virtual operands, we can simply
1577 propagate these without fussing with folding or updating
1578 the stmt. */
1579 if (virtual_operand_p (def))
1580 {
1581 imm_use_iterator iter;
1582 use_operand_p use_p;
1583 gimple stmt;
1584
1585 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1586 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1587 SET_USE (use_p, use);
1588
1589 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1590 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1591 }
1592 else
1593 replace_uses_by (def, use);
1594
1595 remove_phi_node (&psi, true);
1596 }
1597 }
1598
1599 /* Ensure that B follows A. */
1600 move_block_after (b, a);
1601
1602 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1603 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1604
1605 /* Remove labels from B and set gimple_bb to A for other statements. */
1606 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1607 {
1608 gimple stmt = gsi_stmt (gsi);
1609 if (gimple_code (stmt) == GIMPLE_LABEL)
1610 {
1611 tree label = gimple_label_label (stmt);
1612 int lp_nr;
1613
1614 gsi_remove (&gsi, false);
1615
1616 /* Now that we can thread computed gotos, we might have
1617 a situation where we have a forced label in block B
1618 However, the label at the start of block B might still be
1619 used in other ways (think about the runtime checking for
1620 Fortran assigned gotos). So we can not just delete the
1621 label. Instead we move the label to the start of block A. */
1622 if (FORCED_LABEL (label))
1623 {
1624 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1625 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1626 }
1627 /* Other user labels keep around in a form of a debug stmt. */
1628 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1629 {
1630 gimple dbg = gimple_build_debug_bind (label,
1631 integer_zero_node,
1632 stmt);
1633 gimple_debug_bind_reset_value (dbg);
1634 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1635 }
1636
1637 lp_nr = EH_LANDING_PAD_NR (label);
1638 if (lp_nr)
1639 {
1640 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1641 lp->post_landing_pad = NULL;
1642 }
1643 }
1644 else
1645 {
1646 gimple_set_bb (stmt, a);
1647 gsi_next (&gsi);
1648 }
1649 }
1650
1651 /* Merge the sequences. */
1652 last = gsi_last_bb (a);
1653 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1654 set_bb_seq (b, NULL);
1655
1656 if (cfgcleanup_altered_bbs)
1657 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1658 }
1659
1660
1661 /* Return the one of two successors of BB that is not reachable by a
1662 complex edge, if there is one. Else, return BB. We use
1663 this in optimizations that use post-dominators for their heuristics,
1664 to catch the cases in C++ where function calls are involved. */
1665
1666 basic_block
1667 single_noncomplex_succ (basic_block bb)
1668 {
1669 edge e0, e1;
1670 if (EDGE_COUNT (bb->succs) != 2)
1671 return bb;
1672
1673 e0 = EDGE_SUCC (bb, 0);
1674 e1 = EDGE_SUCC (bb, 1);
1675 if (e0->flags & EDGE_COMPLEX)
1676 return e1->dest;
1677 if (e1->flags & EDGE_COMPLEX)
1678 return e0->dest;
1679
1680 return bb;
1681 }
1682
1683 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1684
1685 void
1686 notice_special_calls (gimple call)
1687 {
1688 int flags = gimple_call_flags (call);
1689
1690 if (flags & ECF_MAY_BE_ALLOCA)
1691 cfun->calls_alloca = true;
1692 if (flags & ECF_RETURNS_TWICE)
1693 cfun->calls_setjmp = true;
1694 }
1695
1696
1697 /* Clear flags set by notice_special_calls. Used by dead code removal
1698 to update the flags. */
1699
1700 void
1701 clear_special_calls (void)
1702 {
1703 cfun->calls_alloca = false;
1704 cfun->calls_setjmp = false;
1705 }
1706
1707 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1708
1709 static void
1710 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1711 {
1712 /* Since this block is no longer reachable, we can just delete all
1713 of its PHI nodes. */
1714 remove_phi_nodes (bb);
1715
1716 /* Remove edges to BB's successors. */
1717 while (EDGE_COUNT (bb->succs) > 0)
1718 remove_edge (EDGE_SUCC (bb, 0));
1719 }
1720
1721
1722 /* Remove statements of basic block BB. */
1723
1724 static void
1725 remove_bb (basic_block bb)
1726 {
1727 gimple_stmt_iterator i;
1728
1729 if (dump_file)
1730 {
1731 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1732 if (dump_flags & TDF_DETAILS)
1733 {
1734 dump_bb (dump_file, bb, 0, dump_flags);
1735 fprintf (dump_file, "\n");
1736 }
1737 }
1738
1739 if (current_loops)
1740 {
1741 struct loop *loop = bb->loop_father;
1742
1743 /* If a loop gets removed, clean up the information associated
1744 with it. */
1745 if (loop->latch == bb
1746 || loop->header == bb)
1747 free_numbers_of_iterations_estimates_loop (loop);
1748 }
1749
1750 /* Remove all the instructions in the block. */
1751 if (bb_seq (bb) != NULL)
1752 {
1753 /* Walk backwards so as to get a chance to substitute all
1754 released DEFs into debug stmts. See
1755 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
1756 details. */
1757 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
1758 {
1759 gimple stmt = gsi_stmt (i);
1760 if (gimple_code (stmt) == GIMPLE_LABEL
1761 && (FORCED_LABEL (gimple_label_label (stmt))
1762 || DECL_NONLOCAL (gimple_label_label (stmt))))
1763 {
1764 basic_block new_bb;
1765 gimple_stmt_iterator new_gsi;
1766
1767 /* A non-reachable non-local label may still be referenced.
1768 But it no longer needs to carry the extra semantics of
1769 non-locality. */
1770 if (DECL_NONLOCAL (gimple_label_label (stmt)))
1771 {
1772 DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
1773 FORCED_LABEL (gimple_label_label (stmt)) = 1;
1774 }
1775
1776 new_bb = bb->prev_bb;
1777 new_gsi = gsi_start_bb (new_bb);
1778 gsi_remove (&i, false);
1779 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
1780 }
1781 else
1782 {
1783 /* Release SSA definitions if we are in SSA. Note that we
1784 may be called when not in SSA. For example,
1785 final_cleanup calls this function via
1786 cleanup_tree_cfg. */
1787 if (gimple_in_ssa_p (cfun))
1788 release_defs (stmt);
1789
1790 gsi_remove (&i, true);
1791 }
1792
1793 if (gsi_end_p (i))
1794 i = gsi_last_bb (bb);
1795 else
1796 gsi_prev (&i);
1797 }
1798 }
1799
1800 remove_phi_nodes_and_edges_for_unreachable_block (bb);
1801 bb->il.gimple.seq = NULL;
1802 bb->il.gimple.phi_nodes = NULL;
1803 }
1804
1805
1806 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
1807 predicate VAL, return the edge that will be taken out of the block.
1808 If VAL does not match a unique edge, NULL is returned. */
1809
1810 edge
1811 find_taken_edge (basic_block bb, tree val)
1812 {
1813 gimple stmt;
1814
1815 stmt = last_stmt (bb);
1816
1817 gcc_assert (stmt);
1818 gcc_assert (is_ctrl_stmt (stmt));
1819
1820 if (val == NULL)
1821 return NULL;
1822
1823 if (!is_gimple_min_invariant (val))
1824 return NULL;
1825
1826 if (gimple_code (stmt) == GIMPLE_COND)
1827 return find_taken_edge_cond_expr (bb, val);
1828
1829 if (gimple_code (stmt) == GIMPLE_SWITCH)
1830 return find_taken_edge_switch_expr (bb, val);
1831
1832 if (computed_goto_p (stmt))
1833 {
1834 /* Only optimize if the argument is a label, if the argument is
1835 not a label then we can not construct a proper CFG.
1836
1837 It may be the case that we only need to allow the LABEL_REF to
1838 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
1839 appear inside a LABEL_EXPR just to be safe. */
1840 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
1841 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
1842 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
1843 return NULL;
1844 }
1845
1846 gcc_unreachable ();
1847 }
1848
1849 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
1850 statement, determine which of the outgoing edges will be taken out of the
1851 block. Return NULL if either edge may be taken. */
1852
1853 static edge
1854 find_taken_edge_computed_goto (basic_block bb, tree val)
1855 {
1856 basic_block dest;
1857 edge e = NULL;
1858
1859 dest = label_to_block (val);
1860 if (dest)
1861 {
1862 e = find_edge (bb, dest);
1863 gcc_assert (e != NULL);
1864 }
1865
1866 return e;
1867 }
1868
1869 /* Given a constant value VAL and the entry block BB to a COND_EXPR
1870 statement, determine which of the two edges will be taken out of the
1871 block. Return NULL if either edge may be taken. */
1872
1873 static edge
1874 find_taken_edge_cond_expr (basic_block bb, tree val)
1875 {
1876 edge true_edge, false_edge;
1877
1878 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1879
1880 gcc_assert (TREE_CODE (val) == INTEGER_CST);
1881 return (integer_zerop (val) ? false_edge : true_edge);
1882 }
1883
1884 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
1885 statement, determine which edge will be taken out of the block. Return
1886 NULL if any edge may be taken. */
1887
1888 static edge
1889 find_taken_edge_switch_expr (basic_block bb, tree val)
1890 {
1891 basic_block dest_bb;
1892 edge e;
1893 gimple switch_stmt;
1894 tree taken_case;
1895
1896 switch_stmt = last_stmt (bb);
1897 taken_case = find_case_label_for_value (switch_stmt, val);
1898 dest_bb = label_to_block (CASE_LABEL (taken_case));
1899
1900 e = find_edge (bb, dest_bb);
1901 gcc_assert (e);
1902 return e;
1903 }
1904
1905
1906 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
1907 We can make optimal use here of the fact that the case labels are
1908 sorted: We can do a binary search for a case matching VAL. */
1909
1910 static tree
1911 find_case_label_for_value (gimple switch_stmt, tree val)
1912 {
1913 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
1914 tree default_case = gimple_switch_default_label (switch_stmt);
1915
1916 for (low = 0, high = n; high - low > 1; )
1917 {
1918 size_t i = (high + low) / 2;
1919 tree t = gimple_switch_label (switch_stmt, i);
1920 int cmp;
1921
1922 /* Cache the result of comparing CASE_LOW and val. */
1923 cmp = tree_int_cst_compare (CASE_LOW (t), val);
1924
1925 if (cmp > 0)
1926 high = i;
1927 else
1928 low = i;
1929
1930 if (CASE_HIGH (t) == NULL)
1931 {
1932 /* A singe-valued case label. */
1933 if (cmp == 0)
1934 return t;
1935 }
1936 else
1937 {
1938 /* A case range. We can only handle integer ranges. */
1939 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
1940 return t;
1941 }
1942 }
1943
1944 return default_case;
1945 }
1946
1947
1948 /* Dump a basic block on stderr. */
1949
1950 void
1951 gimple_debug_bb (basic_block bb)
1952 {
1953 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
1954 }
1955
1956
1957 /* Dump basic block with index N on stderr. */
1958
1959 basic_block
1960 gimple_debug_bb_n (int n)
1961 {
1962 gimple_debug_bb (BASIC_BLOCK (n));
1963 return BASIC_BLOCK (n);
1964 }
1965
1966
1967 /* Dump the CFG on stderr.
1968
1969 FLAGS are the same used by the tree dumping functions
1970 (see TDF_* in dumpfile.h). */
1971
1972 void
1973 gimple_debug_cfg (int flags)
1974 {
1975 gimple_dump_cfg (stderr, flags);
1976 }
1977
1978
1979 /* Dump the program showing basic block boundaries on the given FILE.
1980
1981 FLAGS are the same used by the tree dumping functions (see TDF_* in
1982 tree.h). */
1983
1984 void
1985 gimple_dump_cfg (FILE *file, int flags)
1986 {
1987 if (flags & TDF_DETAILS)
1988 {
1989 dump_function_header (file, current_function_decl, flags);
1990 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
1991 n_basic_blocks, n_edges, last_basic_block);
1992
1993 brief_dump_cfg (file, flags | TDF_COMMENT);
1994 fprintf (file, "\n");
1995 }
1996
1997 if (flags & TDF_STATS)
1998 dump_cfg_stats (file);
1999
2000 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2001 }
2002
2003
2004 /* Dump CFG statistics on FILE. */
2005
2006 void
2007 dump_cfg_stats (FILE *file)
2008 {
2009 static long max_num_merged_labels = 0;
2010 unsigned long size, total = 0;
2011 long num_edges;
2012 basic_block bb;
2013 const char * const fmt_str = "%-30s%-13s%12s\n";
2014 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2015 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2016 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2017 const char *funcname = current_function_name ();
2018
2019 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2020
2021 fprintf (file, "---------------------------------------------------------\n");
2022 fprintf (file, fmt_str, "", " Number of ", "Memory");
2023 fprintf (file, fmt_str, "", " instances ", "used ");
2024 fprintf (file, "---------------------------------------------------------\n");
2025
2026 size = n_basic_blocks * sizeof (struct basic_block_def);
2027 total += size;
2028 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2029 SCALE (size), LABEL (size));
2030
2031 num_edges = 0;
2032 FOR_EACH_BB (bb)
2033 num_edges += EDGE_COUNT (bb->succs);
2034 size = num_edges * sizeof (struct edge_def);
2035 total += size;
2036 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2037
2038 fprintf (file, "---------------------------------------------------------\n");
2039 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2040 LABEL (total));
2041 fprintf (file, "---------------------------------------------------------\n");
2042 fprintf (file, "\n");
2043
2044 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2045 max_num_merged_labels = cfg_stats.num_merged_labels;
2046
2047 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2048 cfg_stats.num_merged_labels, max_num_merged_labels);
2049
2050 fprintf (file, "\n");
2051 }
2052
2053
2054 /* Dump CFG statistics on stderr. Keep extern so that it's always
2055 linked in the final executable. */
2056
2057 DEBUG_FUNCTION void
2058 debug_cfg_stats (void)
2059 {
2060 dump_cfg_stats (stderr);
2061 }
2062
2063 /*---------------------------------------------------------------------------
2064 Miscellaneous helpers
2065 ---------------------------------------------------------------------------*/
2066
2067 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2068 flow. Transfers of control flow associated with EH are excluded. */
2069
2070 static bool
2071 call_can_make_abnormal_goto (gimple t)
2072 {
2073 /* If the function has no non-local labels, then a call cannot make an
2074 abnormal transfer of control. */
2075 if (!cfun->has_nonlocal_label
2076 && !cfun->calls_setjmp)
2077 return false;
2078
2079 /* Likewise if the call has no side effects. */
2080 if (!gimple_has_side_effects (t))
2081 return false;
2082
2083 /* Likewise if the called function is leaf. */
2084 if (gimple_call_flags (t) & ECF_LEAF)
2085 return false;
2086
2087 return true;
2088 }
2089
2090
2091 /* Return true if T can make an abnormal transfer of control flow.
2092 Transfers of control flow associated with EH are excluded. */
2093
2094 bool
2095 stmt_can_make_abnormal_goto (gimple t)
2096 {
2097 if (computed_goto_p (t))
2098 return true;
2099 if (is_gimple_call (t))
2100 return call_can_make_abnormal_goto (t);
2101 return false;
2102 }
2103
2104
2105 /* Return true if T represents a stmt that always transfers control. */
2106
2107 bool
2108 is_ctrl_stmt (gimple t)
2109 {
2110 switch (gimple_code (t))
2111 {
2112 case GIMPLE_COND:
2113 case GIMPLE_SWITCH:
2114 case GIMPLE_GOTO:
2115 case GIMPLE_RETURN:
2116 case GIMPLE_RESX:
2117 return true;
2118 default:
2119 return false;
2120 }
2121 }
2122
2123
2124 /* Return true if T is a statement that may alter the flow of control
2125 (e.g., a call to a non-returning function). */
2126
2127 bool
2128 is_ctrl_altering_stmt (gimple t)
2129 {
2130 gcc_assert (t);
2131
2132 switch (gimple_code (t))
2133 {
2134 case GIMPLE_CALL:
2135 {
2136 int flags = gimple_call_flags (t);
2137
2138 /* A call alters control flow if it can make an abnormal goto. */
2139 if (call_can_make_abnormal_goto (t))
2140 return true;
2141
2142 /* A call also alters control flow if it does not return. */
2143 if (flags & ECF_NORETURN)
2144 return true;
2145
2146 /* TM ending statements have backedges out of the transaction.
2147 Return true so we split the basic block containing them.
2148 Note that the TM_BUILTIN test is merely an optimization. */
2149 if ((flags & ECF_TM_BUILTIN)
2150 && is_tm_ending_fndecl (gimple_call_fndecl (t)))
2151 return true;
2152
2153 /* BUILT_IN_RETURN call is same as return statement. */
2154 if (gimple_call_builtin_p (t, BUILT_IN_RETURN))
2155 return true;
2156 }
2157 break;
2158
2159 case GIMPLE_EH_DISPATCH:
2160 /* EH_DISPATCH branches to the individual catch handlers at
2161 this level of a try or allowed-exceptions region. It can
2162 fallthru to the next statement as well. */
2163 return true;
2164
2165 case GIMPLE_ASM:
2166 if (gimple_asm_nlabels (t) > 0)
2167 return true;
2168 break;
2169
2170 CASE_GIMPLE_OMP:
2171 /* OpenMP directives alter control flow. */
2172 return true;
2173
2174 case GIMPLE_TRANSACTION:
2175 /* A transaction start alters control flow. */
2176 return true;
2177
2178 default:
2179 break;
2180 }
2181
2182 /* If a statement can throw, it alters control flow. */
2183 return stmt_can_throw_internal (t);
2184 }
2185
2186
2187 /* Return true if T is a simple local goto. */
2188
2189 bool
2190 simple_goto_p (gimple t)
2191 {
2192 return (gimple_code (t) == GIMPLE_GOTO
2193 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2194 }
2195
2196
2197 /* Return true if STMT should start a new basic block. PREV_STMT is
2198 the statement preceding STMT. It is used when STMT is a label or a
2199 case label. Labels should only start a new basic block if their
2200 previous statement wasn't a label. Otherwise, sequence of labels
2201 would generate unnecessary basic blocks that only contain a single
2202 label. */
2203
2204 static inline bool
2205 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2206 {
2207 if (stmt == NULL)
2208 return false;
2209
2210 /* Labels start a new basic block only if the preceding statement
2211 wasn't a label of the same type. This prevents the creation of
2212 consecutive blocks that have nothing but a single label. */
2213 if (gimple_code (stmt) == GIMPLE_LABEL)
2214 {
2215 /* Nonlocal and computed GOTO targets always start a new block. */
2216 if (DECL_NONLOCAL (gimple_label_label (stmt))
2217 || FORCED_LABEL (gimple_label_label (stmt)))
2218 return true;
2219
2220 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2221 {
2222 if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
2223 return true;
2224
2225 cfg_stats.num_merged_labels++;
2226 return false;
2227 }
2228 else
2229 return true;
2230 }
2231 else if (gimple_code (stmt) == GIMPLE_CALL
2232 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2233 /* setjmp acts similar to a nonlocal GOTO target and thus should
2234 start a new block. */
2235 return true;
2236
2237 return false;
2238 }
2239
2240
2241 /* Return true if T should end a basic block. */
2242
2243 bool
2244 stmt_ends_bb_p (gimple t)
2245 {
2246 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2247 }
2248
2249 /* Remove block annotations and other data structures. */
2250
2251 void
2252 delete_tree_cfg_annotations (void)
2253 {
2254 vec_free (label_to_block_map);
2255 }
2256
2257
2258 /* Return the first statement in basic block BB. */
2259
2260 gimple
2261 first_stmt (basic_block bb)
2262 {
2263 gimple_stmt_iterator i = gsi_start_bb (bb);
2264 gimple stmt = NULL;
2265
2266 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2267 {
2268 gsi_next (&i);
2269 stmt = NULL;
2270 }
2271 return stmt;
2272 }
2273
2274 /* Return the first non-label statement in basic block BB. */
2275
2276 static gimple
2277 first_non_label_stmt (basic_block bb)
2278 {
2279 gimple_stmt_iterator i = gsi_start_bb (bb);
2280 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2281 gsi_next (&i);
2282 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2283 }
2284
2285 /* Return the last statement in basic block BB. */
2286
2287 gimple
2288 last_stmt (basic_block bb)
2289 {
2290 gimple_stmt_iterator i = gsi_last_bb (bb);
2291 gimple stmt = NULL;
2292
2293 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2294 {
2295 gsi_prev (&i);
2296 stmt = NULL;
2297 }
2298 return stmt;
2299 }
2300
2301 /* Return the last statement of an otherwise empty block. Return NULL
2302 if the block is totally empty, or if it contains more than one
2303 statement. */
2304
2305 gimple
2306 last_and_only_stmt (basic_block bb)
2307 {
2308 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2309 gimple last, prev;
2310
2311 if (gsi_end_p (i))
2312 return NULL;
2313
2314 last = gsi_stmt (i);
2315 gsi_prev_nondebug (&i);
2316 if (gsi_end_p (i))
2317 return last;
2318
2319 /* Empty statements should no longer appear in the instruction stream.
2320 Everything that might have appeared before should be deleted by
2321 remove_useless_stmts, and the optimizers should just gsi_remove
2322 instead of smashing with build_empty_stmt.
2323
2324 Thus the only thing that should appear here in a block containing
2325 one executable statement is a label. */
2326 prev = gsi_stmt (i);
2327 if (gimple_code (prev) == GIMPLE_LABEL)
2328 return last;
2329 else
2330 return NULL;
2331 }
2332
2333 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2334
2335 static void
2336 reinstall_phi_args (edge new_edge, edge old_edge)
2337 {
2338 edge_var_map_vector *v;
2339 edge_var_map *vm;
2340 int i;
2341 gimple_stmt_iterator phis;
2342
2343 v = redirect_edge_var_map_vector (old_edge);
2344 if (!v)
2345 return;
2346
2347 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2348 v->iterate (i, &vm) && !gsi_end_p (phis);
2349 i++, gsi_next (&phis))
2350 {
2351 gimple phi = gsi_stmt (phis);
2352 tree result = redirect_edge_var_map_result (vm);
2353 tree arg = redirect_edge_var_map_def (vm);
2354
2355 gcc_assert (result == gimple_phi_result (phi));
2356
2357 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2358 }
2359
2360 redirect_edge_var_map_clear (old_edge);
2361 }
2362
2363 /* Returns the basic block after which the new basic block created
2364 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2365 near its "logical" location. This is of most help to humans looking
2366 at debugging dumps. */
2367
2368 static basic_block
2369 split_edge_bb_loc (edge edge_in)
2370 {
2371 basic_block dest = edge_in->dest;
2372 basic_block dest_prev = dest->prev_bb;
2373
2374 if (dest_prev)
2375 {
2376 edge e = find_edge (dest_prev, dest);
2377 if (e && !(e->flags & EDGE_COMPLEX))
2378 return edge_in->src;
2379 }
2380 return dest_prev;
2381 }
2382
2383 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2384 Abort on abnormal edges. */
2385
2386 static basic_block
2387 gimple_split_edge (edge edge_in)
2388 {
2389 basic_block new_bb, after_bb, dest;
2390 edge new_edge, e;
2391
2392 /* Abnormal edges cannot be split. */
2393 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2394
2395 dest = edge_in->dest;
2396
2397 after_bb = split_edge_bb_loc (edge_in);
2398
2399 new_bb = create_empty_bb (after_bb);
2400 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2401 new_bb->count = edge_in->count;
2402 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2403 new_edge->probability = REG_BR_PROB_BASE;
2404 new_edge->count = edge_in->count;
2405
2406 e = redirect_edge_and_branch (edge_in, new_bb);
2407 gcc_assert (e == edge_in);
2408 reinstall_phi_args (new_edge, e);
2409
2410 return new_bb;
2411 }
2412
2413
2414 /* Verify properties of the address expression T with base object BASE. */
2415
2416 static tree
2417 verify_address (tree t, tree base)
2418 {
2419 bool old_constant;
2420 bool old_side_effects;
2421 bool new_constant;
2422 bool new_side_effects;
2423
2424 old_constant = TREE_CONSTANT (t);
2425 old_side_effects = TREE_SIDE_EFFECTS (t);
2426
2427 recompute_tree_invariant_for_addr_expr (t);
2428 new_side_effects = TREE_SIDE_EFFECTS (t);
2429 new_constant = TREE_CONSTANT (t);
2430
2431 if (old_constant != new_constant)
2432 {
2433 error ("constant not recomputed when ADDR_EXPR changed");
2434 return t;
2435 }
2436 if (old_side_effects != new_side_effects)
2437 {
2438 error ("side effects not recomputed when ADDR_EXPR changed");
2439 return t;
2440 }
2441
2442 if (!(TREE_CODE (base) == VAR_DECL
2443 || TREE_CODE (base) == PARM_DECL
2444 || TREE_CODE (base) == RESULT_DECL))
2445 return NULL_TREE;
2446
2447 if (DECL_GIMPLE_REG_P (base))
2448 {
2449 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2450 return base;
2451 }
2452
2453 return NULL_TREE;
2454 }
2455
2456 /* Callback for walk_tree, check that all elements with address taken are
2457 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2458 inside a PHI node. */
2459
2460 static tree
2461 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2462 {
2463 tree t = *tp, x;
2464
2465 if (TYPE_P (t))
2466 *walk_subtrees = 0;
2467
2468 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2469 #define CHECK_OP(N, MSG) \
2470 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2471 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2472
2473 switch (TREE_CODE (t))
2474 {
2475 case SSA_NAME:
2476 if (SSA_NAME_IN_FREE_LIST (t))
2477 {
2478 error ("SSA name in freelist but still referenced");
2479 return *tp;
2480 }
2481 break;
2482
2483 case INDIRECT_REF:
2484 error ("INDIRECT_REF in gimple IL");
2485 return t;
2486
2487 case MEM_REF:
2488 x = TREE_OPERAND (t, 0);
2489 if (!POINTER_TYPE_P (TREE_TYPE (x))
2490 || !is_gimple_mem_ref_addr (x))
2491 {
2492 error ("invalid first operand of MEM_REF");
2493 return x;
2494 }
2495 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2496 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2497 {
2498 error ("invalid offset operand of MEM_REF");
2499 return TREE_OPERAND (t, 1);
2500 }
2501 if (TREE_CODE (x) == ADDR_EXPR
2502 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2503 return x;
2504 *walk_subtrees = 0;
2505 break;
2506
2507 case ASSERT_EXPR:
2508 x = fold (ASSERT_EXPR_COND (t));
2509 if (x == boolean_false_node)
2510 {
2511 error ("ASSERT_EXPR with an always-false condition");
2512 return *tp;
2513 }
2514 break;
2515
2516 case MODIFY_EXPR:
2517 error ("MODIFY_EXPR not expected while having tuples");
2518 return *tp;
2519
2520 case ADDR_EXPR:
2521 {
2522 tree tem;
2523
2524 gcc_assert (is_gimple_address (t));
2525
2526 /* Skip any references (they will be checked when we recurse down the
2527 tree) and ensure that any variable used as a prefix is marked
2528 addressable. */
2529 for (x = TREE_OPERAND (t, 0);
2530 handled_component_p (x);
2531 x = TREE_OPERAND (x, 0))
2532 ;
2533
2534 if ((tem = verify_address (t, x)))
2535 return tem;
2536
2537 if (!(TREE_CODE (x) == VAR_DECL
2538 || TREE_CODE (x) == PARM_DECL
2539 || TREE_CODE (x) == RESULT_DECL))
2540 return NULL;
2541
2542 if (!TREE_ADDRESSABLE (x))
2543 {
2544 error ("address taken, but ADDRESSABLE bit not set");
2545 return x;
2546 }
2547
2548 break;
2549 }
2550
2551 case COND_EXPR:
2552 x = COND_EXPR_COND (t);
2553 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2554 {
2555 error ("non-integral used in condition");
2556 return x;
2557 }
2558 if (!is_gimple_condexpr (x))
2559 {
2560 error ("invalid conditional operand");
2561 return x;
2562 }
2563 break;
2564
2565 case NON_LVALUE_EXPR:
2566 case TRUTH_NOT_EXPR:
2567 gcc_unreachable ();
2568
2569 CASE_CONVERT:
2570 case FIX_TRUNC_EXPR:
2571 case FLOAT_EXPR:
2572 case NEGATE_EXPR:
2573 case ABS_EXPR:
2574 case BIT_NOT_EXPR:
2575 CHECK_OP (0, "invalid operand to unary operator");
2576 break;
2577
2578 case REALPART_EXPR:
2579 case IMAGPART_EXPR:
2580 case BIT_FIELD_REF:
2581 if (!is_gimple_reg_type (TREE_TYPE (t)))
2582 {
2583 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2584 return t;
2585 }
2586
2587 if (TREE_CODE (t) == BIT_FIELD_REF)
2588 {
2589 if (!host_integerp (TREE_OPERAND (t, 1), 1)
2590 || !host_integerp (TREE_OPERAND (t, 2), 1))
2591 {
2592 error ("invalid position or size operand to BIT_FIELD_REF");
2593 return t;
2594 }
2595 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2596 && (TYPE_PRECISION (TREE_TYPE (t))
2597 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2598 {
2599 error ("integral result type precision does not match "
2600 "field size of BIT_FIELD_REF");
2601 return t;
2602 }
2603 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2604 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2605 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2606 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2607 {
2608 error ("mode precision of non-integral result does not "
2609 "match field size of BIT_FIELD_REF");
2610 return t;
2611 }
2612 }
2613 t = TREE_OPERAND (t, 0);
2614
2615 /* Fall-through. */
2616 case COMPONENT_REF:
2617 case ARRAY_REF:
2618 case ARRAY_RANGE_REF:
2619 case VIEW_CONVERT_EXPR:
2620 /* We have a nest of references. Verify that each of the operands
2621 that determine where to reference is either a constant or a variable,
2622 verify that the base is valid, and then show we've already checked
2623 the subtrees. */
2624 while (handled_component_p (t))
2625 {
2626 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2627 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2628 else if (TREE_CODE (t) == ARRAY_REF
2629 || TREE_CODE (t) == ARRAY_RANGE_REF)
2630 {
2631 CHECK_OP (1, "invalid array index");
2632 if (TREE_OPERAND (t, 2))
2633 CHECK_OP (2, "invalid array lower bound");
2634 if (TREE_OPERAND (t, 3))
2635 CHECK_OP (3, "invalid array stride");
2636 }
2637 else if (TREE_CODE (t) == BIT_FIELD_REF
2638 || TREE_CODE (t) == REALPART_EXPR
2639 || TREE_CODE (t) == IMAGPART_EXPR)
2640 {
2641 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
2642 "REALPART_EXPR");
2643 return t;
2644 }
2645
2646 t = TREE_OPERAND (t, 0);
2647 }
2648
2649 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2650 {
2651 error ("invalid reference prefix");
2652 return t;
2653 }
2654 *walk_subtrees = 0;
2655 break;
2656 case PLUS_EXPR:
2657 case MINUS_EXPR:
2658 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2659 POINTER_PLUS_EXPR. */
2660 if (POINTER_TYPE_P (TREE_TYPE (t)))
2661 {
2662 error ("invalid operand to plus/minus, type is a pointer");
2663 return t;
2664 }
2665 CHECK_OP (0, "invalid operand to binary operator");
2666 CHECK_OP (1, "invalid operand to binary operator");
2667 break;
2668
2669 case POINTER_PLUS_EXPR:
2670 /* Check to make sure the first operand is a pointer or reference type. */
2671 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
2672 {
2673 error ("invalid operand to pointer plus, first operand is not a pointer");
2674 return t;
2675 }
2676 /* Check to make sure the second operand is a ptrofftype. */
2677 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
2678 {
2679 error ("invalid operand to pointer plus, second operand is not an "
2680 "integer type of appropriate width");
2681 return t;
2682 }
2683 /* FALLTHROUGH */
2684 case LT_EXPR:
2685 case LE_EXPR:
2686 case GT_EXPR:
2687 case GE_EXPR:
2688 case EQ_EXPR:
2689 case NE_EXPR:
2690 case UNORDERED_EXPR:
2691 case ORDERED_EXPR:
2692 case UNLT_EXPR:
2693 case UNLE_EXPR:
2694 case UNGT_EXPR:
2695 case UNGE_EXPR:
2696 case UNEQ_EXPR:
2697 case LTGT_EXPR:
2698 case MULT_EXPR:
2699 case TRUNC_DIV_EXPR:
2700 case CEIL_DIV_EXPR:
2701 case FLOOR_DIV_EXPR:
2702 case ROUND_DIV_EXPR:
2703 case TRUNC_MOD_EXPR:
2704 case CEIL_MOD_EXPR:
2705 case FLOOR_MOD_EXPR:
2706 case ROUND_MOD_EXPR:
2707 case RDIV_EXPR:
2708 case EXACT_DIV_EXPR:
2709 case MIN_EXPR:
2710 case MAX_EXPR:
2711 case LSHIFT_EXPR:
2712 case RSHIFT_EXPR:
2713 case LROTATE_EXPR:
2714 case RROTATE_EXPR:
2715 case BIT_IOR_EXPR:
2716 case BIT_XOR_EXPR:
2717 case BIT_AND_EXPR:
2718 CHECK_OP (0, "invalid operand to binary operator");
2719 CHECK_OP (1, "invalid operand to binary operator");
2720 break;
2721
2722 case CONSTRUCTOR:
2723 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2724 *walk_subtrees = 0;
2725 break;
2726
2727 case CASE_LABEL_EXPR:
2728 if (CASE_CHAIN (t))
2729 {
2730 error ("invalid CASE_CHAIN");
2731 return t;
2732 }
2733 break;
2734
2735 default:
2736 break;
2737 }
2738 return NULL;
2739
2740 #undef CHECK_OP
2741 }
2742
2743
2744 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
2745 Returns true if there is an error, otherwise false. */
2746
2747 static bool
2748 verify_types_in_gimple_min_lval (tree expr)
2749 {
2750 tree op;
2751
2752 if (is_gimple_id (expr))
2753 return false;
2754
2755 if (TREE_CODE (expr) != TARGET_MEM_REF
2756 && TREE_CODE (expr) != MEM_REF)
2757 {
2758 error ("invalid expression for min lvalue");
2759 return true;
2760 }
2761
2762 /* TARGET_MEM_REFs are strange beasts. */
2763 if (TREE_CODE (expr) == TARGET_MEM_REF)
2764 return false;
2765
2766 op = TREE_OPERAND (expr, 0);
2767 if (!is_gimple_val (op))
2768 {
2769 error ("invalid operand in indirect reference");
2770 debug_generic_stmt (op);
2771 return true;
2772 }
2773 /* Memory references now generally can involve a value conversion. */
2774
2775 return false;
2776 }
2777
2778 /* Verify if EXPR is a valid GIMPLE reference expression. If
2779 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
2780 if there is an error, otherwise false. */
2781
2782 static bool
2783 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
2784 {
2785 while (handled_component_p (expr))
2786 {
2787 tree op = TREE_OPERAND (expr, 0);
2788
2789 if (TREE_CODE (expr) == ARRAY_REF
2790 || TREE_CODE (expr) == ARRAY_RANGE_REF)
2791 {
2792 if (!is_gimple_val (TREE_OPERAND (expr, 1))
2793 || (TREE_OPERAND (expr, 2)
2794 && !is_gimple_val (TREE_OPERAND (expr, 2)))
2795 || (TREE_OPERAND (expr, 3)
2796 && !is_gimple_val (TREE_OPERAND (expr, 3))))
2797 {
2798 error ("invalid operands to array reference");
2799 debug_generic_stmt (expr);
2800 return true;
2801 }
2802 }
2803
2804 /* Verify if the reference array element types are compatible. */
2805 if (TREE_CODE (expr) == ARRAY_REF
2806 && !useless_type_conversion_p (TREE_TYPE (expr),
2807 TREE_TYPE (TREE_TYPE (op))))
2808 {
2809 error ("type mismatch in array reference");
2810 debug_generic_stmt (TREE_TYPE (expr));
2811 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2812 return true;
2813 }
2814 if (TREE_CODE (expr) == ARRAY_RANGE_REF
2815 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
2816 TREE_TYPE (TREE_TYPE (op))))
2817 {
2818 error ("type mismatch in array range reference");
2819 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
2820 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2821 return true;
2822 }
2823
2824 if ((TREE_CODE (expr) == REALPART_EXPR
2825 || TREE_CODE (expr) == IMAGPART_EXPR)
2826 && !useless_type_conversion_p (TREE_TYPE (expr),
2827 TREE_TYPE (TREE_TYPE (op))))
2828 {
2829 error ("type mismatch in real/imagpart reference");
2830 debug_generic_stmt (TREE_TYPE (expr));
2831 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2832 return true;
2833 }
2834
2835 if (TREE_CODE (expr) == COMPONENT_REF
2836 && !useless_type_conversion_p (TREE_TYPE (expr),
2837 TREE_TYPE (TREE_OPERAND (expr, 1))))
2838 {
2839 error ("type mismatch in component reference");
2840 debug_generic_stmt (TREE_TYPE (expr));
2841 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
2842 return true;
2843 }
2844
2845 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2846 {
2847 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
2848 that their operand is not an SSA name or an invariant when
2849 requiring an lvalue (this usually means there is a SRA or IPA-SRA
2850 bug). Otherwise there is nothing to verify, gross mismatches at
2851 most invoke undefined behavior. */
2852 if (require_lvalue
2853 && (TREE_CODE (op) == SSA_NAME
2854 || is_gimple_min_invariant (op)))
2855 {
2856 error ("conversion of an SSA_NAME on the left hand side");
2857 debug_generic_stmt (expr);
2858 return true;
2859 }
2860 else if (TREE_CODE (op) == SSA_NAME
2861 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
2862 {
2863 error ("conversion of register to a different size");
2864 debug_generic_stmt (expr);
2865 return true;
2866 }
2867 else if (!handled_component_p (op))
2868 return false;
2869 }
2870
2871 expr = op;
2872 }
2873
2874 if (TREE_CODE (expr) == MEM_REF)
2875 {
2876 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
2877 {
2878 error ("invalid address operand in MEM_REF");
2879 debug_generic_stmt (expr);
2880 return true;
2881 }
2882 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
2883 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
2884 {
2885 error ("invalid offset operand in MEM_REF");
2886 debug_generic_stmt (expr);
2887 return true;
2888 }
2889 }
2890 else if (TREE_CODE (expr) == TARGET_MEM_REF)
2891 {
2892 if (!TMR_BASE (expr)
2893 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
2894 {
2895 error ("invalid address operand in TARGET_MEM_REF");
2896 return true;
2897 }
2898 if (!TMR_OFFSET (expr)
2899 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
2900 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
2901 {
2902 error ("invalid offset operand in TARGET_MEM_REF");
2903 debug_generic_stmt (expr);
2904 return true;
2905 }
2906 }
2907
2908 return ((require_lvalue || !is_gimple_min_invariant (expr))
2909 && verify_types_in_gimple_min_lval (expr));
2910 }
2911
2912 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
2913 list of pointer-to types that is trivially convertible to DEST. */
2914
2915 static bool
2916 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
2917 {
2918 tree src;
2919
2920 if (!TYPE_POINTER_TO (src_obj))
2921 return true;
2922
2923 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
2924 if (useless_type_conversion_p (dest, src))
2925 return true;
2926
2927 return false;
2928 }
2929
2930 /* Return true if TYPE1 is a fixed-point type and if conversions to and
2931 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
2932
2933 static bool
2934 valid_fixed_convert_types_p (tree type1, tree type2)
2935 {
2936 return (FIXED_POINT_TYPE_P (type1)
2937 && (INTEGRAL_TYPE_P (type2)
2938 || SCALAR_FLOAT_TYPE_P (type2)
2939 || FIXED_POINT_TYPE_P (type2)));
2940 }
2941
2942 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
2943 is a problem, otherwise false. */
2944
2945 static bool
2946 verify_gimple_call (gimple stmt)
2947 {
2948 tree fn = gimple_call_fn (stmt);
2949 tree fntype, fndecl;
2950 unsigned i;
2951
2952 if (gimple_call_internal_p (stmt))
2953 {
2954 if (fn)
2955 {
2956 error ("gimple call has two targets");
2957 debug_generic_stmt (fn);
2958 return true;
2959 }
2960 }
2961 else
2962 {
2963 if (!fn)
2964 {
2965 error ("gimple call has no target");
2966 return true;
2967 }
2968 }
2969
2970 if (fn && !is_gimple_call_addr (fn))
2971 {
2972 error ("invalid function in gimple call");
2973 debug_generic_stmt (fn);
2974 return true;
2975 }
2976
2977 if (fn
2978 && (!POINTER_TYPE_P (TREE_TYPE (fn))
2979 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
2980 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
2981 {
2982 error ("non-function in gimple call");
2983 return true;
2984 }
2985
2986 fndecl = gimple_call_fndecl (stmt);
2987 if (fndecl
2988 && TREE_CODE (fndecl) == FUNCTION_DECL
2989 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
2990 && !DECL_PURE_P (fndecl)
2991 && !TREE_READONLY (fndecl))
2992 {
2993 error ("invalid pure const state for function");
2994 return true;
2995 }
2996
2997 if (gimple_call_lhs (stmt)
2998 && (!is_gimple_lvalue (gimple_call_lhs (stmt))
2999 || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
3000 {
3001 error ("invalid LHS in gimple call");
3002 return true;
3003 }
3004
3005 if (gimple_call_lhs (stmt) && gimple_call_noreturn_p (stmt))
3006 {
3007 error ("LHS in noreturn call");
3008 return true;
3009 }
3010
3011 fntype = gimple_call_fntype (stmt);
3012 if (fntype
3013 && gimple_call_lhs (stmt)
3014 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3015 TREE_TYPE (fntype))
3016 /* ??? At least C++ misses conversions at assignments from
3017 void * call results.
3018 ??? Java is completely off. Especially with functions
3019 returning java.lang.Object.
3020 For now simply allow arbitrary pointer type conversions. */
3021 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3022 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3023 {
3024 error ("invalid conversion in gimple call");
3025 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3026 debug_generic_stmt (TREE_TYPE (fntype));
3027 return true;
3028 }
3029
3030 if (gimple_call_chain (stmt)
3031 && !is_gimple_val (gimple_call_chain (stmt)))
3032 {
3033 error ("invalid static chain in gimple call");
3034 debug_generic_stmt (gimple_call_chain (stmt));
3035 return true;
3036 }
3037
3038 /* If there is a static chain argument, this should not be an indirect
3039 call, and the decl should have DECL_STATIC_CHAIN set. */
3040 if (gimple_call_chain (stmt))
3041 {
3042 if (!gimple_call_fndecl (stmt))
3043 {
3044 error ("static chain in indirect gimple call");
3045 return true;
3046 }
3047 fn = TREE_OPERAND (fn, 0);
3048
3049 if (!DECL_STATIC_CHAIN (fn))
3050 {
3051 error ("static chain with function that doesn%'t use one");
3052 return true;
3053 }
3054 }
3055
3056 /* ??? The C frontend passes unpromoted arguments in case it
3057 didn't see a function declaration before the call. So for now
3058 leave the call arguments mostly unverified. Once we gimplify
3059 unit-at-a-time we have a chance to fix this. */
3060
3061 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3062 {
3063 tree arg = gimple_call_arg (stmt, i);
3064 if ((is_gimple_reg_type (TREE_TYPE (arg))
3065 && !is_gimple_val (arg))
3066 || (!is_gimple_reg_type (TREE_TYPE (arg))
3067 && !is_gimple_lvalue (arg)))
3068 {
3069 error ("invalid argument to gimple call");
3070 debug_generic_expr (arg);
3071 return true;
3072 }
3073 }
3074
3075 return false;
3076 }
3077
3078 /* Verifies the gimple comparison with the result type TYPE and
3079 the operands OP0 and OP1. */
3080
3081 static bool
3082 verify_gimple_comparison (tree type, tree op0, tree op1)
3083 {
3084 tree op0_type = TREE_TYPE (op0);
3085 tree op1_type = TREE_TYPE (op1);
3086
3087 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3088 {
3089 error ("invalid operands in gimple comparison");
3090 return true;
3091 }
3092
3093 /* For comparisons we do not have the operations type as the
3094 effective type the comparison is carried out in. Instead
3095 we require that either the first operand is trivially
3096 convertible into the second, or the other way around.
3097 Because we special-case pointers to void we allow
3098 comparisons of pointers with the same mode as well. */
3099 if (!useless_type_conversion_p (op0_type, op1_type)
3100 && !useless_type_conversion_p (op1_type, op0_type)
3101 && (!POINTER_TYPE_P (op0_type)
3102 || !POINTER_TYPE_P (op1_type)
3103 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3104 {
3105 error ("mismatching comparison operand types");
3106 debug_generic_expr (op0_type);
3107 debug_generic_expr (op1_type);
3108 return true;
3109 }
3110
3111 /* The resulting type of a comparison may be an effective boolean type. */
3112 if (INTEGRAL_TYPE_P (type)
3113 && (TREE_CODE (type) == BOOLEAN_TYPE
3114 || TYPE_PRECISION (type) == 1))
3115 {
3116 if (TREE_CODE (op0_type) == VECTOR_TYPE
3117 || TREE_CODE (op1_type) == VECTOR_TYPE)
3118 {
3119 error ("vector comparison returning a boolean");
3120 debug_generic_expr (op0_type);
3121 debug_generic_expr (op1_type);
3122 return true;
3123 }
3124 }
3125 /* Or an integer vector type with the same size and element count
3126 as the comparison operand types. */
3127 else if (TREE_CODE (type) == VECTOR_TYPE
3128 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3129 {
3130 if (TREE_CODE (op0_type) != VECTOR_TYPE
3131 || TREE_CODE (op1_type) != VECTOR_TYPE)
3132 {
3133 error ("non-vector operands in vector comparison");
3134 debug_generic_expr (op0_type);
3135 debug_generic_expr (op1_type);
3136 return true;
3137 }
3138
3139 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3140 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3141 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type))))
3142 /* The result of a vector comparison is of signed
3143 integral type. */
3144 || TYPE_UNSIGNED (TREE_TYPE (type)))
3145 {
3146 error ("invalid vector comparison resulting type");
3147 debug_generic_expr (type);
3148 return true;
3149 }
3150 }
3151 else
3152 {
3153 error ("bogus comparison result type");
3154 debug_generic_expr (type);
3155 return true;
3156 }
3157
3158 return false;
3159 }
3160
3161 /* Verify a gimple assignment statement STMT with an unary rhs.
3162 Returns true if anything is wrong. */
3163
3164 static bool
3165 verify_gimple_assign_unary (gimple stmt)
3166 {
3167 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3168 tree lhs = gimple_assign_lhs (stmt);
3169 tree lhs_type = TREE_TYPE (lhs);
3170 tree rhs1 = gimple_assign_rhs1 (stmt);
3171 tree rhs1_type = TREE_TYPE (rhs1);
3172
3173 if (!is_gimple_reg (lhs))
3174 {
3175 error ("non-register as LHS of unary operation");
3176 return true;
3177 }
3178
3179 if (!is_gimple_val (rhs1))
3180 {
3181 error ("invalid operand in unary operation");
3182 return true;
3183 }
3184
3185 /* First handle conversions. */
3186 switch (rhs_code)
3187 {
3188 CASE_CONVERT:
3189 {
3190 /* Allow conversions from pointer type to integral type only if
3191 there is no sign or zero extension involved.
3192 For targets were the precision of ptrofftype doesn't match that
3193 of pointers we need to allow arbitrary conversions to ptrofftype. */
3194 if ((POINTER_TYPE_P (lhs_type)
3195 && INTEGRAL_TYPE_P (rhs1_type))
3196 || (POINTER_TYPE_P (rhs1_type)
3197 && INTEGRAL_TYPE_P (lhs_type)
3198 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3199 || ptrofftype_p (sizetype))))
3200 return false;
3201
3202 /* Allow conversion from integral to offset type and vice versa. */
3203 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3204 && INTEGRAL_TYPE_P (rhs1_type))
3205 || (INTEGRAL_TYPE_P (lhs_type)
3206 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3207 return false;
3208
3209 /* Otherwise assert we are converting between types of the
3210 same kind. */
3211 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3212 {
3213 error ("invalid types in nop conversion");
3214 debug_generic_expr (lhs_type);
3215 debug_generic_expr (rhs1_type);
3216 return true;
3217 }
3218
3219 return false;
3220 }
3221
3222 case ADDR_SPACE_CONVERT_EXPR:
3223 {
3224 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3225 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3226 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3227 {
3228 error ("invalid types in address space conversion");
3229 debug_generic_expr (lhs_type);
3230 debug_generic_expr (rhs1_type);
3231 return true;
3232 }
3233
3234 return false;
3235 }
3236
3237 case FIXED_CONVERT_EXPR:
3238 {
3239 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3240 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3241 {
3242 error ("invalid types in fixed-point conversion");
3243 debug_generic_expr (lhs_type);
3244 debug_generic_expr (rhs1_type);
3245 return true;
3246 }
3247
3248 return false;
3249 }
3250
3251 case FLOAT_EXPR:
3252 {
3253 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3254 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3255 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3256 {
3257 error ("invalid types in conversion to floating point");
3258 debug_generic_expr (lhs_type);
3259 debug_generic_expr (rhs1_type);
3260 return true;
3261 }
3262
3263 return false;
3264 }
3265
3266 case FIX_TRUNC_EXPR:
3267 {
3268 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3269 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3270 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3271 {
3272 error ("invalid types in conversion to integer");
3273 debug_generic_expr (lhs_type);
3274 debug_generic_expr (rhs1_type);
3275 return true;
3276 }
3277
3278 return false;
3279 }
3280
3281 case VEC_UNPACK_HI_EXPR:
3282 case VEC_UNPACK_LO_EXPR:
3283 case REDUC_MAX_EXPR:
3284 case REDUC_MIN_EXPR:
3285 case REDUC_PLUS_EXPR:
3286 case VEC_UNPACK_FLOAT_HI_EXPR:
3287 case VEC_UNPACK_FLOAT_LO_EXPR:
3288 /* FIXME. */
3289 return false;
3290
3291 case NEGATE_EXPR:
3292 case ABS_EXPR:
3293 case BIT_NOT_EXPR:
3294 case PAREN_EXPR:
3295 case NON_LVALUE_EXPR:
3296 case CONJ_EXPR:
3297 break;
3298
3299 default:
3300 gcc_unreachable ();
3301 }
3302
3303 /* For the remaining codes assert there is no conversion involved. */
3304 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3305 {
3306 error ("non-trivial conversion in unary operation");
3307 debug_generic_expr (lhs_type);
3308 debug_generic_expr (rhs1_type);
3309 return true;
3310 }
3311
3312 return false;
3313 }
3314
3315 /* Verify a gimple assignment statement STMT with a binary rhs.
3316 Returns true if anything is wrong. */
3317
3318 static bool
3319 verify_gimple_assign_binary (gimple stmt)
3320 {
3321 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3322 tree lhs = gimple_assign_lhs (stmt);
3323 tree lhs_type = TREE_TYPE (lhs);
3324 tree rhs1 = gimple_assign_rhs1 (stmt);
3325 tree rhs1_type = TREE_TYPE (rhs1);
3326 tree rhs2 = gimple_assign_rhs2 (stmt);
3327 tree rhs2_type = TREE_TYPE (rhs2);
3328
3329 if (!is_gimple_reg (lhs))
3330 {
3331 error ("non-register as LHS of binary operation");
3332 return true;
3333 }
3334
3335 if (!is_gimple_val (rhs1)
3336 || !is_gimple_val (rhs2))
3337 {
3338 error ("invalid operands in binary operation");
3339 return true;
3340 }
3341
3342 /* First handle operations that involve different types. */
3343 switch (rhs_code)
3344 {
3345 case COMPLEX_EXPR:
3346 {
3347 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3348 || !(INTEGRAL_TYPE_P (rhs1_type)
3349 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3350 || !(INTEGRAL_TYPE_P (rhs2_type)
3351 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3352 {
3353 error ("type mismatch in complex expression");
3354 debug_generic_expr (lhs_type);
3355 debug_generic_expr (rhs1_type);
3356 debug_generic_expr (rhs2_type);
3357 return true;
3358 }
3359
3360 return false;
3361 }
3362
3363 case LSHIFT_EXPR:
3364 case RSHIFT_EXPR:
3365 case LROTATE_EXPR:
3366 case RROTATE_EXPR:
3367 {
3368 /* Shifts and rotates are ok on integral types, fixed point
3369 types and integer vector types. */
3370 if ((!INTEGRAL_TYPE_P (rhs1_type)
3371 && !FIXED_POINT_TYPE_P (rhs1_type)
3372 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3373 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3374 || (!INTEGRAL_TYPE_P (rhs2_type)
3375 /* Vector shifts of vectors are also ok. */
3376 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3377 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3378 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3379 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3380 || !useless_type_conversion_p (lhs_type, rhs1_type))
3381 {
3382 error ("type mismatch in shift expression");
3383 debug_generic_expr (lhs_type);
3384 debug_generic_expr (rhs1_type);
3385 debug_generic_expr (rhs2_type);
3386 return true;
3387 }
3388
3389 return false;
3390 }
3391
3392 case VEC_LSHIFT_EXPR:
3393 case VEC_RSHIFT_EXPR:
3394 {
3395 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3396 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3397 || POINTER_TYPE_P (TREE_TYPE (rhs1_type))
3398 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type))
3399 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3400 || (!INTEGRAL_TYPE_P (rhs2_type)
3401 && (TREE_CODE (rhs2_type) != VECTOR_TYPE
3402 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3403 || !useless_type_conversion_p (lhs_type, rhs1_type))
3404 {
3405 error ("type mismatch in vector shift expression");
3406 debug_generic_expr (lhs_type);
3407 debug_generic_expr (rhs1_type);
3408 debug_generic_expr (rhs2_type);
3409 return true;
3410 }
3411 /* For shifting a vector of non-integral components we
3412 only allow shifting by a constant multiple of the element size. */
3413 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3414 && (TREE_CODE (rhs2) != INTEGER_CST
3415 || !div_if_zero_remainder (EXACT_DIV_EXPR, rhs2,
3416 TYPE_SIZE (TREE_TYPE (rhs1_type)))))
3417 {
3418 error ("non-element sized vector shift of floating point vector");
3419 return true;
3420 }
3421
3422 return false;
3423 }
3424
3425 case WIDEN_LSHIFT_EXPR:
3426 {
3427 if (!INTEGRAL_TYPE_P (lhs_type)
3428 || !INTEGRAL_TYPE_P (rhs1_type)
3429 || TREE_CODE (rhs2) != INTEGER_CST
3430 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3431 {
3432 error ("type mismatch in widening vector shift expression");
3433 debug_generic_expr (lhs_type);
3434 debug_generic_expr (rhs1_type);
3435 debug_generic_expr (rhs2_type);
3436 return true;
3437 }
3438
3439 return false;
3440 }
3441
3442 case VEC_WIDEN_LSHIFT_HI_EXPR:
3443 case VEC_WIDEN_LSHIFT_LO_EXPR:
3444 {
3445 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3446 || TREE_CODE (lhs_type) != VECTOR_TYPE
3447 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3448 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3449 || TREE_CODE (rhs2) != INTEGER_CST
3450 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3451 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3452 {
3453 error ("type mismatch in widening vector shift expression");
3454 debug_generic_expr (lhs_type);
3455 debug_generic_expr (rhs1_type);
3456 debug_generic_expr (rhs2_type);
3457 return true;
3458 }
3459
3460 return false;
3461 }
3462
3463 case PLUS_EXPR:
3464 case MINUS_EXPR:
3465 {
3466 tree lhs_etype = lhs_type;
3467 tree rhs1_etype = rhs1_type;
3468 tree rhs2_etype = rhs2_type;
3469 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3470 {
3471 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3472 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3473 {
3474 error ("invalid non-vector operands to vector valued plus");
3475 return true;
3476 }
3477 lhs_etype = TREE_TYPE (lhs_type);
3478 rhs1_etype = TREE_TYPE (rhs1_type);
3479 rhs2_etype = TREE_TYPE (rhs2_type);
3480 }
3481 if (POINTER_TYPE_P (lhs_etype)
3482 || POINTER_TYPE_P (rhs1_etype)
3483 || POINTER_TYPE_P (rhs2_etype))
3484 {
3485 error ("invalid (pointer) operands to plus/minus");
3486 return true;
3487 }
3488
3489 /* Continue with generic binary expression handling. */
3490 break;
3491 }
3492
3493 case POINTER_PLUS_EXPR:
3494 {
3495 if (!POINTER_TYPE_P (rhs1_type)
3496 || !useless_type_conversion_p (lhs_type, rhs1_type)
3497 || !ptrofftype_p (rhs2_type))
3498 {
3499 error ("type mismatch in pointer plus expression");
3500 debug_generic_stmt (lhs_type);
3501 debug_generic_stmt (rhs1_type);
3502 debug_generic_stmt (rhs2_type);
3503 return true;
3504 }
3505
3506 return false;
3507 }
3508
3509 case TRUTH_ANDIF_EXPR:
3510 case TRUTH_ORIF_EXPR:
3511 case TRUTH_AND_EXPR:
3512 case TRUTH_OR_EXPR:
3513 case TRUTH_XOR_EXPR:
3514
3515 gcc_unreachable ();
3516
3517 case LT_EXPR:
3518 case LE_EXPR:
3519 case GT_EXPR:
3520 case GE_EXPR:
3521 case EQ_EXPR:
3522 case NE_EXPR:
3523 case UNORDERED_EXPR:
3524 case ORDERED_EXPR:
3525 case UNLT_EXPR:
3526 case UNLE_EXPR:
3527 case UNGT_EXPR:
3528 case UNGE_EXPR:
3529 case UNEQ_EXPR:
3530 case LTGT_EXPR:
3531 /* Comparisons are also binary, but the result type is not
3532 connected to the operand types. */
3533 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3534
3535 case WIDEN_MULT_EXPR:
3536 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3537 return true;
3538 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3539 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3540
3541 case WIDEN_SUM_EXPR:
3542 case VEC_WIDEN_MULT_HI_EXPR:
3543 case VEC_WIDEN_MULT_LO_EXPR:
3544 case VEC_WIDEN_MULT_EVEN_EXPR:
3545 case VEC_WIDEN_MULT_ODD_EXPR:
3546 case VEC_PACK_TRUNC_EXPR:
3547 case VEC_PACK_SAT_EXPR:
3548 case VEC_PACK_FIX_TRUNC_EXPR:
3549 /* FIXME. */
3550 return false;
3551
3552 case MULT_EXPR:
3553 case MULT_HIGHPART_EXPR:
3554 case TRUNC_DIV_EXPR:
3555 case CEIL_DIV_EXPR:
3556 case FLOOR_DIV_EXPR:
3557 case ROUND_DIV_EXPR:
3558 case TRUNC_MOD_EXPR:
3559 case CEIL_MOD_EXPR:
3560 case FLOOR_MOD_EXPR:
3561 case ROUND_MOD_EXPR:
3562 case RDIV_EXPR:
3563 case EXACT_DIV_EXPR:
3564 case MIN_EXPR:
3565 case MAX_EXPR:
3566 case BIT_IOR_EXPR:
3567 case BIT_XOR_EXPR:
3568 case BIT_AND_EXPR:
3569 /* Continue with generic binary expression handling. */
3570 break;
3571
3572 default:
3573 gcc_unreachable ();
3574 }
3575
3576 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3577 || !useless_type_conversion_p (lhs_type, rhs2_type))
3578 {
3579 error ("type mismatch in binary expression");
3580 debug_generic_stmt (lhs_type);
3581 debug_generic_stmt (rhs1_type);
3582 debug_generic_stmt (rhs2_type);
3583 return true;
3584 }
3585
3586 return false;
3587 }
3588
3589 /* Verify a gimple assignment statement STMT with a ternary rhs.
3590 Returns true if anything is wrong. */
3591
3592 static bool
3593 verify_gimple_assign_ternary (gimple stmt)
3594 {
3595 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3596 tree lhs = gimple_assign_lhs (stmt);
3597 tree lhs_type = TREE_TYPE (lhs);
3598 tree rhs1 = gimple_assign_rhs1 (stmt);
3599 tree rhs1_type = TREE_TYPE (rhs1);
3600 tree rhs2 = gimple_assign_rhs2 (stmt);
3601 tree rhs2_type = TREE_TYPE (rhs2);
3602 tree rhs3 = gimple_assign_rhs3 (stmt);
3603 tree rhs3_type = TREE_TYPE (rhs3);
3604
3605 if (!is_gimple_reg (lhs))
3606 {
3607 error ("non-register as LHS of ternary operation");
3608 return true;
3609 }
3610
3611 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3612 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3613 || !is_gimple_val (rhs2)
3614 || !is_gimple_val (rhs3))
3615 {
3616 error ("invalid operands in ternary operation");
3617 return true;
3618 }
3619
3620 /* First handle operations that involve different types. */
3621 switch (rhs_code)
3622 {
3623 case WIDEN_MULT_PLUS_EXPR:
3624 case WIDEN_MULT_MINUS_EXPR:
3625 if ((!INTEGRAL_TYPE_P (rhs1_type)
3626 && !FIXED_POINT_TYPE_P (rhs1_type))
3627 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3628 || !useless_type_conversion_p (lhs_type, rhs3_type)
3629 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3630 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3631 {
3632 error ("type mismatch in widening multiply-accumulate expression");
3633 debug_generic_expr (lhs_type);
3634 debug_generic_expr (rhs1_type);
3635 debug_generic_expr (rhs2_type);
3636 debug_generic_expr (rhs3_type);
3637 return true;
3638 }
3639 break;
3640
3641 case FMA_EXPR:
3642 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3643 || !useless_type_conversion_p (lhs_type, rhs2_type)
3644 || !useless_type_conversion_p (lhs_type, rhs3_type))
3645 {
3646 error ("type mismatch in fused multiply-add expression");
3647 debug_generic_expr (lhs_type);
3648 debug_generic_expr (rhs1_type);
3649 debug_generic_expr (rhs2_type);
3650 debug_generic_expr (rhs3_type);
3651 return true;
3652 }
3653 break;
3654
3655 case COND_EXPR:
3656 case VEC_COND_EXPR:
3657 if (!useless_type_conversion_p (lhs_type, rhs2_type)
3658 || !useless_type_conversion_p (lhs_type, rhs3_type))
3659 {
3660 error ("type mismatch in conditional expression");
3661 debug_generic_expr (lhs_type);
3662 debug_generic_expr (rhs2_type);
3663 debug_generic_expr (rhs3_type);
3664 return true;
3665 }
3666 break;
3667
3668 case VEC_PERM_EXPR:
3669 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3670 || !useless_type_conversion_p (lhs_type, rhs2_type))
3671 {
3672 error ("type mismatch in vector permute expression");
3673 debug_generic_expr (lhs_type);
3674 debug_generic_expr (rhs1_type);
3675 debug_generic_expr (rhs2_type);
3676 debug_generic_expr (rhs3_type);
3677 return true;
3678 }
3679
3680 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3681 || TREE_CODE (rhs2_type) != VECTOR_TYPE
3682 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
3683 {
3684 error ("vector types expected in vector permute expression");
3685 debug_generic_expr (lhs_type);
3686 debug_generic_expr (rhs1_type);
3687 debug_generic_expr (rhs2_type);
3688 debug_generic_expr (rhs3_type);
3689 return true;
3690 }
3691
3692 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
3693 || TYPE_VECTOR_SUBPARTS (rhs2_type)
3694 != TYPE_VECTOR_SUBPARTS (rhs3_type)
3695 || TYPE_VECTOR_SUBPARTS (rhs3_type)
3696 != TYPE_VECTOR_SUBPARTS (lhs_type))
3697 {
3698 error ("vectors with different element number found "
3699 "in vector permute expression");
3700 debug_generic_expr (lhs_type);
3701 debug_generic_expr (rhs1_type);
3702 debug_generic_expr (rhs2_type);
3703 debug_generic_expr (rhs3_type);
3704 return true;
3705 }
3706
3707 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
3708 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
3709 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
3710 {
3711 error ("invalid mask type in vector permute expression");
3712 debug_generic_expr (lhs_type);
3713 debug_generic_expr (rhs1_type);
3714 debug_generic_expr (rhs2_type);
3715 debug_generic_expr (rhs3_type);
3716 return true;
3717 }
3718
3719 return false;
3720
3721 case DOT_PROD_EXPR:
3722 case REALIGN_LOAD_EXPR:
3723 /* FIXME. */
3724 return false;
3725
3726 default:
3727 gcc_unreachable ();
3728 }
3729 return false;
3730 }
3731
3732 /* Verify a gimple assignment statement STMT with a single rhs.
3733 Returns true if anything is wrong. */
3734
3735 static bool
3736 verify_gimple_assign_single (gimple stmt)
3737 {
3738 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3739 tree lhs = gimple_assign_lhs (stmt);
3740 tree lhs_type = TREE_TYPE (lhs);
3741 tree rhs1 = gimple_assign_rhs1 (stmt);
3742 tree rhs1_type = TREE_TYPE (rhs1);
3743 bool res = false;
3744
3745 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3746 {
3747 error ("non-trivial conversion at assignment");
3748 debug_generic_expr (lhs_type);
3749 debug_generic_expr (rhs1_type);
3750 return true;
3751 }
3752
3753 if (gimple_clobber_p (stmt)
3754 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
3755 {
3756 error ("non-decl/MEM_REF LHS in clobber statement");
3757 debug_generic_expr (lhs);
3758 return true;
3759 }
3760
3761 if (handled_component_p (lhs))
3762 res |= verify_types_in_gimple_reference (lhs, true);
3763
3764 /* Special codes we cannot handle via their class. */
3765 switch (rhs_code)
3766 {
3767 case ADDR_EXPR:
3768 {
3769 tree op = TREE_OPERAND (rhs1, 0);
3770 if (!is_gimple_addressable (op))
3771 {
3772 error ("invalid operand in unary expression");
3773 return true;
3774 }
3775
3776 /* Technically there is no longer a need for matching types, but
3777 gimple hygiene asks for this check. In LTO we can end up
3778 combining incompatible units and thus end up with addresses
3779 of globals that change their type to a common one. */
3780 if (!in_lto_p
3781 && !types_compatible_p (TREE_TYPE (op),
3782 TREE_TYPE (TREE_TYPE (rhs1)))
3783 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
3784 TREE_TYPE (op)))
3785 {
3786 error ("type mismatch in address expression");
3787 debug_generic_stmt (TREE_TYPE (rhs1));
3788 debug_generic_stmt (TREE_TYPE (op));
3789 return true;
3790 }
3791
3792 return verify_types_in_gimple_reference (op, true);
3793 }
3794
3795 /* tcc_reference */
3796 case INDIRECT_REF:
3797 error ("INDIRECT_REF in gimple IL");
3798 return true;
3799
3800 case COMPONENT_REF:
3801 case BIT_FIELD_REF:
3802 case ARRAY_REF:
3803 case ARRAY_RANGE_REF:
3804 case VIEW_CONVERT_EXPR:
3805 case REALPART_EXPR:
3806 case IMAGPART_EXPR:
3807 case TARGET_MEM_REF:
3808 case MEM_REF:
3809 if (!is_gimple_reg (lhs)
3810 && is_gimple_reg_type (TREE_TYPE (lhs)))
3811 {
3812 error ("invalid rhs for gimple memory store");
3813 debug_generic_stmt (lhs);
3814 debug_generic_stmt (rhs1);
3815 return true;
3816 }
3817 return res || verify_types_in_gimple_reference (rhs1, false);
3818
3819 /* tcc_constant */
3820 case SSA_NAME:
3821 case INTEGER_CST:
3822 case REAL_CST:
3823 case FIXED_CST:
3824 case COMPLEX_CST:
3825 case VECTOR_CST:
3826 case STRING_CST:
3827 return res;
3828
3829 /* tcc_declaration */
3830 case CONST_DECL:
3831 return res;
3832 case VAR_DECL:
3833 case PARM_DECL:
3834 if (!is_gimple_reg (lhs)
3835 && !is_gimple_reg (rhs1)
3836 && is_gimple_reg_type (TREE_TYPE (lhs)))
3837 {
3838 error ("invalid rhs for gimple memory store");
3839 debug_generic_stmt (lhs);
3840 debug_generic_stmt (rhs1);
3841 return true;
3842 }
3843 return res;
3844
3845 case CONSTRUCTOR:
3846 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
3847 {
3848 unsigned int i;
3849 tree elt_i, elt_v, elt_t = NULL_TREE;
3850
3851 if (CONSTRUCTOR_NELTS (rhs1) == 0)
3852 return res;
3853 /* For vector CONSTRUCTORs we require that either it is empty
3854 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
3855 (then the element count must be correct to cover the whole
3856 outer vector and index must be NULL on all elements, or it is
3857 a CONSTRUCTOR of scalar elements, where we as an exception allow
3858 smaller number of elements (assuming zero filling) and
3859 consecutive indexes as compared to NULL indexes (such
3860 CONSTRUCTORs can appear in the IL from FEs). */
3861 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
3862 {
3863 if (elt_t == NULL_TREE)
3864 {
3865 elt_t = TREE_TYPE (elt_v);
3866 if (TREE_CODE (elt_t) == VECTOR_TYPE)
3867 {
3868 tree elt_t = TREE_TYPE (elt_v);
3869 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
3870 TREE_TYPE (elt_t)))
3871 {
3872 error ("incorrect type of vector CONSTRUCTOR"
3873 " elements");
3874 debug_generic_stmt (rhs1);
3875 return true;
3876 }
3877 else if (CONSTRUCTOR_NELTS (rhs1)
3878 * TYPE_VECTOR_SUBPARTS (elt_t)
3879 != TYPE_VECTOR_SUBPARTS (rhs1_type))
3880 {
3881 error ("incorrect number of vector CONSTRUCTOR"
3882 " elements");
3883 debug_generic_stmt (rhs1);
3884 return true;
3885 }
3886 }
3887 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
3888 elt_t))
3889 {
3890 error ("incorrect type of vector CONSTRUCTOR elements");
3891 debug_generic_stmt (rhs1);
3892 return true;
3893 }
3894 else if (CONSTRUCTOR_NELTS (rhs1)
3895 > TYPE_VECTOR_SUBPARTS (rhs1_type))
3896 {
3897 error ("incorrect number of vector CONSTRUCTOR elements");
3898 debug_generic_stmt (rhs1);
3899 return true;
3900 }
3901 }
3902 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
3903 {
3904 error ("incorrect type of vector CONSTRUCTOR elements");
3905 debug_generic_stmt (rhs1);
3906 return true;
3907 }
3908 if (elt_i != NULL_TREE
3909 && (TREE_CODE (elt_t) == VECTOR_TYPE
3910 || TREE_CODE (elt_i) != INTEGER_CST
3911 || compare_tree_int (elt_i, i) != 0))
3912 {
3913 error ("vector CONSTRUCTOR with non-NULL element index");
3914 debug_generic_stmt (rhs1);
3915 return true;
3916 }
3917 }
3918 }
3919 return res;
3920 case OBJ_TYPE_REF:
3921 case ASSERT_EXPR:
3922 case WITH_SIZE_EXPR:
3923 /* FIXME. */
3924 return res;
3925
3926 default:;
3927 }
3928
3929 return res;
3930 }
3931
3932 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
3933 is a problem, otherwise false. */
3934
3935 static bool
3936 verify_gimple_assign (gimple stmt)
3937 {
3938 switch (gimple_assign_rhs_class (stmt))
3939 {
3940 case GIMPLE_SINGLE_RHS:
3941 return verify_gimple_assign_single (stmt);
3942
3943 case GIMPLE_UNARY_RHS:
3944 return verify_gimple_assign_unary (stmt);
3945
3946 case GIMPLE_BINARY_RHS:
3947 return verify_gimple_assign_binary (stmt);
3948
3949 case GIMPLE_TERNARY_RHS:
3950 return verify_gimple_assign_ternary (stmt);
3951
3952 default:
3953 gcc_unreachable ();
3954 }
3955 }
3956
3957 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
3958 is a problem, otherwise false. */
3959
3960 static bool
3961 verify_gimple_return (gimple stmt)
3962 {
3963 tree op = gimple_return_retval (stmt);
3964 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
3965
3966 /* We cannot test for present return values as we do not fix up missing
3967 return values from the original source. */
3968 if (op == NULL)
3969 return false;
3970
3971 if (!is_gimple_val (op)
3972 && TREE_CODE (op) != RESULT_DECL)
3973 {
3974 error ("invalid operand in return statement");
3975 debug_generic_stmt (op);
3976 return true;
3977 }
3978
3979 if ((TREE_CODE (op) == RESULT_DECL
3980 && DECL_BY_REFERENCE (op))
3981 || (TREE_CODE (op) == SSA_NAME
3982 && SSA_NAME_VAR (op)
3983 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
3984 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
3985 op = TREE_TYPE (op);
3986
3987 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
3988 {
3989 error ("invalid conversion in return statement");
3990 debug_generic_stmt (restype);
3991 debug_generic_stmt (TREE_TYPE (op));
3992 return true;
3993 }
3994
3995 return false;
3996 }
3997
3998
3999 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4000 is a problem, otherwise false. */
4001
4002 static bool
4003 verify_gimple_goto (gimple stmt)
4004 {
4005 tree dest = gimple_goto_dest (stmt);
4006
4007 /* ??? We have two canonical forms of direct goto destinations, a
4008 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4009 if (TREE_CODE (dest) != LABEL_DECL
4010 && (!is_gimple_val (dest)
4011 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4012 {
4013 error ("goto destination is neither a label nor a pointer");
4014 return true;
4015 }
4016
4017 return false;
4018 }
4019
4020 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4021 is a problem, otherwise false. */
4022
4023 static bool
4024 verify_gimple_switch (gimple stmt)
4025 {
4026 unsigned int i, n;
4027 tree elt, prev_upper_bound = NULL_TREE;
4028 tree index_type, elt_type = NULL_TREE;
4029
4030 if (!is_gimple_val (gimple_switch_index (stmt)))
4031 {
4032 error ("invalid operand to switch statement");
4033 debug_generic_stmt (gimple_switch_index (stmt));
4034 return true;
4035 }
4036
4037 index_type = TREE_TYPE (gimple_switch_index (stmt));
4038 if (! INTEGRAL_TYPE_P (index_type))
4039 {
4040 error ("non-integral type switch statement");
4041 debug_generic_expr (index_type);
4042 return true;
4043 }
4044
4045 elt = gimple_switch_label (stmt, 0);
4046 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4047 {
4048 error ("invalid default case label in switch statement");
4049 debug_generic_expr (elt);
4050 return true;
4051 }
4052
4053 n = gimple_switch_num_labels (stmt);
4054 for (i = 1; i < n; i++)
4055 {
4056 elt = gimple_switch_label (stmt, i);
4057
4058 if (! CASE_LOW (elt))
4059 {
4060 error ("invalid case label in switch statement");
4061 debug_generic_expr (elt);
4062 return true;
4063 }
4064 if (CASE_HIGH (elt)
4065 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4066 {
4067 error ("invalid case range in switch statement");
4068 debug_generic_expr (elt);
4069 return true;
4070 }
4071
4072 if (elt_type)
4073 {
4074 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4075 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4076 {
4077 error ("type mismatch for case label in switch statement");
4078 debug_generic_expr (elt);
4079 return true;
4080 }
4081 }
4082 else
4083 {
4084 elt_type = TREE_TYPE (CASE_LOW (elt));
4085 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4086 {
4087 error ("type precision mismatch in switch statement");
4088 return true;
4089 }
4090 }
4091
4092 if (prev_upper_bound)
4093 {
4094 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4095 {
4096 error ("case labels not sorted in switch statement");
4097 return true;
4098 }
4099 }
4100
4101 prev_upper_bound = CASE_HIGH (elt);
4102 if (! prev_upper_bound)
4103 prev_upper_bound = CASE_LOW (elt);
4104 }
4105
4106 return false;
4107 }
4108
4109 /* Verify a gimple debug statement STMT.
4110 Returns true if anything is wrong. */
4111
4112 static bool
4113 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4114 {
4115 /* There isn't much that could be wrong in a gimple debug stmt. A
4116 gimple debug bind stmt, for example, maps a tree, that's usually
4117 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4118 component or member of an aggregate type, to another tree, that
4119 can be an arbitrary expression. These stmts expand into debug
4120 insns, and are converted to debug notes by var-tracking.c. */
4121 return false;
4122 }
4123
4124 /* Verify a gimple label statement STMT.
4125 Returns true if anything is wrong. */
4126
4127 static bool
4128 verify_gimple_label (gimple stmt)
4129 {
4130 tree decl = gimple_label_label (stmt);
4131 int uid;
4132 bool err = false;
4133
4134 if (TREE_CODE (decl) != LABEL_DECL)
4135 return true;
4136 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4137 && DECL_CONTEXT (decl) != current_function_decl)
4138 {
4139 error ("label's context is not the current function decl");
4140 err |= true;
4141 }
4142
4143 uid = LABEL_DECL_UID (decl);
4144 if (cfun->cfg
4145 && (uid == -1 || (*label_to_block_map)[uid] != gimple_bb (stmt)))
4146 {
4147 error ("incorrect entry in label_to_block_map");
4148 err |= true;
4149 }
4150
4151 uid = EH_LANDING_PAD_NR (decl);
4152 if (uid)
4153 {
4154 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4155 if (decl != lp->post_landing_pad)
4156 {
4157 error ("incorrect setting of landing pad number");
4158 err |= true;
4159 }
4160 }
4161
4162 return err;
4163 }
4164
4165 /* Verify the GIMPLE statement STMT. Returns true if there is an
4166 error, otherwise false. */
4167
4168 static bool
4169 verify_gimple_stmt (gimple stmt)
4170 {
4171 switch (gimple_code (stmt))
4172 {
4173 case GIMPLE_ASSIGN:
4174 return verify_gimple_assign (stmt);
4175
4176 case GIMPLE_LABEL:
4177 return verify_gimple_label (stmt);
4178
4179 case GIMPLE_CALL:
4180 return verify_gimple_call (stmt);
4181
4182 case GIMPLE_COND:
4183 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4184 {
4185 error ("invalid comparison code in gimple cond");
4186 return true;
4187 }
4188 if (!(!gimple_cond_true_label (stmt)
4189 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4190 || !(!gimple_cond_false_label (stmt)
4191 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4192 {
4193 error ("invalid labels in gimple cond");
4194 return true;
4195 }
4196
4197 return verify_gimple_comparison (boolean_type_node,
4198 gimple_cond_lhs (stmt),
4199 gimple_cond_rhs (stmt));
4200
4201 case GIMPLE_GOTO:
4202 return verify_gimple_goto (stmt);
4203
4204 case GIMPLE_SWITCH:
4205 return verify_gimple_switch (stmt);
4206
4207 case GIMPLE_RETURN:
4208 return verify_gimple_return (stmt);
4209
4210 case GIMPLE_ASM:
4211 return false;
4212
4213 case GIMPLE_TRANSACTION:
4214 return verify_gimple_transaction (stmt);
4215
4216 /* Tuples that do not have tree operands. */
4217 case GIMPLE_NOP:
4218 case GIMPLE_PREDICT:
4219 case GIMPLE_RESX:
4220 case GIMPLE_EH_DISPATCH:
4221 case GIMPLE_EH_MUST_NOT_THROW:
4222 return false;
4223
4224 CASE_GIMPLE_OMP:
4225 /* OpenMP directives are validated by the FE and never operated
4226 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4227 non-gimple expressions when the main index variable has had
4228 its address taken. This does not affect the loop itself
4229 because the header of an GIMPLE_OMP_FOR is merely used to determine
4230 how to setup the parallel iteration. */
4231 return false;
4232
4233 case GIMPLE_DEBUG:
4234 return verify_gimple_debug (stmt);
4235
4236 default:
4237 gcc_unreachable ();
4238 }
4239 }
4240
4241 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4242 and false otherwise. */
4243
4244 static bool
4245 verify_gimple_phi (gimple phi)
4246 {
4247 bool err = false;
4248 unsigned i;
4249 tree phi_result = gimple_phi_result (phi);
4250 bool virtual_p;
4251
4252 if (!phi_result)
4253 {
4254 error ("invalid PHI result");
4255 return true;
4256 }
4257
4258 virtual_p = virtual_operand_p (phi_result);
4259 if (TREE_CODE (phi_result) != SSA_NAME
4260 || (virtual_p
4261 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4262 {
4263 error ("invalid PHI result");
4264 err = true;
4265 }
4266
4267 for (i = 0; i < gimple_phi_num_args (phi); i++)
4268 {
4269 tree t = gimple_phi_arg_def (phi, i);
4270
4271 if (!t)
4272 {
4273 error ("missing PHI def");
4274 err |= true;
4275 continue;
4276 }
4277 /* Addressable variables do have SSA_NAMEs but they
4278 are not considered gimple values. */
4279 else if ((TREE_CODE (t) == SSA_NAME
4280 && virtual_p != virtual_operand_p (t))
4281 || (virtual_p
4282 && (TREE_CODE (t) != SSA_NAME
4283 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4284 || (!virtual_p
4285 && !is_gimple_val (t)))
4286 {
4287 error ("invalid PHI argument");
4288 debug_generic_expr (t);
4289 err |= true;
4290 }
4291 #ifdef ENABLE_TYPES_CHECKING
4292 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4293 {
4294 error ("incompatible types in PHI argument %u", i);
4295 debug_generic_stmt (TREE_TYPE (phi_result));
4296 debug_generic_stmt (TREE_TYPE (t));
4297 err |= true;
4298 }
4299 #endif
4300 }
4301
4302 return err;
4303 }
4304
4305 /* Verify the GIMPLE statements inside the sequence STMTS. */
4306
4307 static bool
4308 verify_gimple_in_seq_2 (gimple_seq stmts)
4309 {
4310 gimple_stmt_iterator ittr;
4311 bool err = false;
4312
4313 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4314 {
4315 gimple stmt = gsi_stmt (ittr);
4316
4317 switch (gimple_code (stmt))
4318 {
4319 case GIMPLE_BIND:
4320 err |= verify_gimple_in_seq_2 (gimple_bind_body (stmt));
4321 break;
4322
4323 case GIMPLE_TRY:
4324 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4325 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4326 break;
4327
4328 case GIMPLE_EH_FILTER:
4329 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4330 break;
4331
4332 case GIMPLE_EH_ELSE:
4333 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (stmt));
4334 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (stmt));
4335 break;
4336
4337 case GIMPLE_CATCH:
4338 err |= verify_gimple_in_seq_2 (gimple_catch_handler (stmt));
4339 break;
4340
4341 case GIMPLE_TRANSACTION:
4342 err |= verify_gimple_transaction (stmt);
4343 break;
4344
4345 default:
4346 {
4347 bool err2 = verify_gimple_stmt (stmt);
4348 if (err2)
4349 debug_gimple_stmt (stmt);
4350 err |= err2;
4351 }
4352 }
4353 }
4354
4355 return err;
4356 }
4357
4358 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4359 is a problem, otherwise false. */
4360
4361 static bool
4362 verify_gimple_transaction (gimple stmt)
4363 {
4364 tree lab = gimple_transaction_label (stmt);
4365 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4366 return true;
4367 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4368 }
4369
4370
4371 /* Verify the GIMPLE statements inside the statement list STMTS. */
4372
4373 DEBUG_FUNCTION void
4374 verify_gimple_in_seq (gimple_seq stmts)
4375 {
4376 timevar_push (TV_TREE_STMT_VERIFY);
4377 if (verify_gimple_in_seq_2 (stmts))
4378 internal_error ("verify_gimple failed");
4379 timevar_pop (TV_TREE_STMT_VERIFY);
4380 }
4381
4382 /* Return true when the T can be shared. */
4383
4384 static bool
4385 tree_node_can_be_shared (tree t)
4386 {
4387 if (IS_TYPE_OR_DECL_P (t)
4388 || is_gimple_min_invariant (t)
4389 || TREE_CODE (t) == SSA_NAME
4390 || t == error_mark_node
4391 || TREE_CODE (t) == IDENTIFIER_NODE)
4392 return true;
4393
4394 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4395 return true;
4396
4397 if (DECL_P (t))
4398 return true;
4399
4400 return false;
4401 }
4402
4403 /* Called via walk_tree. Verify tree sharing. */
4404
4405 static tree
4406 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4407 {
4408 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4409
4410 if (tree_node_can_be_shared (*tp))
4411 {
4412 *walk_subtrees = false;
4413 return NULL;
4414 }
4415
4416 if (pointer_set_insert (visited, *tp))
4417 return *tp;
4418
4419 return NULL;
4420 }
4421
4422 /* Called via walk_gimple_stmt. Verify tree sharing. */
4423
4424 static tree
4425 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4426 {
4427 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4428 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4429 }
4430
4431 static bool eh_error_found;
4432 static int
4433 verify_eh_throw_stmt_node (void **slot, void *data)
4434 {
4435 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4436 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4437
4438 if (!pointer_set_contains (visited, node->stmt))
4439 {
4440 error ("dead STMT in EH table");
4441 debug_gimple_stmt (node->stmt);
4442 eh_error_found = true;
4443 }
4444 return 1;
4445 }
4446
4447 /* Verify if the location LOCs block is in BLOCKS. */
4448
4449 static bool
4450 verify_location (pointer_set_t *blocks, location_t loc)
4451 {
4452 tree block = LOCATION_BLOCK (loc);
4453 if (block != NULL_TREE
4454 && !pointer_set_contains (blocks, block))
4455 {
4456 error ("location references block not in block tree");
4457 return true;
4458 }
4459 if (block != NULL_TREE)
4460 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4461 return false;
4462 }
4463
4464 /* Called via walk_tree. Verify that expressions have no blocks. */
4465
4466 static tree
4467 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4468 {
4469 if (!EXPR_P (*tp))
4470 {
4471 *walk_subtrees = false;
4472 return NULL;
4473 }
4474
4475 location_t loc = EXPR_LOCATION (*tp);
4476 if (LOCATION_BLOCK (loc) != NULL)
4477 return *tp;
4478
4479 return NULL;
4480 }
4481
4482 /* Called via walk_tree. Verify locations of expressions. */
4483
4484 static tree
4485 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4486 {
4487 struct pointer_set_t *blocks = (struct pointer_set_t *) data;
4488
4489 if (TREE_CODE (*tp) == VAR_DECL
4490 && DECL_HAS_DEBUG_EXPR_P (*tp))
4491 {
4492 tree t = DECL_DEBUG_EXPR (*tp);
4493 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4494 if (addr)
4495 return addr;
4496 }
4497 if ((TREE_CODE (*tp) == VAR_DECL
4498 || TREE_CODE (*tp) == PARM_DECL
4499 || TREE_CODE (*tp) == RESULT_DECL)
4500 && DECL_HAS_VALUE_EXPR_P (*tp))
4501 {
4502 tree t = DECL_VALUE_EXPR (*tp);
4503 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4504 if (addr)
4505 return addr;
4506 }
4507
4508 if (!EXPR_P (*tp))
4509 {
4510 *walk_subtrees = false;
4511 return NULL;
4512 }
4513
4514 location_t loc = EXPR_LOCATION (*tp);
4515 if (verify_location (blocks, loc))
4516 return *tp;
4517
4518 return NULL;
4519 }
4520
4521 /* Called via walk_gimple_op. Verify locations of expressions. */
4522
4523 static tree
4524 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
4525 {
4526 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4527 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
4528 }
4529
4530 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4531
4532 static void
4533 collect_subblocks (pointer_set_t *blocks, tree block)
4534 {
4535 tree t;
4536 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
4537 {
4538 pointer_set_insert (blocks, t);
4539 collect_subblocks (blocks, t);
4540 }
4541 }
4542
4543 /* Verify the GIMPLE statements in the CFG of FN. */
4544
4545 DEBUG_FUNCTION void
4546 verify_gimple_in_cfg (struct function *fn)
4547 {
4548 basic_block bb;
4549 bool err = false;
4550 struct pointer_set_t *visited, *visited_stmts, *blocks;
4551
4552 timevar_push (TV_TREE_STMT_VERIFY);
4553 visited = pointer_set_create ();
4554 visited_stmts = pointer_set_create ();
4555
4556 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4557 blocks = pointer_set_create ();
4558 if (DECL_INITIAL (fn->decl))
4559 {
4560 pointer_set_insert (blocks, DECL_INITIAL (fn->decl));
4561 collect_subblocks (blocks, DECL_INITIAL (fn->decl));
4562 }
4563
4564 FOR_EACH_BB_FN (bb, fn)
4565 {
4566 gimple_stmt_iterator gsi;
4567
4568 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4569 {
4570 gimple phi = gsi_stmt (gsi);
4571 bool err2 = false;
4572 unsigned i;
4573
4574 pointer_set_insert (visited_stmts, phi);
4575
4576 if (gimple_bb (phi) != bb)
4577 {
4578 error ("gimple_bb (phi) is set to a wrong basic block");
4579 err2 = true;
4580 }
4581
4582 err2 |= verify_gimple_phi (phi);
4583
4584 /* Only PHI arguments have locations. */
4585 if (gimple_location (phi) != UNKNOWN_LOCATION)
4586 {
4587 error ("PHI node with location");
4588 err2 = true;
4589 }
4590
4591 for (i = 0; i < gimple_phi_num_args (phi); i++)
4592 {
4593 tree arg = gimple_phi_arg_def (phi, i);
4594 tree addr = walk_tree (&arg, verify_node_sharing_1,
4595 visited, NULL);
4596 if (addr)
4597 {
4598 error ("incorrect sharing of tree nodes");
4599 debug_generic_expr (addr);
4600 err2 |= true;
4601 }
4602 location_t loc = gimple_phi_arg_location (phi, i);
4603 if (virtual_operand_p (gimple_phi_result (phi))
4604 && loc != UNKNOWN_LOCATION)
4605 {
4606 error ("virtual PHI with argument locations");
4607 err2 = true;
4608 }
4609 addr = walk_tree (&arg, verify_expr_location_1, blocks, NULL);
4610 if (addr)
4611 {
4612 debug_generic_expr (addr);
4613 err2 = true;
4614 }
4615 err2 |= verify_location (blocks, loc);
4616 }
4617
4618 if (err2)
4619 debug_gimple_stmt (phi);
4620 err |= err2;
4621 }
4622
4623 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4624 {
4625 gimple stmt = gsi_stmt (gsi);
4626 bool err2 = false;
4627 struct walk_stmt_info wi;
4628 tree addr;
4629 int lp_nr;
4630
4631 pointer_set_insert (visited_stmts, stmt);
4632
4633 if (gimple_bb (stmt) != bb)
4634 {
4635 error ("gimple_bb (stmt) is set to a wrong basic block");
4636 err2 = true;
4637 }
4638
4639 err2 |= verify_gimple_stmt (stmt);
4640 err2 |= verify_location (blocks, gimple_location (stmt));
4641
4642 memset (&wi, 0, sizeof (wi));
4643 wi.info = (void *) visited;
4644 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
4645 if (addr)
4646 {
4647 error ("incorrect sharing of tree nodes");
4648 debug_generic_expr (addr);
4649 err2 |= true;
4650 }
4651
4652 memset (&wi, 0, sizeof (wi));
4653 wi.info = (void *) blocks;
4654 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
4655 if (addr)
4656 {
4657 debug_generic_expr (addr);
4658 err2 |= true;
4659 }
4660
4661 /* ??? Instead of not checking these stmts at all the walker
4662 should know its context via wi. */
4663 if (!is_gimple_debug (stmt)
4664 && !is_gimple_omp (stmt))
4665 {
4666 memset (&wi, 0, sizeof (wi));
4667 addr = walk_gimple_op (stmt, verify_expr, &wi);
4668 if (addr)
4669 {
4670 debug_generic_expr (addr);
4671 inform (gimple_location (stmt), "in statement");
4672 err2 |= true;
4673 }
4674 }
4675
4676 /* If the statement is marked as part of an EH region, then it is
4677 expected that the statement could throw. Verify that when we
4678 have optimizations that simplify statements such that we prove
4679 that they cannot throw, that we update other data structures
4680 to match. */
4681 lp_nr = lookup_stmt_eh_lp (stmt);
4682 if (lp_nr != 0)
4683 {
4684 if (!stmt_could_throw_p (stmt))
4685 {
4686 error ("statement marked for throw, but doesn%'t");
4687 err2 |= true;
4688 }
4689 else if (lp_nr > 0
4690 && !gsi_one_before_end_p (gsi)
4691 && stmt_can_throw_internal (stmt))
4692 {
4693 error ("statement marked for throw in middle of block");
4694 err2 |= true;
4695 }
4696 }
4697
4698 if (err2)
4699 debug_gimple_stmt (stmt);
4700 err |= err2;
4701 }
4702 }
4703
4704 eh_error_found = false;
4705 if (get_eh_throw_stmt_table (cfun))
4706 htab_traverse (get_eh_throw_stmt_table (cfun),
4707 verify_eh_throw_stmt_node,
4708 visited_stmts);
4709
4710 if (err || eh_error_found)
4711 internal_error ("verify_gimple failed");
4712
4713 pointer_set_destroy (visited);
4714 pointer_set_destroy (visited_stmts);
4715 pointer_set_destroy (blocks);
4716 verify_histograms ();
4717 timevar_pop (TV_TREE_STMT_VERIFY);
4718 }
4719
4720
4721 /* Verifies that the flow information is OK. */
4722
4723 static int
4724 gimple_verify_flow_info (void)
4725 {
4726 int err = 0;
4727 basic_block bb;
4728 gimple_stmt_iterator gsi;
4729 gimple stmt;
4730 edge e;
4731 edge_iterator ei;
4732
4733 if (ENTRY_BLOCK_PTR->il.gimple.seq || ENTRY_BLOCK_PTR->il.gimple.phi_nodes)
4734 {
4735 error ("ENTRY_BLOCK has IL associated with it");
4736 err = 1;
4737 }
4738
4739 if (EXIT_BLOCK_PTR->il.gimple.seq || EXIT_BLOCK_PTR->il.gimple.phi_nodes)
4740 {
4741 error ("EXIT_BLOCK has IL associated with it");
4742 err = 1;
4743 }
4744
4745 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4746 if (e->flags & EDGE_FALLTHRU)
4747 {
4748 error ("fallthru to exit from bb %d", e->src->index);
4749 err = 1;
4750 }
4751
4752 FOR_EACH_BB (bb)
4753 {
4754 bool found_ctrl_stmt = false;
4755
4756 stmt = NULL;
4757
4758 /* Skip labels on the start of basic block. */
4759 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4760 {
4761 tree label;
4762 gimple prev_stmt = stmt;
4763
4764 stmt = gsi_stmt (gsi);
4765
4766 if (gimple_code (stmt) != GIMPLE_LABEL)
4767 break;
4768
4769 label = gimple_label_label (stmt);
4770 if (prev_stmt && DECL_NONLOCAL (label))
4771 {
4772 error ("nonlocal label ");
4773 print_generic_expr (stderr, label, 0);
4774 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4775 bb->index);
4776 err = 1;
4777 }
4778
4779 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
4780 {
4781 error ("EH landing pad label ");
4782 print_generic_expr (stderr, label, 0);
4783 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4784 bb->index);
4785 err = 1;
4786 }
4787
4788 if (label_to_block (label) != bb)
4789 {
4790 error ("label ");
4791 print_generic_expr (stderr, label, 0);
4792 fprintf (stderr, " to block does not match in bb %d",
4793 bb->index);
4794 err = 1;
4795 }
4796
4797 if (decl_function_context (label) != current_function_decl)
4798 {
4799 error ("label ");
4800 print_generic_expr (stderr, label, 0);
4801 fprintf (stderr, " has incorrect context in bb %d",
4802 bb->index);
4803 err = 1;
4804 }
4805 }
4806
4807 /* Verify that body of basic block BB is free of control flow. */
4808 for (; !gsi_end_p (gsi); gsi_next (&gsi))
4809 {
4810 gimple stmt = gsi_stmt (gsi);
4811
4812 if (found_ctrl_stmt)
4813 {
4814 error ("control flow in the middle of basic block %d",
4815 bb->index);
4816 err = 1;
4817 }
4818
4819 if (stmt_ends_bb_p (stmt))
4820 found_ctrl_stmt = true;
4821
4822 if (gimple_code (stmt) == GIMPLE_LABEL)
4823 {
4824 error ("label ");
4825 print_generic_expr (stderr, gimple_label_label (stmt), 0);
4826 fprintf (stderr, " in the middle of basic block %d", bb->index);
4827 err = 1;
4828 }
4829 }
4830
4831 gsi = gsi_last_bb (bb);
4832 if (gsi_end_p (gsi))
4833 continue;
4834
4835 stmt = gsi_stmt (gsi);
4836
4837 if (gimple_code (stmt) == GIMPLE_LABEL)
4838 continue;
4839
4840 err |= verify_eh_edges (stmt);
4841
4842 if (is_ctrl_stmt (stmt))
4843 {
4844 FOR_EACH_EDGE (e, ei, bb->succs)
4845 if (e->flags & EDGE_FALLTHRU)
4846 {
4847 error ("fallthru edge after a control statement in bb %d",
4848 bb->index);
4849 err = 1;
4850 }
4851 }
4852
4853 if (gimple_code (stmt) != GIMPLE_COND)
4854 {
4855 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4856 after anything else but if statement. */
4857 FOR_EACH_EDGE (e, ei, bb->succs)
4858 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
4859 {
4860 error ("true/false edge after a non-GIMPLE_COND in bb %d",
4861 bb->index);
4862 err = 1;
4863 }
4864 }
4865
4866 switch (gimple_code (stmt))
4867 {
4868 case GIMPLE_COND:
4869 {
4870 edge true_edge;
4871 edge false_edge;
4872
4873 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
4874
4875 if (!true_edge
4876 || !false_edge
4877 || !(true_edge->flags & EDGE_TRUE_VALUE)
4878 || !(false_edge->flags & EDGE_FALSE_VALUE)
4879 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4880 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4881 || EDGE_COUNT (bb->succs) >= 3)
4882 {
4883 error ("wrong outgoing edge flags at end of bb %d",
4884 bb->index);
4885 err = 1;
4886 }
4887 }
4888 break;
4889
4890 case GIMPLE_GOTO:
4891 if (simple_goto_p (stmt))
4892 {
4893 error ("explicit goto at end of bb %d", bb->index);
4894 err = 1;
4895 }
4896 else
4897 {
4898 /* FIXME. We should double check that the labels in the
4899 destination blocks have their address taken. */
4900 FOR_EACH_EDGE (e, ei, bb->succs)
4901 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
4902 | EDGE_FALSE_VALUE))
4903 || !(e->flags & EDGE_ABNORMAL))
4904 {
4905 error ("wrong outgoing edge flags at end of bb %d",
4906 bb->index);
4907 err = 1;
4908 }
4909 }
4910 break;
4911
4912 case GIMPLE_CALL:
4913 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
4914 break;
4915 /* ... fallthru ... */
4916 case GIMPLE_RETURN:
4917 if (!single_succ_p (bb)
4918 || (single_succ_edge (bb)->flags
4919 & (EDGE_FALLTHRU | EDGE_ABNORMAL
4920 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4921 {
4922 error ("wrong outgoing edge flags at end of bb %d", bb->index);
4923 err = 1;
4924 }
4925 if (single_succ (bb) != EXIT_BLOCK_PTR)
4926 {
4927 error ("return edge does not point to exit in bb %d",
4928 bb->index);
4929 err = 1;
4930 }
4931 break;
4932
4933 case GIMPLE_SWITCH:
4934 {
4935 tree prev;
4936 edge e;
4937 size_t i, n;
4938
4939 n = gimple_switch_num_labels (stmt);
4940
4941 /* Mark all the destination basic blocks. */
4942 for (i = 0; i < n; ++i)
4943 {
4944 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
4945 basic_block label_bb = label_to_block (lab);
4946 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
4947 label_bb->aux = (void *)1;
4948 }
4949
4950 /* Verify that the case labels are sorted. */
4951 prev = gimple_switch_label (stmt, 0);
4952 for (i = 1; i < n; ++i)
4953 {
4954 tree c = gimple_switch_label (stmt, i);
4955 if (!CASE_LOW (c))
4956 {
4957 error ("found default case not at the start of "
4958 "case vector");
4959 err = 1;
4960 continue;
4961 }
4962 if (CASE_LOW (prev)
4963 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
4964 {
4965 error ("case labels not sorted: ");
4966 print_generic_expr (stderr, prev, 0);
4967 fprintf (stderr," is greater than ");
4968 print_generic_expr (stderr, c, 0);
4969 fprintf (stderr," but comes before it.\n");
4970 err = 1;
4971 }
4972 prev = c;
4973 }
4974 /* VRP will remove the default case if it can prove it will
4975 never be executed. So do not verify there always exists
4976 a default case here. */
4977
4978 FOR_EACH_EDGE (e, ei, bb->succs)
4979 {
4980 if (!e->dest->aux)
4981 {
4982 error ("extra outgoing edge %d->%d",
4983 bb->index, e->dest->index);
4984 err = 1;
4985 }
4986
4987 e->dest->aux = (void *)2;
4988 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
4989 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4990 {
4991 error ("wrong outgoing edge flags at end of bb %d",
4992 bb->index);
4993 err = 1;
4994 }
4995 }
4996
4997 /* Check that we have all of them. */
4998 for (i = 0; i < n; ++i)
4999 {
5000 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
5001 basic_block label_bb = label_to_block (lab);
5002
5003 if (label_bb->aux != (void *)2)
5004 {
5005 error ("missing edge %i->%i", bb->index, label_bb->index);
5006 err = 1;
5007 }
5008 }
5009
5010 FOR_EACH_EDGE (e, ei, bb->succs)
5011 e->dest->aux = (void *)0;
5012 }
5013 break;
5014
5015 case GIMPLE_EH_DISPATCH:
5016 err |= verify_eh_dispatch_edge (stmt);
5017 break;
5018
5019 default:
5020 break;
5021 }
5022 }
5023
5024 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5025 verify_dominators (CDI_DOMINATORS);
5026
5027 return err;
5028 }
5029
5030
5031 /* Updates phi nodes after creating a forwarder block joined
5032 by edge FALLTHRU. */
5033
5034 static void
5035 gimple_make_forwarder_block (edge fallthru)
5036 {
5037 edge e;
5038 edge_iterator ei;
5039 basic_block dummy, bb;
5040 tree var;
5041 gimple_stmt_iterator gsi;
5042
5043 dummy = fallthru->src;
5044 bb = fallthru->dest;
5045
5046 if (single_pred_p (bb))
5047 return;
5048
5049 /* If we redirected a branch we must create new PHI nodes at the
5050 start of BB. */
5051 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5052 {
5053 gimple phi, new_phi;
5054
5055 phi = gsi_stmt (gsi);
5056 var = gimple_phi_result (phi);
5057 new_phi = create_phi_node (var, bb);
5058 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5059 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5060 UNKNOWN_LOCATION);
5061 }
5062
5063 /* Add the arguments we have stored on edges. */
5064 FOR_EACH_EDGE (e, ei, bb->preds)
5065 {
5066 if (e == fallthru)
5067 continue;
5068
5069 flush_pending_stmts (e);
5070 }
5071 }
5072
5073
5074 /* Return a non-special label in the head of basic block BLOCK.
5075 Create one if it doesn't exist. */
5076
5077 tree
5078 gimple_block_label (basic_block bb)
5079 {
5080 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5081 bool first = true;
5082 tree label;
5083 gimple stmt;
5084
5085 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5086 {
5087 stmt = gsi_stmt (i);
5088 if (gimple_code (stmt) != GIMPLE_LABEL)
5089 break;
5090 label = gimple_label_label (stmt);
5091 if (!DECL_NONLOCAL (label))
5092 {
5093 if (!first)
5094 gsi_move_before (&i, &s);
5095 return label;
5096 }
5097 }
5098
5099 label = create_artificial_label (UNKNOWN_LOCATION);
5100 stmt = gimple_build_label (label);
5101 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5102 return label;
5103 }
5104
5105
5106 /* Attempt to perform edge redirection by replacing a possibly complex
5107 jump instruction by a goto or by removing the jump completely.
5108 This can apply only if all edges now point to the same block. The
5109 parameters and return values are equivalent to
5110 redirect_edge_and_branch. */
5111
5112 static edge
5113 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5114 {
5115 basic_block src = e->src;
5116 gimple_stmt_iterator i;
5117 gimple stmt;
5118
5119 /* We can replace or remove a complex jump only when we have exactly
5120 two edges. */
5121 if (EDGE_COUNT (src->succs) != 2
5122 /* Verify that all targets will be TARGET. Specifically, the
5123 edge that is not E must also go to TARGET. */
5124 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5125 return NULL;
5126
5127 i = gsi_last_bb (src);
5128 if (gsi_end_p (i))
5129 return NULL;
5130
5131 stmt = gsi_stmt (i);
5132
5133 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5134 {
5135 gsi_remove (&i, true);
5136 e = ssa_redirect_edge (e, target);
5137 e->flags = EDGE_FALLTHRU;
5138 return e;
5139 }
5140
5141 return NULL;
5142 }
5143
5144
5145 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5146 edge representing the redirected branch. */
5147
5148 static edge
5149 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5150 {
5151 basic_block bb = e->src;
5152 gimple_stmt_iterator gsi;
5153 edge ret;
5154 gimple stmt;
5155
5156 if (e->flags & EDGE_ABNORMAL)
5157 return NULL;
5158
5159 if (e->dest == dest)
5160 return NULL;
5161
5162 if (e->flags & EDGE_EH)
5163 return redirect_eh_edge (e, dest);
5164
5165 if (e->src != ENTRY_BLOCK_PTR)
5166 {
5167 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5168 if (ret)
5169 return ret;
5170 }
5171
5172 gsi = gsi_last_bb (bb);
5173 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5174
5175 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5176 {
5177 case GIMPLE_COND:
5178 /* For COND_EXPR, we only need to redirect the edge. */
5179 break;
5180
5181 case GIMPLE_GOTO:
5182 /* No non-abnormal edges should lead from a non-simple goto, and
5183 simple ones should be represented implicitly. */
5184 gcc_unreachable ();
5185
5186 case GIMPLE_SWITCH:
5187 {
5188 tree label = gimple_block_label (dest);
5189 tree cases = get_cases_for_edge (e, stmt);
5190
5191 /* If we have a list of cases associated with E, then use it
5192 as it's a lot faster than walking the entire case vector. */
5193 if (cases)
5194 {
5195 edge e2 = find_edge (e->src, dest);
5196 tree last, first;
5197
5198 first = cases;
5199 while (cases)
5200 {
5201 last = cases;
5202 CASE_LABEL (cases) = label;
5203 cases = CASE_CHAIN (cases);
5204 }
5205
5206 /* If there was already an edge in the CFG, then we need
5207 to move all the cases associated with E to E2. */
5208 if (e2)
5209 {
5210 tree cases2 = get_cases_for_edge (e2, stmt);
5211
5212 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5213 CASE_CHAIN (cases2) = first;
5214 }
5215 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5216 }
5217 else
5218 {
5219 size_t i, n = gimple_switch_num_labels (stmt);
5220
5221 for (i = 0; i < n; i++)
5222 {
5223 tree elt = gimple_switch_label (stmt, i);
5224 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5225 CASE_LABEL (elt) = label;
5226 }
5227 }
5228 }
5229 break;
5230
5231 case GIMPLE_ASM:
5232 {
5233 int i, n = gimple_asm_nlabels (stmt);
5234 tree label = NULL;
5235
5236 for (i = 0; i < n; ++i)
5237 {
5238 tree cons = gimple_asm_label_op (stmt, i);
5239 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5240 {
5241 if (!label)
5242 label = gimple_block_label (dest);
5243 TREE_VALUE (cons) = label;
5244 }
5245 }
5246
5247 /* If we didn't find any label matching the former edge in the
5248 asm labels, we must be redirecting the fallthrough
5249 edge. */
5250 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5251 }
5252 break;
5253
5254 case GIMPLE_RETURN:
5255 gsi_remove (&gsi, true);
5256 e->flags |= EDGE_FALLTHRU;
5257 break;
5258
5259 case GIMPLE_OMP_RETURN:
5260 case GIMPLE_OMP_CONTINUE:
5261 case GIMPLE_OMP_SECTIONS_SWITCH:
5262 case GIMPLE_OMP_FOR:
5263 /* The edges from OMP constructs can be simply redirected. */
5264 break;
5265
5266 case GIMPLE_EH_DISPATCH:
5267 if (!(e->flags & EDGE_FALLTHRU))
5268 redirect_eh_dispatch_edge (stmt, e, dest);
5269 break;
5270
5271 case GIMPLE_TRANSACTION:
5272 /* The ABORT edge has a stored label associated with it, otherwise
5273 the edges are simply redirectable. */
5274 if (e->flags == 0)
5275 gimple_transaction_set_label (stmt, gimple_block_label (dest));
5276 break;
5277
5278 default:
5279 /* Otherwise it must be a fallthru edge, and we don't need to
5280 do anything besides redirecting it. */
5281 gcc_assert (e->flags & EDGE_FALLTHRU);
5282 break;
5283 }
5284
5285 /* Update/insert PHI nodes as necessary. */
5286
5287 /* Now update the edges in the CFG. */
5288 e = ssa_redirect_edge (e, dest);
5289
5290 return e;
5291 }
5292
5293 /* Returns true if it is possible to remove edge E by redirecting
5294 it to the destination of the other edge from E->src. */
5295
5296 static bool
5297 gimple_can_remove_branch_p (const_edge e)
5298 {
5299 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5300 return false;
5301
5302 return true;
5303 }
5304
5305 /* Simple wrapper, as we can always redirect fallthru edges. */
5306
5307 static basic_block
5308 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5309 {
5310 e = gimple_redirect_edge_and_branch (e, dest);
5311 gcc_assert (e);
5312
5313 return NULL;
5314 }
5315
5316
5317 /* Splits basic block BB after statement STMT (but at least after the
5318 labels). If STMT is NULL, BB is split just after the labels. */
5319
5320 static basic_block
5321 gimple_split_block (basic_block bb, void *stmt)
5322 {
5323 gimple_stmt_iterator gsi;
5324 gimple_stmt_iterator gsi_tgt;
5325 gimple act;
5326 gimple_seq list;
5327 basic_block new_bb;
5328 edge e;
5329 edge_iterator ei;
5330
5331 new_bb = create_empty_bb (bb);
5332
5333 /* Redirect the outgoing edges. */
5334 new_bb->succs = bb->succs;
5335 bb->succs = NULL;
5336 FOR_EACH_EDGE (e, ei, new_bb->succs)
5337 e->src = new_bb;
5338
5339 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5340 stmt = NULL;
5341
5342 /* Move everything from GSI to the new basic block. */
5343 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5344 {
5345 act = gsi_stmt (gsi);
5346 if (gimple_code (act) == GIMPLE_LABEL)
5347 continue;
5348
5349 if (!stmt)
5350 break;
5351
5352 if (stmt == act)
5353 {
5354 gsi_next (&gsi);
5355 break;
5356 }
5357 }
5358
5359 if (gsi_end_p (gsi))
5360 return new_bb;
5361
5362 /* Split the statement list - avoid re-creating new containers as this
5363 brings ugly quadratic memory consumption in the inliner.
5364 (We are still quadratic since we need to update stmt BB pointers,
5365 sadly.) */
5366 gsi_split_seq_before (&gsi, &list);
5367 set_bb_seq (new_bb, list);
5368 for (gsi_tgt = gsi_start (list);
5369 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5370 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5371
5372 return new_bb;
5373 }
5374
5375
5376 /* Moves basic block BB after block AFTER. */
5377
5378 static bool
5379 gimple_move_block_after (basic_block bb, basic_block after)
5380 {
5381 if (bb->prev_bb == after)
5382 return true;
5383
5384 unlink_block (bb);
5385 link_block (bb, after);
5386
5387 return true;
5388 }
5389
5390
5391 /* Return TRUE if block BB has no executable statements, otherwise return
5392 FALSE. */
5393
5394 static bool
5395 gimple_empty_block_p (basic_block bb)
5396 {
5397 /* BB must have no executable statements. */
5398 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5399 if (phi_nodes (bb))
5400 return false;
5401 if (gsi_end_p (gsi))
5402 return true;
5403 if (is_gimple_debug (gsi_stmt (gsi)))
5404 gsi_next_nondebug (&gsi);
5405 return gsi_end_p (gsi);
5406 }
5407
5408
5409 /* Split a basic block if it ends with a conditional branch and if the
5410 other part of the block is not empty. */
5411
5412 static basic_block
5413 gimple_split_block_before_cond_jump (basic_block bb)
5414 {
5415 gimple last, split_point;
5416 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5417 if (gsi_end_p (gsi))
5418 return NULL;
5419 last = gsi_stmt (gsi);
5420 if (gimple_code (last) != GIMPLE_COND
5421 && gimple_code (last) != GIMPLE_SWITCH)
5422 return NULL;
5423 gsi_prev_nondebug (&gsi);
5424 split_point = gsi_stmt (gsi);
5425 return split_block (bb, split_point)->dest;
5426 }
5427
5428
5429 /* Return true if basic_block can be duplicated. */
5430
5431 static bool
5432 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5433 {
5434 return true;
5435 }
5436
5437 /* Create a duplicate of the basic block BB. NOTE: This does not
5438 preserve SSA form. */
5439
5440 static basic_block
5441 gimple_duplicate_bb (basic_block bb)
5442 {
5443 basic_block new_bb;
5444 gimple_stmt_iterator gsi, gsi_tgt;
5445 gimple_seq phis = phi_nodes (bb);
5446 gimple phi, stmt, copy;
5447
5448 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
5449
5450 /* Copy the PHI nodes. We ignore PHI node arguments here because
5451 the incoming edges have not been setup yet. */
5452 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
5453 {
5454 phi = gsi_stmt (gsi);
5455 copy = create_phi_node (NULL_TREE, new_bb);
5456 create_new_def_for (gimple_phi_result (phi), copy,
5457 gimple_phi_result_ptr (copy));
5458 gimple_set_uid (copy, gimple_uid (phi));
5459 }
5460
5461 gsi_tgt = gsi_start_bb (new_bb);
5462 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5463 {
5464 def_operand_p def_p;
5465 ssa_op_iter op_iter;
5466 tree lhs;
5467
5468 stmt = gsi_stmt (gsi);
5469 if (gimple_code (stmt) == GIMPLE_LABEL)
5470 continue;
5471
5472 /* Don't duplicate label debug stmts. */
5473 if (gimple_debug_bind_p (stmt)
5474 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5475 == LABEL_DECL)
5476 continue;
5477
5478 /* Create a new copy of STMT and duplicate STMT's virtual
5479 operands. */
5480 copy = gimple_copy (stmt);
5481 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5482
5483 maybe_duplicate_eh_stmt (copy, stmt);
5484 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5485
5486 /* When copying around a stmt writing into a local non-user
5487 aggregate, make sure it won't share stack slot with other
5488 vars. */
5489 lhs = gimple_get_lhs (stmt);
5490 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5491 {
5492 tree base = get_base_address (lhs);
5493 if (base
5494 && (TREE_CODE (base) == VAR_DECL
5495 || TREE_CODE (base) == RESULT_DECL)
5496 && DECL_IGNORED_P (base)
5497 && !TREE_STATIC (base)
5498 && !DECL_EXTERNAL (base)
5499 && (TREE_CODE (base) != VAR_DECL
5500 || !DECL_HAS_VALUE_EXPR_P (base)))
5501 DECL_NONSHAREABLE (base) = 1;
5502 }
5503
5504 /* Create new names for all the definitions created by COPY and
5505 add replacement mappings for each new name. */
5506 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5507 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5508 }
5509
5510 return new_bb;
5511 }
5512
5513 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5514
5515 static void
5516 add_phi_args_after_copy_edge (edge e_copy)
5517 {
5518 basic_block bb, bb_copy = e_copy->src, dest;
5519 edge e;
5520 edge_iterator ei;
5521 gimple phi, phi_copy;
5522 tree def;
5523 gimple_stmt_iterator psi, psi_copy;
5524
5525 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5526 return;
5527
5528 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5529
5530 if (e_copy->dest->flags & BB_DUPLICATED)
5531 dest = get_bb_original (e_copy->dest);
5532 else
5533 dest = e_copy->dest;
5534
5535 e = find_edge (bb, dest);
5536 if (!e)
5537 {
5538 /* During loop unrolling the target of the latch edge is copied.
5539 In this case we are not looking for edge to dest, but to
5540 duplicated block whose original was dest. */
5541 FOR_EACH_EDGE (e, ei, bb->succs)
5542 {
5543 if ((e->dest->flags & BB_DUPLICATED)
5544 && get_bb_original (e->dest) == dest)
5545 break;
5546 }
5547
5548 gcc_assert (e != NULL);
5549 }
5550
5551 for (psi = gsi_start_phis (e->dest),
5552 psi_copy = gsi_start_phis (e_copy->dest);
5553 !gsi_end_p (psi);
5554 gsi_next (&psi), gsi_next (&psi_copy))
5555 {
5556 phi = gsi_stmt (psi);
5557 phi_copy = gsi_stmt (psi_copy);
5558 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5559 add_phi_arg (phi_copy, def, e_copy,
5560 gimple_phi_arg_location_from_edge (phi, e));
5561 }
5562 }
5563
5564
5565 /* Basic block BB_COPY was created by code duplication. Add phi node
5566 arguments for edges going out of BB_COPY. The blocks that were
5567 duplicated have BB_DUPLICATED set. */
5568
5569 void
5570 add_phi_args_after_copy_bb (basic_block bb_copy)
5571 {
5572 edge e_copy;
5573 edge_iterator ei;
5574
5575 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5576 {
5577 add_phi_args_after_copy_edge (e_copy);
5578 }
5579 }
5580
5581 /* Blocks in REGION_COPY array of length N_REGION were created by
5582 duplication of basic blocks. Add phi node arguments for edges
5583 going from these blocks. If E_COPY is not NULL, also add
5584 phi node arguments for its destination.*/
5585
5586 void
5587 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5588 edge e_copy)
5589 {
5590 unsigned i;
5591
5592 for (i = 0; i < n_region; i++)
5593 region_copy[i]->flags |= BB_DUPLICATED;
5594
5595 for (i = 0; i < n_region; i++)
5596 add_phi_args_after_copy_bb (region_copy[i]);
5597 if (e_copy)
5598 add_phi_args_after_copy_edge (e_copy);
5599
5600 for (i = 0; i < n_region; i++)
5601 region_copy[i]->flags &= ~BB_DUPLICATED;
5602 }
5603
5604 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5605 important exit edge EXIT. By important we mean that no SSA name defined
5606 inside region is live over the other exit edges of the region. All entry
5607 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5608 to the duplicate of the region. Dominance and loop information is
5609 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
5610 UPDATE_DOMINANCE is false then we assume that the caller will update the
5611 dominance information after calling this function. The new basic
5612 blocks are stored to REGION_COPY in the same order as they had in REGION,
5613 provided that REGION_COPY is not NULL.
5614 The function returns false if it is unable to copy the region,
5615 true otherwise. */
5616
5617 bool
5618 gimple_duplicate_sese_region (edge entry, edge exit,
5619 basic_block *region, unsigned n_region,
5620 basic_block *region_copy,
5621 bool update_dominance)
5622 {
5623 unsigned i;
5624 bool free_region_copy = false, copying_header = false;
5625 struct loop *loop = entry->dest->loop_father;
5626 edge exit_copy;
5627 vec<basic_block> doms;
5628 edge redirected;
5629 int total_freq = 0, entry_freq = 0;
5630 gcov_type total_count = 0, entry_count = 0;
5631
5632 if (!can_copy_bbs_p (region, n_region))
5633 return false;
5634
5635 /* Some sanity checking. Note that we do not check for all possible
5636 missuses of the functions. I.e. if you ask to copy something weird,
5637 it will work, but the state of structures probably will not be
5638 correct. */
5639 for (i = 0; i < n_region; i++)
5640 {
5641 /* We do not handle subloops, i.e. all the blocks must belong to the
5642 same loop. */
5643 if (region[i]->loop_father != loop)
5644 return false;
5645
5646 if (region[i] != entry->dest
5647 && region[i] == loop->header)
5648 return false;
5649 }
5650
5651 set_loop_copy (loop, loop);
5652
5653 /* In case the function is used for loop header copying (which is the primary
5654 use), ensure that EXIT and its copy will be new latch and entry edges. */
5655 if (loop->header == entry->dest)
5656 {
5657 copying_header = true;
5658 set_loop_copy (loop, loop_outer (loop));
5659
5660 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5661 return false;
5662
5663 for (i = 0; i < n_region; i++)
5664 if (region[i] != exit->src
5665 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5666 return false;
5667 }
5668
5669 if (!region_copy)
5670 {
5671 region_copy = XNEWVEC (basic_block, n_region);
5672 free_region_copy = true;
5673 }
5674
5675 initialize_original_copy_tables ();
5676
5677 /* Record blocks outside the region that are dominated by something
5678 inside. */
5679 if (update_dominance)
5680 {
5681 doms.create (0);
5682 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5683 }
5684
5685 if (entry->dest->count)
5686 {
5687 total_count = entry->dest->count;
5688 entry_count = entry->count;
5689 /* Fix up corner cases, to avoid division by zero or creation of negative
5690 frequencies. */
5691 if (entry_count > total_count)
5692 entry_count = total_count;
5693 }
5694 else
5695 {
5696 total_freq = entry->dest->frequency;
5697 entry_freq = EDGE_FREQUENCY (entry);
5698 /* Fix up corner cases, to avoid division by zero or creation of negative
5699 frequencies. */
5700 if (total_freq == 0)
5701 total_freq = 1;
5702 else if (entry_freq > total_freq)
5703 entry_freq = total_freq;
5704 }
5705
5706 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5707 split_edge_bb_loc (entry), update_dominance);
5708 if (total_count)
5709 {
5710 scale_bbs_frequencies_gcov_type (region, n_region,
5711 total_count - entry_count,
5712 total_count);
5713 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
5714 total_count);
5715 }
5716 else
5717 {
5718 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5719 total_freq);
5720 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5721 }
5722
5723 if (copying_header)
5724 {
5725 loop->header = exit->dest;
5726 loop->latch = exit->src;
5727 }
5728
5729 /* Redirect the entry and add the phi node arguments. */
5730 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
5731 gcc_assert (redirected != NULL);
5732 flush_pending_stmts (entry);
5733
5734 /* Concerning updating of dominators: We must recount dominators
5735 for entry block and its copy. Anything that is outside of the
5736 region, but was dominated by something inside needs recounting as
5737 well. */
5738 if (update_dominance)
5739 {
5740 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
5741 doms.safe_push (get_bb_original (entry->dest));
5742 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5743 doms.release ();
5744 }
5745
5746 /* Add the other PHI node arguments. */
5747 add_phi_args_after_copy (region_copy, n_region, NULL);
5748
5749 if (free_region_copy)
5750 free (region_copy);
5751
5752 free_original_copy_tables ();
5753 return true;
5754 }
5755
5756 /* Checks if BB is part of the region defined by N_REGION BBS. */
5757 static bool
5758 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
5759 {
5760 unsigned int n;
5761
5762 for (n = 0; n < n_region; n++)
5763 {
5764 if (bb == bbs[n])
5765 return true;
5766 }
5767 return false;
5768 }
5769
5770 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
5771 are stored to REGION_COPY in the same order in that they appear
5772 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5773 the region, EXIT an exit from it. The condition guarding EXIT
5774 is moved to ENTRY. Returns true if duplication succeeds, false
5775 otherwise.
5776
5777 For example,
5778
5779 some_code;
5780 if (cond)
5781 A;
5782 else
5783 B;
5784
5785 is transformed to
5786
5787 if (cond)
5788 {
5789 some_code;
5790 A;
5791 }
5792 else
5793 {
5794 some_code;
5795 B;
5796 }
5797 */
5798
5799 bool
5800 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
5801 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
5802 basic_block *region_copy ATTRIBUTE_UNUSED)
5803 {
5804 unsigned i;
5805 bool free_region_copy = false;
5806 struct loop *loop = exit->dest->loop_father;
5807 struct loop *orig_loop = entry->dest->loop_father;
5808 basic_block switch_bb, entry_bb, nentry_bb;
5809 vec<basic_block> doms;
5810 int total_freq = 0, exit_freq = 0;
5811 gcov_type total_count = 0, exit_count = 0;
5812 edge exits[2], nexits[2], e;
5813 gimple_stmt_iterator gsi;
5814 gimple cond_stmt;
5815 edge sorig, snew;
5816 basic_block exit_bb;
5817 gimple_stmt_iterator psi;
5818 gimple phi;
5819 tree def;
5820 struct loop *target, *aloop, *cloop;
5821
5822 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
5823 exits[0] = exit;
5824 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
5825
5826 if (!can_copy_bbs_p (region, n_region))
5827 return false;
5828
5829 initialize_original_copy_tables ();
5830 set_loop_copy (orig_loop, loop);
5831
5832 target= loop;
5833 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
5834 {
5835 if (bb_part_of_region_p (aloop->header, region, n_region))
5836 {
5837 cloop = duplicate_loop (aloop, target);
5838 duplicate_subloops (aloop, cloop);
5839 }
5840 }
5841
5842 if (!region_copy)
5843 {
5844 region_copy = XNEWVEC (basic_block, n_region);
5845 free_region_copy = true;
5846 }
5847
5848 gcc_assert (!need_ssa_update_p (cfun));
5849
5850 /* Record blocks outside the region that are dominated by something
5851 inside. */
5852 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5853
5854 if (exit->src->count)
5855 {
5856 total_count = exit->src->count;
5857 exit_count = exit->count;
5858 /* Fix up corner cases, to avoid division by zero or creation of negative
5859 frequencies. */
5860 if (exit_count > total_count)
5861 exit_count = total_count;
5862 }
5863 else
5864 {
5865 total_freq = exit->src->frequency;
5866 exit_freq = EDGE_FREQUENCY (exit);
5867 /* Fix up corner cases, to avoid division by zero or creation of negative
5868 frequencies. */
5869 if (total_freq == 0)
5870 total_freq = 1;
5871 if (exit_freq > total_freq)
5872 exit_freq = total_freq;
5873 }
5874
5875 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
5876 split_edge_bb_loc (exit), true);
5877 if (total_count)
5878 {
5879 scale_bbs_frequencies_gcov_type (region, n_region,
5880 total_count - exit_count,
5881 total_count);
5882 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
5883 total_count);
5884 }
5885 else
5886 {
5887 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
5888 total_freq);
5889 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
5890 }
5891
5892 /* Create the switch block, and put the exit condition to it. */
5893 entry_bb = entry->dest;
5894 nentry_bb = get_bb_copy (entry_bb);
5895 if (!last_stmt (entry->src)
5896 || !stmt_ends_bb_p (last_stmt (entry->src)))
5897 switch_bb = entry->src;
5898 else
5899 switch_bb = split_edge (entry);
5900 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
5901
5902 gsi = gsi_last_bb (switch_bb);
5903 cond_stmt = last_stmt (exit->src);
5904 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
5905 cond_stmt = gimple_copy (cond_stmt);
5906
5907 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
5908
5909 sorig = single_succ_edge (switch_bb);
5910 sorig->flags = exits[1]->flags;
5911 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
5912
5913 /* Register the new edge from SWITCH_BB in loop exit lists. */
5914 rescan_loop_exit (snew, true, false);
5915
5916 /* Add the PHI node arguments. */
5917 add_phi_args_after_copy (region_copy, n_region, snew);
5918
5919 /* Get rid of now superfluous conditions and associated edges (and phi node
5920 arguments). */
5921 exit_bb = exit->dest;
5922
5923 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
5924 PENDING_STMT (e) = NULL;
5925
5926 /* The latch of ORIG_LOOP was copied, and so was the backedge
5927 to the original header. We redirect this backedge to EXIT_BB. */
5928 for (i = 0; i < n_region; i++)
5929 if (get_bb_original (region_copy[i]) == orig_loop->latch)
5930 {
5931 gcc_assert (single_succ_edge (region_copy[i]));
5932 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
5933 PENDING_STMT (e) = NULL;
5934 for (psi = gsi_start_phis (exit_bb);
5935 !gsi_end_p (psi);
5936 gsi_next (&psi))
5937 {
5938 phi = gsi_stmt (psi);
5939 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
5940 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
5941 }
5942 }
5943 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
5944 PENDING_STMT (e) = NULL;
5945
5946 /* Anything that is outside of the region, but was dominated by something
5947 inside needs to update dominance info. */
5948 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5949 doms.release ();
5950 /* Update the SSA web. */
5951 update_ssa (TODO_update_ssa);
5952
5953 if (free_region_copy)
5954 free (region_copy);
5955
5956 free_original_copy_tables ();
5957 return true;
5958 }
5959
5960 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
5961 adding blocks when the dominator traversal reaches EXIT. This
5962 function silently assumes that ENTRY strictly dominates EXIT. */
5963
5964 void
5965 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
5966 vec<basic_block> *bbs_p)
5967 {
5968 basic_block son;
5969
5970 for (son = first_dom_son (CDI_DOMINATORS, entry);
5971 son;
5972 son = next_dom_son (CDI_DOMINATORS, son))
5973 {
5974 bbs_p->safe_push (son);
5975 if (son != exit)
5976 gather_blocks_in_sese_region (son, exit, bbs_p);
5977 }
5978 }
5979
5980 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
5981 The duplicates are recorded in VARS_MAP. */
5982
5983 static void
5984 replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
5985 tree to_context)
5986 {
5987 tree t = *tp, new_t;
5988 struct function *f = DECL_STRUCT_FUNCTION (to_context);
5989 void **loc;
5990
5991 if (DECL_CONTEXT (t) == to_context)
5992 return;
5993
5994 loc = pointer_map_contains (vars_map, t);
5995
5996 if (!loc)
5997 {
5998 loc = pointer_map_insert (vars_map, t);
5999
6000 if (SSA_VAR_P (t))
6001 {
6002 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6003 add_local_decl (f, new_t);
6004 }
6005 else
6006 {
6007 gcc_assert (TREE_CODE (t) == CONST_DECL);
6008 new_t = copy_node (t);
6009 }
6010 DECL_CONTEXT (new_t) = to_context;
6011
6012 *loc = new_t;
6013 }
6014 else
6015 new_t = (tree) *loc;
6016
6017 *tp = new_t;
6018 }
6019
6020
6021 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6022 VARS_MAP maps old ssa names and var_decls to the new ones. */
6023
6024 static tree
6025 replace_ssa_name (tree name, struct pointer_map_t *vars_map,
6026 tree to_context)
6027 {
6028 void **loc;
6029 tree new_name;
6030
6031 gcc_assert (!virtual_operand_p (name));
6032
6033 loc = pointer_map_contains (vars_map, name);
6034
6035 if (!loc)
6036 {
6037 tree decl = SSA_NAME_VAR (name);
6038 if (decl)
6039 {
6040 replace_by_duplicate_decl (&decl, vars_map, to_context);
6041 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6042 decl, SSA_NAME_DEF_STMT (name));
6043 if (SSA_NAME_IS_DEFAULT_DEF (name))
6044 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context),
6045 decl, new_name);
6046 }
6047 else
6048 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6049 name, SSA_NAME_DEF_STMT (name));
6050
6051 loc = pointer_map_insert (vars_map, name);
6052 *loc = new_name;
6053 }
6054 else
6055 new_name = (tree) *loc;
6056
6057 return new_name;
6058 }
6059
6060 struct move_stmt_d
6061 {
6062 tree orig_block;
6063 tree new_block;
6064 tree from_context;
6065 tree to_context;
6066 struct pointer_map_t *vars_map;
6067 htab_t new_label_map;
6068 struct pointer_map_t *eh_map;
6069 bool remap_decls_p;
6070 };
6071
6072 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6073 contained in *TP if it has been ORIG_BLOCK previously and change the
6074 DECL_CONTEXT of every local variable referenced in *TP. */
6075
6076 static tree
6077 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6078 {
6079 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6080 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6081 tree t = *tp;
6082
6083 if (EXPR_P (t))
6084 {
6085 tree block = TREE_BLOCK (t);
6086 if (block == p->orig_block
6087 || (p->orig_block == NULL_TREE
6088 && block != NULL_TREE))
6089 TREE_SET_BLOCK (t, p->new_block);
6090 #ifdef ENABLE_CHECKING
6091 else if (block != NULL_TREE)
6092 {
6093 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6094 block = BLOCK_SUPERCONTEXT (block);
6095 gcc_assert (block == p->orig_block);
6096 }
6097 #endif
6098 }
6099 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6100 {
6101 if (TREE_CODE (t) == SSA_NAME)
6102 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6103 else if (TREE_CODE (t) == LABEL_DECL)
6104 {
6105 if (p->new_label_map)
6106 {
6107 struct tree_map in, *out;
6108 in.base.from = t;
6109 out = (struct tree_map *)
6110 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6111 if (out)
6112 *tp = t = out->to;
6113 }
6114
6115 DECL_CONTEXT (t) = p->to_context;
6116 }
6117 else if (p->remap_decls_p)
6118 {
6119 /* Replace T with its duplicate. T should no longer appear in the
6120 parent function, so this looks wasteful; however, it may appear
6121 in referenced_vars, and more importantly, as virtual operands of
6122 statements, and in alias lists of other variables. It would be
6123 quite difficult to expunge it from all those places. ??? It might
6124 suffice to do this for addressable variables. */
6125 if ((TREE_CODE (t) == VAR_DECL
6126 && !is_global_var (t))
6127 || TREE_CODE (t) == CONST_DECL)
6128 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6129 }
6130 *walk_subtrees = 0;
6131 }
6132 else if (TYPE_P (t))
6133 *walk_subtrees = 0;
6134
6135 return NULL_TREE;
6136 }
6137
6138 /* Helper for move_stmt_r. Given an EH region number for the source
6139 function, map that to the duplicate EH regio number in the dest. */
6140
6141 static int
6142 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6143 {
6144 eh_region old_r, new_r;
6145 void **slot;
6146
6147 old_r = get_eh_region_from_number (old_nr);
6148 slot = pointer_map_contains (p->eh_map, old_r);
6149 new_r = (eh_region) *slot;
6150
6151 return new_r->index;
6152 }
6153
6154 /* Similar, but operate on INTEGER_CSTs. */
6155
6156 static tree
6157 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6158 {
6159 int old_nr, new_nr;
6160
6161 old_nr = tree_low_cst (old_t_nr, 0);
6162 new_nr = move_stmt_eh_region_nr (old_nr, p);
6163
6164 return build_int_cst (integer_type_node, new_nr);
6165 }
6166
6167 /* Like move_stmt_op, but for gimple statements.
6168
6169 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6170 contained in the current statement in *GSI_P and change the
6171 DECL_CONTEXT of every local variable referenced in the current
6172 statement. */
6173
6174 static tree
6175 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6176 struct walk_stmt_info *wi)
6177 {
6178 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6179 gimple stmt = gsi_stmt (*gsi_p);
6180 tree block = gimple_block (stmt);
6181
6182 if (block == p->orig_block
6183 || (p->orig_block == NULL_TREE
6184 && block != NULL_TREE))
6185 gimple_set_block (stmt, p->new_block);
6186
6187 switch (gimple_code (stmt))
6188 {
6189 case GIMPLE_CALL:
6190 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6191 {
6192 tree r, fndecl = gimple_call_fndecl (stmt);
6193 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6194 switch (DECL_FUNCTION_CODE (fndecl))
6195 {
6196 case BUILT_IN_EH_COPY_VALUES:
6197 r = gimple_call_arg (stmt, 1);
6198 r = move_stmt_eh_region_tree_nr (r, p);
6199 gimple_call_set_arg (stmt, 1, r);
6200 /* FALLTHRU */
6201
6202 case BUILT_IN_EH_POINTER:
6203 case BUILT_IN_EH_FILTER:
6204 r = gimple_call_arg (stmt, 0);
6205 r = move_stmt_eh_region_tree_nr (r, p);
6206 gimple_call_set_arg (stmt, 0, r);
6207 break;
6208
6209 default:
6210 break;
6211 }
6212 }
6213 break;
6214
6215 case GIMPLE_RESX:
6216 {
6217 int r = gimple_resx_region (stmt);
6218 r = move_stmt_eh_region_nr (r, p);
6219 gimple_resx_set_region (stmt, r);
6220 }
6221 break;
6222
6223 case GIMPLE_EH_DISPATCH:
6224 {
6225 int r = gimple_eh_dispatch_region (stmt);
6226 r = move_stmt_eh_region_nr (r, p);
6227 gimple_eh_dispatch_set_region (stmt, r);
6228 }
6229 break;
6230
6231 case GIMPLE_OMP_RETURN:
6232 case GIMPLE_OMP_CONTINUE:
6233 break;
6234 default:
6235 if (is_gimple_omp (stmt))
6236 {
6237 /* Do not remap variables inside OMP directives. Variables
6238 referenced in clauses and directive header belong to the
6239 parent function and should not be moved into the child
6240 function. */
6241 bool save_remap_decls_p = p->remap_decls_p;
6242 p->remap_decls_p = false;
6243 *handled_ops_p = true;
6244
6245 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6246 move_stmt_op, wi);
6247
6248 p->remap_decls_p = save_remap_decls_p;
6249 }
6250 break;
6251 }
6252
6253 return NULL_TREE;
6254 }
6255
6256 /* Move basic block BB from function CFUN to function DEST_FN. The
6257 block is moved out of the original linked list and placed after
6258 block AFTER in the new list. Also, the block is removed from the
6259 original array of blocks and placed in DEST_FN's array of blocks.
6260 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6261 updated to reflect the moved edges.
6262
6263 The local variables are remapped to new instances, VARS_MAP is used
6264 to record the mapping. */
6265
6266 static void
6267 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6268 basic_block after, bool update_edge_count_p,
6269 struct move_stmt_d *d)
6270 {
6271 struct control_flow_graph *cfg;
6272 edge_iterator ei;
6273 edge e;
6274 gimple_stmt_iterator si;
6275 unsigned old_len, new_len;
6276
6277 /* Remove BB from dominance structures. */
6278 delete_from_dominance_info (CDI_DOMINATORS, bb);
6279
6280 /* Move BB from its current loop to the copy in the new function. */
6281 if (current_loops)
6282 {
6283 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6284 if (new_loop)
6285 bb->loop_father = new_loop;
6286 }
6287
6288 /* Link BB to the new linked list. */
6289 move_block_after (bb, after);
6290
6291 /* Update the edge count in the corresponding flowgraphs. */
6292 if (update_edge_count_p)
6293 FOR_EACH_EDGE (e, ei, bb->succs)
6294 {
6295 cfun->cfg->x_n_edges--;
6296 dest_cfun->cfg->x_n_edges++;
6297 }
6298
6299 /* Remove BB from the original basic block array. */
6300 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6301 cfun->cfg->x_n_basic_blocks--;
6302
6303 /* Grow DEST_CFUN's basic block array if needed. */
6304 cfg = dest_cfun->cfg;
6305 cfg->x_n_basic_blocks++;
6306 if (bb->index >= cfg->x_last_basic_block)
6307 cfg->x_last_basic_block = bb->index + 1;
6308
6309 old_len = vec_safe_length (cfg->x_basic_block_info);
6310 if ((unsigned) cfg->x_last_basic_block >= old_len)
6311 {
6312 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6313 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6314 }
6315
6316 (*cfg->x_basic_block_info)[bb->index] = bb;
6317
6318 /* Remap the variables in phi nodes. */
6319 for (si = gsi_start_phis (bb); !gsi_end_p (si); )
6320 {
6321 gimple phi = gsi_stmt (si);
6322 use_operand_p use;
6323 tree op = PHI_RESULT (phi);
6324 ssa_op_iter oi;
6325 unsigned i;
6326
6327 if (virtual_operand_p (op))
6328 {
6329 /* Remove the phi nodes for virtual operands (alias analysis will be
6330 run for the new function, anyway). */
6331 remove_phi_node (&si, true);
6332 continue;
6333 }
6334
6335 SET_PHI_RESULT (phi,
6336 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6337 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6338 {
6339 op = USE_FROM_PTR (use);
6340 if (TREE_CODE (op) == SSA_NAME)
6341 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6342 }
6343
6344 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6345 {
6346 location_t locus = gimple_phi_arg_location (phi, i);
6347 tree block = LOCATION_BLOCK (locus);
6348
6349 if (locus == UNKNOWN_LOCATION)
6350 continue;
6351 if (d->orig_block == NULL_TREE || block == d->orig_block)
6352 {
6353 if (d->new_block == NULL_TREE)
6354 locus = LOCATION_LOCUS (locus);
6355 else
6356 locus = COMBINE_LOCATION_DATA (line_table, locus, d->new_block);
6357 gimple_phi_arg_set_location (phi, i, locus);
6358 }
6359 }
6360
6361 gsi_next (&si);
6362 }
6363
6364 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6365 {
6366 gimple stmt = gsi_stmt (si);
6367 struct walk_stmt_info wi;
6368
6369 memset (&wi, 0, sizeof (wi));
6370 wi.info = d;
6371 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6372
6373 if (gimple_code (stmt) == GIMPLE_LABEL)
6374 {
6375 tree label = gimple_label_label (stmt);
6376 int uid = LABEL_DECL_UID (label);
6377
6378 gcc_assert (uid > -1);
6379
6380 old_len = vec_safe_length (cfg->x_label_to_block_map);
6381 if (old_len <= (unsigned) uid)
6382 {
6383 new_len = 3 * uid / 2 + 1;
6384 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6385 }
6386
6387 (*cfg->x_label_to_block_map)[uid] = bb;
6388 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6389
6390 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6391
6392 if (uid >= dest_cfun->cfg->last_label_uid)
6393 dest_cfun->cfg->last_label_uid = uid + 1;
6394 }
6395
6396 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6397 remove_stmt_from_eh_lp_fn (cfun, stmt);
6398
6399 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6400 gimple_remove_stmt_histograms (cfun, stmt);
6401
6402 /* We cannot leave any operands allocated from the operand caches of
6403 the current function. */
6404 free_stmt_operands (stmt);
6405 push_cfun (dest_cfun);
6406 update_stmt (stmt);
6407 pop_cfun ();
6408 }
6409
6410 FOR_EACH_EDGE (e, ei, bb->succs)
6411 if (e->goto_locus != UNKNOWN_LOCATION)
6412 {
6413 tree block = LOCATION_BLOCK (e->goto_locus);
6414 if (d->orig_block == NULL_TREE
6415 || block == d->orig_block)
6416 e->goto_locus = d->new_block ?
6417 COMBINE_LOCATION_DATA (line_table, e->goto_locus, d->new_block) :
6418 LOCATION_LOCUS (e->goto_locus);
6419 }
6420 }
6421
6422 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6423 the outermost EH region. Use REGION as the incoming base EH region. */
6424
6425 static eh_region
6426 find_outermost_region_in_block (struct function *src_cfun,
6427 basic_block bb, eh_region region)
6428 {
6429 gimple_stmt_iterator si;
6430
6431 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6432 {
6433 gimple stmt = gsi_stmt (si);
6434 eh_region stmt_region;
6435 int lp_nr;
6436
6437 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6438 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6439 if (stmt_region)
6440 {
6441 if (region == NULL)
6442 region = stmt_region;
6443 else if (stmt_region != region)
6444 {
6445 region = eh_region_outermost (src_cfun, stmt_region, region);
6446 gcc_assert (region != NULL);
6447 }
6448 }
6449 }
6450
6451 return region;
6452 }
6453
6454 static tree
6455 new_label_mapper (tree decl, void *data)
6456 {
6457 htab_t hash = (htab_t) data;
6458 struct tree_map *m;
6459 void **slot;
6460
6461 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6462
6463 m = XNEW (struct tree_map);
6464 m->hash = DECL_UID (decl);
6465 m->base.from = decl;
6466 m->to = create_artificial_label (UNKNOWN_LOCATION);
6467 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6468 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6469 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6470
6471 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6472 gcc_assert (*slot == NULL);
6473
6474 *slot = m;
6475
6476 return m->to;
6477 }
6478
6479 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6480 subblocks. */
6481
6482 static void
6483 replace_block_vars_by_duplicates (tree block, struct pointer_map_t *vars_map,
6484 tree to_context)
6485 {
6486 tree *tp, t;
6487
6488 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6489 {
6490 t = *tp;
6491 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6492 continue;
6493 replace_by_duplicate_decl (&t, vars_map, to_context);
6494 if (t != *tp)
6495 {
6496 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6497 {
6498 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6499 DECL_HAS_VALUE_EXPR_P (t) = 1;
6500 }
6501 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6502 *tp = t;
6503 }
6504 }
6505
6506 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6507 replace_block_vars_by_duplicates (block, vars_map, to_context);
6508 }
6509
6510 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6511 from FN1 to FN2. */
6512
6513 static void
6514 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
6515 struct loop *loop)
6516 {
6517 /* Discard it from the old loop array. */
6518 (*get_loops (fn1))[loop->num] = NULL;
6519
6520 /* Place it in the new loop array, assigning it a new number. */
6521 loop->num = number_of_loops (fn2);
6522 vec_safe_push (loops_for_fn (fn2)->larray, loop);
6523
6524 /* Recurse to children. */
6525 for (loop = loop->inner; loop; loop = loop->next)
6526 fixup_loop_arrays_after_move (fn1, fn2, loop);
6527 }
6528
6529 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6530 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6531 single basic block in the original CFG and the new basic block is
6532 returned. DEST_CFUN must not have a CFG yet.
6533
6534 Note that the region need not be a pure SESE region. Blocks inside
6535 the region may contain calls to abort/exit. The only restriction
6536 is that ENTRY_BB should be the only entry point and it must
6537 dominate EXIT_BB.
6538
6539 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6540 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6541 to the new function.
6542
6543 All local variables referenced in the region are assumed to be in
6544 the corresponding BLOCK_VARS and unexpanded variable lists
6545 associated with DEST_CFUN. */
6546
6547 basic_block
6548 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
6549 basic_block exit_bb, tree orig_block)
6550 {
6551 vec<basic_block> bbs, dom_bbs;
6552 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6553 basic_block after, bb, *entry_pred, *exit_succ, abb;
6554 struct function *saved_cfun = cfun;
6555 int *entry_flag, *exit_flag;
6556 unsigned *entry_prob, *exit_prob;
6557 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
6558 edge e;
6559 edge_iterator ei;
6560 htab_t new_label_map;
6561 struct pointer_map_t *vars_map, *eh_map;
6562 struct loop *loop = entry_bb->loop_father;
6563 struct loop *loop0 = get_loop (saved_cfun, 0);
6564 struct move_stmt_d d;
6565
6566 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6567 region. */
6568 gcc_assert (entry_bb != exit_bb
6569 && (!exit_bb
6570 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
6571
6572 /* Collect all the blocks in the region. Manually add ENTRY_BB
6573 because it won't be added by dfs_enumerate_from. */
6574 bbs.create (0);
6575 bbs.safe_push (entry_bb);
6576 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6577
6578 /* The blocks that used to be dominated by something in BBS will now be
6579 dominated by the new block. */
6580 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6581 bbs.address (),
6582 bbs.length ());
6583
6584 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6585 the predecessor edges to ENTRY_BB and the successor edges to
6586 EXIT_BB so that we can re-attach them to the new basic block that
6587 will replace the region. */
6588 num_entry_edges = EDGE_COUNT (entry_bb->preds);
6589 entry_pred = XNEWVEC (basic_block, num_entry_edges);
6590 entry_flag = XNEWVEC (int, num_entry_edges);
6591 entry_prob = XNEWVEC (unsigned, num_entry_edges);
6592 i = 0;
6593 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
6594 {
6595 entry_prob[i] = e->probability;
6596 entry_flag[i] = e->flags;
6597 entry_pred[i++] = e->src;
6598 remove_edge (e);
6599 }
6600
6601 if (exit_bb)
6602 {
6603 num_exit_edges = EDGE_COUNT (exit_bb->succs);
6604 exit_succ = XNEWVEC (basic_block, num_exit_edges);
6605 exit_flag = XNEWVEC (int, num_exit_edges);
6606 exit_prob = XNEWVEC (unsigned, num_exit_edges);
6607 i = 0;
6608 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
6609 {
6610 exit_prob[i] = e->probability;
6611 exit_flag[i] = e->flags;
6612 exit_succ[i++] = e->dest;
6613 remove_edge (e);
6614 }
6615 }
6616 else
6617 {
6618 num_exit_edges = 0;
6619 exit_succ = NULL;
6620 exit_flag = NULL;
6621 exit_prob = NULL;
6622 }
6623
6624 /* Switch context to the child function to initialize DEST_FN's CFG. */
6625 gcc_assert (dest_cfun->cfg == NULL);
6626 push_cfun (dest_cfun);
6627
6628 init_empty_tree_cfg ();
6629
6630 /* Initialize EH information for the new function. */
6631 eh_map = NULL;
6632 new_label_map = NULL;
6633 if (saved_cfun->eh)
6634 {
6635 eh_region region = NULL;
6636
6637 FOR_EACH_VEC_ELT (bbs, i, bb)
6638 region = find_outermost_region_in_block (saved_cfun, bb, region);
6639
6640 init_eh_for_function ();
6641 if (region != NULL)
6642 {
6643 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6644 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
6645 new_label_mapper, new_label_map);
6646 }
6647 }
6648
6649 /* Initialize an empty loop tree. */
6650 struct loops *loops = ggc_alloc_cleared_loops ();
6651 init_loops_structure (dest_cfun, loops, 1);
6652 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
6653 set_loops_for_fn (dest_cfun, loops);
6654
6655 /* Move the outlined loop tree part. */
6656 num_nodes = bbs.length ();
6657 FOR_EACH_VEC_ELT (bbs, i, bb)
6658 {
6659 if (bb->loop_father->header == bb)
6660 {
6661 struct loop *this_loop = bb->loop_father;
6662 struct loop *outer = loop_outer (this_loop);
6663 if (outer == loop
6664 /* If the SESE region contains some bbs ending with
6665 a noreturn call, those are considered to belong
6666 to the outermost loop in saved_cfun, rather than
6667 the entry_bb's loop_father. */
6668 || outer == loop0)
6669 {
6670 if (outer != loop)
6671 num_nodes -= this_loop->num_nodes;
6672 flow_loop_tree_node_remove (bb->loop_father);
6673 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
6674 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
6675 }
6676 }
6677 else if (bb->loop_father == loop0 && loop0 != loop)
6678 num_nodes--;
6679
6680 /* Remove loop exits from the outlined region. */
6681 if (loops_for_fn (saved_cfun)->exits)
6682 FOR_EACH_EDGE (e, ei, bb->succs)
6683 {
6684 void **slot = htab_find_slot_with_hash
6685 (loops_for_fn (saved_cfun)->exits, e,
6686 htab_hash_pointer (e), NO_INSERT);
6687 if (slot)
6688 htab_clear_slot (loops_for_fn (saved_cfun)->exits, slot);
6689 }
6690 }
6691
6692
6693 /* Adjust the number of blocks in the tree root of the outlined part. */
6694 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
6695
6696 /* Setup a mapping to be used by move_block_to_fn. */
6697 loop->aux = current_loops->tree_root;
6698 loop0->aux = current_loops->tree_root;
6699
6700 pop_cfun ();
6701
6702 /* Move blocks from BBS into DEST_CFUN. */
6703 gcc_assert (bbs.length () >= 2);
6704 after = dest_cfun->cfg->x_entry_block_ptr;
6705 vars_map = pointer_map_create ();
6706
6707 memset (&d, 0, sizeof (d));
6708 d.orig_block = orig_block;
6709 d.new_block = DECL_INITIAL (dest_cfun->decl);
6710 d.from_context = cfun->decl;
6711 d.to_context = dest_cfun->decl;
6712 d.vars_map = vars_map;
6713 d.new_label_map = new_label_map;
6714 d.eh_map = eh_map;
6715 d.remap_decls_p = true;
6716
6717 FOR_EACH_VEC_ELT (bbs, i, bb)
6718 {
6719 /* No need to update edge counts on the last block. It has
6720 already been updated earlier when we detached the region from
6721 the original CFG. */
6722 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
6723 after = bb;
6724 }
6725
6726 loop->aux = NULL;
6727 loop0->aux = NULL;
6728 /* Loop sizes are no longer correct, fix them up. */
6729 loop->num_nodes -= num_nodes;
6730 for (struct loop *outer = loop_outer (loop);
6731 outer; outer = loop_outer (outer))
6732 outer->num_nodes -= num_nodes;
6733 loop0->num_nodes -= bbs.length () - num_nodes;
6734
6735 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vect_loops)
6736 {
6737 struct loop *aloop;
6738 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
6739 if (aloop != NULL)
6740 {
6741 if (aloop->simduid)
6742 {
6743 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
6744 d.to_context);
6745 dest_cfun->has_simduid_loops = true;
6746 }
6747 if (aloop->force_vect)
6748 dest_cfun->has_force_vect_loops = true;
6749 }
6750 }
6751
6752 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
6753 if (orig_block)
6754 {
6755 tree block;
6756 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6757 == NULL_TREE);
6758 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6759 = BLOCK_SUBBLOCKS (orig_block);
6760 for (block = BLOCK_SUBBLOCKS (orig_block);
6761 block; block = BLOCK_CHAIN (block))
6762 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
6763 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
6764 }
6765
6766 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
6767 vars_map, dest_cfun->decl);
6768
6769 if (new_label_map)
6770 htab_delete (new_label_map);
6771 if (eh_map)
6772 pointer_map_destroy (eh_map);
6773 pointer_map_destroy (vars_map);
6774
6775 /* Rewire the entry and exit blocks. The successor to the entry
6776 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6777 the child function. Similarly, the predecessor of DEST_FN's
6778 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
6779 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6780 various CFG manipulation function get to the right CFG.
6781
6782 FIXME, this is silly. The CFG ought to become a parameter to
6783 these helpers. */
6784 push_cfun (dest_cfun);
6785 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
6786 if (exit_bb)
6787 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
6788 pop_cfun ();
6789
6790 /* Back in the original function, the SESE region has disappeared,
6791 create a new basic block in its place. */
6792 bb = create_empty_bb (entry_pred[0]);
6793 if (current_loops)
6794 add_bb_to_loop (bb, loop);
6795 for (i = 0; i < num_entry_edges; i++)
6796 {
6797 e = make_edge (entry_pred[i], bb, entry_flag[i]);
6798 e->probability = entry_prob[i];
6799 }
6800
6801 for (i = 0; i < num_exit_edges; i++)
6802 {
6803 e = make_edge (bb, exit_succ[i], exit_flag[i]);
6804 e->probability = exit_prob[i];
6805 }
6806
6807 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
6808 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
6809 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
6810 dom_bbs.release ();
6811
6812 if (exit_bb)
6813 {
6814 free (exit_prob);
6815 free (exit_flag);
6816 free (exit_succ);
6817 }
6818 free (entry_prob);
6819 free (entry_flag);
6820 free (entry_pred);
6821 bbs.release ();
6822
6823 return bb;
6824 }
6825
6826
6827 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
6828 */
6829
6830 void
6831 dump_function_to_file (tree fndecl, FILE *file, int flags)
6832 {
6833 tree arg, var, old_current_fndecl = current_function_decl;
6834 struct function *dsf;
6835 bool ignore_topmost_bind = false, any_var = false;
6836 basic_block bb;
6837 tree chain;
6838 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
6839 && decl_is_tm_clone (fndecl));
6840 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
6841
6842 current_function_decl = fndecl;
6843 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
6844
6845 arg = DECL_ARGUMENTS (fndecl);
6846 while (arg)
6847 {
6848 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
6849 fprintf (file, " ");
6850 print_generic_expr (file, arg, dump_flags);
6851 if (flags & TDF_VERBOSE)
6852 print_node (file, "", arg, 4);
6853 if (DECL_CHAIN (arg))
6854 fprintf (file, ", ");
6855 arg = DECL_CHAIN (arg);
6856 }
6857 fprintf (file, ")\n");
6858
6859 if (flags & TDF_VERBOSE)
6860 print_node (file, "", fndecl, 2);
6861
6862 dsf = DECL_STRUCT_FUNCTION (fndecl);
6863 if (dsf && (flags & TDF_EH))
6864 dump_eh_tree (file, dsf);
6865
6866 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
6867 {
6868 dump_node (fndecl, TDF_SLIM | flags, file);
6869 current_function_decl = old_current_fndecl;
6870 return;
6871 }
6872
6873 /* When GIMPLE is lowered, the variables are no longer available in
6874 BIND_EXPRs, so display them separately. */
6875 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
6876 {
6877 unsigned ix;
6878 ignore_topmost_bind = true;
6879
6880 fprintf (file, "{\n");
6881 if (!vec_safe_is_empty (fun->local_decls))
6882 FOR_EACH_LOCAL_DECL (fun, ix, var)
6883 {
6884 print_generic_decl (file, var, flags);
6885 if (flags & TDF_VERBOSE)
6886 print_node (file, "", var, 4);
6887 fprintf (file, "\n");
6888
6889 any_var = true;
6890 }
6891 if (gimple_in_ssa_p (cfun))
6892 for (ix = 1; ix < num_ssa_names; ++ix)
6893 {
6894 tree name = ssa_name (ix);
6895 if (name && !SSA_NAME_VAR (name))
6896 {
6897 fprintf (file, " ");
6898 print_generic_expr (file, TREE_TYPE (name), flags);
6899 fprintf (file, " ");
6900 print_generic_expr (file, name, flags);
6901 fprintf (file, ";\n");
6902
6903 any_var = true;
6904 }
6905 }
6906 }
6907
6908 if (fun && fun->decl == fndecl
6909 && fun->cfg
6910 && basic_block_info_for_function (fun))
6911 {
6912 /* If the CFG has been built, emit a CFG-based dump. */
6913 if (!ignore_topmost_bind)
6914 fprintf (file, "{\n");
6915
6916 if (any_var && n_basic_blocks_for_function (fun))
6917 fprintf (file, "\n");
6918
6919 FOR_EACH_BB_FN (bb, fun)
6920 dump_bb (file, bb, 2, flags | TDF_COMMENT);
6921
6922 fprintf (file, "}\n");
6923 }
6924 else if (DECL_SAVED_TREE (fndecl) == NULL)
6925 {
6926 /* The function is now in GIMPLE form but the CFG has not been
6927 built yet. Emit the single sequence of GIMPLE statements
6928 that make up its body. */
6929 gimple_seq body = gimple_body (fndecl);
6930
6931 if (gimple_seq_first_stmt (body)
6932 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
6933 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
6934 print_gimple_seq (file, body, 0, flags);
6935 else
6936 {
6937 if (!ignore_topmost_bind)
6938 fprintf (file, "{\n");
6939
6940 if (any_var)
6941 fprintf (file, "\n");
6942
6943 print_gimple_seq (file, body, 2, flags);
6944 fprintf (file, "}\n");
6945 }
6946 }
6947 else
6948 {
6949 int indent;
6950
6951 /* Make a tree based dump. */
6952 chain = DECL_SAVED_TREE (fndecl);
6953 if (chain && TREE_CODE (chain) == BIND_EXPR)
6954 {
6955 if (ignore_topmost_bind)
6956 {
6957 chain = BIND_EXPR_BODY (chain);
6958 indent = 2;
6959 }
6960 else
6961 indent = 0;
6962 }
6963 else
6964 {
6965 if (!ignore_topmost_bind)
6966 fprintf (file, "{\n");
6967 indent = 2;
6968 }
6969
6970 if (any_var)
6971 fprintf (file, "\n");
6972
6973 print_generic_stmt_indented (file, chain, flags, indent);
6974 if (ignore_topmost_bind)
6975 fprintf (file, "}\n");
6976 }
6977
6978 if (flags & TDF_ENUMERATE_LOCALS)
6979 dump_enumerated_decls (file, flags);
6980 fprintf (file, "\n\n");
6981
6982 current_function_decl = old_current_fndecl;
6983 }
6984
6985 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
6986
6987 DEBUG_FUNCTION void
6988 debug_function (tree fn, int flags)
6989 {
6990 dump_function_to_file (fn, stderr, flags);
6991 }
6992
6993
6994 /* Print on FILE the indexes for the predecessors of basic_block BB. */
6995
6996 static void
6997 print_pred_bbs (FILE *file, basic_block bb)
6998 {
6999 edge e;
7000 edge_iterator ei;
7001
7002 FOR_EACH_EDGE (e, ei, bb->preds)
7003 fprintf (file, "bb_%d ", e->src->index);
7004 }
7005
7006
7007 /* Print on FILE the indexes for the successors of basic_block BB. */
7008
7009 static void
7010 print_succ_bbs (FILE *file, basic_block bb)
7011 {
7012 edge e;
7013 edge_iterator ei;
7014
7015 FOR_EACH_EDGE (e, ei, bb->succs)
7016 fprintf (file, "bb_%d ", e->dest->index);
7017 }
7018
7019 /* Print to FILE the basic block BB following the VERBOSITY level. */
7020
7021 void
7022 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7023 {
7024 char *s_indent = (char *) alloca ((size_t) indent + 1);
7025 memset ((void *) s_indent, ' ', (size_t) indent);
7026 s_indent[indent] = '\0';
7027
7028 /* Print basic_block's header. */
7029 if (verbosity >= 2)
7030 {
7031 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7032 print_pred_bbs (file, bb);
7033 fprintf (file, "}, succs = {");
7034 print_succ_bbs (file, bb);
7035 fprintf (file, "})\n");
7036 }
7037
7038 /* Print basic_block's body. */
7039 if (verbosity >= 3)
7040 {
7041 fprintf (file, "%s {\n", s_indent);
7042 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7043 fprintf (file, "%s }\n", s_indent);
7044 }
7045 }
7046
7047 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7048
7049 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7050 VERBOSITY level this outputs the contents of the loop, or just its
7051 structure. */
7052
7053 static void
7054 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7055 {
7056 char *s_indent;
7057 basic_block bb;
7058
7059 if (loop == NULL)
7060 return;
7061
7062 s_indent = (char *) alloca ((size_t) indent + 1);
7063 memset ((void *) s_indent, ' ', (size_t) indent);
7064 s_indent[indent] = '\0';
7065
7066 /* Print loop's header. */
7067 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7068 if (loop->header)
7069 fprintf (file, "header = %d", loop->header->index);
7070 else
7071 {
7072 fprintf (file, "deleted)\n");
7073 return;
7074 }
7075 if (loop->latch)
7076 fprintf (file, ", latch = %d", loop->latch->index);
7077 else
7078 fprintf (file, ", multiple latches");
7079 fprintf (file, ", niter = ");
7080 print_generic_expr (file, loop->nb_iterations, 0);
7081
7082 if (loop->any_upper_bound)
7083 {
7084 fprintf (file, ", upper_bound = ");
7085 dump_double_int (file, loop->nb_iterations_upper_bound, true);
7086 }
7087
7088 if (loop->any_estimate)
7089 {
7090 fprintf (file, ", estimate = ");
7091 dump_double_int (file, loop->nb_iterations_estimate, true);
7092 }
7093 fprintf (file, ")\n");
7094
7095 /* Print loop's body. */
7096 if (verbosity >= 1)
7097 {
7098 fprintf (file, "%s{\n", s_indent);
7099 FOR_EACH_BB (bb)
7100 if (bb->loop_father == loop)
7101 print_loops_bb (file, bb, indent, verbosity);
7102
7103 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7104 fprintf (file, "%s}\n", s_indent);
7105 }
7106 }
7107
7108 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7109 spaces. Following VERBOSITY level this outputs the contents of the
7110 loop, or just its structure. */
7111
7112 static void
7113 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7114 int verbosity)
7115 {
7116 if (loop == NULL)
7117 return;
7118
7119 print_loop (file, loop, indent, verbosity);
7120 print_loop_and_siblings (file, loop->next, indent, verbosity);
7121 }
7122
7123 /* Follow a CFG edge from the entry point of the program, and on entry
7124 of a loop, pretty print the loop structure on FILE. */
7125
7126 void
7127 print_loops (FILE *file, int verbosity)
7128 {
7129 basic_block bb;
7130
7131 bb = ENTRY_BLOCK_PTR;
7132 if (bb && bb->loop_father)
7133 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7134 }
7135
7136 /* Dump a loop. */
7137
7138 DEBUG_FUNCTION void
7139 debug (struct loop &ref)
7140 {
7141 print_loop (stderr, &ref, 0, /*verbosity*/0);
7142 }
7143
7144 DEBUG_FUNCTION void
7145 debug (struct loop *ptr)
7146 {
7147 if (ptr)
7148 debug (*ptr);
7149 else
7150 fprintf (stderr, "<nil>\n");
7151 }
7152
7153 /* Dump a loop verbosely. */
7154
7155 DEBUG_FUNCTION void
7156 debug_verbose (struct loop &ref)
7157 {
7158 print_loop (stderr, &ref, 0, /*verbosity*/3);
7159 }
7160
7161 DEBUG_FUNCTION void
7162 debug_verbose (struct loop *ptr)
7163 {
7164 if (ptr)
7165 debug (*ptr);
7166 else
7167 fprintf (stderr, "<nil>\n");
7168 }
7169
7170
7171 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7172
7173 DEBUG_FUNCTION void
7174 debug_loops (int verbosity)
7175 {
7176 print_loops (stderr, verbosity);
7177 }
7178
7179 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7180
7181 DEBUG_FUNCTION void
7182 debug_loop (struct loop *loop, int verbosity)
7183 {
7184 print_loop (stderr, loop, 0, verbosity);
7185 }
7186
7187 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7188 level. */
7189
7190 DEBUG_FUNCTION void
7191 debug_loop_num (unsigned num, int verbosity)
7192 {
7193 debug_loop (get_loop (cfun, num), verbosity);
7194 }
7195
7196 /* Return true if BB ends with a call, possibly followed by some
7197 instructions that must stay with the call. Return false,
7198 otherwise. */
7199
7200 static bool
7201 gimple_block_ends_with_call_p (basic_block bb)
7202 {
7203 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7204 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7205 }
7206
7207
7208 /* Return true if BB ends with a conditional branch. Return false,
7209 otherwise. */
7210
7211 static bool
7212 gimple_block_ends_with_condjump_p (const_basic_block bb)
7213 {
7214 gimple stmt = last_stmt (CONST_CAST_BB (bb));
7215 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7216 }
7217
7218
7219 /* Return true if we need to add fake edge to exit at statement T.
7220 Helper function for gimple_flow_call_edges_add. */
7221
7222 static bool
7223 need_fake_edge_p (gimple t)
7224 {
7225 tree fndecl = NULL_TREE;
7226 int call_flags = 0;
7227
7228 /* NORETURN and LONGJMP calls already have an edge to exit.
7229 CONST and PURE calls do not need one.
7230 We don't currently check for CONST and PURE here, although
7231 it would be a good idea, because those attributes are
7232 figured out from the RTL in mark_constant_function, and
7233 the counter incrementation code from -fprofile-arcs
7234 leads to different results from -fbranch-probabilities. */
7235 if (is_gimple_call (t))
7236 {
7237 fndecl = gimple_call_fndecl (t);
7238 call_flags = gimple_call_flags (t);
7239 }
7240
7241 if (is_gimple_call (t)
7242 && fndecl
7243 && DECL_BUILT_IN (fndecl)
7244 && (call_flags & ECF_NOTHROW)
7245 && !(call_flags & ECF_RETURNS_TWICE)
7246 /* fork() doesn't really return twice, but the effect of
7247 wrapping it in __gcov_fork() which calls __gcov_flush()
7248 and clears the counters before forking has the same
7249 effect as returning twice. Force a fake edge. */
7250 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7251 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7252 return false;
7253
7254 if (is_gimple_call (t))
7255 {
7256 edge_iterator ei;
7257 edge e;
7258 basic_block bb;
7259
7260 if (!(call_flags & ECF_NORETURN))
7261 return true;
7262
7263 bb = gimple_bb (t);
7264 FOR_EACH_EDGE (e, ei, bb->succs)
7265 if ((e->flags & EDGE_FAKE) == 0)
7266 return true;
7267 }
7268
7269 if (gimple_code (t) == GIMPLE_ASM
7270 && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
7271 return true;
7272
7273 return false;
7274 }
7275
7276
7277 /* Add fake edges to the function exit for any non constant and non
7278 noreturn calls (or noreturn calls with EH/abnormal edges),
7279 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7280 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7281 that were split.
7282
7283 The goal is to expose cases in which entering a basic block does
7284 not imply that all subsequent instructions must be executed. */
7285
7286 static int
7287 gimple_flow_call_edges_add (sbitmap blocks)
7288 {
7289 int i;
7290 int blocks_split = 0;
7291 int last_bb = last_basic_block;
7292 bool check_last_block = false;
7293
7294 if (n_basic_blocks == NUM_FIXED_BLOCKS)
7295 return 0;
7296
7297 if (! blocks)
7298 check_last_block = true;
7299 else
7300 check_last_block = bitmap_bit_p (blocks, EXIT_BLOCK_PTR->prev_bb->index);
7301
7302 /* In the last basic block, before epilogue generation, there will be
7303 a fallthru edge to EXIT. Special care is required if the last insn
7304 of the last basic block is a call because make_edge folds duplicate
7305 edges, which would result in the fallthru edge also being marked
7306 fake, which would result in the fallthru edge being removed by
7307 remove_fake_edges, which would result in an invalid CFG.
7308
7309 Moreover, we can't elide the outgoing fake edge, since the block
7310 profiler needs to take this into account in order to solve the minimal
7311 spanning tree in the case that the call doesn't return.
7312
7313 Handle this by adding a dummy instruction in a new last basic block. */
7314 if (check_last_block)
7315 {
7316 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
7317 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7318 gimple t = NULL;
7319
7320 if (!gsi_end_p (gsi))
7321 t = gsi_stmt (gsi);
7322
7323 if (t && need_fake_edge_p (t))
7324 {
7325 edge e;
7326
7327 e = find_edge (bb, EXIT_BLOCK_PTR);
7328 if (e)
7329 {
7330 gsi_insert_on_edge (e, gimple_build_nop ());
7331 gsi_commit_edge_inserts ();
7332 }
7333 }
7334 }
7335
7336 /* Now add fake edges to the function exit for any non constant
7337 calls since there is no way that we can determine if they will
7338 return or not... */
7339 for (i = 0; i < last_bb; i++)
7340 {
7341 basic_block bb = BASIC_BLOCK (i);
7342 gimple_stmt_iterator gsi;
7343 gimple stmt, last_stmt;
7344
7345 if (!bb)
7346 continue;
7347
7348 if (blocks && !bitmap_bit_p (blocks, i))
7349 continue;
7350
7351 gsi = gsi_last_nondebug_bb (bb);
7352 if (!gsi_end_p (gsi))
7353 {
7354 last_stmt = gsi_stmt (gsi);
7355 do
7356 {
7357 stmt = gsi_stmt (gsi);
7358 if (need_fake_edge_p (stmt))
7359 {
7360 edge e;
7361
7362 /* The handling above of the final block before the
7363 epilogue should be enough to verify that there is
7364 no edge to the exit block in CFG already.
7365 Calling make_edge in such case would cause us to
7366 mark that edge as fake and remove it later. */
7367 #ifdef ENABLE_CHECKING
7368 if (stmt == last_stmt)
7369 {
7370 e = find_edge (bb, EXIT_BLOCK_PTR);
7371 gcc_assert (e == NULL);
7372 }
7373 #endif
7374
7375 /* Note that the following may create a new basic block
7376 and renumber the existing basic blocks. */
7377 if (stmt != last_stmt)
7378 {
7379 e = split_block (bb, stmt);
7380 if (e)
7381 blocks_split++;
7382 }
7383 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
7384 }
7385 gsi_prev (&gsi);
7386 }
7387 while (!gsi_end_p (gsi));
7388 }
7389 }
7390
7391 if (blocks_split)
7392 verify_flow_info ();
7393
7394 return blocks_split;
7395 }
7396
7397 /* Removes edge E and all the blocks dominated by it, and updates dominance
7398 information. The IL in E->src needs to be updated separately.
7399 If dominance info is not available, only the edge E is removed.*/
7400
7401 void
7402 remove_edge_and_dominated_blocks (edge e)
7403 {
7404 vec<basic_block> bbs_to_remove = vNULL;
7405 vec<basic_block> bbs_to_fix_dom = vNULL;
7406 bitmap df, df_idom;
7407 edge f;
7408 edge_iterator ei;
7409 bool none_removed = false;
7410 unsigned i;
7411 basic_block bb, dbb;
7412 bitmap_iterator bi;
7413
7414 if (!dom_info_available_p (CDI_DOMINATORS))
7415 {
7416 remove_edge (e);
7417 return;
7418 }
7419
7420 /* No updating is needed for edges to exit. */
7421 if (e->dest == EXIT_BLOCK_PTR)
7422 {
7423 if (cfgcleanup_altered_bbs)
7424 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7425 remove_edge (e);
7426 return;
7427 }
7428
7429 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7430 that is not dominated by E->dest, then this set is empty. Otherwise,
7431 all the basic blocks dominated by E->dest are removed.
7432
7433 Also, to DF_IDOM we store the immediate dominators of the blocks in
7434 the dominance frontier of E (i.e., of the successors of the
7435 removed blocks, if there are any, and of E->dest otherwise). */
7436 FOR_EACH_EDGE (f, ei, e->dest->preds)
7437 {
7438 if (f == e)
7439 continue;
7440
7441 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7442 {
7443 none_removed = true;
7444 break;
7445 }
7446 }
7447
7448 df = BITMAP_ALLOC (NULL);
7449 df_idom = BITMAP_ALLOC (NULL);
7450
7451 if (none_removed)
7452 bitmap_set_bit (df_idom,
7453 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7454 else
7455 {
7456 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7457 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7458 {
7459 FOR_EACH_EDGE (f, ei, bb->succs)
7460 {
7461 if (f->dest != EXIT_BLOCK_PTR)
7462 bitmap_set_bit (df, f->dest->index);
7463 }
7464 }
7465 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7466 bitmap_clear_bit (df, bb->index);
7467
7468 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7469 {
7470 bb = BASIC_BLOCK (i);
7471 bitmap_set_bit (df_idom,
7472 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7473 }
7474 }
7475
7476 if (cfgcleanup_altered_bbs)
7477 {
7478 /* Record the set of the altered basic blocks. */
7479 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7480 bitmap_ior_into (cfgcleanup_altered_bbs, df);
7481 }
7482
7483 /* Remove E and the cancelled blocks. */
7484 if (none_removed)
7485 remove_edge (e);
7486 else
7487 {
7488 /* Walk backwards so as to get a chance to substitute all
7489 released DEFs into debug stmts. See
7490 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7491 details. */
7492 for (i = bbs_to_remove.length (); i-- > 0; )
7493 delete_basic_block (bbs_to_remove[i]);
7494 }
7495
7496 /* Update the dominance information. The immediate dominator may change only
7497 for blocks whose immediate dominator belongs to DF_IDOM:
7498
7499 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7500 removal. Let Z the arbitrary block such that idom(Z) = Y and
7501 Z dominates X after the removal. Before removal, there exists a path P
7502 from Y to X that avoids Z. Let F be the last edge on P that is
7503 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7504 dominates W, and because of P, Z does not dominate W), and W belongs to
7505 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7506 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
7507 {
7508 bb = BASIC_BLOCK (i);
7509 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
7510 dbb;
7511 dbb = next_dom_son (CDI_DOMINATORS, dbb))
7512 bbs_to_fix_dom.safe_push (dbb);
7513 }
7514
7515 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
7516
7517 BITMAP_FREE (df);
7518 BITMAP_FREE (df_idom);
7519 bbs_to_remove.release ();
7520 bbs_to_fix_dom.release ();
7521 }
7522
7523 /* Purge dead EH edges from basic block BB. */
7524
7525 bool
7526 gimple_purge_dead_eh_edges (basic_block bb)
7527 {
7528 bool changed = false;
7529 edge e;
7530 edge_iterator ei;
7531 gimple stmt = last_stmt (bb);
7532
7533 if (stmt && stmt_can_throw_internal (stmt))
7534 return false;
7535
7536 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7537 {
7538 if (e->flags & EDGE_EH)
7539 {
7540 remove_edge_and_dominated_blocks (e);
7541 changed = true;
7542 }
7543 else
7544 ei_next (&ei);
7545 }
7546
7547 return changed;
7548 }
7549
7550 /* Purge dead EH edges from basic block listed in BLOCKS. */
7551
7552 bool
7553 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
7554 {
7555 bool changed = false;
7556 unsigned i;
7557 bitmap_iterator bi;
7558
7559 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7560 {
7561 basic_block bb = BASIC_BLOCK (i);
7562
7563 /* Earlier gimple_purge_dead_eh_edges could have removed
7564 this basic block already. */
7565 gcc_assert (bb || changed);
7566 if (bb != NULL)
7567 changed |= gimple_purge_dead_eh_edges (bb);
7568 }
7569
7570 return changed;
7571 }
7572
7573 /* Purge dead abnormal call edges from basic block BB. */
7574
7575 bool
7576 gimple_purge_dead_abnormal_call_edges (basic_block bb)
7577 {
7578 bool changed = false;
7579 edge e;
7580 edge_iterator ei;
7581 gimple stmt = last_stmt (bb);
7582
7583 if (!cfun->has_nonlocal_label
7584 && !cfun->calls_setjmp)
7585 return false;
7586
7587 if (stmt && stmt_can_make_abnormal_goto (stmt))
7588 return false;
7589
7590 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7591 {
7592 if (e->flags & EDGE_ABNORMAL)
7593 {
7594 if (e->flags & EDGE_FALLTHRU)
7595 e->flags &= ~EDGE_ABNORMAL;
7596 else
7597 remove_edge_and_dominated_blocks (e);
7598 changed = true;
7599 }
7600 else
7601 ei_next (&ei);
7602 }
7603
7604 return changed;
7605 }
7606
7607 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
7608
7609 bool
7610 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
7611 {
7612 bool changed = false;
7613 unsigned i;
7614 bitmap_iterator bi;
7615
7616 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7617 {
7618 basic_block bb = BASIC_BLOCK (i);
7619
7620 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
7621 this basic block already. */
7622 gcc_assert (bb || changed);
7623 if (bb != NULL)
7624 changed |= gimple_purge_dead_abnormal_call_edges (bb);
7625 }
7626
7627 return changed;
7628 }
7629
7630 /* This function is called whenever a new edge is created or
7631 redirected. */
7632
7633 static void
7634 gimple_execute_on_growing_pred (edge e)
7635 {
7636 basic_block bb = e->dest;
7637
7638 if (!gimple_seq_empty_p (phi_nodes (bb)))
7639 reserve_phi_args_for_new_edge (bb);
7640 }
7641
7642 /* This function is called immediately before edge E is removed from
7643 the edge vector E->dest->preds. */
7644
7645 static void
7646 gimple_execute_on_shrinking_pred (edge e)
7647 {
7648 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
7649 remove_phi_args (e);
7650 }
7651
7652 /*---------------------------------------------------------------------------
7653 Helper functions for Loop versioning
7654 ---------------------------------------------------------------------------*/
7655
7656 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
7657 of 'first'. Both of them are dominated by 'new_head' basic block. When
7658 'new_head' was created by 'second's incoming edge it received phi arguments
7659 on the edge by split_edge(). Later, additional edge 'e' was created to
7660 connect 'new_head' and 'first'. Now this routine adds phi args on this
7661 additional edge 'e' that new_head to second edge received as part of edge
7662 splitting. */
7663
7664 static void
7665 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
7666 basic_block new_head, edge e)
7667 {
7668 gimple phi1, phi2;
7669 gimple_stmt_iterator psi1, psi2;
7670 tree def;
7671 edge e2 = find_edge (new_head, second);
7672
7673 /* Because NEW_HEAD has been created by splitting SECOND's incoming
7674 edge, we should always have an edge from NEW_HEAD to SECOND. */
7675 gcc_assert (e2 != NULL);
7676
7677 /* Browse all 'second' basic block phi nodes and add phi args to
7678 edge 'e' for 'first' head. PHI args are always in correct order. */
7679
7680 for (psi2 = gsi_start_phis (second),
7681 psi1 = gsi_start_phis (first);
7682 !gsi_end_p (psi2) && !gsi_end_p (psi1);
7683 gsi_next (&psi2), gsi_next (&psi1))
7684 {
7685 phi1 = gsi_stmt (psi1);
7686 phi2 = gsi_stmt (psi2);
7687 def = PHI_ARG_DEF (phi2, e2->dest_idx);
7688 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
7689 }
7690 }
7691
7692
7693 /* Adds a if else statement to COND_BB with condition COND_EXPR.
7694 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
7695 the destination of the ELSE part. */
7696
7697 static void
7698 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
7699 basic_block second_head ATTRIBUTE_UNUSED,
7700 basic_block cond_bb, void *cond_e)
7701 {
7702 gimple_stmt_iterator gsi;
7703 gimple new_cond_expr;
7704 tree cond_expr = (tree) cond_e;
7705 edge e0;
7706
7707 /* Build new conditional expr */
7708 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
7709 NULL_TREE, NULL_TREE);
7710
7711 /* Add new cond in cond_bb. */
7712 gsi = gsi_last_bb (cond_bb);
7713 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
7714
7715 /* Adjust edges appropriately to connect new head with first head
7716 as well as second head. */
7717 e0 = single_succ_edge (cond_bb);
7718 e0->flags &= ~EDGE_FALLTHRU;
7719 e0->flags |= EDGE_FALSE_VALUE;
7720 }
7721
7722
7723 /* Do book-keeping of basic block BB for the profile consistency checker.
7724 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
7725 then do post-pass accounting. Store the counting in RECORD. */
7726 static void
7727 gimple_account_profile_record (basic_block bb, int after_pass,
7728 struct profile_record *record)
7729 {
7730 gimple_stmt_iterator i;
7731 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
7732 {
7733 record->size[after_pass]
7734 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
7735 if (profile_status == PROFILE_READ)
7736 record->time[after_pass]
7737 += estimate_num_insns (gsi_stmt (i),
7738 &eni_time_weights) * bb->count;
7739 else if (profile_status == PROFILE_GUESSED)
7740 record->time[after_pass]
7741 += estimate_num_insns (gsi_stmt (i),
7742 &eni_time_weights) * bb->frequency;
7743 }
7744 }
7745
7746 struct cfg_hooks gimple_cfg_hooks = {
7747 "gimple",
7748 gimple_verify_flow_info,
7749 gimple_dump_bb, /* dump_bb */
7750 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
7751 create_bb, /* create_basic_block */
7752 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
7753 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
7754 gimple_can_remove_branch_p, /* can_remove_branch_p */
7755 remove_bb, /* delete_basic_block */
7756 gimple_split_block, /* split_block */
7757 gimple_move_block_after, /* move_block_after */
7758 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
7759 gimple_merge_blocks, /* merge_blocks */
7760 gimple_predict_edge, /* predict_edge */
7761 gimple_predicted_by_p, /* predicted_by_p */
7762 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
7763 gimple_duplicate_bb, /* duplicate_block */
7764 gimple_split_edge, /* split_edge */
7765 gimple_make_forwarder_block, /* make_forward_block */
7766 NULL, /* tidy_fallthru_edge */
7767 NULL, /* force_nonfallthru */
7768 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
7769 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
7770 gimple_flow_call_edges_add, /* flow_call_edges_add */
7771 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
7772 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
7773 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
7774 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
7775 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
7776 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
7777 flush_pending_stmts, /* flush_pending_stmts */
7778 gimple_empty_block_p, /* block_empty_p */
7779 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
7780 gimple_account_profile_record,
7781 };
7782
7783
7784 /* Split all critical edges. */
7785
7786 static unsigned int
7787 split_critical_edges (void)
7788 {
7789 basic_block bb;
7790 edge e;
7791 edge_iterator ei;
7792
7793 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
7794 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
7795 mappings around the calls to split_edge. */
7796 start_recording_case_labels ();
7797 FOR_ALL_BB (bb)
7798 {
7799 FOR_EACH_EDGE (e, ei, bb->succs)
7800 {
7801 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
7802 split_edge (e);
7803 /* PRE inserts statements to edges and expects that
7804 since split_critical_edges was done beforehand, committing edge
7805 insertions will not split more edges. In addition to critical
7806 edges we must split edges that have multiple successors and
7807 end by control flow statements, such as RESX.
7808 Go ahead and split them too. This matches the logic in
7809 gimple_find_edge_insert_loc. */
7810 else if ((!single_pred_p (e->dest)
7811 || !gimple_seq_empty_p (phi_nodes (e->dest))
7812 || e->dest == EXIT_BLOCK_PTR)
7813 && e->src != ENTRY_BLOCK_PTR
7814 && !(e->flags & EDGE_ABNORMAL))
7815 {
7816 gimple_stmt_iterator gsi;
7817
7818 gsi = gsi_last_bb (e->src);
7819 if (!gsi_end_p (gsi)
7820 && stmt_ends_bb_p (gsi_stmt (gsi))
7821 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
7822 && !gimple_call_builtin_p (gsi_stmt (gsi),
7823 BUILT_IN_RETURN)))
7824 split_edge (e);
7825 }
7826 }
7827 }
7828 end_recording_case_labels ();
7829 return 0;
7830 }
7831
7832 namespace {
7833
7834 const pass_data pass_data_split_crit_edges =
7835 {
7836 GIMPLE_PASS, /* type */
7837 "crited", /* name */
7838 OPTGROUP_NONE, /* optinfo_flags */
7839 false, /* has_gate */
7840 true, /* has_execute */
7841 TV_TREE_SPLIT_EDGES, /* tv_id */
7842 PROP_cfg, /* properties_required */
7843 PROP_no_crit_edges, /* properties_provided */
7844 0, /* properties_destroyed */
7845 0, /* todo_flags_start */
7846 TODO_verify_flow, /* todo_flags_finish */
7847 };
7848
7849 class pass_split_crit_edges : public gimple_opt_pass
7850 {
7851 public:
7852 pass_split_crit_edges (gcc::context *ctxt)
7853 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
7854 {}
7855
7856 /* opt_pass methods: */
7857 unsigned int execute () { return split_critical_edges (); }
7858
7859 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
7860 }; // class pass_split_crit_edges
7861
7862 } // anon namespace
7863
7864 gimple_opt_pass *
7865 make_pass_split_crit_edges (gcc::context *ctxt)
7866 {
7867 return new pass_split_crit_edges (ctxt);
7868 }
7869
7870
7871 /* Build a ternary operation and gimplify it. Emit code before GSI.
7872 Return the gimple_val holding the result. */
7873
7874 tree
7875 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
7876 tree type, tree a, tree b, tree c)
7877 {
7878 tree ret;
7879 location_t loc = gimple_location (gsi_stmt (*gsi));
7880
7881 ret = fold_build3_loc (loc, code, type, a, b, c);
7882 STRIP_NOPS (ret);
7883
7884 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7885 GSI_SAME_STMT);
7886 }
7887
7888 /* Build a binary operation and gimplify it. Emit code before GSI.
7889 Return the gimple_val holding the result. */
7890
7891 tree
7892 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
7893 tree type, tree a, tree b)
7894 {
7895 tree ret;
7896
7897 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
7898 STRIP_NOPS (ret);
7899
7900 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7901 GSI_SAME_STMT);
7902 }
7903
7904 /* Build a unary operation and gimplify it. Emit code before GSI.
7905 Return the gimple_val holding the result. */
7906
7907 tree
7908 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
7909 tree a)
7910 {
7911 tree ret;
7912
7913 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
7914 STRIP_NOPS (ret);
7915
7916 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7917 GSI_SAME_STMT);
7918 }
7919
7920
7921 \f
7922 /* Emit return warnings. */
7923
7924 static unsigned int
7925 execute_warn_function_return (void)
7926 {
7927 source_location location;
7928 gimple last;
7929 edge e;
7930 edge_iterator ei;
7931
7932 if (!targetm.warn_func_return (cfun->decl))
7933 return 0;
7934
7935 /* If we have a path to EXIT, then we do return. */
7936 if (TREE_THIS_VOLATILE (cfun->decl)
7937 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
7938 {
7939 location = UNKNOWN_LOCATION;
7940 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7941 {
7942 last = last_stmt (e->src);
7943 if ((gimple_code (last) == GIMPLE_RETURN
7944 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
7945 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
7946 break;
7947 }
7948 if (location == UNKNOWN_LOCATION)
7949 location = cfun->function_end_locus;
7950 warning_at (location, 0, "%<noreturn%> function does return");
7951 }
7952
7953 /* If we see "return;" in some basic block, then we do reach the end
7954 without returning a value. */
7955 else if (warn_return_type
7956 && !TREE_NO_WARNING (cfun->decl)
7957 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
7958 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
7959 {
7960 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7961 {
7962 gimple last = last_stmt (e->src);
7963 if (gimple_code (last) == GIMPLE_RETURN
7964 && gimple_return_retval (last) == NULL
7965 && !gimple_no_warning_p (last))
7966 {
7967 location = gimple_location (last);
7968 if (location == UNKNOWN_LOCATION)
7969 location = cfun->function_end_locus;
7970 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
7971 TREE_NO_WARNING (cfun->decl) = 1;
7972 break;
7973 }
7974 }
7975 }
7976 return 0;
7977 }
7978
7979
7980 /* Given a basic block B which ends with a conditional and has
7981 precisely two successors, determine which of the edges is taken if
7982 the conditional is true and which is taken if the conditional is
7983 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
7984
7985 void
7986 extract_true_false_edges_from_block (basic_block b,
7987 edge *true_edge,
7988 edge *false_edge)
7989 {
7990 edge e = EDGE_SUCC (b, 0);
7991
7992 if (e->flags & EDGE_TRUE_VALUE)
7993 {
7994 *true_edge = e;
7995 *false_edge = EDGE_SUCC (b, 1);
7996 }
7997 else
7998 {
7999 *false_edge = e;
8000 *true_edge = EDGE_SUCC (b, 1);
8001 }
8002 }
8003
8004 namespace {
8005
8006 const pass_data pass_data_warn_function_return =
8007 {
8008 GIMPLE_PASS, /* type */
8009 "*warn_function_return", /* name */
8010 OPTGROUP_NONE, /* optinfo_flags */
8011 false, /* has_gate */
8012 true, /* has_execute */
8013 TV_NONE, /* tv_id */
8014 PROP_cfg, /* properties_required */
8015 0, /* properties_provided */
8016 0, /* properties_destroyed */
8017 0, /* todo_flags_start */
8018 0, /* todo_flags_finish */
8019 };
8020
8021 class pass_warn_function_return : public gimple_opt_pass
8022 {
8023 public:
8024 pass_warn_function_return (gcc::context *ctxt)
8025 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8026 {}
8027
8028 /* opt_pass methods: */
8029 unsigned int execute () { return execute_warn_function_return (); }
8030
8031 }; // class pass_warn_function_return
8032
8033 } // anon namespace
8034
8035 gimple_opt_pass *
8036 make_pass_warn_function_return (gcc::context *ctxt)
8037 {
8038 return new pass_warn_function_return (ctxt);
8039 }
8040
8041 /* Walk a gimplified function and warn for functions whose return value is
8042 ignored and attribute((warn_unused_result)) is set. This is done before
8043 inlining, so we don't have to worry about that. */
8044
8045 static void
8046 do_warn_unused_result (gimple_seq seq)
8047 {
8048 tree fdecl, ftype;
8049 gimple_stmt_iterator i;
8050
8051 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8052 {
8053 gimple g = gsi_stmt (i);
8054
8055 switch (gimple_code (g))
8056 {
8057 case GIMPLE_BIND:
8058 do_warn_unused_result (gimple_bind_body (g));
8059 break;
8060 case GIMPLE_TRY:
8061 do_warn_unused_result (gimple_try_eval (g));
8062 do_warn_unused_result (gimple_try_cleanup (g));
8063 break;
8064 case GIMPLE_CATCH:
8065 do_warn_unused_result (gimple_catch_handler (g));
8066 break;
8067 case GIMPLE_EH_FILTER:
8068 do_warn_unused_result (gimple_eh_filter_failure (g));
8069 break;
8070
8071 case GIMPLE_CALL:
8072 if (gimple_call_lhs (g))
8073 break;
8074 if (gimple_call_internal_p (g))
8075 break;
8076
8077 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8078 LHS. All calls whose value is ignored should be
8079 represented like this. Look for the attribute. */
8080 fdecl = gimple_call_fndecl (g);
8081 ftype = gimple_call_fntype (g);
8082
8083 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8084 {
8085 location_t loc = gimple_location (g);
8086
8087 if (fdecl)
8088 warning_at (loc, OPT_Wunused_result,
8089 "ignoring return value of %qD, "
8090 "declared with attribute warn_unused_result",
8091 fdecl);
8092 else
8093 warning_at (loc, OPT_Wunused_result,
8094 "ignoring return value of function "
8095 "declared with attribute warn_unused_result");
8096 }
8097 break;
8098
8099 default:
8100 /* Not a container, not a call, or a call whose value is used. */
8101 break;
8102 }
8103 }
8104 }
8105
8106 static unsigned int
8107 run_warn_unused_result (void)
8108 {
8109 do_warn_unused_result (gimple_body (current_function_decl));
8110 return 0;
8111 }
8112
8113 static bool
8114 gate_warn_unused_result (void)
8115 {
8116 return flag_warn_unused_result;
8117 }
8118
8119 namespace {
8120
8121 const pass_data pass_data_warn_unused_result =
8122 {
8123 GIMPLE_PASS, /* type */
8124 "*warn_unused_result", /* name */
8125 OPTGROUP_NONE, /* optinfo_flags */
8126 true, /* has_gate */
8127 true, /* has_execute */
8128 TV_NONE, /* tv_id */
8129 PROP_gimple_any, /* properties_required */
8130 0, /* properties_provided */
8131 0, /* properties_destroyed */
8132 0, /* todo_flags_start */
8133 0, /* todo_flags_finish */
8134 };
8135
8136 class pass_warn_unused_result : public gimple_opt_pass
8137 {
8138 public:
8139 pass_warn_unused_result (gcc::context *ctxt)
8140 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8141 {}
8142
8143 /* opt_pass methods: */
8144 bool gate () { return gate_warn_unused_result (); }
8145 unsigned int execute () { return run_warn_unused_result (); }
8146
8147 }; // class pass_warn_unused_result
8148
8149 } // anon namespace
8150
8151 gimple_opt_pass *
8152 make_pass_warn_unused_result (gcc::context *ctxt)
8153 {
8154 return new pass_warn_unused_result (ctxt);
8155 }
8156
8157 /* IPA passes, compilation of earlier functions or inlining
8158 might have changed some properties, such as marked functions nothrow,
8159 pure, const or noreturn.
8160 Remove redundant edges and basic blocks, and create new ones if necessary.
8161
8162 This pass can't be executed as stand alone pass from pass manager, because
8163 in between inlining and this fixup the verify_flow_info would fail. */
8164
8165 unsigned int
8166 execute_fixup_cfg (void)
8167 {
8168 basic_block bb;
8169 gimple_stmt_iterator gsi;
8170 int todo = gimple_in_ssa_p (cfun) ? TODO_verify_ssa : 0;
8171 gcov_type count_scale;
8172 edge e;
8173 edge_iterator ei;
8174
8175 count_scale
8176 = GCOV_COMPUTE_SCALE (cgraph_get_node (current_function_decl)->count,
8177 ENTRY_BLOCK_PTR->count);
8178
8179 ENTRY_BLOCK_PTR->count = cgraph_get_node (current_function_decl)->count;
8180 EXIT_BLOCK_PTR->count = apply_scale (EXIT_BLOCK_PTR->count,
8181 count_scale);
8182
8183 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
8184 e->count = apply_scale (e->count, count_scale);
8185
8186 FOR_EACH_BB (bb)
8187 {
8188 bb->count = apply_scale (bb->count, count_scale);
8189 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
8190 {
8191 gimple stmt = gsi_stmt (gsi);
8192 tree decl = is_gimple_call (stmt)
8193 ? gimple_call_fndecl (stmt)
8194 : NULL;
8195 if (decl)
8196 {
8197 int flags = gimple_call_flags (stmt);
8198 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
8199 {
8200 if (gimple_purge_dead_abnormal_call_edges (bb))
8201 todo |= TODO_cleanup_cfg;
8202
8203 if (gimple_in_ssa_p (cfun))
8204 {
8205 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8206 update_stmt (stmt);
8207 }
8208 }
8209
8210 if (flags & ECF_NORETURN
8211 && fixup_noreturn_call (stmt))
8212 todo |= TODO_cleanup_cfg;
8213 }
8214
8215 if (maybe_clean_eh_stmt (stmt)
8216 && gimple_purge_dead_eh_edges (bb))
8217 todo |= TODO_cleanup_cfg;
8218 }
8219
8220 FOR_EACH_EDGE (e, ei, bb->succs)
8221 e->count = apply_scale (e->count, count_scale);
8222
8223 /* If we have a basic block with no successors that does not
8224 end with a control statement or a noreturn call end it with
8225 a call to __builtin_unreachable. This situation can occur
8226 when inlining a noreturn call that does in fact return. */
8227 if (EDGE_COUNT (bb->succs) == 0)
8228 {
8229 gimple stmt = last_stmt (bb);
8230 if (!stmt
8231 || (!is_ctrl_stmt (stmt)
8232 && (!is_gimple_call (stmt)
8233 || (gimple_call_flags (stmt) & ECF_NORETURN) == 0)))
8234 {
8235 stmt = gimple_build_call
8236 (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
8237 gimple_stmt_iterator gsi = gsi_last_bb (bb);
8238 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
8239 }
8240 }
8241 }
8242 if (count_scale != REG_BR_PROB_BASE)
8243 compute_function_frequency ();
8244
8245 /* We just processed all calls. */
8246 if (cfun->gimple_df)
8247 vec_free (MODIFIED_NORETURN_CALLS (cfun));
8248
8249 /* Dump a textual representation of the flowgraph. */
8250 if (dump_file)
8251 gimple_dump_cfg (dump_file, dump_flags);
8252
8253 if (current_loops
8254 && (todo & TODO_cleanup_cfg))
8255 loops_state_set (LOOPS_NEED_FIXUP);
8256
8257 return todo;
8258 }
8259
8260 namespace {
8261
8262 const pass_data pass_data_fixup_cfg =
8263 {
8264 GIMPLE_PASS, /* type */
8265 "*free_cfg_annotations", /* name */
8266 OPTGROUP_NONE, /* optinfo_flags */
8267 false, /* has_gate */
8268 true, /* has_execute */
8269 TV_NONE, /* tv_id */
8270 PROP_cfg, /* properties_required */
8271 0, /* properties_provided */
8272 0, /* properties_destroyed */
8273 0, /* todo_flags_start */
8274 0, /* todo_flags_finish */
8275 };
8276
8277 class pass_fixup_cfg : public gimple_opt_pass
8278 {
8279 public:
8280 pass_fixup_cfg (gcc::context *ctxt)
8281 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
8282 {}
8283
8284 /* opt_pass methods: */
8285 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
8286 unsigned int execute () { return execute_fixup_cfg (); }
8287
8288 }; // class pass_fixup_cfg
8289
8290 } // anon namespace
8291
8292 gimple_opt_pass *
8293 make_pass_fixup_cfg (gcc::context *ctxt)
8294 {
8295 return new pass_fixup_cfg (ctxt);
8296 }
8297
8298 /* Garbage collection support for edge_def. */
8299
8300 extern void gt_ggc_mx (tree&);
8301 extern void gt_ggc_mx (gimple&);
8302 extern void gt_ggc_mx (rtx&);
8303 extern void gt_ggc_mx (basic_block&);
8304
8305 void
8306 gt_ggc_mx (edge_def *e)
8307 {
8308 tree block = LOCATION_BLOCK (e->goto_locus);
8309 gt_ggc_mx (e->src);
8310 gt_ggc_mx (e->dest);
8311 if (current_ir_type () == IR_GIMPLE)
8312 gt_ggc_mx (e->insns.g);
8313 else
8314 gt_ggc_mx (e->insns.r);
8315 gt_ggc_mx (block);
8316 }
8317
8318 /* PCH support for edge_def. */
8319
8320 extern void gt_pch_nx (tree&);
8321 extern void gt_pch_nx (gimple&);
8322 extern void gt_pch_nx (rtx&);
8323 extern void gt_pch_nx (basic_block&);
8324
8325 void
8326 gt_pch_nx (edge_def *e)
8327 {
8328 tree block = LOCATION_BLOCK (e->goto_locus);
8329 gt_pch_nx (e->src);
8330 gt_pch_nx (e->dest);
8331 if (current_ir_type () == IR_GIMPLE)
8332 gt_pch_nx (e->insns.g);
8333 else
8334 gt_pch_nx (e->insns.r);
8335 gt_pch_nx (block);
8336 }
8337
8338 void
8339 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
8340 {
8341 tree block = LOCATION_BLOCK (e->goto_locus);
8342 op (&(e->src), cookie);
8343 op (&(e->dest), cookie);
8344 if (current_ir_type () == IR_GIMPLE)
8345 op (&(e->insns.g), cookie);
8346 else
8347 op (&(e->insns.r), cookie);
8348 op (&(block), cookie);
8349 }