tree-flow.h (struct omp_region): Move to omp-low.c.
[gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2013 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "hash-table.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "flags.h"
30 #include "function.h"
31 #include "ggc.h"
32 #include "gimple-pretty-print.h"
33 #include "tree-ssa.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
36 #include "diagnostic-core.h"
37 #include "except.h"
38 #include "cfgloop.h"
39 #include "tree-ssa-propagate.h"
40 #include "value-prof.h"
41 #include "pointer-set.h"
42 #include "tree-inline.h"
43 #include "target.h"
44 #include "tree-ssa-live.h"
45 #include "omp-low.h"
46
47 /* This file contains functions for building the Control Flow Graph (CFG)
48 for a function tree. */
49
50 /* Local declarations. */
51
52 /* Initial capacity for the basic block array. */
53 static const int initial_cfg_capacity = 20;
54
55 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
56 which use a particular edge. The CASE_LABEL_EXPRs are chained together
57 via their CASE_CHAIN field, which we clear after we're done with the
58 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
59
60 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
61 update the case vector in response to edge redirections.
62
63 Right now this table is set up and torn down at key points in the
64 compilation process. It would be nice if we could make the table
65 more persistent. The key is getting notification of changes to
66 the CFG (particularly edge removal, creation and redirection). */
67
68 static struct pointer_map_t *edge_to_cases;
69
70 /* If we record edge_to_cases, this bitmap will hold indexes
71 of basic blocks that end in a GIMPLE_SWITCH which we touched
72 due to edge manipulations. */
73
74 static bitmap touched_switch_bbs;
75
76 /* CFG statistics. */
77 struct cfg_stats_d
78 {
79 long num_merged_labels;
80 };
81
82 static struct cfg_stats_d cfg_stats;
83
84 /* Nonzero if we found a computed goto while building basic blocks. */
85 static bool found_computed_goto;
86
87 /* Hash table to store last discriminator assigned for each locus. */
88 struct locus_discrim_map
89 {
90 location_t locus;
91 int discriminator;
92 };
93
94 /* Hashtable helpers. */
95
96 struct locus_discrim_hasher : typed_free_remove <locus_discrim_map>
97 {
98 typedef locus_discrim_map value_type;
99 typedef locus_discrim_map compare_type;
100 static inline hashval_t hash (const value_type *);
101 static inline bool equal (const value_type *, const compare_type *);
102 };
103
104 /* Trivial hash function for a location_t. ITEM is a pointer to
105 a hash table entry that maps a location_t to a discriminator. */
106
107 inline hashval_t
108 locus_discrim_hasher::hash (const value_type *item)
109 {
110 return LOCATION_LINE (item->locus);
111 }
112
113 /* Equality function for the locus-to-discriminator map. A and B
114 point to the two hash table entries to compare. */
115
116 inline bool
117 locus_discrim_hasher::equal (const value_type *a, const compare_type *b)
118 {
119 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
120 }
121
122 static hash_table <locus_discrim_hasher> discriminator_per_locus;
123
124 /* Basic blocks and flowgraphs. */
125 static void make_blocks (gimple_seq);
126 static void factor_computed_gotos (void);
127
128 /* Edges. */
129 static void make_edges (void);
130 static void assign_discriminators (void);
131 static void make_cond_expr_edges (basic_block);
132 static void make_gimple_switch_edges (basic_block);
133 static void make_goto_expr_edges (basic_block);
134 static void make_gimple_asm_edges (basic_block);
135 static edge gimple_redirect_edge_and_branch (edge, basic_block);
136 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
137 static unsigned int split_critical_edges (void);
138
139 /* Various helpers. */
140 static inline bool stmt_starts_bb_p (gimple, gimple);
141 static int gimple_verify_flow_info (void);
142 static void gimple_make_forwarder_block (edge);
143 static gimple first_non_label_stmt (basic_block);
144 static bool verify_gimple_transaction (gimple);
145
146 /* Flowgraph optimization and cleanup. */
147 static void gimple_merge_blocks (basic_block, basic_block);
148 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
149 static void remove_bb (basic_block);
150 static edge find_taken_edge_computed_goto (basic_block, tree);
151 static edge find_taken_edge_cond_expr (basic_block, tree);
152 static edge find_taken_edge_switch_expr (basic_block, tree);
153 static tree find_case_label_for_value (gimple, tree);
154
155 void
156 init_empty_tree_cfg_for_function (struct function *fn)
157 {
158 /* Initialize the basic block array. */
159 init_flow (fn);
160 profile_status_for_function (fn) = PROFILE_ABSENT;
161 n_basic_blocks_for_function (fn) = NUM_FIXED_BLOCKS;
162 last_basic_block_for_function (fn) = NUM_FIXED_BLOCKS;
163 vec_alloc (basic_block_info_for_function (fn), initial_cfg_capacity);
164 vec_safe_grow_cleared (basic_block_info_for_function (fn),
165 initial_cfg_capacity);
166
167 /* Build a mapping of labels to their associated blocks. */
168 vec_alloc (label_to_block_map_for_function (fn), initial_cfg_capacity);
169 vec_safe_grow_cleared (label_to_block_map_for_function (fn),
170 initial_cfg_capacity);
171
172 SET_BASIC_BLOCK_FOR_FUNCTION (fn, ENTRY_BLOCK,
173 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn));
174 SET_BASIC_BLOCK_FOR_FUNCTION (fn, EXIT_BLOCK,
175 EXIT_BLOCK_PTR_FOR_FUNCTION (fn));
176
177 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)->next_bb
178 = EXIT_BLOCK_PTR_FOR_FUNCTION (fn);
179 EXIT_BLOCK_PTR_FOR_FUNCTION (fn)->prev_bb
180 = ENTRY_BLOCK_PTR_FOR_FUNCTION (fn);
181 }
182
183 void
184 init_empty_tree_cfg (void)
185 {
186 init_empty_tree_cfg_for_function (cfun);
187 }
188
189 /*---------------------------------------------------------------------------
190 Create basic blocks
191 ---------------------------------------------------------------------------*/
192
193 /* Entry point to the CFG builder for trees. SEQ is the sequence of
194 statements to be added to the flowgraph. */
195
196 static void
197 build_gimple_cfg (gimple_seq seq)
198 {
199 /* Register specific gimple functions. */
200 gimple_register_cfg_hooks ();
201
202 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
203
204 init_empty_tree_cfg ();
205
206 found_computed_goto = 0;
207 make_blocks (seq);
208
209 /* Computed gotos are hell to deal with, especially if there are
210 lots of them with a large number of destinations. So we factor
211 them to a common computed goto location before we build the
212 edge list. After we convert back to normal form, we will un-factor
213 the computed gotos since factoring introduces an unwanted jump. */
214 if (found_computed_goto)
215 factor_computed_gotos ();
216
217 /* Make sure there is always at least one block, even if it's empty. */
218 if (n_basic_blocks == NUM_FIXED_BLOCKS)
219 create_empty_bb (ENTRY_BLOCK_PTR);
220
221 /* Adjust the size of the array. */
222 if (basic_block_info->length () < (size_t) n_basic_blocks)
223 vec_safe_grow_cleared (basic_block_info, n_basic_blocks);
224
225 /* To speed up statement iterator walks, we first purge dead labels. */
226 cleanup_dead_labels ();
227
228 /* Group case nodes to reduce the number of edges.
229 We do this after cleaning up dead labels because otherwise we miss
230 a lot of obvious case merging opportunities. */
231 group_case_labels ();
232
233 /* Create the edges of the flowgraph. */
234 discriminator_per_locus.create (13);
235 make_edges ();
236 assign_discriminators ();
237 cleanup_dead_labels ();
238 discriminator_per_locus.dispose ();
239 }
240
241 static unsigned int
242 execute_build_cfg (void)
243 {
244 gimple_seq body = gimple_body (current_function_decl);
245
246 build_gimple_cfg (body);
247 gimple_set_body (current_function_decl, NULL);
248 if (dump_file && (dump_flags & TDF_DETAILS))
249 {
250 fprintf (dump_file, "Scope blocks:\n");
251 dump_scope_blocks (dump_file, dump_flags);
252 }
253 cleanup_tree_cfg ();
254 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
255 return 0;
256 }
257
258 namespace {
259
260 const pass_data pass_data_build_cfg =
261 {
262 GIMPLE_PASS, /* type */
263 "cfg", /* name */
264 OPTGROUP_NONE, /* optinfo_flags */
265 false, /* has_gate */
266 true, /* has_execute */
267 TV_TREE_CFG, /* tv_id */
268 PROP_gimple_leh, /* properties_required */
269 ( PROP_cfg | PROP_loops ), /* properties_provided */
270 0, /* properties_destroyed */
271 0, /* todo_flags_start */
272 TODO_verify_stmts, /* todo_flags_finish */
273 };
274
275 class pass_build_cfg : public gimple_opt_pass
276 {
277 public:
278 pass_build_cfg (gcc::context *ctxt)
279 : gimple_opt_pass (pass_data_build_cfg, ctxt)
280 {}
281
282 /* opt_pass methods: */
283 unsigned int execute () { return execute_build_cfg (); }
284
285 }; // class pass_build_cfg
286
287 } // anon namespace
288
289 gimple_opt_pass *
290 make_pass_build_cfg (gcc::context *ctxt)
291 {
292 return new pass_build_cfg (ctxt);
293 }
294
295
296 /* Return true if T is a computed goto. */
297
298 static bool
299 computed_goto_p (gimple t)
300 {
301 return (gimple_code (t) == GIMPLE_GOTO
302 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
303 }
304
305
306 /* Search the CFG for any computed gotos. If found, factor them to a
307 common computed goto site. Also record the location of that site so
308 that we can un-factor the gotos after we have converted back to
309 normal form. */
310
311 static void
312 factor_computed_gotos (void)
313 {
314 basic_block bb;
315 tree factored_label_decl = NULL;
316 tree var = NULL;
317 gimple factored_computed_goto_label = NULL;
318 gimple factored_computed_goto = NULL;
319
320 /* We know there are one or more computed gotos in this function.
321 Examine the last statement in each basic block to see if the block
322 ends with a computed goto. */
323
324 FOR_EACH_BB (bb)
325 {
326 gimple_stmt_iterator gsi = gsi_last_bb (bb);
327 gimple last;
328
329 if (gsi_end_p (gsi))
330 continue;
331
332 last = gsi_stmt (gsi);
333
334 /* Ignore the computed goto we create when we factor the original
335 computed gotos. */
336 if (last == factored_computed_goto)
337 continue;
338
339 /* If the last statement is a computed goto, factor it. */
340 if (computed_goto_p (last))
341 {
342 gimple assignment;
343
344 /* The first time we find a computed goto we need to create
345 the factored goto block and the variable each original
346 computed goto will use for their goto destination. */
347 if (!factored_computed_goto)
348 {
349 basic_block new_bb = create_empty_bb (bb);
350 gimple_stmt_iterator new_gsi = gsi_start_bb (new_bb);
351
352 /* Create the destination of the factored goto. Each original
353 computed goto will put its desired destination into this
354 variable and jump to the label we create immediately
355 below. */
356 var = create_tmp_var (ptr_type_node, "gotovar");
357
358 /* Build a label for the new block which will contain the
359 factored computed goto. */
360 factored_label_decl = create_artificial_label (UNKNOWN_LOCATION);
361 factored_computed_goto_label
362 = gimple_build_label (factored_label_decl);
363 gsi_insert_after (&new_gsi, factored_computed_goto_label,
364 GSI_NEW_STMT);
365
366 /* Build our new computed goto. */
367 factored_computed_goto = gimple_build_goto (var);
368 gsi_insert_after (&new_gsi, factored_computed_goto, GSI_NEW_STMT);
369 }
370
371 /* Copy the original computed goto's destination into VAR. */
372 assignment = gimple_build_assign (var, gimple_goto_dest (last));
373 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
374
375 /* And re-vector the computed goto to the new destination. */
376 gimple_goto_set_dest (last, factored_label_decl);
377 }
378 }
379 }
380
381
382 /* Build a flowgraph for the sequence of stmts SEQ. */
383
384 static void
385 make_blocks (gimple_seq seq)
386 {
387 gimple_stmt_iterator i = gsi_start (seq);
388 gimple stmt = NULL;
389 bool start_new_block = true;
390 bool first_stmt_of_seq = true;
391 basic_block bb = ENTRY_BLOCK_PTR;
392
393 while (!gsi_end_p (i))
394 {
395 gimple prev_stmt;
396
397 prev_stmt = stmt;
398 stmt = gsi_stmt (i);
399
400 /* If the statement starts a new basic block or if we have determined
401 in a previous pass that we need to create a new block for STMT, do
402 so now. */
403 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
404 {
405 if (!first_stmt_of_seq)
406 gsi_split_seq_before (&i, &seq);
407 bb = create_basic_block (seq, NULL, bb);
408 start_new_block = false;
409 }
410
411 /* Now add STMT to BB and create the subgraphs for special statement
412 codes. */
413 gimple_set_bb (stmt, bb);
414
415 if (computed_goto_p (stmt))
416 found_computed_goto = true;
417
418 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
419 next iteration. */
420 if (stmt_ends_bb_p (stmt))
421 {
422 /* If the stmt can make abnormal goto use a new temporary
423 for the assignment to the LHS. This makes sure the old value
424 of the LHS is available on the abnormal edge. Otherwise
425 we will end up with overlapping life-ranges for abnormal
426 SSA names. */
427 if (gimple_has_lhs (stmt)
428 && stmt_can_make_abnormal_goto (stmt)
429 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
430 {
431 tree lhs = gimple_get_lhs (stmt);
432 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
433 gimple s = gimple_build_assign (lhs, tmp);
434 gimple_set_location (s, gimple_location (stmt));
435 gimple_set_block (s, gimple_block (stmt));
436 gimple_set_lhs (stmt, tmp);
437 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
438 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
439 DECL_GIMPLE_REG_P (tmp) = 1;
440 gsi_insert_after (&i, s, GSI_SAME_STMT);
441 }
442 start_new_block = true;
443 }
444
445 gsi_next (&i);
446 first_stmt_of_seq = false;
447 }
448 }
449
450
451 /* Create and return a new empty basic block after bb AFTER. */
452
453 static basic_block
454 create_bb (void *h, void *e, basic_block after)
455 {
456 basic_block bb;
457
458 gcc_assert (!e);
459
460 /* Create and initialize a new basic block. Since alloc_block uses
461 GC allocation that clears memory to allocate a basic block, we do
462 not have to clear the newly allocated basic block here. */
463 bb = alloc_block ();
464
465 bb->index = last_basic_block;
466 bb->flags = BB_NEW;
467 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
468
469 /* Add the new block to the linked list of blocks. */
470 link_block (bb, after);
471
472 /* Grow the basic block array if needed. */
473 if ((size_t) last_basic_block == basic_block_info->length ())
474 {
475 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
476 vec_safe_grow_cleared (basic_block_info, new_size);
477 }
478
479 /* Add the newly created block to the array. */
480 SET_BASIC_BLOCK (last_basic_block, bb);
481
482 n_basic_blocks++;
483 last_basic_block++;
484
485 return bb;
486 }
487
488
489 /*---------------------------------------------------------------------------
490 Edge creation
491 ---------------------------------------------------------------------------*/
492
493 /* Fold COND_EXPR_COND of each COND_EXPR. */
494
495 void
496 fold_cond_expr_cond (void)
497 {
498 basic_block bb;
499
500 FOR_EACH_BB (bb)
501 {
502 gimple stmt = last_stmt (bb);
503
504 if (stmt && gimple_code (stmt) == GIMPLE_COND)
505 {
506 location_t loc = gimple_location (stmt);
507 tree cond;
508 bool zerop, onep;
509
510 fold_defer_overflow_warnings ();
511 cond = fold_binary_loc (loc, gimple_cond_code (stmt), boolean_type_node,
512 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
513 if (cond)
514 {
515 zerop = integer_zerop (cond);
516 onep = integer_onep (cond);
517 }
518 else
519 zerop = onep = false;
520
521 fold_undefer_overflow_warnings (zerop || onep,
522 stmt,
523 WARN_STRICT_OVERFLOW_CONDITIONAL);
524 if (zerop)
525 gimple_cond_make_false (stmt);
526 else if (onep)
527 gimple_cond_make_true (stmt);
528 }
529 }
530 }
531
532 /* Join all the blocks in the flowgraph. */
533
534 static void
535 make_edges (void)
536 {
537 basic_block bb;
538 struct omp_region *cur_region = NULL;
539
540 /* Create an edge from entry to the first block with executable
541 statements in it. */
542 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
543
544 /* Traverse the basic block array placing edges. */
545 FOR_EACH_BB (bb)
546 {
547 gimple last = last_stmt (bb);
548 bool fallthru;
549
550 if (last)
551 {
552 enum gimple_code code = gimple_code (last);
553 switch (code)
554 {
555 case GIMPLE_GOTO:
556 make_goto_expr_edges (bb);
557 fallthru = false;
558 break;
559 case GIMPLE_RETURN:
560 make_edge (bb, EXIT_BLOCK_PTR, 0);
561 fallthru = false;
562 break;
563 case GIMPLE_COND:
564 make_cond_expr_edges (bb);
565 fallthru = false;
566 break;
567 case GIMPLE_SWITCH:
568 make_gimple_switch_edges (bb);
569 fallthru = false;
570 break;
571 case GIMPLE_RESX:
572 make_eh_edges (last);
573 fallthru = false;
574 break;
575 case GIMPLE_EH_DISPATCH:
576 fallthru = make_eh_dispatch_edges (last);
577 break;
578
579 case GIMPLE_CALL:
580 /* If this function receives a nonlocal goto, then we need to
581 make edges from this call site to all the nonlocal goto
582 handlers. */
583 if (stmt_can_make_abnormal_goto (last))
584 make_abnormal_goto_edges (bb, true);
585
586 /* If this statement has reachable exception handlers, then
587 create abnormal edges to them. */
588 make_eh_edges (last);
589
590 /* BUILTIN_RETURN is really a return statement. */
591 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
592 make_edge (bb, EXIT_BLOCK_PTR, 0), fallthru = false;
593 /* Some calls are known not to return. */
594 else
595 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
596 break;
597
598 case GIMPLE_ASSIGN:
599 /* A GIMPLE_ASSIGN may throw internally and thus be considered
600 control-altering. */
601 if (is_ctrl_altering_stmt (last))
602 make_eh_edges (last);
603 fallthru = true;
604 break;
605
606 case GIMPLE_ASM:
607 make_gimple_asm_edges (bb);
608 fallthru = true;
609 break;
610
611 CASE_GIMPLE_OMP:
612 fallthru = make_gimple_omp_edges (bb, &cur_region);
613 break;
614
615 case GIMPLE_TRANSACTION:
616 {
617 tree abort_label = gimple_transaction_label (last);
618 if (abort_label)
619 make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT);
620 fallthru = true;
621 }
622 break;
623
624 default:
625 gcc_assert (!stmt_ends_bb_p (last));
626 fallthru = true;
627 }
628 }
629 else
630 fallthru = true;
631
632 if (fallthru)
633 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
634 }
635
636 free_omp_regions ();
637
638 /* Fold COND_EXPR_COND of each COND_EXPR. */
639 fold_cond_expr_cond ();
640 }
641
642 /* Find the next available discriminator value for LOCUS. The
643 discriminator distinguishes among several basic blocks that
644 share a common locus, allowing for more accurate sample-based
645 profiling. */
646
647 static int
648 next_discriminator_for_locus (location_t locus)
649 {
650 struct locus_discrim_map item;
651 struct locus_discrim_map **slot;
652
653 item.locus = locus;
654 item.discriminator = 0;
655 slot = discriminator_per_locus.find_slot_with_hash (
656 &item, LOCATION_LINE (locus), INSERT);
657 gcc_assert (slot);
658 if (*slot == HTAB_EMPTY_ENTRY)
659 {
660 *slot = XNEW (struct locus_discrim_map);
661 gcc_assert (*slot);
662 (*slot)->locus = locus;
663 (*slot)->discriminator = 0;
664 }
665 (*slot)->discriminator++;
666 return (*slot)->discriminator;
667 }
668
669 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
670
671 static bool
672 same_line_p (location_t locus1, location_t locus2)
673 {
674 expanded_location from, to;
675
676 if (locus1 == locus2)
677 return true;
678
679 from = expand_location (locus1);
680 to = expand_location (locus2);
681
682 if (from.line != to.line)
683 return false;
684 if (from.file == to.file)
685 return true;
686 return (from.file != NULL
687 && to.file != NULL
688 && filename_cmp (from.file, to.file) == 0);
689 }
690
691 /* Assign discriminators to each basic block. */
692
693 static void
694 assign_discriminators (void)
695 {
696 basic_block bb;
697
698 FOR_EACH_BB (bb)
699 {
700 edge e;
701 edge_iterator ei;
702 gimple last = last_stmt (bb);
703 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
704
705 if (locus == UNKNOWN_LOCATION)
706 continue;
707
708 FOR_EACH_EDGE (e, ei, bb->succs)
709 {
710 gimple first = first_non_label_stmt (e->dest);
711 gimple last = last_stmt (e->dest);
712 if ((first && same_line_p (locus, gimple_location (first)))
713 || (last && same_line_p (locus, gimple_location (last))))
714 {
715 if (e->dest->discriminator != 0 && bb->discriminator == 0)
716 bb->discriminator = next_discriminator_for_locus (locus);
717 else
718 e->dest->discriminator = next_discriminator_for_locus (locus);
719 }
720 }
721 }
722 }
723
724 /* Create the edges for a GIMPLE_COND starting at block BB. */
725
726 static void
727 make_cond_expr_edges (basic_block bb)
728 {
729 gimple entry = last_stmt (bb);
730 gimple then_stmt, else_stmt;
731 basic_block then_bb, else_bb;
732 tree then_label, else_label;
733 edge e;
734
735 gcc_assert (entry);
736 gcc_assert (gimple_code (entry) == GIMPLE_COND);
737
738 /* Entry basic blocks for each component. */
739 then_label = gimple_cond_true_label (entry);
740 else_label = gimple_cond_false_label (entry);
741 then_bb = label_to_block (then_label);
742 else_bb = label_to_block (else_label);
743 then_stmt = first_stmt (then_bb);
744 else_stmt = first_stmt (else_bb);
745
746 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
747 e->goto_locus = gimple_location (then_stmt);
748 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
749 if (e)
750 e->goto_locus = gimple_location (else_stmt);
751
752 /* We do not need the labels anymore. */
753 gimple_cond_set_true_label (entry, NULL_TREE);
754 gimple_cond_set_false_label (entry, NULL_TREE);
755 }
756
757
758 /* Called for each element in the hash table (P) as we delete the
759 edge to cases hash table.
760
761 Clear all the TREE_CHAINs to prevent problems with copying of
762 SWITCH_EXPRs and structure sharing rules, then free the hash table
763 element. */
764
765 static bool
766 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
767 void *data ATTRIBUTE_UNUSED)
768 {
769 tree t, next;
770
771 for (t = (tree) *value; t; t = next)
772 {
773 next = CASE_CHAIN (t);
774 CASE_CHAIN (t) = NULL;
775 }
776
777 *value = NULL;
778 return true;
779 }
780
781 /* Start recording information mapping edges to case labels. */
782
783 void
784 start_recording_case_labels (void)
785 {
786 gcc_assert (edge_to_cases == NULL);
787 edge_to_cases = pointer_map_create ();
788 touched_switch_bbs = BITMAP_ALLOC (NULL);
789 }
790
791 /* Return nonzero if we are recording information for case labels. */
792
793 static bool
794 recording_case_labels_p (void)
795 {
796 return (edge_to_cases != NULL);
797 }
798
799 /* Stop recording information mapping edges to case labels and
800 remove any information we have recorded. */
801 void
802 end_recording_case_labels (void)
803 {
804 bitmap_iterator bi;
805 unsigned i;
806 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
807 pointer_map_destroy (edge_to_cases);
808 edge_to_cases = NULL;
809 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
810 {
811 basic_block bb = BASIC_BLOCK (i);
812 if (bb)
813 {
814 gimple stmt = last_stmt (bb);
815 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
816 group_case_labels_stmt (stmt);
817 }
818 }
819 BITMAP_FREE (touched_switch_bbs);
820 }
821
822 /* If we are inside a {start,end}_recording_cases block, then return
823 a chain of CASE_LABEL_EXPRs from T which reference E.
824
825 Otherwise return NULL. */
826
827 static tree
828 get_cases_for_edge (edge e, gimple t)
829 {
830 void **slot;
831 size_t i, n;
832
833 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
834 chains available. Return NULL so the caller can detect this case. */
835 if (!recording_case_labels_p ())
836 return NULL;
837
838 slot = pointer_map_contains (edge_to_cases, e);
839 if (slot)
840 return (tree) *slot;
841
842 /* If we did not find E in the hash table, then this must be the first
843 time we have been queried for information about E & T. Add all the
844 elements from T to the hash table then perform the query again. */
845
846 n = gimple_switch_num_labels (t);
847 for (i = 0; i < n; i++)
848 {
849 tree elt = gimple_switch_label (t, i);
850 tree lab = CASE_LABEL (elt);
851 basic_block label_bb = label_to_block (lab);
852 edge this_edge = find_edge (e->src, label_bb);
853
854 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
855 a new chain. */
856 slot = pointer_map_insert (edge_to_cases, this_edge);
857 CASE_CHAIN (elt) = (tree) *slot;
858 *slot = elt;
859 }
860
861 return (tree) *pointer_map_contains (edge_to_cases, e);
862 }
863
864 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
865
866 static void
867 make_gimple_switch_edges (basic_block bb)
868 {
869 gimple entry = last_stmt (bb);
870 size_t i, n;
871
872 n = gimple_switch_num_labels (entry);
873
874 for (i = 0; i < n; ++i)
875 {
876 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
877 basic_block label_bb = label_to_block (lab);
878 make_edge (bb, label_bb, 0);
879 }
880 }
881
882
883 /* Return the basic block holding label DEST. */
884
885 basic_block
886 label_to_block_fn (struct function *ifun, tree dest)
887 {
888 int uid = LABEL_DECL_UID (dest);
889
890 /* We would die hard when faced by an undefined label. Emit a label to
891 the very first basic block. This will hopefully make even the dataflow
892 and undefined variable warnings quite right. */
893 if (seen_error () && uid < 0)
894 {
895 gimple_stmt_iterator gsi = gsi_start_bb (BASIC_BLOCK (NUM_FIXED_BLOCKS));
896 gimple stmt;
897
898 stmt = gimple_build_label (dest);
899 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
900 uid = LABEL_DECL_UID (dest);
901 }
902 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
903 return NULL;
904 return (*ifun->cfg->x_label_to_block_map)[uid];
905 }
906
907 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
908 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
909
910 void
911 make_abnormal_goto_edges (basic_block bb, bool for_call)
912 {
913 basic_block target_bb;
914 gimple_stmt_iterator gsi;
915
916 FOR_EACH_BB (target_bb)
917 {
918 for (gsi = gsi_start_bb (target_bb); !gsi_end_p (gsi); gsi_next (&gsi))
919 {
920 gimple label_stmt = gsi_stmt (gsi);
921 tree target;
922
923 if (gimple_code (label_stmt) != GIMPLE_LABEL)
924 break;
925
926 target = gimple_label_label (label_stmt);
927
928 /* Make an edge to every label block that has been marked as a
929 potential target for a computed goto or a non-local goto. */
930 if ((FORCED_LABEL (target) && !for_call)
931 || (DECL_NONLOCAL (target) && for_call))
932 {
933 make_edge (bb, target_bb, EDGE_ABNORMAL);
934 break;
935 }
936 }
937 if (!gsi_end_p (gsi)
938 && is_gimple_debug (gsi_stmt (gsi)))
939 gsi_next_nondebug (&gsi);
940 if (!gsi_end_p (gsi))
941 {
942 /* Make an edge to every setjmp-like call. */
943 gimple call_stmt = gsi_stmt (gsi);
944 if (is_gimple_call (call_stmt)
945 && (gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE))
946 make_edge (bb, target_bb, EDGE_ABNORMAL);
947 }
948 }
949 }
950
951 /* Create edges for a goto statement at block BB. */
952
953 static void
954 make_goto_expr_edges (basic_block bb)
955 {
956 gimple_stmt_iterator last = gsi_last_bb (bb);
957 gimple goto_t = gsi_stmt (last);
958
959 /* A simple GOTO creates normal edges. */
960 if (simple_goto_p (goto_t))
961 {
962 tree dest = gimple_goto_dest (goto_t);
963 basic_block label_bb = label_to_block (dest);
964 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
965 e->goto_locus = gimple_location (goto_t);
966 gsi_remove (&last, true);
967 return;
968 }
969
970 /* A computed GOTO creates abnormal edges. */
971 make_abnormal_goto_edges (bb, false);
972 }
973
974 /* Create edges for an asm statement with labels at block BB. */
975
976 static void
977 make_gimple_asm_edges (basic_block bb)
978 {
979 gimple stmt = last_stmt (bb);
980 int i, n = gimple_asm_nlabels (stmt);
981
982 for (i = 0; i < n; ++i)
983 {
984 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
985 basic_block label_bb = label_to_block (label);
986 make_edge (bb, label_bb, 0);
987 }
988 }
989
990 /*---------------------------------------------------------------------------
991 Flowgraph analysis
992 ---------------------------------------------------------------------------*/
993
994 /* Cleanup useless labels in basic blocks. This is something we wish
995 to do early because it allows us to group case labels before creating
996 the edges for the CFG, and it speeds up block statement iterators in
997 all passes later on.
998 We rerun this pass after CFG is created, to get rid of the labels that
999 are no longer referenced. After then we do not run it any more, since
1000 (almost) no new labels should be created. */
1001
1002 /* A map from basic block index to the leading label of that block. */
1003 static struct label_record
1004 {
1005 /* The label. */
1006 tree label;
1007
1008 /* True if the label is referenced from somewhere. */
1009 bool used;
1010 } *label_for_bb;
1011
1012 /* Given LABEL return the first label in the same basic block. */
1013
1014 static tree
1015 main_block_label (tree label)
1016 {
1017 basic_block bb = label_to_block (label);
1018 tree main_label = label_for_bb[bb->index].label;
1019
1020 /* label_to_block possibly inserted undefined label into the chain. */
1021 if (!main_label)
1022 {
1023 label_for_bb[bb->index].label = label;
1024 main_label = label;
1025 }
1026
1027 label_for_bb[bb->index].used = true;
1028 return main_label;
1029 }
1030
1031 /* Clean up redundant labels within the exception tree. */
1032
1033 static void
1034 cleanup_dead_labels_eh (void)
1035 {
1036 eh_landing_pad lp;
1037 eh_region r;
1038 tree lab;
1039 int i;
1040
1041 if (cfun->eh == NULL)
1042 return;
1043
1044 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1045 if (lp && lp->post_landing_pad)
1046 {
1047 lab = main_block_label (lp->post_landing_pad);
1048 if (lab != lp->post_landing_pad)
1049 {
1050 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1051 EH_LANDING_PAD_NR (lab) = lp->index;
1052 }
1053 }
1054
1055 FOR_ALL_EH_REGION (r)
1056 switch (r->type)
1057 {
1058 case ERT_CLEANUP:
1059 case ERT_MUST_NOT_THROW:
1060 break;
1061
1062 case ERT_TRY:
1063 {
1064 eh_catch c;
1065 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1066 {
1067 lab = c->label;
1068 if (lab)
1069 c->label = main_block_label (lab);
1070 }
1071 }
1072 break;
1073
1074 case ERT_ALLOWED_EXCEPTIONS:
1075 lab = r->u.allowed.label;
1076 if (lab)
1077 r->u.allowed.label = main_block_label (lab);
1078 break;
1079 }
1080 }
1081
1082
1083 /* Cleanup redundant labels. This is a three-step process:
1084 1) Find the leading label for each block.
1085 2) Redirect all references to labels to the leading labels.
1086 3) Cleanup all useless labels. */
1087
1088 void
1089 cleanup_dead_labels (void)
1090 {
1091 basic_block bb;
1092 label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
1093
1094 /* Find a suitable label for each block. We use the first user-defined
1095 label if there is one, or otherwise just the first label we see. */
1096 FOR_EACH_BB (bb)
1097 {
1098 gimple_stmt_iterator i;
1099
1100 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1101 {
1102 tree label;
1103 gimple stmt = gsi_stmt (i);
1104
1105 if (gimple_code (stmt) != GIMPLE_LABEL)
1106 break;
1107
1108 label = gimple_label_label (stmt);
1109
1110 /* If we have not yet seen a label for the current block,
1111 remember this one and see if there are more labels. */
1112 if (!label_for_bb[bb->index].label)
1113 {
1114 label_for_bb[bb->index].label = label;
1115 continue;
1116 }
1117
1118 /* If we did see a label for the current block already, but it
1119 is an artificially created label, replace it if the current
1120 label is a user defined label. */
1121 if (!DECL_ARTIFICIAL (label)
1122 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1123 {
1124 label_for_bb[bb->index].label = label;
1125 break;
1126 }
1127 }
1128 }
1129
1130 /* Now redirect all jumps/branches to the selected label.
1131 First do so for each block ending in a control statement. */
1132 FOR_EACH_BB (bb)
1133 {
1134 gimple stmt = last_stmt (bb);
1135 tree label, new_label;
1136
1137 if (!stmt)
1138 continue;
1139
1140 switch (gimple_code (stmt))
1141 {
1142 case GIMPLE_COND:
1143 label = gimple_cond_true_label (stmt);
1144 if (label)
1145 {
1146 new_label = main_block_label (label);
1147 if (new_label != label)
1148 gimple_cond_set_true_label (stmt, new_label);
1149 }
1150
1151 label = gimple_cond_false_label (stmt);
1152 if (label)
1153 {
1154 new_label = main_block_label (label);
1155 if (new_label != label)
1156 gimple_cond_set_false_label (stmt, new_label);
1157 }
1158 break;
1159
1160 case GIMPLE_SWITCH:
1161 {
1162 size_t i, n = gimple_switch_num_labels (stmt);
1163
1164 /* Replace all destination labels. */
1165 for (i = 0; i < n; ++i)
1166 {
1167 tree case_label = gimple_switch_label (stmt, i);
1168 label = CASE_LABEL (case_label);
1169 new_label = main_block_label (label);
1170 if (new_label != label)
1171 CASE_LABEL (case_label) = new_label;
1172 }
1173 break;
1174 }
1175
1176 case GIMPLE_ASM:
1177 {
1178 int i, n = gimple_asm_nlabels (stmt);
1179
1180 for (i = 0; i < n; ++i)
1181 {
1182 tree cons = gimple_asm_label_op (stmt, i);
1183 tree label = main_block_label (TREE_VALUE (cons));
1184 TREE_VALUE (cons) = label;
1185 }
1186 break;
1187 }
1188
1189 /* We have to handle gotos until they're removed, and we don't
1190 remove them until after we've created the CFG edges. */
1191 case GIMPLE_GOTO:
1192 if (!computed_goto_p (stmt))
1193 {
1194 label = gimple_goto_dest (stmt);
1195 new_label = main_block_label (label);
1196 if (new_label != label)
1197 gimple_goto_set_dest (stmt, new_label);
1198 }
1199 break;
1200
1201 case GIMPLE_TRANSACTION:
1202 {
1203 tree label = gimple_transaction_label (stmt);
1204 if (label)
1205 {
1206 tree new_label = main_block_label (label);
1207 if (new_label != label)
1208 gimple_transaction_set_label (stmt, new_label);
1209 }
1210 }
1211 break;
1212
1213 default:
1214 break;
1215 }
1216 }
1217
1218 /* Do the same for the exception region tree labels. */
1219 cleanup_dead_labels_eh ();
1220
1221 /* Finally, purge dead labels. All user-defined labels and labels that
1222 can be the target of non-local gotos and labels which have their
1223 address taken are preserved. */
1224 FOR_EACH_BB (bb)
1225 {
1226 gimple_stmt_iterator i;
1227 tree label_for_this_bb = label_for_bb[bb->index].label;
1228
1229 if (!label_for_this_bb)
1230 continue;
1231
1232 /* If the main label of the block is unused, we may still remove it. */
1233 if (!label_for_bb[bb->index].used)
1234 label_for_this_bb = NULL;
1235
1236 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1237 {
1238 tree label;
1239 gimple stmt = gsi_stmt (i);
1240
1241 if (gimple_code (stmt) != GIMPLE_LABEL)
1242 break;
1243
1244 label = gimple_label_label (stmt);
1245
1246 if (label == label_for_this_bb
1247 || !DECL_ARTIFICIAL (label)
1248 || DECL_NONLOCAL (label)
1249 || FORCED_LABEL (label))
1250 gsi_next (&i);
1251 else
1252 gsi_remove (&i, true);
1253 }
1254 }
1255
1256 free (label_for_bb);
1257 }
1258
1259 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1260 the ones jumping to the same label.
1261 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1262
1263 void
1264 group_case_labels_stmt (gimple stmt)
1265 {
1266 int old_size = gimple_switch_num_labels (stmt);
1267 int i, j, new_size = old_size;
1268 basic_block default_bb = NULL;
1269
1270 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1271
1272 /* Look for possible opportunities to merge cases. */
1273 i = 1;
1274 while (i < old_size)
1275 {
1276 tree base_case, base_high;
1277 basic_block base_bb;
1278
1279 base_case = gimple_switch_label (stmt, i);
1280
1281 gcc_assert (base_case);
1282 base_bb = label_to_block (CASE_LABEL (base_case));
1283
1284 /* Discard cases that have the same destination as the
1285 default case. */
1286 if (base_bb == default_bb)
1287 {
1288 gimple_switch_set_label (stmt, i, NULL_TREE);
1289 i++;
1290 new_size--;
1291 continue;
1292 }
1293
1294 base_high = CASE_HIGH (base_case)
1295 ? CASE_HIGH (base_case)
1296 : CASE_LOW (base_case);
1297 i++;
1298
1299 /* Try to merge case labels. Break out when we reach the end
1300 of the label vector or when we cannot merge the next case
1301 label with the current one. */
1302 while (i < old_size)
1303 {
1304 tree merge_case = gimple_switch_label (stmt, i);
1305 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1306 double_int bhp1 = tree_to_double_int (base_high) + double_int_one;
1307
1308 /* Merge the cases if they jump to the same place,
1309 and their ranges are consecutive. */
1310 if (merge_bb == base_bb
1311 && tree_to_double_int (CASE_LOW (merge_case)) == bhp1)
1312 {
1313 base_high = CASE_HIGH (merge_case) ?
1314 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1315 CASE_HIGH (base_case) = base_high;
1316 gimple_switch_set_label (stmt, i, NULL_TREE);
1317 new_size--;
1318 i++;
1319 }
1320 else
1321 break;
1322 }
1323 }
1324
1325 /* Compress the case labels in the label vector, and adjust the
1326 length of the vector. */
1327 for (i = 0, j = 0; i < new_size; i++)
1328 {
1329 while (! gimple_switch_label (stmt, j))
1330 j++;
1331 gimple_switch_set_label (stmt, i,
1332 gimple_switch_label (stmt, j++));
1333 }
1334
1335 gcc_assert (new_size <= old_size);
1336 gimple_switch_set_num_labels (stmt, new_size);
1337 }
1338
1339 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1340 and scan the sorted vector of cases. Combine the ones jumping to the
1341 same label. */
1342
1343 void
1344 group_case_labels (void)
1345 {
1346 basic_block bb;
1347
1348 FOR_EACH_BB (bb)
1349 {
1350 gimple stmt = last_stmt (bb);
1351 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1352 group_case_labels_stmt (stmt);
1353 }
1354 }
1355
1356 /* Checks whether we can merge block B into block A. */
1357
1358 static bool
1359 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1360 {
1361 gimple stmt;
1362 gimple_stmt_iterator gsi;
1363
1364 if (!single_succ_p (a))
1365 return false;
1366
1367 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1368 return false;
1369
1370 if (single_succ (a) != b)
1371 return false;
1372
1373 if (!single_pred_p (b))
1374 return false;
1375
1376 if (b == EXIT_BLOCK_PTR)
1377 return false;
1378
1379 /* If A ends by a statement causing exceptions or something similar, we
1380 cannot merge the blocks. */
1381 stmt = last_stmt (a);
1382 if (stmt && stmt_ends_bb_p (stmt))
1383 return false;
1384
1385 /* Do not allow a block with only a non-local label to be merged. */
1386 if (stmt
1387 && gimple_code (stmt) == GIMPLE_LABEL
1388 && DECL_NONLOCAL (gimple_label_label (stmt)))
1389 return false;
1390
1391 /* Examine the labels at the beginning of B. */
1392 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
1393 {
1394 tree lab;
1395 stmt = gsi_stmt (gsi);
1396 if (gimple_code (stmt) != GIMPLE_LABEL)
1397 break;
1398 lab = gimple_label_label (stmt);
1399
1400 /* Do not remove user forced labels or for -O0 any user labels. */
1401 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1402 return false;
1403 }
1404
1405 /* Protect the loop latches. */
1406 if (current_loops && b->loop_father->latch == b)
1407 return false;
1408
1409 /* It must be possible to eliminate all phi nodes in B. If ssa form
1410 is not up-to-date and a name-mapping is registered, we cannot eliminate
1411 any phis. Symbols marked for renaming are never a problem though. */
1412 for (gsi = gsi_start_phis (b); !gsi_end_p (gsi); gsi_next (&gsi))
1413 {
1414 gimple phi = gsi_stmt (gsi);
1415 /* Technically only new names matter. */
1416 if (name_registered_for_update_p (PHI_RESULT (phi)))
1417 return false;
1418 }
1419
1420 /* When not optimizing, don't merge if we'd lose goto_locus. */
1421 if (!optimize
1422 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1423 {
1424 location_t goto_locus = single_succ_edge (a)->goto_locus;
1425 gimple_stmt_iterator prev, next;
1426 prev = gsi_last_nondebug_bb (a);
1427 next = gsi_after_labels (b);
1428 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1429 gsi_next_nondebug (&next);
1430 if ((gsi_end_p (prev)
1431 || gimple_location (gsi_stmt (prev)) != goto_locus)
1432 && (gsi_end_p (next)
1433 || gimple_location (gsi_stmt (next)) != goto_locus))
1434 return false;
1435 }
1436
1437 return true;
1438 }
1439
1440 /* Replaces all uses of NAME by VAL. */
1441
1442 void
1443 replace_uses_by (tree name, tree val)
1444 {
1445 imm_use_iterator imm_iter;
1446 use_operand_p use;
1447 gimple stmt;
1448 edge e;
1449
1450 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1451 {
1452 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1453 {
1454 replace_exp (use, val);
1455
1456 if (gimple_code (stmt) == GIMPLE_PHI)
1457 {
1458 e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
1459 if (e->flags & EDGE_ABNORMAL)
1460 {
1461 /* This can only occur for virtual operands, since
1462 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1463 would prevent replacement. */
1464 gcc_checking_assert (virtual_operand_p (name));
1465 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1466 }
1467 }
1468 }
1469
1470 if (gimple_code (stmt) != GIMPLE_PHI)
1471 {
1472 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1473 gimple orig_stmt = stmt;
1474 size_t i;
1475
1476 /* Mark the block if we changed the last stmt in it. */
1477 if (cfgcleanup_altered_bbs
1478 && stmt_ends_bb_p (stmt))
1479 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1480
1481 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1482 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1483 only change sth from non-invariant to invariant, and only
1484 when propagating constants. */
1485 if (is_gimple_min_invariant (val))
1486 for (i = 0; i < gimple_num_ops (stmt); i++)
1487 {
1488 tree op = gimple_op (stmt, i);
1489 /* Operands may be empty here. For example, the labels
1490 of a GIMPLE_COND are nulled out following the creation
1491 of the corresponding CFG edges. */
1492 if (op && TREE_CODE (op) == ADDR_EXPR)
1493 recompute_tree_invariant_for_addr_expr (op);
1494 }
1495
1496 if (fold_stmt (&gsi))
1497 stmt = gsi_stmt (gsi);
1498
1499 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1500 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1501
1502 update_stmt (stmt);
1503 }
1504 }
1505
1506 gcc_checking_assert (has_zero_uses (name));
1507
1508 /* Also update the trees stored in loop structures. */
1509 if (current_loops)
1510 {
1511 struct loop *loop;
1512 loop_iterator li;
1513
1514 FOR_EACH_LOOP (li, loop, 0)
1515 {
1516 substitute_in_loop_info (loop, name, val);
1517 }
1518 }
1519 }
1520
1521 /* Merge block B into block A. */
1522
1523 static void
1524 gimple_merge_blocks (basic_block a, basic_block b)
1525 {
1526 gimple_stmt_iterator last, gsi, psi;
1527
1528 if (dump_file)
1529 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1530
1531 /* Remove all single-valued PHI nodes from block B of the form
1532 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1533 gsi = gsi_last_bb (a);
1534 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1535 {
1536 gimple phi = gsi_stmt (psi);
1537 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1538 gimple copy;
1539 bool may_replace_uses = (virtual_operand_p (def)
1540 || may_propagate_copy (def, use));
1541
1542 /* In case we maintain loop closed ssa form, do not propagate arguments
1543 of loop exit phi nodes. */
1544 if (current_loops
1545 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1546 && !virtual_operand_p (def)
1547 && TREE_CODE (use) == SSA_NAME
1548 && a->loop_father != b->loop_father)
1549 may_replace_uses = false;
1550
1551 if (!may_replace_uses)
1552 {
1553 gcc_assert (!virtual_operand_p (def));
1554
1555 /* Note that just emitting the copies is fine -- there is no problem
1556 with ordering of phi nodes. This is because A is the single
1557 predecessor of B, therefore results of the phi nodes cannot
1558 appear as arguments of the phi nodes. */
1559 copy = gimple_build_assign (def, use);
1560 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1561 remove_phi_node (&psi, false);
1562 }
1563 else
1564 {
1565 /* If we deal with a PHI for virtual operands, we can simply
1566 propagate these without fussing with folding or updating
1567 the stmt. */
1568 if (virtual_operand_p (def))
1569 {
1570 imm_use_iterator iter;
1571 use_operand_p use_p;
1572 gimple stmt;
1573
1574 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1575 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1576 SET_USE (use_p, use);
1577
1578 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1579 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1580 }
1581 else
1582 replace_uses_by (def, use);
1583
1584 remove_phi_node (&psi, true);
1585 }
1586 }
1587
1588 /* Ensure that B follows A. */
1589 move_block_after (b, a);
1590
1591 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1592 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1593
1594 /* Remove labels from B and set gimple_bb to A for other statements. */
1595 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1596 {
1597 gimple stmt = gsi_stmt (gsi);
1598 if (gimple_code (stmt) == GIMPLE_LABEL)
1599 {
1600 tree label = gimple_label_label (stmt);
1601 int lp_nr;
1602
1603 gsi_remove (&gsi, false);
1604
1605 /* Now that we can thread computed gotos, we might have
1606 a situation where we have a forced label in block B
1607 However, the label at the start of block B might still be
1608 used in other ways (think about the runtime checking for
1609 Fortran assigned gotos). So we can not just delete the
1610 label. Instead we move the label to the start of block A. */
1611 if (FORCED_LABEL (label))
1612 {
1613 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1614 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1615 }
1616 /* Other user labels keep around in a form of a debug stmt. */
1617 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1618 {
1619 gimple dbg = gimple_build_debug_bind (label,
1620 integer_zero_node,
1621 stmt);
1622 gimple_debug_bind_reset_value (dbg);
1623 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1624 }
1625
1626 lp_nr = EH_LANDING_PAD_NR (label);
1627 if (lp_nr)
1628 {
1629 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1630 lp->post_landing_pad = NULL;
1631 }
1632 }
1633 else
1634 {
1635 gimple_set_bb (stmt, a);
1636 gsi_next (&gsi);
1637 }
1638 }
1639
1640 /* Merge the sequences. */
1641 last = gsi_last_bb (a);
1642 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1643 set_bb_seq (b, NULL);
1644
1645 if (cfgcleanup_altered_bbs)
1646 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1647 }
1648
1649
1650 /* Return the one of two successors of BB that is not reachable by a
1651 complex edge, if there is one. Else, return BB. We use
1652 this in optimizations that use post-dominators for their heuristics,
1653 to catch the cases in C++ where function calls are involved. */
1654
1655 basic_block
1656 single_noncomplex_succ (basic_block bb)
1657 {
1658 edge e0, e1;
1659 if (EDGE_COUNT (bb->succs) != 2)
1660 return bb;
1661
1662 e0 = EDGE_SUCC (bb, 0);
1663 e1 = EDGE_SUCC (bb, 1);
1664 if (e0->flags & EDGE_COMPLEX)
1665 return e1->dest;
1666 if (e1->flags & EDGE_COMPLEX)
1667 return e0->dest;
1668
1669 return bb;
1670 }
1671
1672 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1673
1674 void
1675 notice_special_calls (gimple call)
1676 {
1677 int flags = gimple_call_flags (call);
1678
1679 if (flags & ECF_MAY_BE_ALLOCA)
1680 cfun->calls_alloca = true;
1681 if (flags & ECF_RETURNS_TWICE)
1682 cfun->calls_setjmp = true;
1683 }
1684
1685
1686 /* Clear flags set by notice_special_calls. Used by dead code removal
1687 to update the flags. */
1688
1689 void
1690 clear_special_calls (void)
1691 {
1692 cfun->calls_alloca = false;
1693 cfun->calls_setjmp = false;
1694 }
1695
1696 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1697
1698 static void
1699 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1700 {
1701 /* Since this block is no longer reachable, we can just delete all
1702 of its PHI nodes. */
1703 remove_phi_nodes (bb);
1704
1705 /* Remove edges to BB's successors. */
1706 while (EDGE_COUNT (bb->succs) > 0)
1707 remove_edge (EDGE_SUCC (bb, 0));
1708 }
1709
1710
1711 /* Remove statements of basic block BB. */
1712
1713 static void
1714 remove_bb (basic_block bb)
1715 {
1716 gimple_stmt_iterator i;
1717
1718 if (dump_file)
1719 {
1720 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1721 if (dump_flags & TDF_DETAILS)
1722 {
1723 dump_bb (dump_file, bb, 0, dump_flags);
1724 fprintf (dump_file, "\n");
1725 }
1726 }
1727
1728 if (current_loops)
1729 {
1730 struct loop *loop = bb->loop_father;
1731
1732 /* If a loop gets removed, clean up the information associated
1733 with it. */
1734 if (loop->latch == bb
1735 || loop->header == bb)
1736 free_numbers_of_iterations_estimates_loop (loop);
1737 }
1738
1739 /* Remove all the instructions in the block. */
1740 if (bb_seq (bb) != NULL)
1741 {
1742 /* Walk backwards so as to get a chance to substitute all
1743 released DEFs into debug stmts. See
1744 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
1745 details. */
1746 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
1747 {
1748 gimple stmt = gsi_stmt (i);
1749 if (gimple_code (stmt) == GIMPLE_LABEL
1750 && (FORCED_LABEL (gimple_label_label (stmt))
1751 || DECL_NONLOCAL (gimple_label_label (stmt))))
1752 {
1753 basic_block new_bb;
1754 gimple_stmt_iterator new_gsi;
1755
1756 /* A non-reachable non-local label may still be referenced.
1757 But it no longer needs to carry the extra semantics of
1758 non-locality. */
1759 if (DECL_NONLOCAL (gimple_label_label (stmt)))
1760 {
1761 DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
1762 FORCED_LABEL (gimple_label_label (stmt)) = 1;
1763 }
1764
1765 new_bb = bb->prev_bb;
1766 new_gsi = gsi_start_bb (new_bb);
1767 gsi_remove (&i, false);
1768 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
1769 }
1770 else
1771 {
1772 /* Release SSA definitions if we are in SSA. Note that we
1773 may be called when not in SSA. For example,
1774 final_cleanup calls this function via
1775 cleanup_tree_cfg. */
1776 if (gimple_in_ssa_p (cfun))
1777 release_defs (stmt);
1778
1779 gsi_remove (&i, true);
1780 }
1781
1782 if (gsi_end_p (i))
1783 i = gsi_last_bb (bb);
1784 else
1785 gsi_prev (&i);
1786 }
1787 }
1788
1789 remove_phi_nodes_and_edges_for_unreachable_block (bb);
1790 bb->il.gimple.seq = NULL;
1791 bb->il.gimple.phi_nodes = NULL;
1792 }
1793
1794
1795 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
1796 predicate VAL, return the edge that will be taken out of the block.
1797 If VAL does not match a unique edge, NULL is returned. */
1798
1799 edge
1800 find_taken_edge (basic_block bb, tree val)
1801 {
1802 gimple stmt;
1803
1804 stmt = last_stmt (bb);
1805
1806 gcc_assert (stmt);
1807 gcc_assert (is_ctrl_stmt (stmt));
1808
1809 if (val == NULL)
1810 return NULL;
1811
1812 if (!is_gimple_min_invariant (val))
1813 return NULL;
1814
1815 if (gimple_code (stmt) == GIMPLE_COND)
1816 return find_taken_edge_cond_expr (bb, val);
1817
1818 if (gimple_code (stmt) == GIMPLE_SWITCH)
1819 return find_taken_edge_switch_expr (bb, val);
1820
1821 if (computed_goto_p (stmt))
1822 {
1823 /* Only optimize if the argument is a label, if the argument is
1824 not a label then we can not construct a proper CFG.
1825
1826 It may be the case that we only need to allow the LABEL_REF to
1827 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
1828 appear inside a LABEL_EXPR just to be safe. */
1829 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
1830 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
1831 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
1832 return NULL;
1833 }
1834
1835 gcc_unreachable ();
1836 }
1837
1838 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
1839 statement, determine which of the outgoing edges will be taken out of the
1840 block. Return NULL if either edge may be taken. */
1841
1842 static edge
1843 find_taken_edge_computed_goto (basic_block bb, tree val)
1844 {
1845 basic_block dest;
1846 edge e = NULL;
1847
1848 dest = label_to_block (val);
1849 if (dest)
1850 {
1851 e = find_edge (bb, dest);
1852 gcc_assert (e != NULL);
1853 }
1854
1855 return e;
1856 }
1857
1858 /* Given a constant value VAL and the entry block BB to a COND_EXPR
1859 statement, determine which of the two edges will be taken out of the
1860 block. Return NULL if either edge may be taken. */
1861
1862 static edge
1863 find_taken_edge_cond_expr (basic_block bb, tree val)
1864 {
1865 edge true_edge, false_edge;
1866
1867 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1868
1869 gcc_assert (TREE_CODE (val) == INTEGER_CST);
1870 return (integer_zerop (val) ? false_edge : true_edge);
1871 }
1872
1873 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
1874 statement, determine which edge will be taken out of the block. Return
1875 NULL if any edge may be taken. */
1876
1877 static edge
1878 find_taken_edge_switch_expr (basic_block bb, tree val)
1879 {
1880 basic_block dest_bb;
1881 edge e;
1882 gimple switch_stmt;
1883 tree taken_case;
1884
1885 switch_stmt = last_stmt (bb);
1886 taken_case = find_case_label_for_value (switch_stmt, val);
1887 dest_bb = label_to_block (CASE_LABEL (taken_case));
1888
1889 e = find_edge (bb, dest_bb);
1890 gcc_assert (e);
1891 return e;
1892 }
1893
1894
1895 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
1896 We can make optimal use here of the fact that the case labels are
1897 sorted: We can do a binary search for a case matching VAL. */
1898
1899 static tree
1900 find_case_label_for_value (gimple switch_stmt, tree val)
1901 {
1902 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
1903 tree default_case = gimple_switch_default_label (switch_stmt);
1904
1905 for (low = 0, high = n; high - low > 1; )
1906 {
1907 size_t i = (high + low) / 2;
1908 tree t = gimple_switch_label (switch_stmt, i);
1909 int cmp;
1910
1911 /* Cache the result of comparing CASE_LOW and val. */
1912 cmp = tree_int_cst_compare (CASE_LOW (t), val);
1913
1914 if (cmp > 0)
1915 high = i;
1916 else
1917 low = i;
1918
1919 if (CASE_HIGH (t) == NULL)
1920 {
1921 /* A singe-valued case label. */
1922 if (cmp == 0)
1923 return t;
1924 }
1925 else
1926 {
1927 /* A case range. We can only handle integer ranges. */
1928 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
1929 return t;
1930 }
1931 }
1932
1933 return default_case;
1934 }
1935
1936
1937 /* Dump a basic block on stderr. */
1938
1939 void
1940 gimple_debug_bb (basic_block bb)
1941 {
1942 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
1943 }
1944
1945
1946 /* Dump basic block with index N on stderr. */
1947
1948 basic_block
1949 gimple_debug_bb_n (int n)
1950 {
1951 gimple_debug_bb (BASIC_BLOCK (n));
1952 return BASIC_BLOCK (n);
1953 }
1954
1955
1956 /* Dump the CFG on stderr.
1957
1958 FLAGS are the same used by the tree dumping functions
1959 (see TDF_* in dumpfile.h). */
1960
1961 void
1962 gimple_debug_cfg (int flags)
1963 {
1964 gimple_dump_cfg (stderr, flags);
1965 }
1966
1967
1968 /* Dump the program showing basic block boundaries on the given FILE.
1969
1970 FLAGS are the same used by the tree dumping functions (see TDF_* in
1971 tree.h). */
1972
1973 void
1974 gimple_dump_cfg (FILE *file, int flags)
1975 {
1976 if (flags & TDF_DETAILS)
1977 {
1978 dump_function_header (file, current_function_decl, flags);
1979 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
1980 n_basic_blocks, n_edges, last_basic_block);
1981
1982 brief_dump_cfg (file, flags | TDF_COMMENT);
1983 fprintf (file, "\n");
1984 }
1985
1986 if (flags & TDF_STATS)
1987 dump_cfg_stats (file);
1988
1989 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
1990 }
1991
1992
1993 /* Dump CFG statistics on FILE. */
1994
1995 void
1996 dump_cfg_stats (FILE *file)
1997 {
1998 static long max_num_merged_labels = 0;
1999 unsigned long size, total = 0;
2000 long num_edges;
2001 basic_block bb;
2002 const char * const fmt_str = "%-30s%-13s%12s\n";
2003 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2004 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2005 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2006 const char *funcname = current_function_name ();
2007
2008 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2009
2010 fprintf (file, "---------------------------------------------------------\n");
2011 fprintf (file, fmt_str, "", " Number of ", "Memory");
2012 fprintf (file, fmt_str, "", " instances ", "used ");
2013 fprintf (file, "---------------------------------------------------------\n");
2014
2015 size = n_basic_blocks * sizeof (struct basic_block_def);
2016 total += size;
2017 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2018 SCALE (size), LABEL (size));
2019
2020 num_edges = 0;
2021 FOR_EACH_BB (bb)
2022 num_edges += EDGE_COUNT (bb->succs);
2023 size = num_edges * sizeof (struct edge_def);
2024 total += size;
2025 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2026
2027 fprintf (file, "---------------------------------------------------------\n");
2028 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2029 LABEL (total));
2030 fprintf (file, "---------------------------------------------------------\n");
2031 fprintf (file, "\n");
2032
2033 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2034 max_num_merged_labels = cfg_stats.num_merged_labels;
2035
2036 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2037 cfg_stats.num_merged_labels, max_num_merged_labels);
2038
2039 fprintf (file, "\n");
2040 }
2041
2042
2043 /* Dump CFG statistics on stderr. Keep extern so that it's always
2044 linked in the final executable. */
2045
2046 DEBUG_FUNCTION void
2047 debug_cfg_stats (void)
2048 {
2049 dump_cfg_stats (stderr);
2050 }
2051
2052 /*---------------------------------------------------------------------------
2053 Miscellaneous helpers
2054 ---------------------------------------------------------------------------*/
2055
2056 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2057 flow. Transfers of control flow associated with EH are excluded. */
2058
2059 static bool
2060 call_can_make_abnormal_goto (gimple t)
2061 {
2062 /* If the function has no non-local labels, then a call cannot make an
2063 abnormal transfer of control. */
2064 if (!cfun->has_nonlocal_label
2065 && !cfun->calls_setjmp)
2066 return false;
2067
2068 /* Likewise if the call has no side effects. */
2069 if (!gimple_has_side_effects (t))
2070 return false;
2071
2072 /* Likewise if the called function is leaf. */
2073 if (gimple_call_flags (t) & ECF_LEAF)
2074 return false;
2075
2076 return true;
2077 }
2078
2079
2080 /* Return true if T can make an abnormal transfer of control flow.
2081 Transfers of control flow associated with EH are excluded. */
2082
2083 bool
2084 stmt_can_make_abnormal_goto (gimple t)
2085 {
2086 if (computed_goto_p (t))
2087 return true;
2088 if (is_gimple_call (t))
2089 return call_can_make_abnormal_goto (t);
2090 return false;
2091 }
2092
2093
2094 /* Return true if T represents a stmt that always transfers control. */
2095
2096 bool
2097 is_ctrl_stmt (gimple t)
2098 {
2099 switch (gimple_code (t))
2100 {
2101 case GIMPLE_COND:
2102 case GIMPLE_SWITCH:
2103 case GIMPLE_GOTO:
2104 case GIMPLE_RETURN:
2105 case GIMPLE_RESX:
2106 return true;
2107 default:
2108 return false;
2109 }
2110 }
2111
2112
2113 /* Return true if T is a statement that may alter the flow of control
2114 (e.g., a call to a non-returning function). */
2115
2116 bool
2117 is_ctrl_altering_stmt (gimple t)
2118 {
2119 gcc_assert (t);
2120
2121 switch (gimple_code (t))
2122 {
2123 case GIMPLE_CALL:
2124 {
2125 int flags = gimple_call_flags (t);
2126
2127 /* A call alters control flow if it can make an abnormal goto. */
2128 if (call_can_make_abnormal_goto (t))
2129 return true;
2130
2131 /* A call also alters control flow if it does not return. */
2132 if (flags & ECF_NORETURN)
2133 return true;
2134
2135 /* TM ending statements have backedges out of the transaction.
2136 Return true so we split the basic block containing them.
2137 Note that the TM_BUILTIN test is merely an optimization. */
2138 if ((flags & ECF_TM_BUILTIN)
2139 && is_tm_ending_fndecl (gimple_call_fndecl (t)))
2140 return true;
2141
2142 /* BUILT_IN_RETURN call is same as return statement. */
2143 if (gimple_call_builtin_p (t, BUILT_IN_RETURN))
2144 return true;
2145 }
2146 break;
2147
2148 case GIMPLE_EH_DISPATCH:
2149 /* EH_DISPATCH branches to the individual catch handlers at
2150 this level of a try or allowed-exceptions region. It can
2151 fallthru to the next statement as well. */
2152 return true;
2153
2154 case GIMPLE_ASM:
2155 if (gimple_asm_nlabels (t) > 0)
2156 return true;
2157 break;
2158
2159 CASE_GIMPLE_OMP:
2160 /* OpenMP directives alter control flow. */
2161 return true;
2162
2163 case GIMPLE_TRANSACTION:
2164 /* A transaction start alters control flow. */
2165 return true;
2166
2167 default:
2168 break;
2169 }
2170
2171 /* If a statement can throw, it alters control flow. */
2172 return stmt_can_throw_internal (t);
2173 }
2174
2175
2176 /* Return true if T is a simple local goto. */
2177
2178 bool
2179 simple_goto_p (gimple t)
2180 {
2181 return (gimple_code (t) == GIMPLE_GOTO
2182 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2183 }
2184
2185
2186 /* Return true if STMT should start a new basic block. PREV_STMT is
2187 the statement preceding STMT. It is used when STMT is a label or a
2188 case label. Labels should only start a new basic block if their
2189 previous statement wasn't a label. Otherwise, sequence of labels
2190 would generate unnecessary basic blocks that only contain a single
2191 label. */
2192
2193 static inline bool
2194 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2195 {
2196 if (stmt == NULL)
2197 return false;
2198
2199 /* Labels start a new basic block only if the preceding statement
2200 wasn't a label of the same type. This prevents the creation of
2201 consecutive blocks that have nothing but a single label. */
2202 if (gimple_code (stmt) == GIMPLE_LABEL)
2203 {
2204 /* Nonlocal and computed GOTO targets always start a new block. */
2205 if (DECL_NONLOCAL (gimple_label_label (stmt))
2206 || FORCED_LABEL (gimple_label_label (stmt)))
2207 return true;
2208
2209 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2210 {
2211 if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
2212 return true;
2213
2214 cfg_stats.num_merged_labels++;
2215 return false;
2216 }
2217 else
2218 return true;
2219 }
2220 else if (gimple_code (stmt) == GIMPLE_CALL
2221 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2222 /* setjmp acts similar to a nonlocal GOTO target and thus should
2223 start a new block. */
2224 return true;
2225
2226 return false;
2227 }
2228
2229
2230 /* Return true if T should end a basic block. */
2231
2232 bool
2233 stmt_ends_bb_p (gimple t)
2234 {
2235 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2236 }
2237
2238 /* Remove block annotations and other data structures. */
2239
2240 void
2241 delete_tree_cfg_annotations (void)
2242 {
2243 vec_free (label_to_block_map);
2244 }
2245
2246
2247 /* Return the first statement in basic block BB. */
2248
2249 gimple
2250 first_stmt (basic_block bb)
2251 {
2252 gimple_stmt_iterator i = gsi_start_bb (bb);
2253 gimple stmt = NULL;
2254
2255 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2256 {
2257 gsi_next (&i);
2258 stmt = NULL;
2259 }
2260 return stmt;
2261 }
2262
2263 /* Return the first non-label statement in basic block BB. */
2264
2265 static gimple
2266 first_non_label_stmt (basic_block bb)
2267 {
2268 gimple_stmt_iterator i = gsi_start_bb (bb);
2269 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2270 gsi_next (&i);
2271 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2272 }
2273
2274 /* Return the last statement in basic block BB. */
2275
2276 gimple
2277 last_stmt (basic_block bb)
2278 {
2279 gimple_stmt_iterator i = gsi_last_bb (bb);
2280 gimple stmt = NULL;
2281
2282 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2283 {
2284 gsi_prev (&i);
2285 stmt = NULL;
2286 }
2287 return stmt;
2288 }
2289
2290 /* Return the last statement of an otherwise empty block. Return NULL
2291 if the block is totally empty, or if it contains more than one
2292 statement. */
2293
2294 gimple
2295 last_and_only_stmt (basic_block bb)
2296 {
2297 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2298 gimple last, prev;
2299
2300 if (gsi_end_p (i))
2301 return NULL;
2302
2303 last = gsi_stmt (i);
2304 gsi_prev_nondebug (&i);
2305 if (gsi_end_p (i))
2306 return last;
2307
2308 /* Empty statements should no longer appear in the instruction stream.
2309 Everything that might have appeared before should be deleted by
2310 remove_useless_stmts, and the optimizers should just gsi_remove
2311 instead of smashing with build_empty_stmt.
2312
2313 Thus the only thing that should appear here in a block containing
2314 one executable statement is a label. */
2315 prev = gsi_stmt (i);
2316 if (gimple_code (prev) == GIMPLE_LABEL)
2317 return last;
2318 else
2319 return NULL;
2320 }
2321
2322 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2323
2324 static void
2325 reinstall_phi_args (edge new_edge, edge old_edge)
2326 {
2327 edge_var_map_vector *v;
2328 edge_var_map *vm;
2329 int i;
2330 gimple_stmt_iterator phis;
2331
2332 v = redirect_edge_var_map_vector (old_edge);
2333 if (!v)
2334 return;
2335
2336 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2337 v->iterate (i, &vm) && !gsi_end_p (phis);
2338 i++, gsi_next (&phis))
2339 {
2340 gimple phi = gsi_stmt (phis);
2341 tree result = redirect_edge_var_map_result (vm);
2342 tree arg = redirect_edge_var_map_def (vm);
2343
2344 gcc_assert (result == gimple_phi_result (phi));
2345
2346 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2347 }
2348
2349 redirect_edge_var_map_clear (old_edge);
2350 }
2351
2352 /* Returns the basic block after which the new basic block created
2353 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2354 near its "logical" location. This is of most help to humans looking
2355 at debugging dumps. */
2356
2357 static basic_block
2358 split_edge_bb_loc (edge edge_in)
2359 {
2360 basic_block dest = edge_in->dest;
2361 basic_block dest_prev = dest->prev_bb;
2362
2363 if (dest_prev)
2364 {
2365 edge e = find_edge (dest_prev, dest);
2366 if (e && !(e->flags & EDGE_COMPLEX))
2367 return edge_in->src;
2368 }
2369 return dest_prev;
2370 }
2371
2372 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2373 Abort on abnormal edges. */
2374
2375 static basic_block
2376 gimple_split_edge (edge edge_in)
2377 {
2378 basic_block new_bb, after_bb, dest;
2379 edge new_edge, e;
2380
2381 /* Abnormal edges cannot be split. */
2382 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2383
2384 dest = edge_in->dest;
2385
2386 after_bb = split_edge_bb_loc (edge_in);
2387
2388 new_bb = create_empty_bb (after_bb);
2389 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2390 new_bb->count = edge_in->count;
2391 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2392 new_edge->probability = REG_BR_PROB_BASE;
2393 new_edge->count = edge_in->count;
2394
2395 e = redirect_edge_and_branch (edge_in, new_bb);
2396 gcc_assert (e == edge_in);
2397 reinstall_phi_args (new_edge, e);
2398
2399 return new_bb;
2400 }
2401
2402
2403 /* Verify properties of the address expression T with base object BASE. */
2404
2405 static tree
2406 verify_address (tree t, tree base)
2407 {
2408 bool old_constant;
2409 bool old_side_effects;
2410 bool new_constant;
2411 bool new_side_effects;
2412
2413 old_constant = TREE_CONSTANT (t);
2414 old_side_effects = TREE_SIDE_EFFECTS (t);
2415
2416 recompute_tree_invariant_for_addr_expr (t);
2417 new_side_effects = TREE_SIDE_EFFECTS (t);
2418 new_constant = TREE_CONSTANT (t);
2419
2420 if (old_constant != new_constant)
2421 {
2422 error ("constant not recomputed when ADDR_EXPR changed");
2423 return t;
2424 }
2425 if (old_side_effects != new_side_effects)
2426 {
2427 error ("side effects not recomputed when ADDR_EXPR changed");
2428 return t;
2429 }
2430
2431 if (!(TREE_CODE (base) == VAR_DECL
2432 || TREE_CODE (base) == PARM_DECL
2433 || TREE_CODE (base) == RESULT_DECL))
2434 return NULL_TREE;
2435
2436 if (DECL_GIMPLE_REG_P (base))
2437 {
2438 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2439 return base;
2440 }
2441
2442 return NULL_TREE;
2443 }
2444
2445 /* Callback for walk_tree, check that all elements with address taken are
2446 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2447 inside a PHI node. */
2448
2449 static tree
2450 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2451 {
2452 tree t = *tp, x;
2453
2454 if (TYPE_P (t))
2455 *walk_subtrees = 0;
2456
2457 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2458 #define CHECK_OP(N, MSG) \
2459 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2460 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2461
2462 switch (TREE_CODE (t))
2463 {
2464 case SSA_NAME:
2465 if (SSA_NAME_IN_FREE_LIST (t))
2466 {
2467 error ("SSA name in freelist but still referenced");
2468 return *tp;
2469 }
2470 break;
2471
2472 case INDIRECT_REF:
2473 error ("INDIRECT_REF in gimple IL");
2474 return t;
2475
2476 case MEM_REF:
2477 x = TREE_OPERAND (t, 0);
2478 if (!POINTER_TYPE_P (TREE_TYPE (x))
2479 || !is_gimple_mem_ref_addr (x))
2480 {
2481 error ("invalid first operand of MEM_REF");
2482 return x;
2483 }
2484 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2485 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2486 {
2487 error ("invalid offset operand of MEM_REF");
2488 return TREE_OPERAND (t, 1);
2489 }
2490 if (TREE_CODE (x) == ADDR_EXPR
2491 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2492 return x;
2493 *walk_subtrees = 0;
2494 break;
2495
2496 case ASSERT_EXPR:
2497 x = fold (ASSERT_EXPR_COND (t));
2498 if (x == boolean_false_node)
2499 {
2500 error ("ASSERT_EXPR with an always-false condition");
2501 return *tp;
2502 }
2503 break;
2504
2505 case MODIFY_EXPR:
2506 error ("MODIFY_EXPR not expected while having tuples");
2507 return *tp;
2508
2509 case ADDR_EXPR:
2510 {
2511 tree tem;
2512
2513 gcc_assert (is_gimple_address (t));
2514
2515 /* Skip any references (they will be checked when we recurse down the
2516 tree) and ensure that any variable used as a prefix is marked
2517 addressable. */
2518 for (x = TREE_OPERAND (t, 0);
2519 handled_component_p (x);
2520 x = TREE_OPERAND (x, 0))
2521 ;
2522
2523 if ((tem = verify_address (t, x)))
2524 return tem;
2525
2526 if (!(TREE_CODE (x) == VAR_DECL
2527 || TREE_CODE (x) == PARM_DECL
2528 || TREE_CODE (x) == RESULT_DECL))
2529 return NULL;
2530
2531 if (!TREE_ADDRESSABLE (x))
2532 {
2533 error ("address taken, but ADDRESSABLE bit not set");
2534 return x;
2535 }
2536
2537 break;
2538 }
2539
2540 case COND_EXPR:
2541 x = COND_EXPR_COND (t);
2542 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2543 {
2544 error ("non-integral used in condition");
2545 return x;
2546 }
2547 if (!is_gimple_condexpr (x))
2548 {
2549 error ("invalid conditional operand");
2550 return x;
2551 }
2552 break;
2553
2554 case NON_LVALUE_EXPR:
2555 case TRUTH_NOT_EXPR:
2556 gcc_unreachable ();
2557
2558 CASE_CONVERT:
2559 case FIX_TRUNC_EXPR:
2560 case FLOAT_EXPR:
2561 case NEGATE_EXPR:
2562 case ABS_EXPR:
2563 case BIT_NOT_EXPR:
2564 CHECK_OP (0, "invalid operand to unary operator");
2565 break;
2566
2567 case REALPART_EXPR:
2568 case IMAGPART_EXPR:
2569 case BIT_FIELD_REF:
2570 if (!is_gimple_reg_type (TREE_TYPE (t)))
2571 {
2572 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2573 return t;
2574 }
2575
2576 if (TREE_CODE (t) == BIT_FIELD_REF)
2577 {
2578 if (!host_integerp (TREE_OPERAND (t, 1), 1)
2579 || !host_integerp (TREE_OPERAND (t, 2), 1))
2580 {
2581 error ("invalid position or size operand to BIT_FIELD_REF");
2582 return t;
2583 }
2584 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2585 && (TYPE_PRECISION (TREE_TYPE (t))
2586 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2587 {
2588 error ("integral result type precision does not match "
2589 "field size of BIT_FIELD_REF");
2590 return t;
2591 }
2592 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2593 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2594 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2595 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2596 {
2597 error ("mode precision of non-integral result does not "
2598 "match field size of BIT_FIELD_REF");
2599 return t;
2600 }
2601 }
2602 t = TREE_OPERAND (t, 0);
2603
2604 /* Fall-through. */
2605 case COMPONENT_REF:
2606 case ARRAY_REF:
2607 case ARRAY_RANGE_REF:
2608 case VIEW_CONVERT_EXPR:
2609 /* We have a nest of references. Verify that each of the operands
2610 that determine where to reference is either a constant or a variable,
2611 verify that the base is valid, and then show we've already checked
2612 the subtrees. */
2613 while (handled_component_p (t))
2614 {
2615 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2616 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2617 else if (TREE_CODE (t) == ARRAY_REF
2618 || TREE_CODE (t) == ARRAY_RANGE_REF)
2619 {
2620 CHECK_OP (1, "invalid array index");
2621 if (TREE_OPERAND (t, 2))
2622 CHECK_OP (2, "invalid array lower bound");
2623 if (TREE_OPERAND (t, 3))
2624 CHECK_OP (3, "invalid array stride");
2625 }
2626 else if (TREE_CODE (t) == BIT_FIELD_REF
2627 || TREE_CODE (t) == REALPART_EXPR
2628 || TREE_CODE (t) == IMAGPART_EXPR)
2629 {
2630 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
2631 "REALPART_EXPR");
2632 return t;
2633 }
2634
2635 t = TREE_OPERAND (t, 0);
2636 }
2637
2638 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2639 {
2640 error ("invalid reference prefix");
2641 return t;
2642 }
2643 *walk_subtrees = 0;
2644 break;
2645 case PLUS_EXPR:
2646 case MINUS_EXPR:
2647 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2648 POINTER_PLUS_EXPR. */
2649 if (POINTER_TYPE_P (TREE_TYPE (t)))
2650 {
2651 error ("invalid operand to plus/minus, type is a pointer");
2652 return t;
2653 }
2654 CHECK_OP (0, "invalid operand to binary operator");
2655 CHECK_OP (1, "invalid operand to binary operator");
2656 break;
2657
2658 case POINTER_PLUS_EXPR:
2659 /* Check to make sure the first operand is a pointer or reference type. */
2660 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
2661 {
2662 error ("invalid operand to pointer plus, first operand is not a pointer");
2663 return t;
2664 }
2665 /* Check to make sure the second operand is a ptrofftype. */
2666 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
2667 {
2668 error ("invalid operand to pointer plus, second operand is not an "
2669 "integer type of appropriate width");
2670 return t;
2671 }
2672 /* FALLTHROUGH */
2673 case LT_EXPR:
2674 case LE_EXPR:
2675 case GT_EXPR:
2676 case GE_EXPR:
2677 case EQ_EXPR:
2678 case NE_EXPR:
2679 case UNORDERED_EXPR:
2680 case ORDERED_EXPR:
2681 case UNLT_EXPR:
2682 case UNLE_EXPR:
2683 case UNGT_EXPR:
2684 case UNGE_EXPR:
2685 case UNEQ_EXPR:
2686 case LTGT_EXPR:
2687 case MULT_EXPR:
2688 case TRUNC_DIV_EXPR:
2689 case CEIL_DIV_EXPR:
2690 case FLOOR_DIV_EXPR:
2691 case ROUND_DIV_EXPR:
2692 case TRUNC_MOD_EXPR:
2693 case CEIL_MOD_EXPR:
2694 case FLOOR_MOD_EXPR:
2695 case ROUND_MOD_EXPR:
2696 case RDIV_EXPR:
2697 case EXACT_DIV_EXPR:
2698 case MIN_EXPR:
2699 case MAX_EXPR:
2700 case LSHIFT_EXPR:
2701 case RSHIFT_EXPR:
2702 case LROTATE_EXPR:
2703 case RROTATE_EXPR:
2704 case BIT_IOR_EXPR:
2705 case BIT_XOR_EXPR:
2706 case BIT_AND_EXPR:
2707 CHECK_OP (0, "invalid operand to binary operator");
2708 CHECK_OP (1, "invalid operand to binary operator");
2709 break;
2710
2711 case CONSTRUCTOR:
2712 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2713 *walk_subtrees = 0;
2714 break;
2715
2716 case CASE_LABEL_EXPR:
2717 if (CASE_CHAIN (t))
2718 {
2719 error ("invalid CASE_CHAIN");
2720 return t;
2721 }
2722 break;
2723
2724 default:
2725 break;
2726 }
2727 return NULL;
2728
2729 #undef CHECK_OP
2730 }
2731
2732
2733 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
2734 Returns true if there is an error, otherwise false. */
2735
2736 static bool
2737 verify_types_in_gimple_min_lval (tree expr)
2738 {
2739 tree op;
2740
2741 if (is_gimple_id (expr))
2742 return false;
2743
2744 if (TREE_CODE (expr) != TARGET_MEM_REF
2745 && TREE_CODE (expr) != MEM_REF)
2746 {
2747 error ("invalid expression for min lvalue");
2748 return true;
2749 }
2750
2751 /* TARGET_MEM_REFs are strange beasts. */
2752 if (TREE_CODE (expr) == TARGET_MEM_REF)
2753 return false;
2754
2755 op = TREE_OPERAND (expr, 0);
2756 if (!is_gimple_val (op))
2757 {
2758 error ("invalid operand in indirect reference");
2759 debug_generic_stmt (op);
2760 return true;
2761 }
2762 /* Memory references now generally can involve a value conversion. */
2763
2764 return false;
2765 }
2766
2767 /* Verify if EXPR is a valid GIMPLE reference expression. If
2768 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
2769 if there is an error, otherwise false. */
2770
2771 static bool
2772 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
2773 {
2774 while (handled_component_p (expr))
2775 {
2776 tree op = TREE_OPERAND (expr, 0);
2777
2778 if (TREE_CODE (expr) == ARRAY_REF
2779 || TREE_CODE (expr) == ARRAY_RANGE_REF)
2780 {
2781 if (!is_gimple_val (TREE_OPERAND (expr, 1))
2782 || (TREE_OPERAND (expr, 2)
2783 && !is_gimple_val (TREE_OPERAND (expr, 2)))
2784 || (TREE_OPERAND (expr, 3)
2785 && !is_gimple_val (TREE_OPERAND (expr, 3))))
2786 {
2787 error ("invalid operands to array reference");
2788 debug_generic_stmt (expr);
2789 return true;
2790 }
2791 }
2792
2793 /* Verify if the reference array element types are compatible. */
2794 if (TREE_CODE (expr) == ARRAY_REF
2795 && !useless_type_conversion_p (TREE_TYPE (expr),
2796 TREE_TYPE (TREE_TYPE (op))))
2797 {
2798 error ("type mismatch in array reference");
2799 debug_generic_stmt (TREE_TYPE (expr));
2800 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2801 return true;
2802 }
2803 if (TREE_CODE (expr) == ARRAY_RANGE_REF
2804 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
2805 TREE_TYPE (TREE_TYPE (op))))
2806 {
2807 error ("type mismatch in array range reference");
2808 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
2809 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2810 return true;
2811 }
2812
2813 if ((TREE_CODE (expr) == REALPART_EXPR
2814 || TREE_CODE (expr) == IMAGPART_EXPR)
2815 && !useless_type_conversion_p (TREE_TYPE (expr),
2816 TREE_TYPE (TREE_TYPE (op))))
2817 {
2818 error ("type mismatch in real/imagpart reference");
2819 debug_generic_stmt (TREE_TYPE (expr));
2820 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2821 return true;
2822 }
2823
2824 if (TREE_CODE (expr) == COMPONENT_REF
2825 && !useless_type_conversion_p (TREE_TYPE (expr),
2826 TREE_TYPE (TREE_OPERAND (expr, 1))))
2827 {
2828 error ("type mismatch in component reference");
2829 debug_generic_stmt (TREE_TYPE (expr));
2830 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
2831 return true;
2832 }
2833
2834 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2835 {
2836 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
2837 that their operand is not an SSA name or an invariant when
2838 requiring an lvalue (this usually means there is a SRA or IPA-SRA
2839 bug). Otherwise there is nothing to verify, gross mismatches at
2840 most invoke undefined behavior. */
2841 if (require_lvalue
2842 && (TREE_CODE (op) == SSA_NAME
2843 || is_gimple_min_invariant (op)))
2844 {
2845 error ("conversion of an SSA_NAME on the left hand side");
2846 debug_generic_stmt (expr);
2847 return true;
2848 }
2849 else if (TREE_CODE (op) == SSA_NAME
2850 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
2851 {
2852 error ("conversion of register to a different size");
2853 debug_generic_stmt (expr);
2854 return true;
2855 }
2856 else if (!handled_component_p (op))
2857 return false;
2858 }
2859
2860 expr = op;
2861 }
2862
2863 if (TREE_CODE (expr) == MEM_REF)
2864 {
2865 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
2866 {
2867 error ("invalid address operand in MEM_REF");
2868 debug_generic_stmt (expr);
2869 return true;
2870 }
2871 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
2872 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
2873 {
2874 error ("invalid offset operand in MEM_REF");
2875 debug_generic_stmt (expr);
2876 return true;
2877 }
2878 }
2879 else if (TREE_CODE (expr) == TARGET_MEM_REF)
2880 {
2881 if (!TMR_BASE (expr)
2882 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
2883 {
2884 error ("invalid address operand in TARGET_MEM_REF");
2885 return true;
2886 }
2887 if (!TMR_OFFSET (expr)
2888 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
2889 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
2890 {
2891 error ("invalid offset operand in TARGET_MEM_REF");
2892 debug_generic_stmt (expr);
2893 return true;
2894 }
2895 }
2896
2897 return ((require_lvalue || !is_gimple_min_invariant (expr))
2898 && verify_types_in_gimple_min_lval (expr));
2899 }
2900
2901 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
2902 list of pointer-to types that is trivially convertible to DEST. */
2903
2904 static bool
2905 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
2906 {
2907 tree src;
2908
2909 if (!TYPE_POINTER_TO (src_obj))
2910 return true;
2911
2912 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
2913 if (useless_type_conversion_p (dest, src))
2914 return true;
2915
2916 return false;
2917 }
2918
2919 /* Return true if TYPE1 is a fixed-point type and if conversions to and
2920 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
2921
2922 static bool
2923 valid_fixed_convert_types_p (tree type1, tree type2)
2924 {
2925 return (FIXED_POINT_TYPE_P (type1)
2926 && (INTEGRAL_TYPE_P (type2)
2927 || SCALAR_FLOAT_TYPE_P (type2)
2928 || FIXED_POINT_TYPE_P (type2)));
2929 }
2930
2931 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
2932 is a problem, otherwise false. */
2933
2934 static bool
2935 verify_gimple_call (gimple stmt)
2936 {
2937 tree fn = gimple_call_fn (stmt);
2938 tree fntype, fndecl;
2939 unsigned i;
2940
2941 if (gimple_call_internal_p (stmt))
2942 {
2943 if (fn)
2944 {
2945 error ("gimple call has two targets");
2946 debug_generic_stmt (fn);
2947 return true;
2948 }
2949 }
2950 else
2951 {
2952 if (!fn)
2953 {
2954 error ("gimple call has no target");
2955 return true;
2956 }
2957 }
2958
2959 if (fn && !is_gimple_call_addr (fn))
2960 {
2961 error ("invalid function in gimple call");
2962 debug_generic_stmt (fn);
2963 return true;
2964 }
2965
2966 if (fn
2967 && (!POINTER_TYPE_P (TREE_TYPE (fn))
2968 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
2969 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
2970 {
2971 error ("non-function in gimple call");
2972 return true;
2973 }
2974
2975 fndecl = gimple_call_fndecl (stmt);
2976 if (fndecl
2977 && TREE_CODE (fndecl) == FUNCTION_DECL
2978 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
2979 && !DECL_PURE_P (fndecl)
2980 && !TREE_READONLY (fndecl))
2981 {
2982 error ("invalid pure const state for function");
2983 return true;
2984 }
2985
2986 if (gimple_call_lhs (stmt)
2987 && (!is_gimple_lvalue (gimple_call_lhs (stmt))
2988 || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
2989 {
2990 error ("invalid LHS in gimple call");
2991 return true;
2992 }
2993
2994 if (gimple_call_lhs (stmt) && gimple_call_noreturn_p (stmt))
2995 {
2996 error ("LHS in noreturn call");
2997 return true;
2998 }
2999
3000 fntype = gimple_call_fntype (stmt);
3001 if (fntype
3002 && gimple_call_lhs (stmt)
3003 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3004 TREE_TYPE (fntype))
3005 /* ??? At least C++ misses conversions at assignments from
3006 void * call results.
3007 ??? Java is completely off. Especially with functions
3008 returning java.lang.Object.
3009 For now simply allow arbitrary pointer type conversions. */
3010 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3011 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3012 {
3013 error ("invalid conversion in gimple call");
3014 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3015 debug_generic_stmt (TREE_TYPE (fntype));
3016 return true;
3017 }
3018
3019 if (gimple_call_chain (stmt)
3020 && !is_gimple_val (gimple_call_chain (stmt)))
3021 {
3022 error ("invalid static chain in gimple call");
3023 debug_generic_stmt (gimple_call_chain (stmt));
3024 return true;
3025 }
3026
3027 /* If there is a static chain argument, this should not be an indirect
3028 call, and the decl should have DECL_STATIC_CHAIN set. */
3029 if (gimple_call_chain (stmt))
3030 {
3031 if (!gimple_call_fndecl (stmt))
3032 {
3033 error ("static chain in indirect gimple call");
3034 return true;
3035 }
3036 fn = TREE_OPERAND (fn, 0);
3037
3038 if (!DECL_STATIC_CHAIN (fn))
3039 {
3040 error ("static chain with function that doesn%'t use one");
3041 return true;
3042 }
3043 }
3044
3045 /* ??? The C frontend passes unpromoted arguments in case it
3046 didn't see a function declaration before the call. So for now
3047 leave the call arguments mostly unverified. Once we gimplify
3048 unit-at-a-time we have a chance to fix this. */
3049
3050 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3051 {
3052 tree arg = gimple_call_arg (stmt, i);
3053 if ((is_gimple_reg_type (TREE_TYPE (arg))
3054 && !is_gimple_val (arg))
3055 || (!is_gimple_reg_type (TREE_TYPE (arg))
3056 && !is_gimple_lvalue (arg)))
3057 {
3058 error ("invalid argument to gimple call");
3059 debug_generic_expr (arg);
3060 return true;
3061 }
3062 }
3063
3064 return false;
3065 }
3066
3067 /* Verifies the gimple comparison with the result type TYPE and
3068 the operands OP0 and OP1. */
3069
3070 static bool
3071 verify_gimple_comparison (tree type, tree op0, tree op1)
3072 {
3073 tree op0_type = TREE_TYPE (op0);
3074 tree op1_type = TREE_TYPE (op1);
3075
3076 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3077 {
3078 error ("invalid operands in gimple comparison");
3079 return true;
3080 }
3081
3082 /* For comparisons we do not have the operations type as the
3083 effective type the comparison is carried out in. Instead
3084 we require that either the first operand is trivially
3085 convertible into the second, or the other way around.
3086 Because we special-case pointers to void we allow
3087 comparisons of pointers with the same mode as well. */
3088 if (!useless_type_conversion_p (op0_type, op1_type)
3089 && !useless_type_conversion_p (op1_type, op0_type)
3090 && (!POINTER_TYPE_P (op0_type)
3091 || !POINTER_TYPE_P (op1_type)
3092 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3093 {
3094 error ("mismatching comparison operand types");
3095 debug_generic_expr (op0_type);
3096 debug_generic_expr (op1_type);
3097 return true;
3098 }
3099
3100 /* The resulting type of a comparison may be an effective boolean type. */
3101 if (INTEGRAL_TYPE_P (type)
3102 && (TREE_CODE (type) == BOOLEAN_TYPE
3103 || TYPE_PRECISION (type) == 1))
3104 {
3105 if (TREE_CODE (op0_type) == VECTOR_TYPE
3106 || TREE_CODE (op1_type) == VECTOR_TYPE)
3107 {
3108 error ("vector comparison returning a boolean");
3109 debug_generic_expr (op0_type);
3110 debug_generic_expr (op1_type);
3111 return true;
3112 }
3113 }
3114 /* Or an integer vector type with the same size and element count
3115 as the comparison operand types. */
3116 else if (TREE_CODE (type) == VECTOR_TYPE
3117 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3118 {
3119 if (TREE_CODE (op0_type) != VECTOR_TYPE
3120 || TREE_CODE (op1_type) != VECTOR_TYPE)
3121 {
3122 error ("non-vector operands in vector comparison");
3123 debug_generic_expr (op0_type);
3124 debug_generic_expr (op1_type);
3125 return true;
3126 }
3127
3128 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3129 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3130 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type))))
3131 /* The result of a vector comparison is of signed
3132 integral type. */
3133 || TYPE_UNSIGNED (TREE_TYPE (type)))
3134 {
3135 error ("invalid vector comparison resulting type");
3136 debug_generic_expr (type);
3137 return true;
3138 }
3139 }
3140 else
3141 {
3142 error ("bogus comparison result type");
3143 debug_generic_expr (type);
3144 return true;
3145 }
3146
3147 return false;
3148 }
3149
3150 /* Verify a gimple assignment statement STMT with an unary rhs.
3151 Returns true if anything is wrong. */
3152
3153 static bool
3154 verify_gimple_assign_unary (gimple stmt)
3155 {
3156 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3157 tree lhs = gimple_assign_lhs (stmt);
3158 tree lhs_type = TREE_TYPE (lhs);
3159 tree rhs1 = gimple_assign_rhs1 (stmt);
3160 tree rhs1_type = TREE_TYPE (rhs1);
3161
3162 if (!is_gimple_reg (lhs))
3163 {
3164 error ("non-register as LHS of unary operation");
3165 return true;
3166 }
3167
3168 if (!is_gimple_val (rhs1))
3169 {
3170 error ("invalid operand in unary operation");
3171 return true;
3172 }
3173
3174 /* First handle conversions. */
3175 switch (rhs_code)
3176 {
3177 CASE_CONVERT:
3178 {
3179 /* Allow conversions from pointer type to integral type only if
3180 there is no sign or zero extension involved.
3181 For targets were the precision of ptrofftype doesn't match that
3182 of pointers we need to allow arbitrary conversions to ptrofftype. */
3183 if ((POINTER_TYPE_P (lhs_type)
3184 && INTEGRAL_TYPE_P (rhs1_type))
3185 || (POINTER_TYPE_P (rhs1_type)
3186 && INTEGRAL_TYPE_P (lhs_type)
3187 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3188 || ptrofftype_p (sizetype))))
3189 return false;
3190
3191 /* Allow conversion from integral to offset type and vice versa. */
3192 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3193 && INTEGRAL_TYPE_P (rhs1_type))
3194 || (INTEGRAL_TYPE_P (lhs_type)
3195 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3196 return false;
3197
3198 /* Otherwise assert we are converting between types of the
3199 same kind. */
3200 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3201 {
3202 error ("invalid types in nop conversion");
3203 debug_generic_expr (lhs_type);
3204 debug_generic_expr (rhs1_type);
3205 return true;
3206 }
3207
3208 return false;
3209 }
3210
3211 case ADDR_SPACE_CONVERT_EXPR:
3212 {
3213 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3214 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3215 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3216 {
3217 error ("invalid types in address space conversion");
3218 debug_generic_expr (lhs_type);
3219 debug_generic_expr (rhs1_type);
3220 return true;
3221 }
3222
3223 return false;
3224 }
3225
3226 case FIXED_CONVERT_EXPR:
3227 {
3228 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3229 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3230 {
3231 error ("invalid types in fixed-point conversion");
3232 debug_generic_expr (lhs_type);
3233 debug_generic_expr (rhs1_type);
3234 return true;
3235 }
3236
3237 return false;
3238 }
3239
3240 case FLOAT_EXPR:
3241 {
3242 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3243 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3244 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3245 {
3246 error ("invalid types in conversion to floating point");
3247 debug_generic_expr (lhs_type);
3248 debug_generic_expr (rhs1_type);
3249 return true;
3250 }
3251
3252 return false;
3253 }
3254
3255 case FIX_TRUNC_EXPR:
3256 {
3257 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3258 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3259 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3260 {
3261 error ("invalid types in conversion to integer");
3262 debug_generic_expr (lhs_type);
3263 debug_generic_expr (rhs1_type);
3264 return true;
3265 }
3266
3267 return false;
3268 }
3269
3270 case VEC_UNPACK_HI_EXPR:
3271 case VEC_UNPACK_LO_EXPR:
3272 case REDUC_MAX_EXPR:
3273 case REDUC_MIN_EXPR:
3274 case REDUC_PLUS_EXPR:
3275 case VEC_UNPACK_FLOAT_HI_EXPR:
3276 case VEC_UNPACK_FLOAT_LO_EXPR:
3277 /* FIXME. */
3278 return false;
3279
3280 case NEGATE_EXPR:
3281 case ABS_EXPR:
3282 case BIT_NOT_EXPR:
3283 case PAREN_EXPR:
3284 case NON_LVALUE_EXPR:
3285 case CONJ_EXPR:
3286 break;
3287
3288 default:
3289 gcc_unreachable ();
3290 }
3291
3292 /* For the remaining codes assert there is no conversion involved. */
3293 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3294 {
3295 error ("non-trivial conversion in unary operation");
3296 debug_generic_expr (lhs_type);
3297 debug_generic_expr (rhs1_type);
3298 return true;
3299 }
3300
3301 return false;
3302 }
3303
3304 /* Verify a gimple assignment statement STMT with a binary rhs.
3305 Returns true if anything is wrong. */
3306
3307 static bool
3308 verify_gimple_assign_binary (gimple stmt)
3309 {
3310 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3311 tree lhs = gimple_assign_lhs (stmt);
3312 tree lhs_type = TREE_TYPE (lhs);
3313 tree rhs1 = gimple_assign_rhs1 (stmt);
3314 tree rhs1_type = TREE_TYPE (rhs1);
3315 tree rhs2 = gimple_assign_rhs2 (stmt);
3316 tree rhs2_type = TREE_TYPE (rhs2);
3317
3318 if (!is_gimple_reg (lhs))
3319 {
3320 error ("non-register as LHS of binary operation");
3321 return true;
3322 }
3323
3324 if (!is_gimple_val (rhs1)
3325 || !is_gimple_val (rhs2))
3326 {
3327 error ("invalid operands in binary operation");
3328 return true;
3329 }
3330
3331 /* First handle operations that involve different types. */
3332 switch (rhs_code)
3333 {
3334 case COMPLEX_EXPR:
3335 {
3336 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3337 || !(INTEGRAL_TYPE_P (rhs1_type)
3338 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3339 || !(INTEGRAL_TYPE_P (rhs2_type)
3340 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3341 {
3342 error ("type mismatch in complex expression");
3343 debug_generic_expr (lhs_type);
3344 debug_generic_expr (rhs1_type);
3345 debug_generic_expr (rhs2_type);
3346 return true;
3347 }
3348
3349 return false;
3350 }
3351
3352 case LSHIFT_EXPR:
3353 case RSHIFT_EXPR:
3354 case LROTATE_EXPR:
3355 case RROTATE_EXPR:
3356 {
3357 /* Shifts and rotates are ok on integral types, fixed point
3358 types and integer vector types. */
3359 if ((!INTEGRAL_TYPE_P (rhs1_type)
3360 && !FIXED_POINT_TYPE_P (rhs1_type)
3361 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3362 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3363 || (!INTEGRAL_TYPE_P (rhs2_type)
3364 /* Vector shifts of vectors are also ok. */
3365 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3366 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3367 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3368 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3369 || !useless_type_conversion_p (lhs_type, rhs1_type))
3370 {
3371 error ("type mismatch in shift expression");
3372 debug_generic_expr (lhs_type);
3373 debug_generic_expr (rhs1_type);
3374 debug_generic_expr (rhs2_type);
3375 return true;
3376 }
3377
3378 return false;
3379 }
3380
3381 case VEC_LSHIFT_EXPR:
3382 case VEC_RSHIFT_EXPR:
3383 {
3384 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3385 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3386 || POINTER_TYPE_P (TREE_TYPE (rhs1_type))
3387 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type))
3388 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3389 || (!INTEGRAL_TYPE_P (rhs2_type)
3390 && (TREE_CODE (rhs2_type) != VECTOR_TYPE
3391 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3392 || !useless_type_conversion_p (lhs_type, rhs1_type))
3393 {
3394 error ("type mismatch in vector shift expression");
3395 debug_generic_expr (lhs_type);
3396 debug_generic_expr (rhs1_type);
3397 debug_generic_expr (rhs2_type);
3398 return true;
3399 }
3400 /* For shifting a vector of non-integral components we
3401 only allow shifting by a constant multiple of the element size. */
3402 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3403 && (TREE_CODE (rhs2) != INTEGER_CST
3404 || !div_if_zero_remainder (EXACT_DIV_EXPR, rhs2,
3405 TYPE_SIZE (TREE_TYPE (rhs1_type)))))
3406 {
3407 error ("non-element sized vector shift of floating point vector");
3408 return true;
3409 }
3410
3411 return false;
3412 }
3413
3414 case WIDEN_LSHIFT_EXPR:
3415 {
3416 if (!INTEGRAL_TYPE_P (lhs_type)
3417 || !INTEGRAL_TYPE_P (rhs1_type)
3418 || TREE_CODE (rhs2) != INTEGER_CST
3419 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3420 {
3421 error ("type mismatch in widening vector shift expression");
3422 debug_generic_expr (lhs_type);
3423 debug_generic_expr (rhs1_type);
3424 debug_generic_expr (rhs2_type);
3425 return true;
3426 }
3427
3428 return false;
3429 }
3430
3431 case VEC_WIDEN_LSHIFT_HI_EXPR:
3432 case VEC_WIDEN_LSHIFT_LO_EXPR:
3433 {
3434 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3435 || TREE_CODE (lhs_type) != VECTOR_TYPE
3436 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3437 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3438 || TREE_CODE (rhs2) != INTEGER_CST
3439 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3440 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3441 {
3442 error ("type mismatch in widening vector shift expression");
3443 debug_generic_expr (lhs_type);
3444 debug_generic_expr (rhs1_type);
3445 debug_generic_expr (rhs2_type);
3446 return true;
3447 }
3448
3449 return false;
3450 }
3451
3452 case PLUS_EXPR:
3453 case MINUS_EXPR:
3454 {
3455 tree lhs_etype = lhs_type;
3456 tree rhs1_etype = rhs1_type;
3457 tree rhs2_etype = rhs2_type;
3458 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3459 {
3460 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3461 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3462 {
3463 error ("invalid non-vector operands to vector valued plus");
3464 return true;
3465 }
3466 lhs_etype = TREE_TYPE (lhs_type);
3467 rhs1_etype = TREE_TYPE (rhs1_type);
3468 rhs2_etype = TREE_TYPE (rhs2_type);
3469 }
3470 if (POINTER_TYPE_P (lhs_etype)
3471 || POINTER_TYPE_P (rhs1_etype)
3472 || POINTER_TYPE_P (rhs2_etype))
3473 {
3474 error ("invalid (pointer) operands to plus/minus");
3475 return true;
3476 }
3477
3478 /* Continue with generic binary expression handling. */
3479 break;
3480 }
3481
3482 case POINTER_PLUS_EXPR:
3483 {
3484 if (!POINTER_TYPE_P (rhs1_type)
3485 || !useless_type_conversion_p (lhs_type, rhs1_type)
3486 || !ptrofftype_p (rhs2_type))
3487 {
3488 error ("type mismatch in pointer plus expression");
3489 debug_generic_stmt (lhs_type);
3490 debug_generic_stmt (rhs1_type);
3491 debug_generic_stmt (rhs2_type);
3492 return true;
3493 }
3494
3495 return false;
3496 }
3497
3498 case TRUTH_ANDIF_EXPR:
3499 case TRUTH_ORIF_EXPR:
3500 case TRUTH_AND_EXPR:
3501 case TRUTH_OR_EXPR:
3502 case TRUTH_XOR_EXPR:
3503
3504 gcc_unreachable ();
3505
3506 case LT_EXPR:
3507 case LE_EXPR:
3508 case GT_EXPR:
3509 case GE_EXPR:
3510 case EQ_EXPR:
3511 case NE_EXPR:
3512 case UNORDERED_EXPR:
3513 case ORDERED_EXPR:
3514 case UNLT_EXPR:
3515 case UNLE_EXPR:
3516 case UNGT_EXPR:
3517 case UNGE_EXPR:
3518 case UNEQ_EXPR:
3519 case LTGT_EXPR:
3520 /* Comparisons are also binary, but the result type is not
3521 connected to the operand types. */
3522 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3523
3524 case WIDEN_MULT_EXPR:
3525 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3526 return true;
3527 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3528 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3529
3530 case WIDEN_SUM_EXPR:
3531 case VEC_WIDEN_MULT_HI_EXPR:
3532 case VEC_WIDEN_MULT_LO_EXPR:
3533 case VEC_WIDEN_MULT_EVEN_EXPR:
3534 case VEC_WIDEN_MULT_ODD_EXPR:
3535 case VEC_PACK_TRUNC_EXPR:
3536 case VEC_PACK_SAT_EXPR:
3537 case VEC_PACK_FIX_TRUNC_EXPR:
3538 /* FIXME. */
3539 return false;
3540
3541 case MULT_EXPR:
3542 case MULT_HIGHPART_EXPR:
3543 case TRUNC_DIV_EXPR:
3544 case CEIL_DIV_EXPR:
3545 case FLOOR_DIV_EXPR:
3546 case ROUND_DIV_EXPR:
3547 case TRUNC_MOD_EXPR:
3548 case CEIL_MOD_EXPR:
3549 case FLOOR_MOD_EXPR:
3550 case ROUND_MOD_EXPR:
3551 case RDIV_EXPR:
3552 case EXACT_DIV_EXPR:
3553 case MIN_EXPR:
3554 case MAX_EXPR:
3555 case BIT_IOR_EXPR:
3556 case BIT_XOR_EXPR:
3557 case BIT_AND_EXPR:
3558 /* Continue with generic binary expression handling. */
3559 break;
3560
3561 default:
3562 gcc_unreachable ();
3563 }
3564
3565 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3566 || !useless_type_conversion_p (lhs_type, rhs2_type))
3567 {
3568 error ("type mismatch in binary expression");
3569 debug_generic_stmt (lhs_type);
3570 debug_generic_stmt (rhs1_type);
3571 debug_generic_stmt (rhs2_type);
3572 return true;
3573 }
3574
3575 return false;
3576 }
3577
3578 /* Verify a gimple assignment statement STMT with a ternary rhs.
3579 Returns true if anything is wrong. */
3580
3581 static bool
3582 verify_gimple_assign_ternary (gimple stmt)
3583 {
3584 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3585 tree lhs = gimple_assign_lhs (stmt);
3586 tree lhs_type = TREE_TYPE (lhs);
3587 tree rhs1 = gimple_assign_rhs1 (stmt);
3588 tree rhs1_type = TREE_TYPE (rhs1);
3589 tree rhs2 = gimple_assign_rhs2 (stmt);
3590 tree rhs2_type = TREE_TYPE (rhs2);
3591 tree rhs3 = gimple_assign_rhs3 (stmt);
3592 tree rhs3_type = TREE_TYPE (rhs3);
3593
3594 if (!is_gimple_reg (lhs))
3595 {
3596 error ("non-register as LHS of ternary operation");
3597 return true;
3598 }
3599
3600 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3601 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3602 || !is_gimple_val (rhs2)
3603 || !is_gimple_val (rhs3))
3604 {
3605 error ("invalid operands in ternary operation");
3606 return true;
3607 }
3608
3609 /* First handle operations that involve different types. */
3610 switch (rhs_code)
3611 {
3612 case WIDEN_MULT_PLUS_EXPR:
3613 case WIDEN_MULT_MINUS_EXPR:
3614 if ((!INTEGRAL_TYPE_P (rhs1_type)
3615 && !FIXED_POINT_TYPE_P (rhs1_type))
3616 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3617 || !useless_type_conversion_p (lhs_type, rhs3_type)
3618 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3619 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3620 {
3621 error ("type mismatch in widening multiply-accumulate expression");
3622 debug_generic_expr (lhs_type);
3623 debug_generic_expr (rhs1_type);
3624 debug_generic_expr (rhs2_type);
3625 debug_generic_expr (rhs3_type);
3626 return true;
3627 }
3628 break;
3629
3630 case FMA_EXPR:
3631 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3632 || !useless_type_conversion_p (lhs_type, rhs2_type)
3633 || !useless_type_conversion_p (lhs_type, rhs3_type))
3634 {
3635 error ("type mismatch in fused multiply-add expression");
3636 debug_generic_expr (lhs_type);
3637 debug_generic_expr (rhs1_type);
3638 debug_generic_expr (rhs2_type);
3639 debug_generic_expr (rhs3_type);
3640 return true;
3641 }
3642 break;
3643
3644 case COND_EXPR:
3645 case VEC_COND_EXPR:
3646 if (!useless_type_conversion_p (lhs_type, rhs2_type)
3647 || !useless_type_conversion_p (lhs_type, rhs3_type))
3648 {
3649 error ("type mismatch in conditional expression");
3650 debug_generic_expr (lhs_type);
3651 debug_generic_expr (rhs2_type);
3652 debug_generic_expr (rhs3_type);
3653 return true;
3654 }
3655 break;
3656
3657 case VEC_PERM_EXPR:
3658 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3659 || !useless_type_conversion_p (lhs_type, rhs2_type))
3660 {
3661 error ("type mismatch in vector permute expression");
3662 debug_generic_expr (lhs_type);
3663 debug_generic_expr (rhs1_type);
3664 debug_generic_expr (rhs2_type);
3665 debug_generic_expr (rhs3_type);
3666 return true;
3667 }
3668
3669 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3670 || TREE_CODE (rhs2_type) != VECTOR_TYPE
3671 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
3672 {
3673 error ("vector types expected in vector permute expression");
3674 debug_generic_expr (lhs_type);
3675 debug_generic_expr (rhs1_type);
3676 debug_generic_expr (rhs2_type);
3677 debug_generic_expr (rhs3_type);
3678 return true;
3679 }
3680
3681 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
3682 || TYPE_VECTOR_SUBPARTS (rhs2_type)
3683 != TYPE_VECTOR_SUBPARTS (rhs3_type)
3684 || TYPE_VECTOR_SUBPARTS (rhs3_type)
3685 != TYPE_VECTOR_SUBPARTS (lhs_type))
3686 {
3687 error ("vectors with different element number found "
3688 "in vector permute expression");
3689 debug_generic_expr (lhs_type);
3690 debug_generic_expr (rhs1_type);
3691 debug_generic_expr (rhs2_type);
3692 debug_generic_expr (rhs3_type);
3693 return true;
3694 }
3695
3696 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
3697 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
3698 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
3699 {
3700 error ("invalid mask type in vector permute expression");
3701 debug_generic_expr (lhs_type);
3702 debug_generic_expr (rhs1_type);
3703 debug_generic_expr (rhs2_type);
3704 debug_generic_expr (rhs3_type);
3705 return true;
3706 }
3707
3708 return false;
3709
3710 case DOT_PROD_EXPR:
3711 case REALIGN_LOAD_EXPR:
3712 /* FIXME. */
3713 return false;
3714
3715 default:
3716 gcc_unreachable ();
3717 }
3718 return false;
3719 }
3720
3721 /* Verify a gimple assignment statement STMT with a single rhs.
3722 Returns true if anything is wrong. */
3723
3724 static bool
3725 verify_gimple_assign_single (gimple stmt)
3726 {
3727 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3728 tree lhs = gimple_assign_lhs (stmt);
3729 tree lhs_type = TREE_TYPE (lhs);
3730 tree rhs1 = gimple_assign_rhs1 (stmt);
3731 tree rhs1_type = TREE_TYPE (rhs1);
3732 bool res = false;
3733
3734 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3735 {
3736 error ("non-trivial conversion at assignment");
3737 debug_generic_expr (lhs_type);
3738 debug_generic_expr (rhs1_type);
3739 return true;
3740 }
3741
3742 if (gimple_clobber_p (stmt)
3743 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
3744 {
3745 error ("non-decl/MEM_REF LHS in clobber statement");
3746 debug_generic_expr (lhs);
3747 return true;
3748 }
3749
3750 if (handled_component_p (lhs))
3751 res |= verify_types_in_gimple_reference (lhs, true);
3752
3753 /* Special codes we cannot handle via their class. */
3754 switch (rhs_code)
3755 {
3756 case ADDR_EXPR:
3757 {
3758 tree op = TREE_OPERAND (rhs1, 0);
3759 if (!is_gimple_addressable (op))
3760 {
3761 error ("invalid operand in unary expression");
3762 return true;
3763 }
3764
3765 /* Technically there is no longer a need for matching types, but
3766 gimple hygiene asks for this check. In LTO we can end up
3767 combining incompatible units and thus end up with addresses
3768 of globals that change their type to a common one. */
3769 if (!in_lto_p
3770 && !types_compatible_p (TREE_TYPE (op),
3771 TREE_TYPE (TREE_TYPE (rhs1)))
3772 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
3773 TREE_TYPE (op)))
3774 {
3775 error ("type mismatch in address expression");
3776 debug_generic_stmt (TREE_TYPE (rhs1));
3777 debug_generic_stmt (TREE_TYPE (op));
3778 return true;
3779 }
3780
3781 return verify_types_in_gimple_reference (op, true);
3782 }
3783
3784 /* tcc_reference */
3785 case INDIRECT_REF:
3786 error ("INDIRECT_REF in gimple IL");
3787 return true;
3788
3789 case COMPONENT_REF:
3790 case BIT_FIELD_REF:
3791 case ARRAY_REF:
3792 case ARRAY_RANGE_REF:
3793 case VIEW_CONVERT_EXPR:
3794 case REALPART_EXPR:
3795 case IMAGPART_EXPR:
3796 case TARGET_MEM_REF:
3797 case MEM_REF:
3798 if (!is_gimple_reg (lhs)
3799 && is_gimple_reg_type (TREE_TYPE (lhs)))
3800 {
3801 error ("invalid rhs for gimple memory store");
3802 debug_generic_stmt (lhs);
3803 debug_generic_stmt (rhs1);
3804 return true;
3805 }
3806 return res || verify_types_in_gimple_reference (rhs1, false);
3807
3808 /* tcc_constant */
3809 case SSA_NAME:
3810 case INTEGER_CST:
3811 case REAL_CST:
3812 case FIXED_CST:
3813 case COMPLEX_CST:
3814 case VECTOR_CST:
3815 case STRING_CST:
3816 return res;
3817
3818 /* tcc_declaration */
3819 case CONST_DECL:
3820 return res;
3821 case VAR_DECL:
3822 case PARM_DECL:
3823 if (!is_gimple_reg (lhs)
3824 && !is_gimple_reg (rhs1)
3825 && is_gimple_reg_type (TREE_TYPE (lhs)))
3826 {
3827 error ("invalid rhs for gimple memory store");
3828 debug_generic_stmt (lhs);
3829 debug_generic_stmt (rhs1);
3830 return true;
3831 }
3832 return res;
3833
3834 case CONSTRUCTOR:
3835 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
3836 {
3837 unsigned int i;
3838 tree elt_i, elt_v, elt_t = NULL_TREE;
3839
3840 if (CONSTRUCTOR_NELTS (rhs1) == 0)
3841 return res;
3842 /* For vector CONSTRUCTORs we require that either it is empty
3843 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
3844 (then the element count must be correct to cover the whole
3845 outer vector and index must be NULL on all elements, or it is
3846 a CONSTRUCTOR of scalar elements, where we as an exception allow
3847 smaller number of elements (assuming zero filling) and
3848 consecutive indexes as compared to NULL indexes (such
3849 CONSTRUCTORs can appear in the IL from FEs). */
3850 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
3851 {
3852 if (elt_t == NULL_TREE)
3853 {
3854 elt_t = TREE_TYPE (elt_v);
3855 if (TREE_CODE (elt_t) == VECTOR_TYPE)
3856 {
3857 tree elt_t = TREE_TYPE (elt_v);
3858 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
3859 TREE_TYPE (elt_t)))
3860 {
3861 error ("incorrect type of vector CONSTRUCTOR"
3862 " elements");
3863 debug_generic_stmt (rhs1);
3864 return true;
3865 }
3866 else if (CONSTRUCTOR_NELTS (rhs1)
3867 * TYPE_VECTOR_SUBPARTS (elt_t)
3868 != TYPE_VECTOR_SUBPARTS (rhs1_type))
3869 {
3870 error ("incorrect number of vector CONSTRUCTOR"
3871 " elements");
3872 debug_generic_stmt (rhs1);
3873 return true;
3874 }
3875 }
3876 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
3877 elt_t))
3878 {
3879 error ("incorrect type of vector CONSTRUCTOR elements");
3880 debug_generic_stmt (rhs1);
3881 return true;
3882 }
3883 else if (CONSTRUCTOR_NELTS (rhs1)
3884 > TYPE_VECTOR_SUBPARTS (rhs1_type))
3885 {
3886 error ("incorrect number of vector CONSTRUCTOR elements");
3887 debug_generic_stmt (rhs1);
3888 return true;
3889 }
3890 }
3891 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
3892 {
3893 error ("incorrect type of vector CONSTRUCTOR elements");
3894 debug_generic_stmt (rhs1);
3895 return true;
3896 }
3897 if (elt_i != NULL_TREE
3898 && (TREE_CODE (elt_t) == VECTOR_TYPE
3899 || TREE_CODE (elt_i) != INTEGER_CST
3900 || compare_tree_int (elt_i, i) != 0))
3901 {
3902 error ("vector CONSTRUCTOR with non-NULL element index");
3903 debug_generic_stmt (rhs1);
3904 return true;
3905 }
3906 }
3907 }
3908 return res;
3909 case OBJ_TYPE_REF:
3910 case ASSERT_EXPR:
3911 case WITH_SIZE_EXPR:
3912 /* FIXME. */
3913 return res;
3914
3915 default:;
3916 }
3917
3918 return res;
3919 }
3920
3921 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
3922 is a problem, otherwise false. */
3923
3924 static bool
3925 verify_gimple_assign (gimple stmt)
3926 {
3927 switch (gimple_assign_rhs_class (stmt))
3928 {
3929 case GIMPLE_SINGLE_RHS:
3930 return verify_gimple_assign_single (stmt);
3931
3932 case GIMPLE_UNARY_RHS:
3933 return verify_gimple_assign_unary (stmt);
3934
3935 case GIMPLE_BINARY_RHS:
3936 return verify_gimple_assign_binary (stmt);
3937
3938 case GIMPLE_TERNARY_RHS:
3939 return verify_gimple_assign_ternary (stmt);
3940
3941 default:
3942 gcc_unreachable ();
3943 }
3944 }
3945
3946 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
3947 is a problem, otherwise false. */
3948
3949 static bool
3950 verify_gimple_return (gimple stmt)
3951 {
3952 tree op = gimple_return_retval (stmt);
3953 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
3954
3955 /* We cannot test for present return values as we do not fix up missing
3956 return values from the original source. */
3957 if (op == NULL)
3958 return false;
3959
3960 if (!is_gimple_val (op)
3961 && TREE_CODE (op) != RESULT_DECL)
3962 {
3963 error ("invalid operand in return statement");
3964 debug_generic_stmt (op);
3965 return true;
3966 }
3967
3968 if ((TREE_CODE (op) == RESULT_DECL
3969 && DECL_BY_REFERENCE (op))
3970 || (TREE_CODE (op) == SSA_NAME
3971 && SSA_NAME_VAR (op)
3972 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
3973 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
3974 op = TREE_TYPE (op);
3975
3976 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
3977 {
3978 error ("invalid conversion in return statement");
3979 debug_generic_stmt (restype);
3980 debug_generic_stmt (TREE_TYPE (op));
3981 return true;
3982 }
3983
3984 return false;
3985 }
3986
3987
3988 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
3989 is a problem, otherwise false. */
3990
3991 static bool
3992 verify_gimple_goto (gimple stmt)
3993 {
3994 tree dest = gimple_goto_dest (stmt);
3995
3996 /* ??? We have two canonical forms of direct goto destinations, a
3997 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
3998 if (TREE_CODE (dest) != LABEL_DECL
3999 && (!is_gimple_val (dest)
4000 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4001 {
4002 error ("goto destination is neither a label nor a pointer");
4003 return true;
4004 }
4005
4006 return false;
4007 }
4008
4009 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4010 is a problem, otherwise false. */
4011
4012 static bool
4013 verify_gimple_switch (gimple stmt)
4014 {
4015 unsigned int i, n;
4016 tree elt, prev_upper_bound = NULL_TREE;
4017 tree index_type, elt_type = NULL_TREE;
4018
4019 if (!is_gimple_val (gimple_switch_index (stmt)))
4020 {
4021 error ("invalid operand to switch statement");
4022 debug_generic_stmt (gimple_switch_index (stmt));
4023 return true;
4024 }
4025
4026 index_type = TREE_TYPE (gimple_switch_index (stmt));
4027 if (! INTEGRAL_TYPE_P (index_type))
4028 {
4029 error ("non-integral type switch statement");
4030 debug_generic_expr (index_type);
4031 return true;
4032 }
4033
4034 elt = gimple_switch_label (stmt, 0);
4035 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4036 {
4037 error ("invalid default case label in switch statement");
4038 debug_generic_expr (elt);
4039 return true;
4040 }
4041
4042 n = gimple_switch_num_labels (stmt);
4043 for (i = 1; i < n; i++)
4044 {
4045 elt = gimple_switch_label (stmt, i);
4046
4047 if (! CASE_LOW (elt))
4048 {
4049 error ("invalid case label in switch statement");
4050 debug_generic_expr (elt);
4051 return true;
4052 }
4053 if (CASE_HIGH (elt)
4054 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4055 {
4056 error ("invalid case range in switch statement");
4057 debug_generic_expr (elt);
4058 return true;
4059 }
4060
4061 if (elt_type)
4062 {
4063 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4064 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4065 {
4066 error ("type mismatch for case label in switch statement");
4067 debug_generic_expr (elt);
4068 return true;
4069 }
4070 }
4071 else
4072 {
4073 elt_type = TREE_TYPE (CASE_LOW (elt));
4074 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4075 {
4076 error ("type precision mismatch in switch statement");
4077 return true;
4078 }
4079 }
4080
4081 if (prev_upper_bound)
4082 {
4083 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4084 {
4085 error ("case labels not sorted in switch statement");
4086 return true;
4087 }
4088 }
4089
4090 prev_upper_bound = CASE_HIGH (elt);
4091 if (! prev_upper_bound)
4092 prev_upper_bound = CASE_LOW (elt);
4093 }
4094
4095 return false;
4096 }
4097
4098 /* Verify a gimple debug statement STMT.
4099 Returns true if anything is wrong. */
4100
4101 static bool
4102 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4103 {
4104 /* There isn't much that could be wrong in a gimple debug stmt. A
4105 gimple debug bind stmt, for example, maps a tree, that's usually
4106 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4107 component or member of an aggregate type, to another tree, that
4108 can be an arbitrary expression. These stmts expand into debug
4109 insns, and are converted to debug notes by var-tracking.c. */
4110 return false;
4111 }
4112
4113 /* Verify a gimple label statement STMT.
4114 Returns true if anything is wrong. */
4115
4116 static bool
4117 verify_gimple_label (gimple stmt)
4118 {
4119 tree decl = gimple_label_label (stmt);
4120 int uid;
4121 bool err = false;
4122
4123 if (TREE_CODE (decl) != LABEL_DECL)
4124 return true;
4125 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4126 && DECL_CONTEXT (decl) != current_function_decl)
4127 {
4128 error ("label's context is not the current function decl");
4129 err |= true;
4130 }
4131
4132 uid = LABEL_DECL_UID (decl);
4133 if (cfun->cfg
4134 && (uid == -1 || (*label_to_block_map)[uid] != gimple_bb (stmt)))
4135 {
4136 error ("incorrect entry in label_to_block_map");
4137 err |= true;
4138 }
4139
4140 uid = EH_LANDING_PAD_NR (decl);
4141 if (uid)
4142 {
4143 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4144 if (decl != lp->post_landing_pad)
4145 {
4146 error ("incorrect setting of landing pad number");
4147 err |= true;
4148 }
4149 }
4150
4151 return err;
4152 }
4153
4154 /* Verify the GIMPLE statement STMT. Returns true if there is an
4155 error, otherwise false. */
4156
4157 static bool
4158 verify_gimple_stmt (gimple stmt)
4159 {
4160 switch (gimple_code (stmt))
4161 {
4162 case GIMPLE_ASSIGN:
4163 return verify_gimple_assign (stmt);
4164
4165 case GIMPLE_LABEL:
4166 return verify_gimple_label (stmt);
4167
4168 case GIMPLE_CALL:
4169 return verify_gimple_call (stmt);
4170
4171 case GIMPLE_COND:
4172 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4173 {
4174 error ("invalid comparison code in gimple cond");
4175 return true;
4176 }
4177 if (!(!gimple_cond_true_label (stmt)
4178 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4179 || !(!gimple_cond_false_label (stmt)
4180 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4181 {
4182 error ("invalid labels in gimple cond");
4183 return true;
4184 }
4185
4186 return verify_gimple_comparison (boolean_type_node,
4187 gimple_cond_lhs (stmt),
4188 gimple_cond_rhs (stmt));
4189
4190 case GIMPLE_GOTO:
4191 return verify_gimple_goto (stmt);
4192
4193 case GIMPLE_SWITCH:
4194 return verify_gimple_switch (stmt);
4195
4196 case GIMPLE_RETURN:
4197 return verify_gimple_return (stmt);
4198
4199 case GIMPLE_ASM:
4200 return false;
4201
4202 case GIMPLE_TRANSACTION:
4203 return verify_gimple_transaction (stmt);
4204
4205 /* Tuples that do not have tree operands. */
4206 case GIMPLE_NOP:
4207 case GIMPLE_PREDICT:
4208 case GIMPLE_RESX:
4209 case GIMPLE_EH_DISPATCH:
4210 case GIMPLE_EH_MUST_NOT_THROW:
4211 return false;
4212
4213 CASE_GIMPLE_OMP:
4214 /* OpenMP directives are validated by the FE and never operated
4215 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4216 non-gimple expressions when the main index variable has had
4217 its address taken. This does not affect the loop itself
4218 because the header of an GIMPLE_OMP_FOR is merely used to determine
4219 how to setup the parallel iteration. */
4220 return false;
4221
4222 case GIMPLE_DEBUG:
4223 return verify_gimple_debug (stmt);
4224
4225 default:
4226 gcc_unreachable ();
4227 }
4228 }
4229
4230 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4231 and false otherwise. */
4232
4233 static bool
4234 verify_gimple_phi (gimple phi)
4235 {
4236 bool err = false;
4237 unsigned i;
4238 tree phi_result = gimple_phi_result (phi);
4239 bool virtual_p;
4240
4241 if (!phi_result)
4242 {
4243 error ("invalid PHI result");
4244 return true;
4245 }
4246
4247 virtual_p = virtual_operand_p (phi_result);
4248 if (TREE_CODE (phi_result) != SSA_NAME
4249 || (virtual_p
4250 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4251 {
4252 error ("invalid PHI result");
4253 err = true;
4254 }
4255
4256 for (i = 0; i < gimple_phi_num_args (phi); i++)
4257 {
4258 tree t = gimple_phi_arg_def (phi, i);
4259
4260 if (!t)
4261 {
4262 error ("missing PHI def");
4263 err |= true;
4264 continue;
4265 }
4266 /* Addressable variables do have SSA_NAMEs but they
4267 are not considered gimple values. */
4268 else if ((TREE_CODE (t) == SSA_NAME
4269 && virtual_p != virtual_operand_p (t))
4270 || (virtual_p
4271 && (TREE_CODE (t) != SSA_NAME
4272 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4273 || (!virtual_p
4274 && !is_gimple_val (t)))
4275 {
4276 error ("invalid PHI argument");
4277 debug_generic_expr (t);
4278 err |= true;
4279 }
4280 #ifdef ENABLE_TYPES_CHECKING
4281 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4282 {
4283 error ("incompatible types in PHI argument %u", i);
4284 debug_generic_stmt (TREE_TYPE (phi_result));
4285 debug_generic_stmt (TREE_TYPE (t));
4286 err |= true;
4287 }
4288 #endif
4289 }
4290
4291 return err;
4292 }
4293
4294 /* Verify the GIMPLE statements inside the sequence STMTS. */
4295
4296 static bool
4297 verify_gimple_in_seq_2 (gimple_seq stmts)
4298 {
4299 gimple_stmt_iterator ittr;
4300 bool err = false;
4301
4302 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4303 {
4304 gimple stmt = gsi_stmt (ittr);
4305
4306 switch (gimple_code (stmt))
4307 {
4308 case GIMPLE_BIND:
4309 err |= verify_gimple_in_seq_2 (gimple_bind_body (stmt));
4310 break;
4311
4312 case GIMPLE_TRY:
4313 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4314 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4315 break;
4316
4317 case GIMPLE_EH_FILTER:
4318 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4319 break;
4320
4321 case GIMPLE_EH_ELSE:
4322 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (stmt));
4323 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (stmt));
4324 break;
4325
4326 case GIMPLE_CATCH:
4327 err |= verify_gimple_in_seq_2 (gimple_catch_handler (stmt));
4328 break;
4329
4330 case GIMPLE_TRANSACTION:
4331 err |= verify_gimple_transaction (stmt);
4332 break;
4333
4334 default:
4335 {
4336 bool err2 = verify_gimple_stmt (stmt);
4337 if (err2)
4338 debug_gimple_stmt (stmt);
4339 err |= err2;
4340 }
4341 }
4342 }
4343
4344 return err;
4345 }
4346
4347 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4348 is a problem, otherwise false. */
4349
4350 static bool
4351 verify_gimple_transaction (gimple stmt)
4352 {
4353 tree lab = gimple_transaction_label (stmt);
4354 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4355 return true;
4356 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4357 }
4358
4359
4360 /* Verify the GIMPLE statements inside the statement list STMTS. */
4361
4362 DEBUG_FUNCTION void
4363 verify_gimple_in_seq (gimple_seq stmts)
4364 {
4365 timevar_push (TV_TREE_STMT_VERIFY);
4366 if (verify_gimple_in_seq_2 (stmts))
4367 internal_error ("verify_gimple failed");
4368 timevar_pop (TV_TREE_STMT_VERIFY);
4369 }
4370
4371 /* Return true when the T can be shared. */
4372
4373 static bool
4374 tree_node_can_be_shared (tree t)
4375 {
4376 if (IS_TYPE_OR_DECL_P (t)
4377 || is_gimple_min_invariant (t)
4378 || TREE_CODE (t) == SSA_NAME
4379 || t == error_mark_node
4380 || TREE_CODE (t) == IDENTIFIER_NODE)
4381 return true;
4382
4383 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4384 return true;
4385
4386 if (DECL_P (t))
4387 return true;
4388
4389 return false;
4390 }
4391
4392 /* Called via walk_tree. Verify tree sharing. */
4393
4394 static tree
4395 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4396 {
4397 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4398
4399 if (tree_node_can_be_shared (*tp))
4400 {
4401 *walk_subtrees = false;
4402 return NULL;
4403 }
4404
4405 if (pointer_set_insert (visited, *tp))
4406 return *tp;
4407
4408 return NULL;
4409 }
4410
4411 /* Called via walk_gimple_stmt. Verify tree sharing. */
4412
4413 static tree
4414 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4415 {
4416 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4417 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4418 }
4419
4420 static bool eh_error_found;
4421 static int
4422 verify_eh_throw_stmt_node (void **slot, void *data)
4423 {
4424 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4425 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4426
4427 if (!pointer_set_contains (visited, node->stmt))
4428 {
4429 error ("dead STMT in EH table");
4430 debug_gimple_stmt (node->stmt);
4431 eh_error_found = true;
4432 }
4433 return 1;
4434 }
4435
4436 /* Verify if the location LOCs block is in BLOCKS. */
4437
4438 static bool
4439 verify_location (pointer_set_t *blocks, location_t loc)
4440 {
4441 tree block = LOCATION_BLOCK (loc);
4442 if (block != NULL_TREE
4443 && !pointer_set_contains (blocks, block))
4444 {
4445 error ("location references block not in block tree");
4446 return true;
4447 }
4448 if (block != NULL_TREE)
4449 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4450 return false;
4451 }
4452
4453 /* Called via walk_tree. Verify that expressions have no blocks. */
4454
4455 static tree
4456 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4457 {
4458 if (!EXPR_P (*tp))
4459 {
4460 *walk_subtrees = false;
4461 return NULL;
4462 }
4463
4464 location_t loc = EXPR_LOCATION (*tp);
4465 if (LOCATION_BLOCK (loc) != NULL)
4466 return *tp;
4467
4468 return NULL;
4469 }
4470
4471 /* Called via walk_tree. Verify locations of expressions. */
4472
4473 static tree
4474 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4475 {
4476 struct pointer_set_t *blocks = (struct pointer_set_t *) data;
4477
4478 if (TREE_CODE (*tp) == VAR_DECL
4479 && DECL_HAS_DEBUG_EXPR_P (*tp))
4480 {
4481 tree t = DECL_DEBUG_EXPR (*tp);
4482 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4483 if (addr)
4484 return addr;
4485 }
4486 if ((TREE_CODE (*tp) == VAR_DECL
4487 || TREE_CODE (*tp) == PARM_DECL
4488 || TREE_CODE (*tp) == RESULT_DECL)
4489 && DECL_HAS_VALUE_EXPR_P (*tp))
4490 {
4491 tree t = DECL_VALUE_EXPR (*tp);
4492 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4493 if (addr)
4494 return addr;
4495 }
4496
4497 if (!EXPR_P (*tp))
4498 {
4499 *walk_subtrees = false;
4500 return NULL;
4501 }
4502
4503 location_t loc = EXPR_LOCATION (*tp);
4504 if (verify_location (blocks, loc))
4505 return *tp;
4506
4507 return NULL;
4508 }
4509
4510 /* Called via walk_gimple_op. Verify locations of expressions. */
4511
4512 static tree
4513 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
4514 {
4515 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4516 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
4517 }
4518
4519 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4520
4521 static void
4522 collect_subblocks (pointer_set_t *blocks, tree block)
4523 {
4524 tree t;
4525 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
4526 {
4527 pointer_set_insert (blocks, t);
4528 collect_subblocks (blocks, t);
4529 }
4530 }
4531
4532 /* Verify the GIMPLE statements in the CFG of FN. */
4533
4534 DEBUG_FUNCTION void
4535 verify_gimple_in_cfg (struct function *fn)
4536 {
4537 basic_block bb;
4538 bool err = false;
4539 struct pointer_set_t *visited, *visited_stmts, *blocks;
4540
4541 timevar_push (TV_TREE_STMT_VERIFY);
4542 visited = pointer_set_create ();
4543 visited_stmts = pointer_set_create ();
4544
4545 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4546 blocks = pointer_set_create ();
4547 if (DECL_INITIAL (fn->decl))
4548 {
4549 pointer_set_insert (blocks, DECL_INITIAL (fn->decl));
4550 collect_subblocks (blocks, DECL_INITIAL (fn->decl));
4551 }
4552
4553 FOR_EACH_BB_FN (bb, fn)
4554 {
4555 gimple_stmt_iterator gsi;
4556
4557 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4558 {
4559 gimple phi = gsi_stmt (gsi);
4560 bool err2 = false;
4561 unsigned i;
4562
4563 pointer_set_insert (visited_stmts, phi);
4564
4565 if (gimple_bb (phi) != bb)
4566 {
4567 error ("gimple_bb (phi) is set to a wrong basic block");
4568 err2 = true;
4569 }
4570
4571 err2 |= verify_gimple_phi (phi);
4572
4573 /* Only PHI arguments have locations. */
4574 if (gimple_location (phi) != UNKNOWN_LOCATION)
4575 {
4576 error ("PHI node with location");
4577 err2 = true;
4578 }
4579
4580 for (i = 0; i < gimple_phi_num_args (phi); i++)
4581 {
4582 tree arg = gimple_phi_arg_def (phi, i);
4583 tree addr = walk_tree (&arg, verify_node_sharing_1,
4584 visited, NULL);
4585 if (addr)
4586 {
4587 error ("incorrect sharing of tree nodes");
4588 debug_generic_expr (addr);
4589 err2 |= true;
4590 }
4591 location_t loc = gimple_phi_arg_location (phi, i);
4592 if (virtual_operand_p (gimple_phi_result (phi))
4593 && loc != UNKNOWN_LOCATION)
4594 {
4595 error ("virtual PHI with argument locations");
4596 err2 = true;
4597 }
4598 addr = walk_tree (&arg, verify_expr_location_1, blocks, NULL);
4599 if (addr)
4600 {
4601 debug_generic_expr (addr);
4602 err2 = true;
4603 }
4604 err2 |= verify_location (blocks, loc);
4605 }
4606
4607 if (err2)
4608 debug_gimple_stmt (phi);
4609 err |= err2;
4610 }
4611
4612 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4613 {
4614 gimple stmt = gsi_stmt (gsi);
4615 bool err2 = false;
4616 struct walk_stmt_info wi;
4617 tree addr;
4618 int lp_nr;
4619
4620 pointer_set_insert (visited_stmts, stmt);
4621
4622 if (gimple_bb (stmt) != bb)
4623 {
4624 error ("gimple_bb (stmt) is set to a wrong basic block");
4625 err2 = true;
4626 }
4627
4628 err2 |= verify_gimple_stmt (stmt);
4629 err2 |= verify_location (blocks, gimple_location (stmt));
4630
4631 memset (&wi, 0, sizeof (wi));
4632 wi.info = (void *) visited;
4633 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
4634 if (addr)
4635 {
4636 error ("incorrect sharing of tree nodes");
4637 debug_generic_expr (addr);
4638 err2 |= true;
4639 }
4640
4641 memset (&wi, 0, sizeof (wi));
4642 wi.info = (void *) blocks;
4643 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
4644 if (addr)
4645 {
4646 debug_generic_expr (addr);
4647 err2 |= true;
4648 }
4649
4650 /* ??? Instead of not checking these stmts at all the walker
4651 should know its context via wi. */
4652 if (!is_gimple_debug (stmt)
4653 && !is_gimple_omp (stmt))
4654 {
4655 memset (&wi, 0, sizeof (wi));
4656 addr = walk_gimple_op (stmt, verify_expr, &wi);
4657 if (addr)
4658 {
4659 debug_generic_expr (addr);
4660 inform (gimple_location (stmt), "in statement");
4661 err2 |= true;
4662 }
4663 }
4664
4665 /* If the statement is marked as part of an EH region, then it is
4666 expected that the statement could throw. Verify that when we
4667 have optimizations that simplify statements such that we prove
4668 that they cannot throw, that we update other data structures
4669 to match. */
4670 lp_nr = lookup_stmt_eh_lp (stmt);
4671 if (lp_nr != 0)
4672 {
4673 if (!stmt_could_throw_p (stmt))
4674 {
4675 error ("statement marked for throw, but doesn%'t");
4676 err2 |= true;
4677 }
4678 else if (lp_nr > 0
4679 && !gsi_one_before_end_p (gsi)
4680 && stmt_can_throw_internal (stmt))
4681 {
4682 error ("statement marked for throw in middle of block");
4683 err2 |= true;
4684 }
4685 }
4686
4687 if (err2)
4688 debug_gimple_stmt (stmt);
4689 err |= err2;
4690 }
4691 }
4692
4693 eh_error_found = false;
4694 if (get_eh_throw_stmt_table (cfun))
4695 htab_traverse (get_eh_throw_stmt_table (cfun),
4696 verify_eh_throw_stmt_node,
4697 visited_stmts);
4698
4699 if (err || eh_error_found)
4700 internal_error ("verify_gimple failed");
4701
4702 pointer_set_destroy (visited);
4703 pointer_set_destroy (visited_stmts);
4704 pointer_set_destroy (blocks);
4705 verify_histograms ();
4706 timevar_pop (TV_TREE_STMT_VERIFY);
4707 }
4708
4709
4710 /* Verifies that the flow information is OK. */
4711
4712 static int
4713 gimple_verify_flow_info (void)
4714 {
4715 int err = 0;
4716 basic_block bb;
4717 gimple_stmt_iterator gsi;
4718 gimple stmt;
4719 edge e;
4720 edge_iterator ei;
4721
4722 if (ENTRY_BLOCK_PTR->il.gimple.seq || ENTRY_BLOCK_PTR->il.gimple.phi_nodes)
4723 {
4724 error ("ENTRY_BLOCK has IL associated with it");
4725 err = 1;
4726 }
4727
4728 if (EXIT_BLOCK_PTR->il.gimple.seq || EXIT_BLOCK_PTR->il.gimple.phi_nodes)
4729 {
4730 error ("EXIT_BLOCK has IL associated with it");
4731 err = 1;
4732 }
4733
4734 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4735 if (e->flags & EDGE_FALLTHRU)
4736 {
4737 error ("fallthru to exit from bb %d", e->src->index);
4738 err = 1;
4739 }
4740
4741 FOR_EACH_BB (bb)
4742 {
4743 bool found_ctrl_stmt = false;
4744
4745 stmt = NULL;
4746
4747 /* Skip labels on the start of basic block. */
4748 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4749 {
4750 tree label;
4751 gimple prev_stmt = stmt;
4752
4753 stmt = gsi_stmt (gsi);
4754
4755 if (gimple_code (stmt) != GIMPLE_LABEL)
4756 break;
4757
4758 label = gimple_label_label (stmt);
4759 if (prev_stmt && DECL_NONLOCAL (label))
4760 {
4761 error ("nonlocal label ");
4762 print_generic_expr (stderr, label, 0);
4763 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4764 bb->index);
4765 err = 1;
4766 }
4767
4768 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
4769 {
4770 error ("EH landing pad label ");
4771 print_generic_expr (stderr, label, 0);
4772 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4773 bb->index);
4774 err = 1;
4775 }
4776
4777 if (label_to_block (label) != bb)
4778 {
4779 error ("label ");
4780 print_generic_expr (stderr, label, 0);
4781 fprintf (stderr, " to block does not match in bb %d",
4782 bb->index);
4783 err = 1;
4784 }
4785
4786 if (decl_function_context (label) != current_function_decl)
4787 {
4788 error ("label ");
4789 print_generic_expr (stderr, label, 0);
4790 fprintf (stderr, " has incorrect context in bb %d",
4791 bb->index);
4792 err = 1;
4793 }
4794 }
4795
4796 /* Verify that body of basic block BB is free of control flow. */
4797 for (; !gsi_end_p (gsi); gsi_next (&gsi))
4798 {
4799 gimple stmt = gsi_stmt (gsi);
4800
4801 if (found_ctrl_stmt)
4802 {
4803 error ("control flow in the middle of basic block %d",
4804 bb->index);
4805 err = 1;
4806 }
4807
4808 if (stmt_ends_bb_p (stmt))
4809 found_ctrl_stmt = true;
4810
4811 if (gimple_code (stmt) == GIMPLE_LABEL)
4812 {
4813 error ("label ");
4814 print_generic_expr (stderr, gimple_label_label (stmt), 0);
4815 fprintf (stderr, " in the middle of basic block %d", bb->index);
4816 err = 1;
4817 }
4818 }
4819
4820 gsi = gsi_last_bb (bb);
4821 if (gsi_end_p (gsi))
4822 continue;
4823
4824 stmt = gsi_stmt (gsi);
4825
4826 if (gimple_code (stmt) == GIMPLE_LABEL)
4827 continue;
4828
4829 err |= verify_eh_edges (stmt);
4830
4831 if (is_ctrl_stmt (stmt))
4832 {
4833 FOR_EACH_EDGE (e, ei, bb->succs)
4834 if (e->flags & EDGE_FALLTHRU)
4835 {
4836 error ("fallthru edge after a control statement in bb %d",
4837 bb->index);
4838 err = 1;
4839 }
4840 }
4841
4842 if (gimple_code (stmt) != GIMPLE_COND)
4843 {
4844 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4845 after anything else but if statement. */
4846 FOR_EACH_EDGE (e, ei, bb->succs)
4847 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
4848 {
4849 error ("true/false edge after a non-GIMPLE_COND in bb %d",
4850 bb->index);
4851 err = 1;
4852 }
4853 }
4854
4855 switch (gimple_code (stmt))
4856 {
4857 case GIMPLE_COND:
4858 {
4859 edge true_edge;
4860 edge false_edge;
4861
4862 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
4863
4864 if (!true_edge
4865 || !false_edge
4866 || !(true_edge->flags & EDGE_TRUE_VALUE)
4867 || !(false_edge->flags & EDGE_FALSE_VALUE)
4868 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4869 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4870 || EDGE_COUNT (bb->succs) >= 3)
4871 {
4872 error ("wrong outgoing edge flags at end of bb %d",
4873 bb->index);
4874 err = 1;
4875 }
4876 }
4877 break;
4878
4879 case GIMPLE_GOTO:
4880 if (simple_goto_p (stmt))
4881 {
4882 error ("explicit goto at end of bb %d", bb->index);
4883 err = 1;
4884 }
4885 else
4886 {
4887 /* FIXME. We should double check that the labels in the
4888 destination blocks have their address taken. */
4889 FOR_EACH_EDGE (e, ei, bb->succs)
4890 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
4891 | EDGE_FALSE_VALUE))
4892 || !(e->flags & EDGE_ABNORMAL))
4893 {
4894 error ("wrong outgoing edge flags at end of bb %d",
4895 bb->index);
4896 err = 1;
4897 }
4898 }
4899 break;
4900
4901 case GIMPLE_CALL:
4902 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
4903 break;
4904 /* ... fallthru ... */
4905 case GIMPLE_RETURN:
4906 if (!single_succ_p (bb)
4907 || (single_succ_edge (bb)->flags
4908 & (EDGE_FALLTHRU | EDGE_ABNORMAL
4909 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4910 {
4911 error ("wrong outgoing edge flags at end of bb %d", bb->index);
4912 err = 1;
4913 }
4914 if (single_succ (bb) != EXIT_BLOCK_PTR)
4915 {
4916 error ("return edge does not point to exit in bb %d",
4917 bb->index);
4918 err = 1;
4919 }
4920 break;
4921
4922 case GIMPLE_SWITCH:
4923 {
4924 tree prev;
4925 edge e;
4926 size_t i, n;
4927
4928 n = gimple_switch_num_labels (stmt);
4929
4930 /* Mark all the destination basic blocks. */
4931 for (i = 0; i < n; ++i)
4932 {
4933 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
4934 basic_block label_bb = label_to_block (lab);
4935 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
4936 label_bb->aux = (void *)1;
4937 }
4938
4939 /* Verify that the case labels are sorted. */
4940 prev = gimple_switch_label (stmt, 0);
4941 for (i = 1; i < n; ++i)
4942 {
4943 tree c = gimple_switch_label (stmt, i);
4944 if (!CASE_LOW (c))
4945 {
4946 error ("found default case not at the start of "
4947 "case vector");
4948 err = 1;
4949 continue;
4950 }
4951 if (CASE_LOW (prev)
4952 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
4953 {
4954 error ("case labels not sorted: ");
4955 print_generic_expr (stderr, prev, 0);
4956 fprintf (stderr," is greater than ");
4957 print_generic_expr (stderr, c, 0);
4958 fprintf (stderr," but comes before it.\n");
4959 err = 1;
4960 }
4961 prev = c;
4962 }
4963 /* VRP will remove the default case if it can prove it will
4964 never be executed. So do not verify there always exists
4965 a default case here. */
4966
4967 FOR_EACH_EDGE (e, ei, bb->succs)
4968 {
4969 if (!e->dest->aux)
4970 {
4971 error ("extra outgoing edge %d->%d",
4972 bb->index, e->dest->index);
4973 err = 1;
4974 }
4975
4976 e->dest->aux = (void *)2;
4977 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
4978 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4979 {
4980 error ("wrong outgoing edge flags at end of bb %d",
4981 bb->index);
4982 err = 1;
4983 }
4984 }
4985
4986 /* Check that we have all of them. */
4987 for (i = 0; i < n; ++i)
4988 {
4989 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
4990 basic_block label_bb = label_to_block (lab);
4991
4992 if (label_bb->aux != (void *)2)
4993 {
4994 error ("missing edge %i->%i", bb->index, label_bb->index);
4995 err = 1;
4996 }
4997 }
4998
4999 FOR_EACH_EDGE (e, ei, bb->succs)
5000 e->dest->aux = (void *)0;
5001 }
5002 break;
5003
5004 case GIMPLE_EH_DISPATCH:
5005 err |= verify_eh_dispatch_edge (stmt);
5006 break;
5007
5008 default:
5009 break;
5010 }
5011 }
5012
5013 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5014 verify_dominators (CDI_DOMINATORS);
5015
5016 return err;
5017 }
5018
5019
5020 /* Updates phi nodes after creating a forwarder block joined
5021 by edge FALLTHRU. */
5022
5023 static void
5024 gimple_make_forwarder_block (edge fallthru)
5025 {
5026 edge e;
5027 edge_iterator ei;
5028 basic_block dummy, bb;
5029 tree var;
5030 gimple_stmt_iterator gsi;
5031
5032 dummy = fallthru->src;
5033 bb = fallthru->dest;
5034
5035 if (single_pred_p (bb))
5036 return;
5037
5038 /* If we redirected a branch we must create new PHI nodes at the
5039 start of BB. */
5040 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5041 {
5042 gimple phi, new_phi;
5043
5044 phi = gsi_stmt (gsi);
5045 var = gimple_phi_result (phi);
5046 new_phi = create_phi_node (var, bb);
5047 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5048 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5049 UNKNOWN_LOCATION);
5050 }
5051
5052 /* Add the arguments we have stored on edges. */
5053 FOR_EACH_EDGE (e, ei, bb->preds)
5054 {
5055 if (e == fallthru)
5056 continue;
5057
5058 flush_pending_stmts (e);
5059 }
5060 }
5061
5062
5063 /* Return a non-special label in the head of basic block BLOCK.
5064 Create one if it doesn't exist. */
5065
5066 tree
5067 gimple_block_label (basic_block bb)
5068 {
5069 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5070 bool first = true;
5071 tree label;
5072 gimple stmt;
5073
5074 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5075 {
5076 stmt = gsi_stmt (i);
5077 if (gimple_code (stmt) != GIMPLE_LABEL)
5078 break;
5079 label = gimple_label_label (stmt);
5080 if (!DECL_NONLOCAL (label))
5081 {
5082 if (!first)
5083 gsi_move_before (&i, &s);
5084 return label;
5085 }
5086 }
5087
5088 label = create_artificial_label (UNKNOWN_LOCATION);
5089 stmt = gimple_build_label (label);
5090 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5091 return label;
5092 }
5093
5094
5095 /* Attempt to perform edge redirection by replacing a possibly complex
5096 jump instruction by a goto or by removing the jump completely.
5097 This can apply only if all edges now point to the same block. The
5098 parameters and return values are equivalent to
5099 redirect_edge_and_branch. */
5100
5101 static edge
5102 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5103 {
5104 basic_block src = e->src;
5105 gimple_stmt_iterator i;
5106 gimple stmt;
5107
5108 /* We can replace or remove a complex jump only when we have exactly
5109 two edges. */
5110 if (EDGE_COUNT (src->succs) != 2
5111 /* Verify that all targets will be TARGET. Specifically, the
5112 edge that is not E must also go to TARGET. */
5113 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5114 return NULL;
5115
5116 i = gsi_last_bb (src);
5117 if (gsi_end_p (i))
5118 return NULL;
5119
5120 stmt = gsi_stmt (i);
5121
5122 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5123 {
5124 gsi_remove (&i, true);
5125 e = ssa_redirect_edge (e, target);
5126 e->flags = EDGE_FALLTHRU;
5127 return e;
5128 }
5129
5130 return NULL;
5131 }
5132
5133
5134 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5135 edge representing the redirected branch. */
5136
5137 static edge
5138 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5139 {
5140 basic_block bb = e->src;
5141 gimple_stmt_iterator gsi;
5142 edge ret;
5143 gimple stmt;
5144
5145 if (e->flags & EDGE_ABNORMAL)
5146 return NULL;
5147
5148 if (e->dest == dest)
5149 return NULL;
5150
5151 if (e->flags & EDGE_EH)
5152 return redirect_eh_edge (e, dest);
5153
5154 if (e->src != ENTRY_BLOCK_PTR)
5155 {
5156 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5157 if (ret)
5158 return ret;
5159 }
5160
5161 gsi = gsi_last_bb (bb);
5162 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5163
5164 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5165 {
5166 case GIMPLE_COND:
5167 /* For COND_EXPR, we only need to redirect the edge. */
5168 break;
5169
5170 case GIMPLE_GOTO:
5171 /* No non-abnormal edges should lead from a non-simple goto, and
5172 simple ones should be represented implicitly. */
5173 gcc_unreachable ();
5174
5175 case GIMPLE_SWITCH:
5176 {
5177 tree label = gimple_block_label (dest);
5178 tree cases = get_cases_for_edge (e, stmt);
5179
5180 /* If we have a list of cases associated with E, then use it
5181 as it's a lot faster than walking the entire case vector. */
5182 if (cases)
5183 {
5184 edge e2 = find_edge (e->src, dest);
5185 tree last, first;
5186
5187 first = cases;
5188 while (cases)
5189 {
5190 last = cases;
5191 CASE_LABEL (cases) = label;
5192 cases = CASE_CHAIN (cases);
5193 }
5194
5195 /* If there was already an edge in the CFG, then we need
5196 to move all the cases associated with E to E2. */
5197 if (e2)
5198 {
5199 tree cases2 = get_cases_for_edge (e2, stmt);
5200
5201 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5202 CASE_CHAIN (cases2) = first;
5203 }
5204 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5205 }
5206 else
5207 {
5208 size_t i, n = gimple_switch_num_labels (stmt);
5209
5210 for (i = 0; i < n; i++)
5211 {
5212 tree elt = gimple_switch_label (stmt, i);
5213 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5214 CASE_LABEL (elt) = label;
5215 }
5216 }
5217 }
5218 break;
5219
5220 case GIMPLE_ASM:
5221 {
5222 int i, n = gimple_asm_nlabels (stmt);
5223 tree label = NULL;
5224
5225 for (i = 0; i < n; ++i)
5226 {
5227 tree cons = gimple_asm_label_op (stmt, i);
5228 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5229 {
5230 if (!label)
5231 label = gimple_block_label (dest);
5232 TREE_VALUE (cons) = label;
5233 }
5234 }
5235
5236 /* If we didn't find any label matching the former edge in the
5237 asm labels, we must be redirecting the fallthrough
5238 edge. */
5239 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5240 }
5241 break;
5242
5243 case GIMPLE_RETURN:
5244 gsi_remove (&gsi, true);
5245 e->flags |= EDGE_FALLTHRU;
5246 break;
5247
5248 case GIMPLE_OMP_RETURN:
5249 case GIMPLE_OMP_CONTINUE:
5250 case GIMPLE_OMP_SECTIONS_SWITCH:
5251 case GIMPLE_OMP_FOR:
5252 /* The edges from OMP constructs can be simply redirected. */
5253 break;
5254
5255 case GIMPLE_EH_DISPATCH:
5256 if (!(e->flags & EDGE_FALLTHRU))
5257 redirect_eh_dispatch_edge (stmt, e, dest);
5258 break;
5259
5260 case GIMPLE_TRANSACTION:
5261 /* The ABORT edge has a stored label associated with it, otherwise
5262 the edges are simply redirectable. */
5263 if (e->flags == 0)
5264 gimple_transaction_set_label (stmt, gimple_block_label (dest));
5265 break;
5266
5267 default:
5268 /* Otherwise it must be a fallthru edge, and we don't need to
5269 do anything besides redirecting it. */
5270 gcc_assert (e->flags & EDGE_FALLTHRU);
5271 break;
5272 }
5273
5274 /* Update/insert PHI nodes as necessary. */
5275
5276 /* Now update the edges in the CFG. */
5277 e = ssa_redirect_edge (e, dest);
5278
5279 return e;
5280 }
5281
5282 /* Returns true if it is possible to remove edge E by redirecting
5283 it to the destination of the other edge from E->src. */
5284
5285 static bool
5286 gimple_can_remove_branch_p (const_edge e)
5287 {
5288 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5289 return false;
5290
5291 return true;
5292 }
5293
5294 /* Simple wrapper, as we can always redirect fallthru edges. */
5295
5296 static basic_block
5297 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5298 {
5299 e = gimple_redirect_edge_and_branch (e, dest);
5300 gcc_assert (e);
5301
5302 return NULL;
5303 }
5304
5305
5306 /* Splits basic block BB after statement STMT (but at least after the
5307 labels). If STMT is NULL, BB is split just after the labels. */
5308
5309 static basic_block
5310 gimple_split_block (basic_block bb, void *stmt)
5311 {
5312 gimple_stmt_iterator gsi;
5313 gimple_stmt_iterator gsi_tgt;
5314 gimple act;
5315 gimple_seq list;
5316 basic_block new_bb;
5317 edge e;
5318 edge_iterator ei;
5319
5320 new_bb = create_empty_bb (bb);
5321
5322 /* Redirect the outgoing edges. */
5323 new_bb->succs = bb->succs;
5324 bb->succs = NULL;
5325 FOR_EACH_EDGE (e, ei, new_bb->succs)
5326 e->src = new_bb;
5327
5328 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5329 stmt = NULL;
5330
5331 /* Move everything from GSI to the new basic block. */
5332 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5333 {
5334 act = gsi_stmt (gsi);
5335 if (gimple_code (act) == GIMPLE_LABEL)
5336 continue;
5337
5338 if (!stmt)
5339 break;
5340
5341 if (stmt == act)
5342 {
5343 gsi_next (&gsi);
5344 break;
5345 }
5346 }
5347
5348 if (gsi_end_p (gsi))
5349 return new_bb;
5350
5351 /* Split the statement list - avoid re-creating new containers as this
5352 brings ugly quadratic memory consumption in the inliner.
5353 (We are still quadratic since we need to update stmt BB pointers,
5354 sadly.) */
5355 gsi_split_seq_before (&gsi, &list);
5356 set_bb_seq (new_bb, list);
5357 for (gsi_tgt = gsi_start (list);
5358 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5359 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5360
5361 return new_bb;
5362 }
5363
5364
5365 /* Moves basic block BB after block AFTER. */
5366
5367 static bool
5368 gimple_move_block_after (basic_block bb, basic_block after)
5369 {
5370 if (bb->prev_bb == after)
5371 return true;
5372
5373 unlink_block (bb);
5374 link_block (bb, after);
5375
5376 return true;
5377 }
5378
5379
5380 /* Return TRUE if block BB has no executable statements, otherwise return
5381 FALSE. */
5382
5383 static bool
5384 gimple_empty_block_p (basic_block bb)
5385 {
5386 /* BB must have no executable statements. */
5387 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5388 if (phi_nodes (bb))
5389 return false;
5390 if (gsi_end_p (gsi))
5391 return true;
5392 if (is_gimple_debug (gsi_stmt (gsi)))
5393 gsi_next_nondebug (&gsi);
5394 return gsi_end_p (gsi);
5395 }
5396
5397
5398 /* Split a basic block if it ends with a conditional branch and if the
5399 other part of the block is not empty. */
5400
5401 static basic_block
5402 gimple_split_block_before_cond_jump (basic_block bb)
5403 {
5404 gimple last, split_point;
5405 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5406 if (gsi_end_p (gsi))
5407 return NULL;
5408 last = gsi_stmt (gsi);
5409 if (gimple_code (last) != GIMPLE_COND
5410 && gimple_code (last) != GIMPLE_SWITCH)
5411 return NULL;
5412 gsi_prev_nondebug (&gsi);
5413 split_point = gsi_stmt (gsi);
5414 return split_block (bb, split_point)->dest;
5415 }
5416
5417
5418 /* Return true if basic_block can be duplicated. */
5419
5420 static bool
5421 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5422 {
5423 return true;
5424 }
5425
5426 /* Create a duplicate of the basic block BB. NOTE: This does not
5427 preserve SSA form. */
5428
5429 static basic_block
5430 gimple_duplicate_bb (basic_block bb)
5431 {
5432 basic_block new_bb;
5433 gimple_stmt_iterator gsi, gsi_tgt;
5434 gimple_seq phis = phi_nodes (bb);
5435 gimple phi, stmt, copy;
5436
5437 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
5438
5439 /* Copy the PHI nodes. We ignore PHI node arguments here because
5440 the incoming edges have not been setup yet. */
5441 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
5442 {
5443 phi = gsi_stmt (gsi);
5444 copy = create_phi_node (NULL_TREE, new_bb);
5445 create_new_def_for (gimple_phi_result (phi), copy,
5446 gimple_phi_result_ptr (copy));
5447 gimple_set_uid (copy, gimple_uid (phi));
5448 }
5449
5450 gsi_tgt = gsi_start_bb (new_bb);
5451 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5452 {
5453 def_operand_p def_p;
5454 ssa_op_iter op_iter;
5455 tree lhs;
5456
5457 stmt = gsi_stmt (gsi);
5458 if (gimple_code (stmt) == GIMPLE_LABEL)
5459 continue;
5460
5461 /* Don't duplicate label debug stmts. */
5462 if (gimple_debug_bind_p (stmt)
5463 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5464 == LABEL_DECL)
5465 continue;
5466
5467 /* Create a new copy of STMT and duplicate STMT's virtual
5468 operands. */
5469 copy = gimple_copy (stmt);
5470 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5471
5472 maybe_duplicate_eh_stmt (copy, stmt);
5473 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5474
5475 /* When copying around a stmt writing into a local non-user
5476 aggregate, make sure it won't share stack slot with other
5477 vars. */
5478 lhs = gimple_get_lhs (stmt);
5479 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5480 {
5481 tree base = get_base_address (lhs);
5482 if (base
5483 && (TREE_CODE (base) == VAR_DECL
5484 || TREE_CODE (base) == RESULT_DECL)
5485 && DECL_IGNORED_P (base)
5486 && !TREE_STATIC (base)
5487 && !DECL_EXTERNAL (base)
5488 && (TREE_CODE (base) != VAR_DECL
5489 || !DECL_HAS_VALUE_EXPR_P (base)))
5490 DECL_NONSHAREABLE (base) = 1;
5491 }
5492
5493 /* Create new names for all the definitions created by COPY and
5494 add replacement mappings for each new name. */
5495 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5496 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5497 }
5498
5499 return new_bb;
5500 }
5501
5502 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5503
5504 static void
5505 add_phi_args_after_copy_edge (edge e_copy)
5506 {
5507 basic_block bb, bb_copy = e_copy->src, dest;
5508 edge e;
5509 edge_iterator ei;
5510 gimple phi, phi_copy;
5511 tree def;
5512 gimple_stmt_iterator psi, psi_copy;
5513
5514 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5515 return;
5516
5517 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5518
5519 if (e_copy->dest->flags & BB_DUPLICATED)
5520 dest = get_bb_original (e_copy->dest);
5521 else
5522 dest = e_copy->dest;
5523
5524 e = find_edge (bb, dest);
5525 if (!e)
5526 {
5527 /* During loop unrolling the target of the latch edge is copied.
5528 In this case we are not looking for edge to dest, but to
5529 duplicated block whose original was dest. */
5530 FOR_EACH_EDGE (e, ei, bb->succs)
5531 {
5532 if ((e->dest->flags & BB_DUPLICATED)
5533 && get_bb_original (e->dest) == dest)
5534 break;
5535 }
5536
5537 gcc_assert (e != NULL);
5538 }
5539
5540 for (psi = gsi_start_phis (e->dest),
5541 psi_copy = gsi_start_phis (e_copy->dest);
5542 !gsi_end_p (psi);
5543 gsi_next (&psi), gsi_next (&psi_copy))
5544 {
5545 phi = gsi_stmt (psi);
5546 phi_copy = gsi_stmt (psi_copy);
5547 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5548 add_phi_arg (phi_copy, def, e_copy,
5549 gimple_phi_arg_location_from_edge (phi, e));
5550 }
5551 }
5552
5553
5554 /* Basic block BB_COPY was created by code duplication. Add phi node
5555 arguments for edges going out of BB_COPY. The blocks that were
5556 duplicated have BB_DUPLICATED set. */
5557
5558 void
5559 add_phi_args_after_copy_bb (basic_block bb_copy)
5560 {
5561 edge e_copy;
5562 edge_iterator ei;
5563
5564 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5565 {
5566 add_phi_args_after_copy_edge (e_copy);
5567 }
5568 }
5569
5570 /* Blocks in REGION_COPY array of length N_REGION were created by
5571 duplication of basic blocks. Add phi node arguments for edges
5572 going from these blocks. If E_COPY is not NULL, also add
5573 phi node arguments for its destination.*/
5574
5575 void
5576 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5577 edge e_copy)
5578 {
5579 unsigned i;
5580
5581 for (i = 0; i < n_region; i++)
5582 region_copy[i]->flags |= BB_DUPLICATED;
5583
5584 for (i = 0; i < n_region; i++)
5585 add_phi_args_after_copy_bb (region_copy[i]);
5586 if (e_copy)
5587 add_phi_args_after_copy_edge (e_copy);
5588
5589 for (i = 0; i < n_region; i++)
5590 region_copy[i]->flags &= ~BB_DUPLICATED;
5591 }
5592
5593 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5594 important exit edge EXIT. By important we mean that no SSA name defined
5595 inside region is live over the other exit edges of the region. All entry
5596 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5597 to the duplicate of the region. Dominance and loop information is
5598 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
5599 UPDATE_DOMINANCE is false then we assume that the caller will update the
5600 dominance information after calling this function. The new basic
5601 blocks are stored to REGION_COPY in the same order as they had in REGION,
5602 provided that REGION_COPY is not NULL.
5603 The function returns false if it is unable to copy the region,
5604 true otherwise. */
5605
5606 bool
5607 gimple_duplicate_sese_region (edge entry, edge exit,
5608 basic_block *region, unsigned n_region,
5609 basic_block *region_copy,
5610 bool update_dominance)
5611 {
5612 unsigned i;
5613 bool free_region_copy = false, copying_header = false;
5614 struct loop *loop = entry->dest->loop_father;
5615 edge exit_copy;
5616 vec<basic_block> doms;
5617 edge redirected;
5618 int total_freq = 0, entry_freq = 0;
5619 gcov_type total_count = 0, entry_count = 0;
5620
5621 if (!can_copy_bbs_p (region, n_region))
5622 return false;
5623
5624 /* Some sanity checking. Note that we do not check for all possible
5625 missuses of the functions. I.e. if you ask to copy something weird,
5626 it will work, but the state of structures probably will not be
5627 correct. */
5628 for (i = 0; i < n_region; i++)
5629 {
5630 /* We do not handle subloops, i.e. all the blocks must belong to the
5631 same loop. */
5632 if (region[i]->loop_father != loop)
5633 return false;
5634
5635 if (region[i] != entry->dest
5636 && region[i] == loop->header)
5637 return false;
5638 }
5639
5640 set_loop_copy (loop, loop);
5641
5642 /* In case the function is used for loop header copying (which is the primary
5643 use), ensure that EXIT and its copy will be new latch and entry edges. */
5644 if (loop->header == entry->dest)
5645 {
5646 copying_header = true;
5647 set_loop_copy (loop, loop_outer (loop));
5648
5649 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5650 return false;
5651
5652 for (i = 0; i < n_region; i++)
5653 if (region[i] != exit->src
5654 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5655 return false;
5656 }
5657
5658 if (!region_copy)
5659 {
5660 region_copy = XNEWVEC (basic_block, n_region);
5661 free_region_copy = true;
5662 }
5663
5664 initialize_original_copy_tables ();
5665
5666 /* Record blocks outside the region that are dominated by something
5667 inside. */
5668 if (update_dominance)
5669 {
5670 doms.create (0);
5671 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5672 }
5673
5674 if (entry->dest->count)
5675 {
5676 total_count = entry->dest->count;
5677 entry_count = entry->count;
5678 /* Fix up corner cases, to avoid division by zero or creation of negative
5679 frequencies. */
5680 if (entry_count > total_count)
5681 entry_count = total_count;
5682 }
5683 else
5684 {
5685 total_freq = entry->dest->frequency;
5686 entry_freq = EDGE_FREQUENCY (entry);
5687 /* Fix up corner cases, to avoid division by zero or creation of negative
5688 frequencies. */
5689 if (total_freq == 0)
5690 total_freq = 1;
5691 else if (entry_freq > total_freq)
5692 entry_freq = total_freq;
5693 }
5694
5695 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5696 split_edge_bb_loc (entry), update_dominance);
5697 if (total_count)
5698 {
5699 scale_bbs_frequencies_gcov_type (region, n_region,
5700 total_count - entry_count,
5701 total_count);
5702 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
5703 total_count);
5704 }
5705 else
5706 {
5707 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5708 total_freq);
5709 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5710 }
5711
5712 if (copying_header)
5713 {
5714 loop->header = exit->dest;
5715 loop->latch = exit->src;
5716 }
5717
5718 /* Redirect the entry and add the phi node arguments. */
5719 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
5720 gcc_assert (redirected != NULL);
5721 flush_pending_stmts (entry);
5722
5723 /* Concerning updating of dominators: We must recount dominators
5724 for entry block and its copy. Anything that is outside of the
5725 region, but was dominated by something inside needs recounting as
5726 well. */
5727 if (update_dominance)
5728 {
5729 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
5730 doms.safe_push (get_bb_original (entry->dest));
5731 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5732 doms.release ();
5733 }
5734
5735 /* Add the other PHI node arguments. */
5736 add_phi_args_after_copy (region_copy, n_region, NULL);
5737
5738 if (free_region_copy)
5739 free (region_copy);
5740
5741 free_original_copy_tables ();
5742 return true;
5743 }
5744
5745 /* Checks if BB is part of the region defined by N_REGION BBS. */
5746 static bool
5747 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
5748 {
5749 unsigned int n;
5750
5751 for (n = 0; n < n_region; n++)
5752 {
5753 if (bb == bbs[n])
5754 return true;
5755 }
5756 return false;
5757 }
5758
5759 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
5760 are stored to REGION_COPY in the same order in that they appear
5761 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5762 the region, EXIT an exit from it. The condition guarding EXIT
5763 is moved to ENTRY. Returns true if duplication succeeds, false
5764 otherwise.
5765
5766 For example,
5767
5768 some_code;
5769 if (cond)
5770 A;
5771 else
5772 B;
5773
5774 is transformed to
5775
5776 if (cond)
5777 {
5778 some_code;
5779 A;
5780 }
5781 else
5782 {
5783 some_code;
5784 B;
5785 }
5786 */
5787
5788 bool
5789 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
5790 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
5791 basic_block *region_copy ATTRIBUTE_UNUSED)
5792 {
5793 unsigned i;
5794 bool free_region_copy = false;
5795 struct loop *loop = exit->dest->loop_father;
5796 struct loop *orig_loop = entry->dest->loop_father;
5797 basic_block switch_bb, entry_bb, nentry_bb;
5798 vec<basic_block> doms;
5799 int total_freq = 0, exit_freq = 0;
5800 gcov_type total_count = 0, exit_count = 0;
5801 edge exits[2], nexits[2], e;
5802 gimple_stmt_iterator gsi;
5803 gimple cond_stmt;
5804 edge sorig, snew;
5805 basic_block exit_bb;
5806 gimple_stmt_iterator psi;
5807 gimple phi;
5808 tree def;
5809 struct loop *target, *aloop, *cloop;
5810
5811 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
5812 exits[0] = exit;
5813 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
5814
5815 if (!can_copy_bbs_p (region, n_region))
5816 return false;
5817
5818 initialize_original_copy_tables ();
5819 set_loop_copy (orig_loop, loop);
5820
5821 target= loop;
5822 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
5823 {
5824 if (bb_part_of_region_p (aloop->header, region, n_region))
5825 {
5826 cloop = duplicate_loop (aloop, target);
5827 duplicate_subloops (aloop, cloop);
5828 }
5829 }
5830
5831 if (!region_copy)
5832 {
5833 region_copy = XNEWVEC (basic_block, n_region);
5834 free_region_copy = true;
5835 }
5836
5837 gcc_assert (!need_ssa_update_p (cfun));
5838
5839 /* Record blocks outside the region that are dominated by something
5840 inside. */
5841 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5842
5843 if (exit->src->count)
5844 {
5845 total_count = exit->src->count;
5846 exit_count = exit->count;
5847 /* Fix up corner cases, to avoid division by zero or creation of negative
5848 frequencies. */
5849 if (exit_count > total_count)
5850 exit_count = total_count;
5851 }
5852 else
5853 {
5854 total_freq = exit->src->frequency;
5855 exit_freq = EDGE_FREQUENCY (exit);
5856 /* Fix up corner cases, to avoid division by zero or creation of negative
5857 frequencies. */
5858 if (total_freq == 0)
5859 total_freq = 1;
5860 if (exit_freq > total_freq)
5861 exit_freq = total_freq;
5862 }
5863
5864 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
5865 split_edge_bb_loc (exit), true);
5866 if (total_count)
5867 {
5868 scale_bbs_frequencies_gcov_type (region, n_region,
5869 total_count - exit_count,
5870 total_count);
5871 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
5872 total_count);
5873 }
5874 else
5875 {
5876 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
5877 total_freq);
5878 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
5879 }
5880
5881 /* Create the switch block, and put the exit condition to it. */
5882 entry_bb = entry->dest;
5883 nentry_bb = get_bb_copy (entry_bb);
5884 if (!last_stmt (entry->src)
5885 || !stmt_ends_bb_p (last_stmt (entry->src)))
5886 switch_bb = entry->src;
5887 else
5888 switch_bb = split_edge (entry);
5889 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
5890
5891 gsi = gsi_last_bb (switch_bb);
5892 cond_stmt = last_stmt (exit->src);
5893 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
5894 cond_stmt = gimple_copy (cond_stmt);
5895
5896 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
5897
5898 sorig = single_succ_edge (switch_bb);
5899 sorig->flags = exits[1]->flags;
5900 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
5901
5902 /* Register the new edge from SWITCH_BB in loop exit lists. */
5903 rescan_loop_exit (snew, true, false);
5904
5905 /* Add the PHI node arguments. */
5906 add_phi_args_after_copy (region_copy, n_region, snew);
5907
5908 /* Get rid of now superfluous conditions and associated edges (and phi node
5909 arguments). */
5910 exit_bb = exit->dest;
5911
5912 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
5913 PENDING_STMT (e) = NULL;
5914
5915 /* The latch of ORIG_LOOP was copied, and so was the backedge
5916 to the original header. We redirect this backedge to EXIT_BB. */
5917 for (i = 0; i < n_region; i++)
5918 if (get_bb_original (region_copy[i]) == orig_loop->latch)
5919 {
5920 gcc_assert (single_succ_edge (region_copy[i]));
5921 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
5922 PENDING_STMT (e) = NULL;
5923 for (psi = gsi_start_phis (exit_bb);
5924 !gsi_end_p (psi);
5925 gsi_next (&psi))
5926 {
5927 phi = gsi_stmt (psi);
5928 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
5929 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
5930 }
5931 }
5932 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
5933 PENDING_STMT (e) = NULL;
5934
5935 /* Anything that is outside of the region, but was dominated by something
5936 inside needs to update dominance info. */
5937 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5938 doms.release ();
5939 /* Update the SSA web. */
5940 update_ssa (TODO_update_ssa);
5941
5942 if (free_region_copy)
5943 free (region_copy);
5944
5945 free_original_copy_tables ();
5946 return true;
5947 }
5948
5949 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
5950 adding blocks when the dominator traversal reaches EXIT. This
5951 function silently assumes that ENTRY strictly dominates EXIT. */
5952
5953 void
5954 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
5955 vec<basic_block> *bbs_p)
5956 {
5957 basic_block son;
5958
5959 for (son = first_dom_son (CDI_DOMINATORS, entry);
5960 son;
5961 son = next_dom_son (CDI_DOMINATORS, son))
5962 {
5963 bbs_p->safe_push (son);
5964 if (son != exit)
5965 gather_blocks_in_sese_region (son, exit, bbs_p);
5966 }
5967 }
5968
5969 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
5970 The duplicates are recorded in VARS_MAP. */
5971
5972 static void
5973 replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
5974 tree to_context)
5975 {
5976 tree t = *tp, new_t;
5977 struct function *f = DECL_STRUCT_FUNCTION (to_context);
5978 void **loc;
5979
5980 if (DECL_CONTEXT (t) == to_context)
5981 return;
5982
5983 loc = pointer_map_contains (vars_map, t);
5984
5985 if (!loc)
5986 {
5987 loc = pointer_map_insert (vars_map, t);
5988
5989 if (SSA_VAR_P (t))
5990 {
5991 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
5992 add_local_decl (f, new_t);
5993 }
5994 else
5995 {
5996 gcc_assert (TREE_CODE (t) == CONST_DECL);
5997 new_t = copy_node (t);
5998 }
5999 DECL_CONTEXT (new_t) = to_context;
6000
6001 *loc = new_t;
6002 }
6003 else
6004 new_t = (tree) *loc;
6005
6006 *tp = new_t;
6007 }
6008
6009
6010 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6011 VARS_MAP maps old ssa names and var_decls to the new ones. */
6012
6013 static tree
6014 replace_ssa_name (tree name, struct pointer_map_t *vars_map,
6015 tree to_context)
6016 {
6017 void **loc;
6018 tree new_name;
6019
6020 gcc_assert (!virtual_operand_p (name));
6021
6022 loc = pointer_map_contains (vars_map, name);
6023
6024 if (!loc)
6025 {
6026 tree decl = SSA_NAME_VAR (name);
6027 if (decl)
6028 {
6029 replace_by_duplicate_decl (&decl, vars_map, to_context);
6030 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6031 decl, SSA_NAME_DEF_STMT (name));
6032 if (SSA_NAME_IS_DEFAULT_DEF (name))
6033 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context),
6034 decl, new_name);
6035 }
6036 else
6037 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6038 name, SSA_NAME_DEF_STMT (name));
6039
6040 loc = pointer_map_insert (vars_map, name);
6041 *loc = new_name;
6042 }
6043 else
6044 new_name = (tree) *loc;
6045
6046 return new_name;
6047 }
6048
6049 struct move_stmt_d
6050 {
6051 tree orig_block;
6052 tree new_block;
6053 tree from_context;
6054 tree to_context;
6055 struct pointer_map_t *vars_map;
6056 htab_t new_label_map;
6057 struct pointer_map_t *eh_map;
6058 bool remap_decls_p;
6059 };
6060
6061 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6062 contained in *TP if it has been ORIG_BLOCK previously and change the
6063 DECL_CONTEXT of every local variable referenced in *TP. */
6064
6065 static tree
6066 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6067 {
6068 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6069 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6070 tree t = *tp;
6071
6072 if (EXPR_P (t))
6073 {
6074 tree block = TREE_BLOCK (t);
6075 if (block == p->orig_block
6076 || (p->orig_block == NULL_TREE
6077 && block != NULL_TREE))
6078 TREE_SET_BLOCK (t, p->new_block);
6079 #ifdef ENABLE_CHECKING
6080 else if (block != NULL_TREE)
6081 {
6082 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6083 block = BLOCK_SUPERCONTEXT (block);
6084 gcc_assert (block == p->orig_block);
6085 }
6086 #endif
6087 }
6088 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6089 {
6090 if (TREE_CODE (t) == SSA_NAME)
6091 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6092 else if (TREE_CODE (t) == LABEL_DECL)
6093 {
6094 if (p->new_label_map)
6095 {
6096 struct tree_map in, *out;
6097 in.base.from = t;
6098 out = (struct tree_map *)
6099 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6100 if (out)
6101 *tp = t = out->to;
6102 }
6103
6104 DECL_CONTEXT (t) = p->to_context;
6105 }
6106 else if (p->remap_decls_p)
6107 {
6108 /* Replace T with its duplicate. T should no longer appear in the
6109 parent function, so this looks wasteful; however, it may appear
6110 in referenced_vars, and more importantly, as virtual operands of
6111 statements, and in alias lists of other variables. It would be
6112 quite difficult to expunge it from all those places. ??? It might
6113 suffice to do this for addressable variables. */
6114 if ((TREE_CODE (t) == VAR_DECL
6115 && !is_global_var (t))
6116 || TREE_CODE (t) == CONST_DECL)
6117 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6118 }
6119 *walk_subtrees = 0;
6120 }
6121 else if (TYPE_P (t))
6122 *walk_subtrees = 0;
6123
6124 return NULL_TREE;
6125 }
6126
6127 /* Helper for move_stmt_r. Given an EH region number for the source
6128 function, map that to the duplicate EH regio number in the dest. */
6129
6130 static int
6131 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6132 {
6133 eh_region old_r, new_r;
6134 void **slot;
6135
6136 old_r = get_eh_region_from_number (old_nr);
6137 slot = pointer_map_contains (p->eh_map, old_r);
6138 new_r = (eh_region) *slot;
6139
6140 return new_r->index;
6141 }
6142
6143 /* Similar, but operate on INTEGER_CSTs. */
6144
6145 static tree
6146 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6147 {
6148 int old_nr, new_nr;
6149
6150 old_nr = tree_low_cst (old_t_nr, 0);
6151 new_nr = move_stmt_eh_region_nr (old_nr, p);
6152
6153 return build_int_cst (integer_type_node, new_nr);
6154 }
6155
6156 /* Like move_stmt_op, but for gimple statements.
6157
6158 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6159 contained in the current statement in *GSI_P and change the
6160 DECL_CONTEXT of every local variable referenced in the current
6161 statement. */
6162
6163 static tree
6164 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6165 struct walk_stmt_info *wi)
6166 {
6167 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6168 gimple stmt = gsi_stmt (*gsi_p);
6169 tree block = gimple_block (stmt);
6170
6171 if (block == p->orig_block
6172 || (p->orig_block == NULL_TREE
6173 && block != NULL_TREE))
6174 gimple_set_block (stmt, p->new_block);
6175
6176 switch (gimple_code (stmt))
6177 {
6178 case GIMPLE_CALL:
6179 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6180 {
6181 tree r, fndecl = gimple_call_fndecl (stmt);
6182 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6183 switch (DECL_FUNCTION_CODE (fndecl))
6184 {
6185 case BUILT_IN_EH_COPY_VALUES:
6186 r = gimple_call_arg (stmt, 1);
6187 r = move_stmt_eh_region_tree_nr (r, p);
6188 gimple_call_set_arg (stmt, 1, r);
6189 /* FALLTHRU */
6190
6191 case BUILT_IN_EH_POINTER:
6192 case BUILT_IN_EH_FILTER:
6193 r = gimple_call_arg (stmt, 0);
6194 r = move_stmt_eh_region_tree_nr (r, p);
6195 gimple_call_set_arg (stmt, 0, r);
6196 break;
6197
6198 default:
6199 break;
6200 }
6201 }
6202 break;
6203
6204 case GIMPLE_RESX:
6205 {
6206 int r = gimple_resx_region (stmt);
6207 r = move_stmt_eh_region_nr (r, p);
6208 gimple_resx_set_region (stmt, r);
6209 }
6210 break;
6211
6212 case GIMPLE_EH_DISPATCH:
6213 {
6214 int r = gimple_eh_dispatch_region (stmt);
6215 r = move_stmt_eh_region_nr (r, p);
6216 gimple_eh_dispatch_set_region (stmt, r);
6217 }
6218 break;
6219
6220 case GIMPLE_OMP_RETURN:
6221 case GIMPLE_OMP_CONTINUE:
6222 break;
6223 default:
6224 if (is_gimple_omp (stmt))
6225 {
6226 /* Do not remap variables inside OMP directives. Variables
6227 referenced in clauses and directive header belong to the
6228 parent function and should not be moved into the child
6229 function. */
6230 bool save_remap_decls_p = p->remap_decls_p;
6231 p->remap_decls_p = false;
6232 *handled_ops_p = true;
6233
6234 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6235 move_stmt_op, wi);
6236
6237 p->remap_decls_p = save_remap_decls_p;
6238 }
6239 break;
6240 }
6241
6242 return NULL_TREE;
6243 }
6244
6245 /* Move basic block BB from function CFUN to function DEST_FN. The
6246 block is moved out of the original linked list and placed after
6247 block AFTER in the new list. Also, the block is removed from the
6248 original array of blocks and placed in DEST_FN's array of blocks.
6249 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6250 updated to reflect the moved edges.
6251
6252 The local variables are remapped to new instances, VARS_MAP is used
6253 to record the mapping. */
6254
6255 static void
6256 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6257 basic_block after, bool update_edge_count_p,
6258 struct move_stmt_d *d)
6259 {
6260 struct control_flow_graph *cfg;
6261 edge_iterator ei;
6262 edge e;
6263 gimple_stmt_iterator si;
6264 unsigned old_len, new_len;
6265
6266 /* Remove BB from dominance structures. */
6267 delete_from_dominance_info (CDI_DOMINATORS, bb);
6268
6269 /* Move BB from its current loop to the copy in the new function. */
6270 if (current_loops)
6271 {
6272 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6273 if (new_loop)
6274 bb->loop_father = new_loop;
6275 }
6276
6277 /* Link BB to the new linked list. */
6278 move_block_after (bb, after);
6279
6280 /* Update the edge count in the corresponding flowgraphs. */
6281 if (update_edge_count_p)
6282 FOR_EACH_EDGE (e, ei, bb->succs)
6283 {
6284 cfun->cfg->x_n_edges--;
6285 dest_cfun->cfg->x_n_edges++;
6286 }
6287
6288 /* Remove BB from the original basic block array. */
6289 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6290 cfun->cfg->x_n_basic_blocks--;
6291
6292 /* Grow DEST_CFUN's basic block array if needed. */
6293 cfg = dest_cfun->cfg;
6294 cfg->x_n_basic_blocks++;
6295 if (bb->index >= cfg->x_last_basic_block)
6296 cfg->x_last_basic_block = bb->index + 1;
6297
6298 old_len = vec_safe_length (cfg->x_basic_block_info);
6299 if ((unsigned) cfg->x_last_basic_block >= old_len)
6300 {
6301 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6302 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6303 }
6304
6305 (*cfg->x_basic_block_info)[bb->index] = bb;
6306
6307 /* Remap the variables in phi nodes. */
6308 for (si = gsi_start_phis (bb); !gsi_end_p (si); )
6309 {
6310 gimple phi = gsi_stmt (si);
6311 use_operand_p use;
6312 tree op = PHI_RESULT (phi);
6313 ssa_op_iter oi;
6314 unsigned i;
6315
6316 if (virtual_operand_p (op))
6317 {
6318 /* Remove the phi nodes for virtual operands (alias analysis will be
6319 run for the new function, anyway). */
6320 remove_phi_node (&si, true);
6321 continue;
6322 }
6323
6324 SET_PHI_RESULT (phi,
6325 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6326 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6327 {
6328 op = USE_FROM_PTR (use);
6329 if (TREE_CODE (op) == SSA_NAME)
6330 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6331 }
6332
6333 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6334 {
6335 location_t locus = gimple_phi_arg_location (phi, i);
6336 tree block = LOCATION_BLOCK (locus);
6337
6338 if (locus == UNKNOWN_LOCATION)
6339 continue;
6340 if (d->orig_block == NULL_TREE || block == d->orig_block)
6341 {
6342 if (d->new_block == NULL_TREE)
6343 locus = LOCATION_LOCUS (locus);
6344 else
6345 locus = COMBINE_LOCATION_DATA (line_table, locus, d->new_block);
6346 gimple_phi_arg_set_location (phi, i, locus);
6347 }
6348 }
6349
6350 gsi_next (&si);
6351 }
6352
6353 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6354 {
6355 gimple stmt = gsi_stmt (si);
6356 struct walk_stmt_info wi;
6357
6358 memset (&wi, 0, sizeof (wi));
6359 wi.info = d;
6360 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6361
6362 if (gimple_code (stmt) == GIMPLE_LABEL)
6363 {
6364 tree label = gimple_label_label (stmt);
6365 int uid = LABEL_DECL_UID (label);
6366
6367 gcc_assert (uid > -1);
6368
6369 old_len = vec_safe_length (cfg->x_label_to_block_map);
6370 if (old_len <= (unsigned) uid)
6371 {
6372 new_len = 3 * uid / 2 + 1;
6373 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6374 }
6375
6376 (*cfg->x_label_to_block_map)[uid] = bb;
6377 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6378
6379 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6380
6381 if (uid >= dest_cfun->cfg->last_label_uid)
6382 dest_cfun->cfg->last_label_uid = uid + 1;
6383 }
6384
6385 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6386 remove_stmt_from_eh_lp_fn (cfun, stmt);
6387
6388 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6389 gimple_remove_stmt_histograms (cfun, stmt);
6390
6391 /* We cannot leave any operands allocated from the operand caches of
6392 the current function. */
6393 free_stmt_operands (stmt);
6394 push_cfun (dest_cfun);
6395 update_stmt (stmt);
6396 pop_cfun ();
6397 }
6398
6399 FOR_EACH_EDGE (e, ei, bb->succs)
6400 if (e->goto_locus != UNKNOWN_LOCATION)
6401 {
6402 tree block = LOCATION_BLOCK (e->goto_locus);
6403 if (d->orig_block == NULL_TREE
6404 || block == d->orig_block)
6405 e->goto_locus = d->new_block ?
6406 COMBINE_LOCATION_DATA (line_table, e->goto_locus, d->new_block) :
6407 LOCATION_LOCUS (e->goto_locus);
6408 }
6409 }
6410
6411 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6412 the outermost EH region. Use REGION as the incoming base EH region. */
6413
6414 static eh_region
6415 find_outermost_region_in_block (struct function *src_cfun,
6416 basic_block bb, eh_region region)
6417 {
6418 gimple_stmt_iterator si;
6419
6420 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6421 {
6422 gimple stmt = gsi_stmt (si);
6423 eh_region stmt_region;
6424 int lp_nr;
6425
6426 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6427 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6428 if (stmt_region)
6429 {
6430 if (region == NULL)
6431 region = stmt_region;
6432 else if (stmt_region != region)
6433 {
6434 region = eh_region_outermost (src_cfun, stmt_region, region);
6435 gcc_assert (region != NULL);
6436 }
6437 }
6438 }
6439
6440 return region;
6441 }
6442
6443 static tree
6444 new_label_mapper (tree decl, void *data)
6445 {
6446 htab_t hash = (htab_t) data;
6447 struct tree_map *m;
6448 void **slot;
6449
6450 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6451
6452 m = XNEW (struct tree_map);
6453 m->hash = DECL_UID (decl);
6454 m->base.from = decl;
6455 m->to = create_artificial_label (UNKNOWN_LOCATION);
6456 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6457 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6458 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6459
6460 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6461 gcc_assert (*slot == NULL);
6462
6463 *slot = m;
6464
6465 return m->to;
6466 }
6467
6468 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6469 subblocks. */
6470
6471 static void
6472 replace_block_vars_by_duplicates (tree block, struct pointer_map_t *vars_map,
6473 tree to_context)
6474 {
6475 tree *tp, t;
6476
6477 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6478 {
6479 t = *tp;
6480 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6481 continue;
6482 replace_by_duplicate_decl (&t, vars_map, to_context);
6483 if (t != *tp)
6484 {
6485 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6486 {
6487 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6488 DECL_HAS_VALUE_EXPR_P (t) = 1;
6489 }
6490 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6491 *tp = t;
6492 }
6493 }
6494
6495 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6496 replace_block_vars_by_duplicates (block, vars_map, to_context);
6497 }
6498
6499 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6500 from FN1 to FN2. */
6501
6502 static void
6503 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
6504 struct loop *loop)
6505 {
6506 /* Discard it from the old loop array. */
6507 (*get_loops (fn1))[loop->num] = NULL;
6508
6509 /* Place it in the new loop array, assigning it a new number. */
6510 loop->num = number_of_loops (fn2);
6511 vec_safe_push (loops_for_fn (fn2)->larray, loop);
6512
6513 /* Recurse to children. */
6514 for (loop = loop->inner; loop; loop = loop->next)
6515 fixup_loop_arrays_after_move (fn1, fn2, loop);
6516 }
6517
6518 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6519 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6520 single basic block in the original CFG and the new basic block is
6521 returned. DEST_CFUN must not have a CFG yet.
6522
6523 Note that the region need not be a pure SESE region. Blocks inside
6524 the region may contain calls to abort/exit. The only restriction
6525 is that ENTRY_BB should be the only entry point and it must
6526 dominate EXIT_BB.
6527
6528 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6529 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6530 to the new function.
6531
6532 All local variables referenced in the region are assumed to be in
6533 the corresponding BLOCK_VARS and unexpanded variable lists
6534 associated with DEST_CFUN. */
6535
6536 basic_block
6537 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
6538 basic_block exit_bb, tree orig_block)
6539 {
6540 vec<basic_block> bbs, dom_bbs;
6541 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6542 basic_block after, bb, *entry_pred, *exit_succ, abb;
6543 struct function *saved_cfun = cfun;
6544 int *entry_flag, *exit_flag;
6545 unsigned *entry_prob, *exit_prob;
6546 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
6547 edge e;
6548 edge_iterator ei;
6549 htab_t new_label_map;
6550 struct pointer_map_t *vars_map, *eh_map;
6551 struct loop *loop = entry_bb->loop_father;
6552 struct loop *loop0 = get_loop (saved_cfun, 0);
6553 struct move_stmt_d d;
6554
6555 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6556 region. */
6557 gcc_assert (entry_bb != exit_bb
6558 && (!exit_bb
6559 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
6560
6561 /* Collect all the blocks in the region. Manually add ENTRY_BB
6562 because it won't be added by dfs_enumerate_from. */
6563 bbs.create (0);
6564 bbs.safe_push (entry_bb);
6565 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6566
6567 /* The blocks that used to be dominated by something in BBS will now be
6568 dominated by the new block. */
6569 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6570 bbs.address (),
6571 bbs.length ());
6572
6573 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6574 the predecessor edges to ENTRY_BB and the successor edges to
6575 EXIT_BB so that we can re-attach them to the new basic block that
6576 will replace the region. */
6577 num_entry_edges = EDGE_COUNT (entry_bb->preds);
6578 entry_pred = XNEWVEC (basic_block, num_entry_edges);
6579 entry_flag = XNEWVEC (int, num_entry_edges);
6580 entry_prob = XNEWVEC (unsigned, num_entry_edges);
6581 i = 0;
6582 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
6583 {
6584 entry_prob[i] = e->probability;
6585 entry_flag[i] = e->flags;
6586 entry_pred[i++] = e->src;
6587 remove_edge (e);
6588 }
6589
6590 if (exit_bb)
6591 {
6592 num_exit_edges = EDGE_COUNT (exit_bb->succs);
6593 exit_succ = XNEWVEC (basic_block, num_exit_edges);
6594 exit_flag = XNEWVEC (int, num_exit_edges);
6595 exit_prob = XNEWVEC (unsigned, num_exit_edges);
6596 i = 0;
6597 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
6598 {
6599 exit_prob[i] = e->probability;
6600 exit_flag[i] = e->flags;
6601 exit_succ[i++] = e->dest;
6602 remove_edge (e);
6603 }
6604 }
6605 else
6606 {
6607 num_exit_edges = 0;
6608 exit_succ = NULL;
6609 exit_flag = NULL;
6610 exit_prob = NULL;
6611 }
6612
6613 /* Switch context to the child function to initialize DEST_FN's CFG. */
6614 gcc_assert (dest_cfun->cfg == NULL);
6615 push_cfun (dest_cfun);
6616
6617 init_empty_tree_cfg ();
6618
6619 /* Initialize EH information for the new function. */
6620 eh_map = NULL;
6621 new_label_map = NULL;
6622 if (saved_cfun->eh)
6623 {
6624 eh_region region = NULL;
6625
6626 FOR_EACH_VEC_ELT (bbs, i, bb)
6627 region = find_outermost_region_in_block (saved_cfun, bb, region);
6628
6629 init_eh_for_function ();
6630 if (region != NULL)
6631 {
6632 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6633 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
6634 new_label_mapper, new_label_map);
6635 }
6636 }
6637
6638 /* Initialize an empty loop tree. */
6639 struct loops *loops = ggc_alloc_cleared_loops ();
6640 init_loops_structure (dest_cfun, loops, 1);
6641 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
6642 set_loops_for_fn (dest_cfun, loops);
6643
6644 /* Move the outlined loop tree part. */
6645 num_nodes = bbs.length ();
6646 FOR_EACH_VEC_ELT (bbs, i, bb)
6647 {
6648 if (bb->loop_father->header == bb)
6649 {
6650 struct loop *this_loop = bb->loop_father;
6651 struct loop *outer = loop_outer (this_loop);
6652 if (outer == loop
6653 /* If the SESE region contains some bbs ending with
6654 a noreturn call, those are considered to belong
6655 to the outermost loop in saved_cfun, rather than
6656 the entry_bb's loop_father. */
6657 || outer == loop0)
6658 {
6659 if (outer != loop)
6660 num_nodes -= this_loop->num_nodes;
6661 flow_loop_tree_node_remove (bb->loop_father);
6662 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
6663 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
6664 }
6665 }
6666 else if (bb->loop_father == loop0 && loop0 != loop)
6667 num_nodes--;
6668
6669 /* Remove loop exits from the outlined region. */
6670 if (loops_for_fn (saved_cfun)->exits)
6671 FOR_EACH_EDGE (e, ei, bb->succs)
6672 {
6673 void **slot = htab_find_slot_with_hash
6674 (loops_for_fn (saved_cfun)->exits, e,
6675 htab_hash_pointer (e), NO_INSERT);
6676 if (slot)
6677 htab_clear_slot (loops_for_fn (saved_cfun)->exits, slot);
6678 }
6679 }
6680
6681
6682 /* Adjust the number of blocks in the tree root of the outlined part. */
6683 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
6684
6685 /* Setup a mapping to be used by move_block_to_fn. */
6686 loop->aux = current_loops->tree_root;
6687 loop0->aux = current_loops->tree_root;
6688
6689 pop_cfun ();
6690
6691 /* Move blocks from BBS into DEST_CFUN. */
6692 gcc_assert (bbs.length () >= 2);
6693 after = dest_cfun->cfg->x_entry_block_ptr;
6694 vars_map = pointer_map_create ();
6695
6696 memset (&d, 0, sizeof (d));
6697 d.orig_block = orig_block;
6698 d.new_block = DECL_INITIAL (dest_cfun->decl);
6699 d.from_context = cfun->decl;
6700 d.to_context = dest_cfun->decl;
6701 d.vars_map = vars_map;
6702 d.new_label_map = new_label_map;
6703 d.eh_map = eh_map;
6704 d.remap_decls_p = true;
6705
6706 FOR_EACH_VEC_ELT (bbs, i, bb)
6707 {
6708 /* No need to update edge counts on the last block. It has
6709 already been updated earlier when we detached the region from
6710 the original CFG. */
6711 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
6712 after = bb;
6713 }
6714
6715 loop->aux = NULL;
6716 loop0->aux = NULL;
6717 /* Loop sizes are no longer correct, fix them up. */
6718 loop->num_nodes -= num_nodes;
6719 for (struct loop *outer = loop_outer (loop);
6720 outer; outer = loop_outer (outer))
6721 outer->num_nodes -= num_nodes;
6722 loop0->num_nodes -= bbs.length () - num_nodes;
6723
6724 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vect_loops)
6725 {
6726 struct loop *aloop;
6727 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
6728 if (aloop != NULL)
6729 {
6730 if (aloop->simduid)
6731 {
6732 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
6733 d.to_context);
6734 dest_cfun->has_simduid_loops = true;
6735 }
6736 if (aloop->force_vect)
6737 dest_cfun->has_force_vect_loops = true;
6738 }
6739 }
6740
6741 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
6742 if (orig_block)
6743 {
6744 tree block;
6745 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6746 == NULL_TREE);
6747 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6748 = BLOCK_SUBBLOCKS (orig_block);
6749 for (block = BLOCK_SUBBLOCKS (orig_block);
6750 block; block = BLOCK_CHAIN (block))
6751 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
6752 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
6753 }
6754
6755 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
6756 vars_map, dest_cfun->decl);
6757
6758 if (new_label_map)
6759 htab_delete (new_label_map);
6760 if (eh_map)
6761 pointer_map_destroy (eh_map);
6762 pointer_map_destroy (vars_map);
6763
6764 /* Rewire the entry and exit blocks. The successor to the entry
6765 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6766 the child function. Similarly, the predecessor of DEST_FN's
6767 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
6768 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6769 various CFG manipulation function get to the right CFG.
6770
6771 FIXME, this is silly. The CFG ought to become a parameter to
6772 these helpers. */
6773 push_cfun (dest_cfun);
6774 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
6775 if (exit_bb)
6776 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
6777 pop_cfun ();
6778
6779 /* Back in the original function, the SESE region has disappeared,
6780 create a new basic block in its place. */
6781 bb = create_empty_bb (entry_pred[0]);
6782 if (current_loops)
6783 add_bb_to_loop (bb, loop);
6784 for (i = 0; i < num_entry_edges; i++)
6785 {
6786 e = make_edge (entry_pred[i], bb, entry_flag[i]);
6787 e->probability = entry_prob[i];
6788 }
6789
6790 for (i = 0; i < num_exit_edges; i++)
6791 {
6792 e = make_edge (bb, exit_succ[i], exit_flag[i]);
6793 e->probability = exit_prob[i];
6794 }
6795
6796 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
6797 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
6798 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
6799 dom_bbs.release ();
6800
6801 if (exit_bb)
6802 {
6803 free (exit_prob);
6804 free (exit_flag);
6805 free (exit_succ);
6806 }
6807 free (entry_prob);
6808 free (entry_flag);
6809 free (entry_pred);
6810 bbs.release ();
6811
6812 return bb;
6813 }
6814
6815
6816 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
6817 */
6818
6819 void
6820 dump_function_to_file (tree fndecl, FILE *file, int flags)
6821 {
6822 tree arg, var, old_current_fndecl = current_function_decl;
6823 struct function *dsf;
6824 bool ignore_topmost_bind = false, any_var = false;
6825 basic_block bb;
6826 tree chain;
6827 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
6828 && decl_is_tm_clone (fndecl));
6829 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
6830
6831 current_function_decl = fndecl;
6832 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
6833
6834 arg = DECL_ARGUMENTS (fndecl);
6835 while (arg)
6836 {
6837 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
6838 fprintf (file, " ");
6839 print_generic_expr (file, arg, dump_flags);
6840 if (flags & TDF_VERBOSE)
6841 print_node (file, "", arg, 4);
6842 if (DECL_CHAIN (arg))
6843 fprintf (file, ", ");
6844 arg = DECL_CHAIN (arg);
6845 }
6846 fprintf (file, ")\n");
6847
6848 if (flags & TDF_VERBOSE)
6849 print_node (file, "", fndecl, 2);
6850
6851 dsf = DECL_STRUCT_FUNCTION (fndecl);
6852 if (dsf && (flags & TDF_EH))
6853 dump_eh_tree (file, dsf);
6854
6855 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
6856 {
6857 dump_node (fndecl, TDF_SLIM | flags, file);
6858 current_function_decl = old_current_fndecl;
6859 return;
6860 }
6861
6862 /* When GIMPLE is lowered, the variables are no longer available in
6863 BIND_EXPRs, so display them separately. */
6864 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
6865 {
6866 unsigned ix;
6867 ignore_topmost_bind = true;
6868
6869 fprintf (file, "{\n");
6870 if (!vec_safe_is_empty (fun->local_decls))
6871 FOR_EACH_LOCAL_DECL (fun, ix, var)
6872 {
6873 print_generic_decl (file, var, flags);
6874 if (flags & TDF_VERBOSE)
6875 print_node (file, "", var, 4);
6876 fprintf (file, "\n");
6877
6878 any_var = true;
6879 }
6880 if (gimple_in_ssa_p (cfun))
6881 for (ix = 1; ix < num_ssa_names; ++ix)
6882 {
6883 tree name = ssa_name (ix);
6884 if (name && !SSA_NAME_VAR (name))
6885 {
6886 fprintf (file, " ");
6887 print_generic_expr (file, TREE_TYPE (name), flags);
6888 fprintf (file, " ");
6889 print_generic_expr (file, name, flags);
6890 fprintf (file, ";\n");
6891
6892 any_var = true;
6893 }
6894 }
6895 }
6896
6897 if (fun && fun->decl == fndecl
6898 && fun->cfg
6899 && basic_block_info_for_function (fun))
6900 {
6901 /* If the CFG has been built, emit a CFG-based dump. */
6902 if (!ignore_topmost_bind)
6903 fprintf (file, "{\n");
6904
6905 if (any_var && n_basic_blocks_for_function (fun))
6906 fprintf (file, "\n");
6907
6908 FOR_EACH_BB_FN (bb, fun)
6909 dump_bb (file, bb, 2, flags | TDF_COMMENT);
6910
6911 fprintf (file, "}\n");
6912 }
6913 else if (DECL_SAVED_TREE (fndecl) == NULL)
6914 {
6915 /* The function is now in GIMPLE form but the CFG has not been
6916 built yet. Emit the single sequence of GIMPLE statements
6917 that make up its body. */
6918 gimple_seq body = gimple_body (fndecl);
6919
6920 if (gimple_seq_first_stmt (body)
6921 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
6922 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
6923 print_gimple_seq (file, body, 0, flags);
6924 else
6925 {
6926 if (!ignore_topmost_bind)
6927 fprintf (file, "{\n");
6928
6929 if (any_var)
6930 fprintf (file, "\n");
6931
6932 print_gimple_seq (file, body, 2, flags);
6933 fprintf (file, "}\n");
6934 }
6935 }
6936 else
6937 {
6938 int indent;
6939
6940 /* Make a tree based dump. */
6941 chain = DECL_SAVED_TREE (fndecl);
6942 if (chain && TREE_CODE (chain) == BIND_EXPR)
6943 {
6944 if (ignore_topmost_bind)
6945 {
6946 chain = BIND_EXPR_BODY (chain);
6947 indent = 2;
6948 }
6949 else
6950 indent = 0;
6951 }
6952 else
6953 {
6954 if (!ignore_topmost_bind)
6955 fprintf (file, "{\n");
6956 indent = 2;
6957 }
6958
6959 if (any_var)
6960 fprintf (file, "\n");
6961
6962 print_generic_stmt_indented (file, chain, flags, indent);
6963 if (ignore_topmost_bind)
6964 fprintf (file, "}\n");
6965 }
6966
6967 if (flags & TDF_ENUMERATE_LOCALS)
6968 dump_enumerated_decls (file, flags);
6969 fprintf (file, "\n\n");
6970
6971 current_function_decl = old_current_fndecl;
6972 }
6973
6974 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
6975
6976 DEBUG_FUNCTION void
6977 debug_function (tree fn, int flags)
6978 {
6979 dump_function_to_file (fn, stderr, flags);
6980 }
6981
6982
6983 /* Print on FILE the indexes for the predecessors of basic_block BB. */
6984
6985 static void
6986 print_pred_bbs (FILE *file, basic_block bb)
6987 {
6988 edge e;
6989 edge_iterator ei;
6990
6991 FOR_EACH_EDGE (e, ei, bb->preds)
6992 fprintf (file, "bb_%d ", e->src->index);
6993 }
6994
6995
6996 /* Print on FILE the indexes for the successors of basic_block BB. */
6997
6998 static void
6999 print_succ_bbs (FILE *file, basic_block bb)
7000 {
7001 edge e;
7002 edge_iterator ei;
7003
7004 FOR_EACH_EDGE (e, ei, bb->succs)
7005 fprintf (file, "bb_%d ", e->dest->index);
7006 }
7007
7008 /* Print to FILE the basic block BB following the VERBOSITY level. */
7009
7010 void
7011 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7012 {
7013 char *s_indent = (char *) alloca ((size_t) indent + 1);
7014 memset ((void *) s_indent, ' ', (size_t) indent);
7015 s_indent[indent] = '\0';
7016
7017 /* Print basic_block's header. */
7018 if (verbosity >= 2)
7019 {
7020 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7021 print_pred_bbs (file, bb);
7022 fprintf (file, "}, succs = {");
7023 print_succ_bbs (file, bb);
7024 fprintf (file, "})\n");
7025 }
7026
7027 /* Print basic_block's body. */
7028 if (verbosity >= 3)
7029 {
7030 fprintf (file, "%s {\n", s_indent);
7031 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7032 fprintf (file, "%s }\n", s_indent);
7033 }
7034 }
7035
7036 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7037
7038 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7039 VERBOSITY level this outputs the contents of the loop, or just its
7040 structure. */
7041
7042 static void
7043 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7044 {
7045 char *s_indent;
7046 basic_block bb;
7047
7048 if (loop == NULL)
7049 return;
7050
7051 s_indent = (char *) alloca ((size_t) indent + 1);
7052 memset ((void *) s_indent, ' ', (size_t) indent);
7053 s_indent[indent] = '\0';
7054
7055 /* Print loop's header. */
7056 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7057 if (loop->header)
7058 fprintf (file, "header = %d", loop->header->index);
7059 else
7060 {
7061 fprintf (file, "deleted)\n");
7062 return;
7063 }
7064 if (loop->latch)
7065 fprintf (file, ", latch = %d", loop->latch->index);
7066 else
7067 fprintf (file, ", multiple latches");
7068 fprintf (file, ", niter = ");
7069 print_generic_expr (file, loop->nb_iterations, 0);
7070
7071 if (loop->any_upper_bound)
7072 {
7073 fprintf (file, ", upper_bound = ");
7074 dump_double_int (file, loop->nb_iterations_upper_bound, true);
7075 }
7076
7077 if (loop->any_estimate)
7078 {
7079 fprintf (file, ", estimate = ");
7080 dump_double_int (file, loop->nb_iterations_estimate, true);
7081 }
7082 fprintf (file, ")\n");
7083
7084 /* Print loop's body. */
7085 if (verbosity >= 1)
7086 {
7087 fprintf (file, "%s{\n", s_indent);
7088 FOR_EACH_BB (bb)
7089 if (bb->loop_father == loop)
7090 print_loops_bb (file, bb, indent, verbosity);
7091
7092 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7093 fprintf (file, "%s}\n", s_indent);
7094 }
7095 }
7096
7097 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7098 spaces. Following VERBOSITY level this outputs the contents of the
7099 loop, or just its structure. */
7100
7101 static void
7102 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7103 int verbosity)
7104 {
7105 if (loop == NULL)
7106 return;
7107
7108 print_loop (file, loop, indent, verbosity);
7109 print_loop_and_siblings (file, loop->next, indent, verbosity);
7110 }
7111
7112 /* Follow a CFG edge from the entry point of the program, and on entry
7113 of a loop, pretty print the loop structure on FILE. */
7114
7115 void
7116 print_loops (FILE *file, int verbosity)
7117 {
7118 basic_block bb;
7119
7120 bb = ENTRY_BLOCK_PTR;
7121 if (bb && bb->loop_father)
7122 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7123 }
7124
7125 /* Dump a loop. */
7126
7127 DEBUG_FUNCTION void
7128 debug (struct loop &ref)
7129 {
7130 print_loop (stderr, &ref, 0, /*verbosity*/0);
7131 }
7132
7133 DEBUG_FUNCTION void
7134 debug (struct loop *ptr)
7135 {
7136 if (ptr)
7137 debug (*ptr);
7138 else
7139 fprintf (stderr, "<nil>\n");
7140 }
7141
7142 /* Dump a loop verbosely. */
7143
7144 DEBUG_FUNCTION void
7145 debug_verbose (struct loop &ref)
7146 {
7147 print_loop (stderr, &ref, 0, /*verbosity*/3);
7148 }
7149
7150 DEBUG_FUNCTION void
7151 debug_verbose (struct loop *ptr)
7152 {
7153 if (ptr)
7154 debug (*ptr);
7155 else
7156 fprintf (stderr, "<nil>\n");
7157 }
7158
7159
7160 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7161
7162 DEBUG_FUNCTION void
7163 debug_loops (int verbosity)
7164 {
7165 print_loops (stderr, verbosity);
7166 }
7167
7168 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7169
7170 DEBUG_FUNCTION void
7171 debug_loop (struct loop *loop, int verbosity)
7172 {
7173 print_loop (stderr, loop, 0, verbosity);
7174 }
7175
7176 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7177 level. */
7178
7179 DEBUG_FUNCTION void
7180 debug_loop_num (unsigned num, int verbosity)
7181 {
7182 debug_loop (get_loop (cfun, num), verbosity);
7183 }
7184
7185 /* Return true if BB ends with a call, possibly followed by some
7186 instructions that must stay with the call. Return false,
7187 otherwise. */
7188
7189 static bool
7190 gimple_block_ends_with_call_p (basic_block bb)
7191 {
7192 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7193 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7194 }
7195
7196
7197 /* Return true if BB ends with a conditional branch. Return false,
7198 otherwise. */
7199
7200 static bool
7201 gimple_block_ends_with_condjump_p (const_basic_block bb)
7202 {
7203 gimple stmt = last_stmt (CONST_CAST_BB (bb));
7204 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7205 }
7206
7207
7208 /* Return true if we need to add fake edge to exit at statement T.
7209 Helper function for gimple_flow_call_edges_add. */
7210
7211 static bool
7212 need_fake_edge_p (gimple t)
7213 {
7214 tree fndecl = NULL_TREE;
7215 int call_flags = 0;
7216
7217 /* NORETURN and LONGJMP calls already have an edge to exit.
7218 CONST and PURE calls do not need one.
7219 We don't currently check for CONST and PURE here, although
7220 it would be a good idea, because those attributes are
7221 figured out from the RTL in mark_constant_function, and
7222 the counter incrementation code from -fprofile-arcs
7223 leads to different results from -fbranch-probabilities. */
7224 if (is_gimple_call (t))
7225 {
7226 fndecl = gimple_call_fndecl (t);
7227 call_flags = gimple_call_flags (t);
7228 }
7229
7230 if (is_gimple_call (t)
7231 && fndecl
7232 && DECL_BUILT_IN (fndecl)
7233 && (call_flags & ECF_NOTHROW)
7234 && !(call_flags & ECF_RETURNS_TWICE)
7235 /* fork() doesn't really return twice, but the effect of
7236 wrapping it in __gcov_fork() which calls __gcov_flush()
7237 and clears the counters before forking has the same
7238 effect as returning twice. Force a fake edge. */
7239 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7240 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7241 return false;
7242
7243 if (is_gimple_call (t))
7244 {
7245 edge_iterator ei;
7246 edge e;
7247 basic_block bb;
7248
7249 if (!(call_flags & ECF_NORETURN))
7250 return true;
7251
7252 bb = gimple_bb (t);
7253 FOR_EACH_EDGE (e, ei, bb->succs)
7254 if ((e->flags & EDGE_FAKE) == 0)
7255 return true;
7256 }
7257
7258 if (gimple_code (t) == GIMPLE_ASM
7259 && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
7260 return true;
7261
7262 return false;
7263 }
7264
7265
7266 /* Add fake edges to the function exit for any non constant and non
7267 noreturn calls (or noreturn calls with EH/abnormal edges),
7268 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7269 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7270 that were split.
7271
7272 The goal is to expose cases in which entering a basic block does
7273 not imply that all subsequent instructions must be executed. */
7274
7275 static int
7276 gimple_flow_call_edges_add (sbitmap blocks)
7277 {
7278 int i;
7279 int blocks_split = 0;
7280 int last_bb = last_basic_block;
7281 bool check_last_block = false;
7282
7283 if (n_basic_blocks == NUM_FIXED_BLOCKS)
7284 return 0;
7285
7286 if (! blocks)
7287 check_last_block = true;
7288 else
7289 check_last_block = bitmap_bit_p (blocks, EXIT_BLOCK_PTR->prev_bb->index);
7290
7291 /* In the last basic block, before epilogue generation, there will be
7292 a fallthru edge to EXIT. Special care is required if the last insn
7293 of the last basic block is a call because make_edge folds duplicate
7294 edges, which would result in the fallthru edge also being marked
7295 fake, which would result in the fallthru edge being removed by
7296 remove_fake_edges, which would result in an invalid CFG.
7297
7298 Moreover, we can't elide the outgoing fake edge, since the block
7299 profiler needs to take this into account in order to solve the minimal
7300 spanning tree in the case that the call doesn't return.
7301
7302 Handle this by adding a dummy instruction in a new last basic block. */
7303 if (check_last_block)
7304 {
7305 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
7306 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7307 gimple t = NULL;
7308
7309 if (!gsi_end_p (gsi))
7310 t = gsi_stmt (gsi);
7311
7312 if (t && need_fake_edge_p (t))
7313 {
7314 edge e;
7315
7316 e = find_edge (bb, EXIT_BLOCK_PTR);
7317 if (e)
7318 {
7319 gsi_insert_on_edge (e, gimple_build_nop ());
7320 gsi_commit_edge_inserts ();
7321 }
7322 }
7323 }
7324
7325 /* Now add fake edges to the function exit for any non constant
7326 calls since there is no way that we can determine if they will
7327 return or not... */
7328 for (i = 0; i < last_bb; i++)
7329 {
7330 basic_block bb = BASIC_BLOCK (i);
7331 gimple_stmt_iterator gsi;
7332 gimple stmt, last_stmt;
7333
7334 if (!bb)
7335 continue;
7336
7337 if (blocks && !bitmap_bit_p (blocks, i))
7338 continue;
7339
7340 gsi = gsi_last_nondebug_bb (bb);
7341 if (!gsi_end_p (gsi))
7342 {
7343 last_stmt = gsi_stmt (gsi);
7344 do
7345 {
7346 stmt = gsi_stmt (gsi);
7347 if (need_fake_edge_p (stmt))
7348 {
7349 edge e;
7350
7351 /* The handling above of the final block before the
7352 epilogue should be enough to verify that there is
7353 no edge to the exit block in CFG already.
7354 Calling make_edge in such case would cause us to
7355 mark that edge as fake and remove it later. */
7356 #ifdef ENABLE_CHECKING
7357 if (stmt == last_stmt)
7358 {
7359 e = find_edge (bb, EXIT_BLOCK_PTR);
7360 gcc_assert (e == NULL);
7361 }
7362 #endif
7363
7364 /* Note that the following may create a new basic block
7365 and renumber the existing basic blocks. */
7366 if (stmt != last_stmt)
7367 {
7368 e = split_block (bb, stmt);
7369 if (e)
7370 blocks_split++;
7371 }
7372 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
7373 }
7374 gsi_prev (&gsi);
7375 }
7376 while (!gsi_end_p (gsi));
7377 }
7378 }
7379
7380 if (blocks_split)
7381 verify_flow_info ();
7382
7383 return blocks_split;
7384 }
7385
7386 /* Removes edge E and all the blocks dominated by it, and updates dominance
7387 information. The IL in E->src needs to be updated separately.
7388 If dominance info is not available, only the edge E is removed.*/
7389
7390 void
7391 remove_edge_and_dominated_blocks (edge e)
7392 {
7393 vec<basic_block> bbs_to_remove = vNULL;
7394 vec<basic_block> bbs_to_fix_dom = vNULL;
7395 bitmap df, df_idom;
7396 edge f;
7397 edge_iterator ei;
7398 bool none_removed = false;
7399 unsigned i;
7400 basic_block bb, dbb;
7401 bitmap_iterator bi;
7402
7403 if (!dom_info_available_p (CDI_DOMINATORS))
7404 {
7405 remove_edge (e);
7406 return;
7407 }
7408
7409 /* No updating is needed for edges to exit. */
7410 if (e->dest == EXIT_BLOCK_PTR)
7411 {
7412 if (cfgcleanup_altered_bbs)
7413 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7414 remove_edge (e);
7415 return;
7416 }
7417
7418 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7419 that is not dominated by E->dest, then this set is empty. Otherwise,
7420 all the basic blocks dominated by E->dest are removed.
7421
7422 Also, to DF_IDOM we store the immediate dominators of the blocks in
7423 the dominance frontier of E (i.e., of the successors of the
7424 removed blocks, if there are any, and of E->dest otherwise). */
7425 FOR_EACH_EDGE (f, ei, e->dest->preds)
7426 {
7427 if (f == e)
7428 continue;
7429
7430 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7431 {
7432 none_removed = true;
7433 break;
7434 }
7435 }
7436
7437 df = BITMAP_ALLOC (NULL);
7438 df_idom = BITMAP_ALLOC (NULL);
7439
7440 if (none_removed)
7441 bitmap_set_bit (df_idom,
7442 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7443 else
7444 {
7445 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7446 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7447 {
7448 FOR_EACH_EDGE (f, ei, bb->succs)
7449 {
7450 if (f->dest != EXIT_BLOCK_PTR)
7451 bitmap_set_bit (df, f->dest->index);
7452 }
7453 }
7454 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7455 bitmap_clear_bit (df, bb->index);
7456
7457 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7458 {
7459 bb = BASIC_BLOCK (i);
7460 bitmap_set_bit (df_idom,
7461 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7462 }
7463 }
7464
7465 if (cfgcleanup_altered_bbs)
7466 {
7467 /* Record the set of the altered basic blocks. */
7468 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7469 bitmap_ior_into (cfgcleanup_altered_bbs, df);
7470 }
7471
7472 /* Remove E and the cancelled blocks. */
7473 if (none_removed)
7474 remove_edge (e);
7475 else
7476 {
7477 /* Walk backwards so as to get a chance to substitute all
7478 released DEFs into debug stmts. See
7479 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7480 details. */
7481 for (i = bbs_to_remove.length (); i-- > 0; )
7482 delete_basic_block (bbs_to_remove[i]);
7483 }
7484
7485 /* Update the dominance information. The immediate dominator may change only
7486 for blocks whose immediate dominator belongs to DF_IDOM:
7487
7488 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7489 removal. Let Z the arbitrary block such that idom(Z) = Y and
7490 Z dominates X after the removal. Before removal, there exists a path P
7491 from Y to X that avoids Z. Let F be the last edge on P that is
7492 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7493 dominates W, and because of P, Z does not dominate W), and W belongs to
7494 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7495 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
7496 {
7497 bb = BASIC_BLOCK (i);
7498 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
7499 dbb;
7500 dbb = next_dom_son (CDI_DOMINATORS, dbb))
7501 bbs_to_fix_dom.safe_push (dbb);
7502 }
7503
7504 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
7505
7506 BITMAP_FREE (df);
7507 BITMAP_FREE (df_idom);
7508 bbs_to_remove.release ();
7509 bbs_to_fix_dom.release ();
7510 }
7511
7512 /* Purge dead EH edges from basic block BB. */
7513
7514 bool
7515 gimple_purge_dead_eh_edges (basic_block bb)
7516 {
7517 bool changed = false;
7518 edge e;
7519 edge_iterator ei;
7520 gimple stmt = last_stmt (bb);
7521
7522 if (stmt && stmt_can_throw_internal (stmt))
7523 return false;
7524
7525 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7526 {
7527 if (e->flags & EDGE_EH)
7528 {
7529 remove_edge_and_dominated_blocks (e);
7530 changed = true;
7531 }
7532 else
7533 ei_next (&ei);
7534 }
7535
7536 return changed;
7537 }
7538
7539 /* Purge dead EH edges from basic block listed in BLOCKS. */
7540
7541 bool
7542 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
7543 {
7544 bool changed = false;
7545 unsigned i;
7546 bitmap_iterator bi;
7547
7548 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7549 {
7550 basic_block bb = BASIC_BLOCK (i);
7551
7552 /* Earlier gimple_purge_dead_eh_edges could have removed
7553 this basic block already. */
7554 gcc_assert (bb || changed);
7555 if (bb != NULL)
7556 changed |= gimple_purge_dead_eh_edges (bb);
7557 }
7558
7559 return changed;
7560 }
7561
7562 /* Purge dead abnormal call edges from basic block BB. */
7563
7564 bool
7565 gimple_purge_dead_abnormal_call_edges (basic_block bb)
7566 {
7567 bool changed = false;
7568 edge e;
7569 edge_iterator ei;
7570 gimple stmt = last_stmt (bb);
7571
7572 if (!cfun->has_nonlocal_label
7573 && !cfun->calls_setjmp)
7574 return false;
7575
7576 if (stmt && stmt_can_make_abnormal_goto (stmt))
7577 return false;
7578
7579 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7580 {
7581 if (e->flags & EDGE_ABNORMAL)
7582 {
7583 if (e->flags & EDGE_FALLTHRU)
7584 e->flags &= ~EDGE_ABNORMAL;
7585 else
7586 remove_edge_and_dominated_blocks (e);
7587 changed = true;
7588 }
7589 else
7590 ei_next (&ei);
7591 }
7592
7593 return changed;
7594 }
7595
7596 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
7597
7598 bool
7599 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
7600 {
7601 bool changed = false;
7602 unsigned i;
7603 bitmap_iterator bi;
7604
7605 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7606 {
7607 basic_block bb = BASIC_BLOCK (i);
7608
7609 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
7610 this basic block already. */
7611 gcc_assert (bb || changed);
7612 if (bb != NULL)
7613 changed |= gimple_purge_dead_abnormal_call_edges (bb);
7614 }
7615
7616 return changed;
7617 }
7618
7619 /* This function is called whenever a new edge is created or
7620 redirected. */
7621
7622 static void
7623 gimple_execute_on_growing_pred (edge e)
7624 {
7625 basic_block bb = e->dest;
7626
7627 if (!gimple_seq_empty_p (phi_nodes (bb)))
7628 reserve_phi_args_for_new_edge (bb);
7629 }
7630
7631 /* This function is called immediately before edge E is removed from
7632 the edge vector E->dest->preds. */
7633
7634 static void
7635 gimple_execute_on_shrinking_pred (edge e)
7636 {
7637 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
7638 remove_phi_args (e);
7639 }
7640
7641 /*---------------------------------------------------------------------------
7642 Helper functions for Loop versioning
7643 ---------------------------------------------------------------------------*/
7644
7645 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
7646 of 'first'. Both of them are dominated by 'new_head' basic block. When
7647 'new_head' was created by 'second's incoming edge it received phi arguments
7648 on the edge by split_edge(). Later, additional edge 'e' was created to
7649 connect 'new_head' and 'first'. Now this routine adds phi args on this
7650 additional edge 'e' that new_head to second edge received as part of edge
7651 splitting. */
7652
7653 static void
7654 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
7655 basic_block new_head, edge e)
7656 {
7657 gimple phi1, phi2;
7658 gimple_stmt_iterator psi1, psi2;
7659 tree def;
7660 edge e2 = find_edge (new_head, second);
7661
7662 /* Because NEW_HEAD has been created by splitting SECOND's incoming
7663 edge, we should always have an edge from NEW_HEAD to SECOND. */
7664 gcc_assert (e2 != NULL);
7665
7666 /* Browse all 'second' basic block phi nodes and add phi args to
7667 edge 'e' for 'first' head. PHI args are always in correct order. */
7668
7669 for (psi2 = gsi_start_phis (second),
7670 psi1 = gsi_start_phis (first);
7671 !gsi_end_p (psi2) && !gsi_end_p (psi1);
7672 gsi_next (&psi2), gsi_next (&psi1))
7673 {
7674 phi1 = gsi_stmt (psi1);
7675 phi2 = gsi_stmt (psi2);
7676 def = PHI_ARG_DEF (phi2, e2->dest_idx);
7677 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
7678 }
7679 }
7680
7681
7682 /* Adds a if else statement to COND_BB with condition COND_EXPR.
7683 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
7684 the destination of the ELSE part. */
7685
7686 static void
7687 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
7688 basic_block second_head ATTRIBUTE_UNUSED,
7689 basic_block cond_bb, void *cond_e)
7690 {
7691 gimple_stmt_iterator gsi;
7692 gimple new_cond_expr;
7693 tree cond_expr = (tree) cond_e;
7694 edge e0;
7695
7696 /* Build new conditional expr */
7697 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
7698 NULL_TREE, NULL_TREE);
7699
7700 /* Add new cond in cond_bb. */
7701 gsi = gsi_last_bb (cond_bb);
7702 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
7703
7704 /* Adjust edges appropriately to connect new head with first head
7705 as well as second head. */
7706 e0 = single_succ_edge (cond_bb);
7707 e0->flags &= ~EDGE_FALLTHRU;
7708 e0->flags |= EDGE_FALSE_VALUE;
7709 }
7710
7711
7712 /* Do book-keeping of basic block BB for the profile consistency checker.
7713 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
7714 then do post-pass accounting. Store the counting in RECORD. */
7715 static void
7716 gimple_account_profile_record (basic_block bb, int after_pass,
7717 struct profile_record *record)
7718 {
7719 gimple_stmt_iterator i;
7720 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
7721 {
7722 record->size[after_pass]
7723 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
7724 if (profile_status == PROFILE_READ)
7725 record->time[after_pass]
7726 += estimate_num_insns (gsi_stmt (i),
7727 &eni_time_weights) * bb->count;
7728 else if (profile_status == PROFILE_GUESSED)
7729 record->time[after_pass]
7730 += estimate_num_insns (gsi_stmt (i),
7731 &eni_time_weights) * bb->frequency;
7732 }
7733 }
7734
7735 struct cfg_hooks gimple_cfg_hooks = {
7736 "gimple",
7737 gimple_verify_flow_info,
7738 gimple_dump_bb, /* dump_bb */
7739 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
7740 create_bb, /* create_basic_block */
7741 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
7742 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
7743 gimple_can_remove_branch_p, /* can_remove_branch_p */
7744 remove_bb, /* delete_basic_block */
7745 gimple_split_block, /* split_block */
7746 gimple_move_block_after, /* move_block_after */
7747 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
7748 gimple_merge_blocks, /* merge_blocks */
7749 gimple_predict_edge, /* predict_edge */
7750 gimple_predicted_by_p, /* predicted_by_p */
7751 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
7752 gimple_duplicate_bb, /* duplicate_block */
7753 gimple_split_edge, /* split_edge */
7754 gimple_make_forwarder_block, /* make_forward_block */
7755 NULL, /* tidy_fallthru_edge */
7756 NULL, /* force_nonfallthru */
7757 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
7758 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
7759 gimple_flow_call_edges_add, /* flow_call_edges_add */
7760 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
7761 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
7762 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
7763 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
7764 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
7765 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
7766 flush_pending_stmts, /* flush_pending_stmts */
7767 gimple_empty_block_p, /* block_empty_p */
7768 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
7769 gimple_account_profile_record,
7770 };
7771
7772
7773 /* Split all critical edges. */
7774
7775 static unsigned int
7776 split_critical_edges (void)
7777 {
7778 basic_block bb;
7779 edge e;
7780 edge_iterator ei;
7781
7782 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
7783 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
7784 mappings around the calls to split_edge. */
7785 start_recording_case_labels ();
7786 FOR_ALL_BB (bb)
7787 {
7788 FOR_EACH_EDGE (e, ei, bb->succs)
7789 {
7790 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
7791 split_edge (e);
7792 /* PRE inserts statements to edges and expects that
7793 since split_critical_edges was done beforehand, committing edge
7794 insertions will not split more edges. In addition to critical
7795 edges we must split edges that have multiple successors and
7796 end by control flow statements, such as RESX.
7797 Go ahead and split them too. This matches the logic in
7798 gimple_find_edge_insert_loc. */
7799 else if ((!single_pred_p (e->dest)
7800 || !gimple_seq_empty_p (phi_nodes (e->dest))
7801 || e->dest == EXIT_BLOCK_PTR)
7802 && e->src != ENTRY_BLOCK_PTR
7803 && !(e->flags & EDGE_ABNORMAL))
7804 {
7805 gimple_stmt_iterator gsi;
7806
7807 gsi = gsi_last_bb (e->src);
7808 if (!gsi_end_p (gsi)
7809 && stmt_ends_bb_p (gsi_stmt (gsi))
7810 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
7811 && !gimple_call_builtin_p (gsi_stmt (gsi),
7812 BUILT_IN_RETURN)))
7813 split_edge (e);
7814 }
7815 }
7816 }
7817 end_recording_case_labels ();
7818 return 0;
7819 }
7820
7821 namespace {
7822
7823 const pass_data pass_data_split_crit_edges =
7824 {
7825 GIMPLE_PASS, /* type */
7826 "crited", /* name */
7827 OPTGROUP_NONE, /* optinfo_flags */
7828 false, /* has_gate */
7829 true, /* has_execute */
7830 TV_TREE_SPLIT_EDGES, /* tv_id */
7831 PROP_cfg, /* properties_required */
7832 PROP_no_crit_edges, /* properties_provided */
7833 0, /* properties_destroyed */
7834 0, /* todo_flags_start */
7835 TODO_verify_flow, /* todo_flags_finish */
7836 };
7837
7838 class pass_split_crit_edges : public gimple_opt_pass
7839 {
7840 public:
7841 pass_split_crit_edges (gcc::context *ctxt)
7842 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
7843 {}
7844
7845 /* opt_pass methods: */
7846 unsigned int execute () { return split_critical_edges (); }
7847
7848 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
7849 }; // class pass_split_crit_edges
7850
7851 } // anon namespace
7852
7853 gimple_opt_pass *
7854 make_pass_split_crit_edges (gcc::context *ctxt)
7855 {
7856 return new pass_split_crit_edges (ctxt);
7857 }
7858
7859
7860 /* Build a ternary operation and gimplify it. Emit code before GSI.
7861 Return the gimple_val holding the result. */
7862
7863 tree
7864 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
7865 tree type, tree a, tree b, tree c)
7866 {
7867 tree ret;
7868 location_t loc = gimple_location (gsi_stmt (*gsi));
7869
7870 ret = fold_build3_loc (loc, code, type, a, b, c);
7871 STRIP_NOPS (ret);
7872
7873 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7874 GSI_SAME_STMT);
7875 }
7876
7877 /* Build a binary operation and gimplify it. Emit code before GSI.
7878 Return the gimple_val holding the result. */
7879
7880 tree
7881 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
7882 tree type, tree a, tree b)
7883 {
7884 tree ret;
7885
7886 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
7887 STRIP_NOPS (ret);
7888
7889 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7890 GSI_SAME_STMT);
7891 }
7892
7893 /* Build a unary operation and gimplify it. Emit code before GSI.
7894 Return the gimple_val holding the result. */
7895
7896 tree
7897 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
7898 tree a)
7899 {
7900 tree ret;
7901
7902 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
7903 STRIP_NOPS (ret);
7904
7905 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7906 GSI_SAME_STMT);
7907 }
7908
7909
7910 \f
7911 /* Emit return warnings. */
7912
7913 static unsigned int
7914 execute_warn_function_return (void)
7915 {
7916 source_location location;
7917 gimple last;
7918 edge e;
7919 edge_iterator ei;
7920
7921 if (!targetm.warn_func_return (cfun->decl))
7922 return 0;
7923
7924 /* If we have a path to EXIT, then we do return. */
7925 if (TREE_THIS_VOLATILE (cfun->decl)
7926 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
7927 {
7928 location = UNKNOWN_LOCATION;
7929 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7930 {
7931 last = last_stmt (e->src);
7932 if ((gimple_code (last) == GIMPLE_RETURN
7933 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
7934 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
7935 break;
7936 }
7937 if (location == UNKNOWN_LOCATION)
7938 location = cfun->function_end_locus;
7939 warning_at (location, 0, "%<noreturn%> function does return");
7940 }
7941
7942 /* If we see "return;" in some basic block, then we do reach the end
7943 without returning a value. */
7944 else if (warn_return_type
7945 && !TREE_NO_WARNING (cfun->decl)
7946 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
7947 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
7948 {
7949 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7950 {
7951 gimple last = last_stmt (e->src);
7952 if (gimple_code (last) == GIMPLE_RETURN
7953 && gimple_return_retval (last) == NULL
7954 && !gimple_no_warning_p (last))
7955 {
7956 location = gimple_location (last);
7957 if (location == UNKNOWN_LOCATION)
7958 location = cfun->function_end_locus;
7959 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
7960 TREE_NO_WARNING (cfun->decl) = 1;
7961 break;
7962 }
7963 }
7964 }
7965 return 0;
7966 }
7967
7968
7969 /* Given a basic block B which ends with a conditional and has
7970 precisely two successors, determine which of the edges is taken if
7971 the conditional is true and which is taken if the conditional is
7972 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
7973
7974 void
7975 extract_true_false_edges_from_block (basic_block b,
7976 edge *true_edge,
7977 edge *false_edge)
7978 {
7979 edge e = EDGE_SUCC (b, 0);
7980
7981 if (e->flags & EDGE_TRUE_VALUE)
7982 {
7983 *true_edge = e;
7984 *false_edge = EDGE_SUCC (b, 1);
7985 }
7986 else
7987 {
7988 *false_edge = e;
7989 *true_edge = EDGE_SUCC (b, 1);
7990 }
7991 }
7992
7993 namespace {
7994
7995 const pass_data pass_data_warn_function_return =
7996 {
7997 GIMPLE_PASS, /* type */
7998 "*warn_function_return", /* name */
7999 OPTGROUP_NONE, /* optinfo_flags */
8000 false, /* has_gate */
8001 true, /* has_execute */
8002 TV_NONE, /* tv_id */
8003 PROP_cfg, /* properties_required */
8004 0, /* properties_provided */
8005 0, /* properties_destroyed */
8006 0, /* todo_flags_start */
8007 0, /* todo_flags_finish */
8008 };
8009
8010 class pass_warn_function_return : public gimple_opt_pass
8011 {
8012 public:
8013 pass_warn_function_return (gcc::context *ctxt)
8014 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8015 {}
8016
8017 /* opt_pass methods: */
8018 unsigned int execute () { return execute_warn_function_return (); }
8019
8020 }; // class pass_warn_function_return
8021
8022 } // anon namespace
8023
8024 gimple_opt_pass *
8025 make_pass_warn_function_return (gcc::context *ctxt)
8026 {
8027 return new pass_warn_function_return (ctxt);
8028 }
8029
8030 /* Walk a gimplified function and warn for functions whose return value is
8031 ignored and attribute((warn_unused_result)) is set. This is done before
8032 inlining, so we don't have to worry about that. */
8033
8034 static void
8035 do_warn_unused_result (gimple_seq seq)
8036 {
8037 tree fdecl, ftype;
8038 gimple_stmt_iterator i;
8039
8040 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8041 {
8042 gimple g = gsi_stmt (i);
8043
8044 switch (gimple_code (g))
8045 {
8046 case GIMPLE_BIND:
8047 do_warn_unused_result (gimple_bind_body (g));
8048 break;
8049 case GIMPLE_TRY:
8050 do_warn_unused_result (gimple_try_eval (g));
8051 do_warn_unused_result (gimple_try_cleanup (g));
8052 break;
8053 case GIMPLE_CATCH:
8054 do_warn_unused_result (gimple_catch_handler (g));
8055 break;
8056 case GIMPLE_EH_FILTER:
8057 do_warn_unused_result (gimple_eh_filter_failure (g));
8058 break;
8059
8060 case GIMPLE_CALL:
8061 if (gimple_call_lhs (g))
8062 break;
8063 if (gimple_call_internal_p (g))
8064 break;
8065
8066 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8067 LHS. All calls whose value is ignored should be
8068 represented like this. Look for the attribute. */
8069 fdecl = gimple_call_fndecl (g);
8070 ftype = gimple_call_fntype (g);
8071
8072 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8073 {
8074 location_t loc = gimple_location (g);
8075
8076 if (fdecl)
8077 warning_at (loc, OPT_Wunused_result,
8078 "ignoring return value of %qD, "
8079 "declared with attribute warn_unused_result",
8080 fdecl);
8081 else
8082 warning_at (loc, OPT_Wunused_result,
8083 "ignoring return value of function "
8084 "declared with attribute warn_unused_result");
8085 }
8086 break;
8087
8088 default:
8089 /* Not a container, not a call, or a call whose value is used. */
8090 break;
8091 }
8092 }
8093 }
8094
8095 static unsigned int
8096 run_warn_unused_result (void)
8097 {
8098 do_warn_unused_result (gimple_body (current_function_decl));
8099 return 0;
8100 }
8101
8102 static bool
8103 gate_warn_unused_result (void)
8104 {
8105 return flag_warn_unused_result;
8106 }
8107
8108 namespace {
8109
8110 const pass_data pass_data_warn_unused_result =
8111 {
8112 GIMPLE_PASS, /* type */
8113 "*warn_unused_result", /* name */
8114 OPTGROUP_NONE, /* optinfo_flags */
8115 true, /* has_gate */
8116 true, /* has_execute */
8117 TV_NONE, /* tv_id */
8118 PROP_gimple_any, /* properties_required */
8119 0, /* properties_provided */
8120 0, /* properties_destroyed */
8121 0, /* todo_flags_start */
8122 0, /* todo_flags_finish */
8123 };
8124
8125 class pass_warn_unused_result : public gimple_opt_pass
8126 {
8127 public:
8128 pass_warn_unused_result (gcc::context *ctxt)
8129 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8130 {}
8131
8132 /* opt_pass methods: */
8133 bool gate () { return gate_warn_unused_result (); }
8134 unsigned int execute () { return run_warn_unused_result (); }
8135
8136 }; // class pass_warn_unused_result
8137
8138 } // anon namespace
8139
8140 gimple_opt_pass *
8141 make_pass_warn_unused_result (gcc::context *ctxt)
8142 {
8143 return new pass_warn_unused_result (ctxt);
8144 }
8145
8146 /* IPA passes, compilation of earlier functions or inlining
8147 might have changed some properties, such as marked functions nothrow,
8148 pure, const or noreturn.
8149 Remove redundant edges and basic blocks, and create new ones if necessary.
8150
8151 This pass can't be executed as stand alone pass from pass manager, because
8152 in between inlining and this fixup the verify_flow_info would fail. */
8153
8154 unsigned int
8155 execute_fixup_cfg (void)
8156 {
8157 basic_block bb;
8158 gimple_stmt_iterator gsi;
8159 int todo = gimple_in_ssa_p (cfun) ? TODO_verify_ssa : 0;
8160 gcov_type count_scale;
8161 edge e;
8162 edge_iterator ei;
8163
8164 count_scale
8165 = GCOV_COMPUTE_SCALE (cgraph_get_node (current_function_decl)->count,
8166 ENTRY_BLOCK_PTR->count);
8167
8168 ENTRY_BLOCK_PTR->count = cgraph_get_node (current_function_decl)->count;
8169 EXIT_BLOCK_PTR->count = apply_scale (EXIT_BLOCK_PTR->count,
8170 count_scale);
8171
8172 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
8173 e->count = apply_scale (e->count, count_scale);
8174
8175 FOR_EACH_BB (bb)
8176 {
8177 bb->count = apply_scale (bb->count, count_scale);
8178 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
8179 {
8180 gimple stmt = gsi_stmt (gsi);
8181 tree decl = is_gimple_call (stmt)
8182 ? gimple_call_fndecl (stmt)
8183 : NULL;
8184 if (decl)
8185 {
8186 int flags = gimple_call_flags (stmt);
8187 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
8188 {
8189 if (gimple_purge_dead_abnormal_call_edges (bb))
8190 todo |= TODO_cleanup_cfg;
8191
8192 if (gimple_in_ssa_p (cfun))
8193 {
8194 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8195 update_stmt (stmt);
8196 }
8197 }
8198
8199 if (flags & ECF_NORETURN
8200 && fixup_noreturn_call (stmt))
8201 todo |= TODO_cleanup_cfg;
8202 }
8203
8204 if (maybe_clean_eh_stmt (stmt)
8205 && gimple_purge_dead_eh_edges (bb))
8206 todo |= TODO_cleanup_cfg;
8207 }
8208
8209 FOR_EACH_EDGE (e, ei, bb->succs)
8210 e->count = apply_scale (e->count, count_scale);
8211
8212 /* If we have a basic block with no successors that does not
8213 end with a control statement or a noreturn call end it with
8214 a call to __builtin_unreachable. This situation can occur
8215 when inlining a noreturn call that does in fact return. */
8216 if (EDGE_COUNT (bb->succs) == 0)
8217 {
8218 gimple stmt = last_stmt (bb);
8219 if (!stmt
8220 || (!is_ctrl_stmt (stmt)
8221 && (!is_gimple_call (stmt)
8222 || (gimple_call_flags (stmt) & ECF_NORETURN) == 0)))
8223 {
8224 stmt = gimple_build_call
8225 (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
8226 gimple_stmt_iterator gsi = gsi_last_bb (bb);
8227 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
8228 }
8229 }
8230 }
8231 if (count_scale != REG_BR_PROB_BASE)
8232 compute_function_frequency ();
8233
8234 /* We just processed all calls. */
8235 if (cfun->gimple_df)
8236 vec_free (MODIFIED_NORETURN_CALLS (cfun));
8237
8238 /* Dump a textual representation of the flowgraph. */
8239 if (dump_file)
8240 gimple_dump_cfg (dump_file, dump_flags);
8241
8242 if (current_loops
8243 && (todo & TODO_cleanup_cfg))
8244 loops_state_set (LOOPS_NEED_FIXUP);
8245
8246 return todo;
8247 }
8248
8249 namespace {
8250
8251 const pass_data pass_data_fixup_cfg =
8252 {
8253 GIMPLE_PASS, /* type */
8254 "*free_cfg_annotations", /* name */
8255 OPTGROUP_NONE, /* optinfo_flags */
8256 false, /* has_gate */
8257 true, /* has_execute */
8258 TV_NONE, /* tv_id */
8259 PROP_cfg, /* properties_required */
8260 0, /* properties_provided */
8261 0, /* properties_destroyed */
8262 0, /* todo_flags_start */
8263 0, /* todo_flags_finish */
8264 };
8265
8266 class pass_fixup_cfg : public gimple_opt_pass
8267 {
8268 public:
8269 pass_fixup_cfg (gcc::context *ctxt)
8270 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
8271 {}
8272
8273 /* opt_pass methods: */
8274 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
8275 unsigned int execute () { return execute_fixup_cfg (); }
8276
8277 }; // class pass_fixup_cfg
8278
8279 } // anon namespace
8280
8281 gimple_opt_pass *
8282 make_pass_fixup_cfg (gcc::context *ctxt)
8283 {
8284 return new pass_fixup_cfg (ctxt);
8285 }
8286
8287 /* Garbage collection support for edge_def. */
8288
8289 extern void gt_ggc_mx (tree&);
8290 extern void gt_ggc_mx (gimple&);
8291 extern void gt_ggc_mx (rtx&);
8292 extern void gt_ggc_mx (basic_block&);
8293
8294 void
8295 gt_ggc_mx (edge_def *e)
8296 {
8297 tree block = LOCATION_BLOCK (e->goto_locus);
8298 gt_ggc_mx (e->src);
8299 gt_ggc_mx (e->dest);
8300 if (current_ir_type () == IR_GIMPLE)
8301 gt_ggc_mx (e->insns.g);
8302 else
8303 gt_ggc_mx (e->insns.r);
8304 gt_ggc_mx (block);
8305 }
8306
8307 /* PCH support for edge_def. */
8308
8309 extern void gt_pch_nx (tree&);
8310 extern void gt_pch_nx (gimple&);
8311 extern void gt_pch_nx (rtx&);
8312 extern void gt_pch_nx (basic_block&);
8313
8314 void
8315 gt_pch_nx (edge_def *e)
8316 {
8317 tree block = LOCATION_BLOCK (e->goto_locus);
8318 gt_pch_nx (e->src);
8319 gt_pch_nx (e->dest);
8320 if (current_ir_type () == IR_GIMPLE)
8321 gt_pch_nx (e->insns.g);
8322 else
8323 gt_pch_nx (e->insns.r);
8324 gt_pch_nx (block);
8325 }
8326
8327 void
8328 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
8329 {
8330 tree block = LOCATION_BLOCK (e->goto_locus);
8331 op (&(e->src), cookie);
8332 op (&(e->dest), cookie);
8333 if (current_ir_type () == IR_GIMPLE)
8334 op (&(e->insns.g), cookie);
8335 else
8336 op (&(e->insns.r), cookie);
8337 op (&(block), cookie);
8338 }