basic-block.h (edge_list): Prefix member names with "m_".
[gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2013 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "hash-table.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "flags.h"
30 #include "function.h"
31 #include "ggc.h"
32 #include "gimple-pretty-print.h"
33 #include "tree-ssa.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
36 #include "diagnostic-core.h"
37 #include "except.h"
38 #include "cfgloop.h"
39 #include "tree-ssa-propagate.h"
40 #include "value-prof.h"
41 #include "pointer-set.h"
42 #include "tree-inline.h"
43 #include "target.h"
44
45 /* This file contains functions for building the Control Flow Graph (CFG)
46 for a function tree. */
47
48 /* Local declarations. */
49
50 /* Initial capacity for the basic block array. */
51 static const int initial_cfg_capacity = 20;
52
53 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
54 which use a particular edge. The CASE_LABEL_EXPRs are chained together
55 via their CASE_CHAIN field, which we clear after we're done with the
56 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
57
58 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
59 update the case vector in response to edge redirections.
60
61 Right now this table is set up and torn down at key points in the
62 compilation process. It would be nice if we could make the table
63 more persistent. The key is getting notification of changes to
64 the CFG (particularly edge removal, creation and redirection). */
65
66 static struct pointer_map_t *edge_to_cases;
67
68 /* If we record edge_to_cases, this bitmap will hold indexes
69 of basic blocks that end in a GIMPLE_SWITCH which we touched
70 due to edge manipulations. */
71
72 static bitmap touched_switch_bbs;
73
74 /* CFG statistics. */
75 struct cfg_stats_d
76 {
77 long num_merged_labels;
78 };
79
80 static struct cfg_stats_d cfg_stats;
81
82 /* Nonzero if we found a computed goto while building basic blocks. */
83 static bool found_computed_goto;
84
85 /* Hash table to store last discriminator assigned for each locus. */
86 struct locus_discrim_map
87 {
88 location_t locus;
89 int discriminator;
90 };
91
92 /* Hashtable helpers. */
93
94 struct locus_discrim_hasher : typed_free_remove <locus_discrim_map>
95 {
96 typedef locus_discrim_map value_type;
97 typedef locus_discrim_map compare_type;
98 static inline hashval_t hash (const value_type *);
99 static inline bool equal (const value_type *, const compare_type *);
100 };
101
102 /* Trivial hash function for a location_t. ITEM is a pointer to
103 a hash table entry that maps a location_t to a discriminator. */
104
105 inline hashval_t
106 locus_discrim_hasher::hash (const value_type *item)
107 {
108 return LOCATION_LINE (item->locus);
109 }
110
111 /* Equality function for the locus-to-discriminator map. A and B
112 point to the two hash table entries to compare. */
113
114 inline bool
115 locus_discrim_hasher::equal (const value_type *a, const compare_type *b)
116 {
117 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
118 }
119
120 static hash_table <locus_discrim_hasher> discriminator_per_locus;
121
122 /* Basic blocks and flowgraphs. */
123 static void make_blocks (gimple_seq);
124 static void factor_computed_gotos (void);
125
126 /* Edges. */
127 static void make_edges (void);
128 static void assign_discriminators (void);
129 static void make_cond_expr_edges (basic_block);
130 static void make_gimple_switch_edges (basic_block);
131 static void make_goto_expr_edges (basic_block);
132 static void make_gimple_asm_edges (basic_block);
133 static edge gimple_redirect_edge_and_branch (edge, basic_block);
134 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
135 static unsigned int split_critical_edges (void);
136
137 /* Various helpers. */
138 static inline bool stmt_starts_bb_p (gimple, gimple);
139 static int gimple_verify_flow_info (void);
140 static void gimple_make_forwarder_block (edge);
141 static gimple first_non_label_stmt (basic_block);
142 static bool verify_gimple_transaction (gimple);
143
144 /* Flowgraph optimization and cleanup. */
145 static void gimple_merge_blocks (basic_block, basic_block);
146 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
147 static void remove_bb (basic_block);
148 static edge find_taken_edge_computed_goto (basic_block, tree);
149 static edge find_taken_edge_cond_expr (basic_block, tree);
150 static edge find_taken_edge_switch_expr (basic_block, tree);
151 static tree find_case_label_for_value (gimple, tree);
152
153 void
154 init_empty_tree_cfg_for_function (struct function *fn)
155 {
156 /* Initialize the basic block array. */
157 init_flow (fn);
158 profile_status_for_function (fn) = PROFILE_ABSENT;
159 n_basic_blocks_for_function (fn) = NUM_FIXED_BLOCKS;
160 last_basic_block_for_function (fn) = NUM_FIXED_BLOCKS;
161 vec_alloc (basic_block_info_for_function (fn), initial_cfg_capacity);
162 vec_safe_grow_cleared (basic_block_info_for_function (fn),
163 initial_cfg_capacity);
164
165 /* Build a mapping of labels to their associated blocks. */
166 vec_alloc (label_to_block_map_for_function (fn), initial_cfg_capacity);
167 vec_safe_grow_cleared (label_to_block_map_for_function (fn),
168 initial_cfg_capacity);
169
170 SET_BASIC_BLOCK_FOR_FUNCTION (fn, ENTRY_BLOCK,
171 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn));
172 SET_BASIC_BLOCK_FOR_FUNCTION (fn, EXIT_BLOCK,
173 EXIT_BLOCK_PTR_FOR_FUNCTION (fn));
174
175 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)->next_bb
176 = EXIT_BLOCK_PTR_FOR_FUNCTION (fn);
177 EXIT_BLOCK_PTR_FOR_FUNCTION (fn)->prev_bb
178 = ENTRY_BLOCK_PTR_FOR_FUNCTION (fn);
179 }
180
181 void
182 init_empty_tree_cfg (void)
183 {
184 init_empty_tree_cfg_for_function (cfun);
185 }
186
187 /*---------------------------------------------------------------------------
188 Create basic blocks
189 ---------------------------------------------------------------------------*/
190
191 /* Entry point to the CFG builder for trees. SEQ is the sequence of
192 statements to be added to the flowgraph. */
193
194 static void
195 build_gimple_cfg (gimple_seq seq)
196 {
197 /* Register specific gimple functions. */
198 gimple_register_cfg_hooks ();
199
200 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
201
202 init_empty_tree_cfg ();
203
204 found_computed_goto = 0;
205 make_blocks (seq);
206
207 /* Computed gotos are hell to deal with, especially if there are
208 lots of them with a large number of destinations. So we factor
209 them to a common computed goto location before we build the
210 edge list. After we convert back to normal form, we will un-factor
211 the computed gotos since factoring introduces an unwanted jump. */
212 if (found_computed_goto)
213 factor_computed_gotos ();
214
215 /* Make sure there is always at least one block, even if it's empty. */
216 if (n_basic_blocks == NUM_FIXED_BLOCKS)
217 create_empty_bb (ENTRY_BLOCK_PTR);
218
219 /* Adjust the size of the array. */
220 if (basic_block_info->length () < (size_t) n_basic_blocks)
221 vec_safe_grow_cleared (basic_block_info, n_basic_blocks);
222
223 /* To speed up statement iterator walks, we first purge dead labels. */
224 cleanup_dead_labels ();
225
226 /* Group case nodes to reduce the number of edges.
227 We do this after cleaning up dead labels because otherwise we miss
228 a lot of obvious case merging opportunities. */
229 group_case_labels ();
230
231 /* Create the edges of the flowgraph. */
232 discriminator_per_locus.create (13);
233 make_edges ();
234 assign_discriminators ();
235 cleanup_dead_labels ();
236 discriminator_per_locus.dispose ();
237 }
238
239 static unsigned int
240 execute_build_cfg (void)
241 {
242 gimple_seq body = gimple_body (current_function_decl);
243
244 build_gimple_cfg (body);
245 gimple_set_body (current_function_decl, NULL);
246 if (dump_file && (dump_flags & TDF_DETAILS))
247 {
248 fprintf (dump_file, "Scope blocks:\n");
249 dump_scope_blocks (dump_file, dump_flags);
250 }
251 cleanup_tree_cfg ();
252 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
253 return 0;
254 }
255
256 namespace {
257
258 const pass_data pass_data_build_cfg =
259 {
260 GIMPLE_PASS, /* type */
261 "cfg", /* name */
262 OPTGROUP_NONE, /* optinfo_flags */
263 false, /* has_gate */
264 true, /* has_execute */
265 TV_TREE_CFG, /* tv_id */
266 PROP_gimple_leh, /* properties_required */
267 ( PROP_cfg | PROP_loops ), /* properties_provided */
268 0, /* properties_destroyed */
269 0, /* todo_flags_start */
270 TODO_verify_stmts, /* todo_flags_finish */
271 };
272
273 class pass_build_cfg : public gimple_opt_pass
274 {
275 public:
276 pass_build_cfg (gcc::context *ctxt)
277 : gimple_opt_pass (pass_data_build_cfg, ctxt)
278 {}
279
280 /* opt_pass methods: */
281 unsigned int execute () { return execute_build_cfg (); }
282
283 }; // class pass_build_cfg
284
285 } // anon namespace
286
287 gimple_opt_pass *
288 make_pass_build_cfg (gcc::context *ctxt)
289 {
290 return new pass_build_cfg (ctxt);
291 }
292
293
294 /* Return true if T is a computed goto. */
295
296 static bool
297 computed_goto_p (gimple t)
298 {
299 return (gimple_code (t) == GIMPLE_GOTO
300 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
301 }
302
303
304 /* Search the CFG for any computed gotos. If found, factor them to a
305 common computed goto site. Also record the location of that site so
306 that we can un-factor the gotos after we have converted back to
307 normal form. */
308
309 static void
310 factor_computed_gotos (void)
311 {
312 basic_block bb;
313 tree factored_label_decl = NULL;
314 tree var = NULL;
315 gimple factored_computed_goto_label = NULL;
316 gimple factored_computed_goto = NULL;
317
318 /* We know there are one or more computed gotos in this function.
319 Examine the last statement in each basic block to see if the block
320 ends with a computed goto. */
321
322 FOR_EACH_BB (bb)
323 {
324 gimple_stmt_iterator gsi = gsi_last_bb (bb);
325 gimple last;
326
327 if (gsi_end_p (gsi))
328 continue;
329
330 last = gsi_stmt (gsi);
331
332 /* Ignore the computed goto we create when we factor the original
333 computed gotos. */
334 if (last == factored_computed_goto)
335 continue;
336
337 /* If the last statement is a computed goto, factor it. */
338 if (computed_goto_p (last))
339 {
340 gimple assignment;
341
342 /* The first time we find a computed goto we need to create
343 the factored goto block and the variable each original
344 computed goto will use for their goto destination. */
345 if (!factored_computed_goto)
346 {
347 basic_block new_bb = create_empty_bb (bb);
348 gimple_stmt_iterator new_gsi = gsi_start_bb (new_bb);
349
350 /* Create the destination of the factored goto. Each original
351 computed goto will put its desired destination into this
352 variable and jump to the label we create immediately
353 below. */
354 var = create_tmp_var (ptr_type_node, "gotovar");
355
356 /* Build a label for the new block which will contain the
357 factored computed goto. */
358 factored_label_decl = create_artificial_label (UNKNOWN_LOCATION);
359 factored_computed_goto_label
360 = gimple_build_label (factored_label_decl);
361 gsi_insert_after (&new_gsi, factored_computed_goto_label,
362 GSI_NEW_STMT);
363
364 /* Build our new computed goto. */
365 factored_computed_goto = gimple_build_goto (var);
366 gsi_insert_after (&new_gsi, factored_computed_goto, GSI_NEW_STMT);
367 }
368
369 /* Copy the original computed goto's destination into VAR. */
370 assignment = gimple_build_assign (var, gimple_goto_dest (last));
371 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
372
373 /* And re-vector the computed goto to the new destination. */
374 gimple_goto_set_dest (last, factored_label_decl);
375 }
376 }
377 }
378
379
380 /* Build a flowgraph for the sequence of stmts SEQ. */
381
382 static void
383 make_blocks (gimple_seq seq)
384 {
385 gimple_stmt_iterator i = gsi_start (seq);
386 gimple stmt = NULL;
387 bool start_new_block = true;
388 bool first_stmt_of_seq = true;
389 basic_block bb = ENTRY_BLOCK_PTR;
390
391 while (!gsi_end_p (i))
392 {
393 gimple prev_stmt;
394
395 prev_stmt = stmt;
396 stmt = gsi_stmt (i);
397
398 /* If the statement starts a new basic block or if we have determined
399 in a previous pass that we need to create a new block for STMT, do
400 so now. */
401 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
402 {
403 if (!first_stmt_of_seq)
404 gsi_split_seq_before (&i, &seq);
405 bb = create_basic_block (seq, NULL, bb);
406 start_new_block = false;
407 }
408
409 /* Now add STMT to BB and create the subgraphs for special statement
410 codes. */
411 gimple_set_bb (stmt, bb);
412
413 if (computed_goto_p (stmt))
414 found_computed_goto = true;
415
416 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
417 next iteration. */
418 if (stmt_ends_bb_p (stmt))
419 {
420 /* If the stmt can make abnormal goto use a new temporary
421 for the assignment to the LHS. This makes sure the old value
422 of the LHS is available on the abnormal edge. Otherwise
423 we will end up with overlapping life-ranges for abnormal
424 SSA names. */
425 if (gimple_has_lhs (stmt)
426 && stmt_can_make_abnormal_goto (stmt)
427 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
428 {
429 tree lhs = gimple_get_lhs (stmt);
430 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
431 gimple s = gimple_build_assign (lhs, tmp);
432 gimple_set_location (s, gimple_location (stmt));
433 gimple_set_block (s, gimple_block (stmt));
434 gimple_set_lhs (stmt, tmp);
435 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
436 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
437 DECL_GIMPLE_REG_P (tmp) = 1;
438 gsi_insert_after (&i, s, GSI_SAME_STMT);
439 }
440 start_new_block = true;
441 }
442
443 gsi_next (&i);
444 first_stmt_of_seq = false;
445 }
446 }
447
448
449 /* Create and return a new empty basic block after bb AFTER. */
450
451 static basic_block
452 create_bb (void *h, void *e, basic_block after)
453 {
454 basic_block bb;
455
456 gcc_assert (!e);
457
458 /* Create and initialize a new basic block. Since alloc_block uses
459 GC allocation that clears memory to allocate a basic block, we do
460 not have to clear the newly allocated basic block here. */
461 bb = alloc_block ();
462
463 bb->index = last_basic_block;
464 bb->flags = BB_NEW;
465 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
466
467 /* Add the new block to the linked list of blocks. */
468 link_block (bb, after);
469
470 /* Grow the basic block array if needed. */
471 if ((size_t) last_basic_block == basic_block_info->length ())
472 {
473 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
474 vec_safe_grow_cleared (basic_block_info, new_size);
475 }
476
477 /* Add the newly created block to the array. */
478 SET_BASIC_BLOCK (last_basic_block, bb);
479
480 n_basic_blocks++;
481 last_basic_block++;
482
483 return bb;
484 }
485
486
487 /*---------------------------------------------------------------------------
488 Edge creation
489 ---------------------------------------------------------------------------*/
490
491 /* Fold COND_EXPR_COND of each COND_EXPR. */
492
493 void
494 fold_cond_expr_cond (void)
495 {
496 basic_block bb;
497
498 FOR_EACH_BB (bb)
499 {
500 gimple stmt = last_stmt (bb);
501
502 if (stmt && gimple_code (stmt) == GIMPLE_COND)
503 {
504 location_t loc = gimple_location (stmt);
505 tree cond;
506 bool zerop, onep;
507
508 fold_defer_overflow_warnings ();
509 cond = fold_binary_loc (loc, gimple_cond_code (stmt), boolean_type_node,
510 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
511 if (cond)
512 {
513 zerop = integer_zerop (cond);
514 onep = integer_onep (cond);
515 }
516 else
517 zerop = onep = false;
518
519 fold_undefer_overflow_warnings (zerop || onep,
520 stmt,
521 WARN_STRICT_OVERFLOW_CONDITIONAL);
522 if (zerop)
523 gimple_cond_make_false (stmt);
524 else if (onep)
525 gimple_cond_make_true (stmt);
526 }
527 }
528 }
529
530 /* Join all the blocks in the flowgraph. */
531
532 static void
533 make_edges (void)
534 {
535 basic_block bb;
536 struct omp_region *cur_region = NULL;
537
538 /* Create an edge from entry to the first block with executable
539 statements in it. */
540 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
541
542 /* Traverse the basic block array placing edges. */
543 FOR_EACH_BB (bb)
544 {
545 gimple last = last_stmt (bb);
546 bool fallthru;
547
548 if (last)
549 {
550 enum gimple_code code = gimple_code (last);
551 switch (code)
552 {
553 case GIMPLE_GOTO:
554 make_goto_expr_edges (bb);
555 fallthru = false;
556 break;
557 case GIMPLE_RETURN:
558 make_edge (bb, EXIT_BLOCK_PTR, 0);
559 fallthru = false;
560 break;
561 case GIMPLE_COND:
562 make_cond_expr_edges (bb);
563 fallthru = false;
564 break;
565 case GIMPLE_SWITCH:
566 make_gimple_switch_edges (bb);
567 fallthru = false;
568 break;
569 case GIMPLE_RESX:
570 make_eh_edges (last);
571 fallthru = false;
572 break;
573 case GIMPLE_EH_DISPATCH:
574 fallthru = make_eh_dispatch_edges (last);
575 break;
576
577 case GIMPLE_CALL:
578 /* If this function receives a nonlocal goto, then we need to
579 make edges from this call site to all the nonlocal goto
580 handlers. */
581 if (stmt_can_make_abnormal_goto (last))
582 make_abnormal_goto_edges (bb, true);
583
584 /* If this statement has reachable exception handlers, then
585 create abnormal edges to them. */
586 make_eh_edges (last);
587
588 /* BUILTIN_RETURN is really a return statement. */
589 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
590 make_edge (bb, EXIT_BLOCK_PTR, 0), fallthru = false;
591 /* Some calls are known not to return. */
592 else
593 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
594 break;
595
596 case GIMPLE_ASSIGN:
597 /* A GIMPLE_ASSIGN may throw internally and thus be considered
598 control-altering. */
599 if (is_ctrl_altering_stmt (last))
600 make_eh_edges (last);
601 fallthru = true;
602 break;
603
604 case GIMPLE_ASM:
605 make_gimple_asm_edges (bb);
606 fallthru = true;
607 break;
608
609 case GIMPLE_OMP_PARALLEL:
610 case GIMPLE_OMP_TASK:
611 case GIMPLE_OMP_FOR:
612 case GIMPLE_OMP_SINGLE:
613 case GIMPLE_OMP_MASTER:
614 case GIMPLE_OMP_ORDERED:
615 case GIMPLE_OMP_CRITICAL:
616 case GIMPLE_OMP_SECTION:
617 cur_region = new_omp_region (bb, code, cur_region);
618 fallthru = true;
619 break;
620
621 case GIMPLE_OMP_SECTIONS:
622 cur_region = new_omp_region (bb, code, cur_region);
623 fallthru = true;
624 break;
625
626 case GIMPLE_OMP_SECTIONS_SWITCH:
627 fallthru = false;
628 break;
629
630 case GIMPLE_OMP_ATOMIC_LOAD:
631 case GIMPLE_OMP_ATOMIC_STORE:
632 fallthru = true;
633 break;
634
635 case GIMPLE_OMP_RETURN:
636 /* In the case of a GIMPLE_OMP_SECTION, the edge will go
637 somewhere other than the next block. This will be
638 created later. */
639 cur_region->exit = bb;
640 fallthru = cur_region->type != GIMPLE_OMP_SECTION;
641 cur_region = cur_region->outer;
642 break;
643
644 case GIMPLE_OMP_CONTINUE:
645 cur_region->cont = bb;
646 switch (cur_region->type)
647 {
648 case GIMPLE_OMP_FOR:
649 /* Mark all GIMPLE_OMP_FOR and GIMPLE_OMP_CONTINUE
650 succs edges as abnormal to prevent splitting
651 them. */
652 single_succ_edge (cur_region->entry)->flags |= EDGE_ABNORMAL;
653 /* Make the loopback edge. */
654 make_edge (bb, single_succ (cur_region->entry),
655 EDGE_ABNORMAL);
656
657 /* Create an edge from GIMPLE_OMP_FOR to exit, which
658 corresponds to the case that the body of the loop
659 is not executed at all. */
660 make_edge (cur_region->entry, bb->next_bb, EDGE_ABNORMAL);
661 make_edge (bb, bb->next_bb, EDGE_FALLTHRU | EDGE_ABNORMAL);
662 fallthru = false;
663 break;
664
665 case GIMPLE_OMP_SECTIONS:
666 /* Wire up the edges into and out of the nested sections. */
667 {
668 basic_block switch_bb = single_succ (cur_region->entry);
669
670 struct omp_region *i;
671 for (i = cur_region->inner; i ; i = i->next)
672 {
673 gcc_assert (i->type == GIMPLE_OMP_SECTION);
674 make_edge (switch_bb, i->entry, 0);
675 make_edge (i->exit, bb, EDGE_FALLTHRU);
676 }
677
678 /* Make the loopback edge to the block with
679 GIMPLE_OMP_SECTIONS_SWITCH. */
680 make_edge (bb, switch_bb, 0);
681
682 /* Make the edge from the switch to exit. */
683 make_edge (switch_bb, bb->next_bb, 0);
684 fallthru = false;
685 }
686 break;
687
688 default:
689 gcc_unreachable ();
690 }
691 break;
692
693 case GIMPLE_TRANSACTION:
694 {
695 tree abort_label = gimple_transaction_label (last);
696 if (abort_label)
697 make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT);
698 fallthru = true;
699 }
700 break;
701
702 default:
703 gcc_assert (!stmt_ends_bb_p (last));
704 fallthru = true;
705 }
706 }
707 else
708 fallthru = true;
709
710 if (fallthru)
711 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
712 }
713
714 if (root_omp_region)
715 free_omp_regions ();
716
717 /* Fold COND_EXPR_COND of each COND_EXPR. */
718 fold_cond_expr_cond ();
719 }
720
721 /* Find the next available discriminator value for LOCUS. The
722 discriminator distinguishes among several basic blocks that
723 share a common locus, allowing for more accurate sample-based
724 profiling. */
725
726 static int
727 next_discriminator_for_locus (location_t locus)
728 {
729 struct locus_discrim_map item;
730 struct locus_discrim_map **slot;
731
732 item.locus = locus;
733 item.discriminator = 0;
734 slot = discriminator_per_locus.find_slot_with_hash (
735 &item, LOCATION_LINE (locus), INSERT);
736 gcc_assert (slot);
737 if (*slot == HTAB_EMPTY_ENTRY)
738 {
739 *slot = XNEW (struct locus_discrim_map);
740 gcc_assert (*slot);
741 (*slot)->locus = locus;
742 (*slot)->discriminator = 0;
743 }
744 (*slot)->discriminator++;
745 return (*slot)->discriminator;
746 }
747
748 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
749
750 static bool
751 same_line_p (location_t locus1, location_t locus2)
752 {
753 expanded_location from, to;
754
755 if (locus1 == locus2)
756 return true;
757
758 from = expand_location (locus1);
759 to = expand_location (locus2);
760
761 if (from.line != to.line)
762 return false;
763 if (from.file == to.file)
764 return true;
765 return (from.file != NULL
766 && to.file != NULL
767 && filename_cmp (from.file, to.file) == 0);
768 }
769
770 /* Assign discriminators to each basic block. */
771
772 static void
773 assign_discriminators (void)
774 {
775 basic_block bb;
776
777 FOR_EACH_BB (bb)
778 {
779 edge e;
780 edge_iterator ei;
781 gimple last = last_stmt (bb);
782 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
783
784 if (locus == UNKNOWN_LOCATION)
785 continue;
786
787 FOR_EACH_EDGE (e, ei, bb->succs)
788 {
789 gimple first = first_non_label_stmt (e->dest);
790 gimple last = last_stmt (e->dest);
791 if ((first && same_line_p (locus, gimple_location (first)))
792 || (last && same_line_p (locus, gimple_location (last))))
793 {
794 if (e->dest->discriminator != 0 && bb->discriminator == 0)
795 bb->discriminator = next_discriminator_for_locus (locus);
796 else
797 e->dest->discriminator = next_discriminator_for_locus (locus);
798 }
799 }
800 }
801 }
802
803 /* Create the edges for a GIMPLE_COND starting at block BB. */
804
805 static void
806 make_cond_expr_edges (basic_block bb)
807 {
808 gimple entry = last_stmt (bb);
809 gimple then_stmt, else_stmt;
810 basic_block then_bb, else_bb;
811 tree then_label, else_label;
812 edge e;
813
814 gcc_assert (entry);
815 gcc_assert (gimple_code (entry) == GIMPLE_COND);
816
817 /* Entry basic blocks for each component. */
818 then_label = gimple_cond_true_label (entry);
819 else_label = gimple_cond_false_label (entry);
820 then_bb = label_to_block (then_label);
821 else_bb = label_to_block (else_label);
822 then_stmt = first_stmt (then_bb);
823 else_stmt = first_stmt (else_bb);
824
825 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
826 e->goto_locus = gimple_location (then_stmt);
827 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
828 if (e)
829 e->goto_locus = gimple_location (else_stmt);
830
831 /* We do not need the labels anymore. */
832 gimple_cond_set_true_label (entry, NULL_TREE);
833 gimple_cond_set_false_label (entry, NULL_TREE);
834 }
835
836
837 /* Called for each element in the hash table (P) as we delete the
838 edge to cases hash table.
839
840 Clear all the TREE_CHAINs to prevent problems with copying of
841 SWITCH_EXPRs and structure sharing rules, then free the hash table
842 element. */
843
844 static bool
845 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
846 void *data ATTRIBUTE_UNUSED)
847 {
848 tree t, next;
849
850 for (t = (tree) *value; t; t = next)
851 {
852 next = CASE_CHAIN (t);
853 CASE_CHAIN (t) = NULL;
854 }
855
856 *value = NULL;
857 return true;
858 }
859
860 /* Start recording information mapping edges to case labels. */
861
862 void
863 start_recording_case_labels (void)
864 {
865 gcc_assert (edge_to_cases == NULL);
866 edge_to_cases = pointer_map_create ();
867 touched_switch_bbs = BITMAP_ALLOC (NULL);
868 }
869
870 /* Return nonzero if we are recording information for case labels. */
871
872 static bool
873 recording_case_labels_p (void)
874 {
875 return (edge_to_cases != NULL);
876 }
877
878 /* Stop recording information mapping edges to case labels and
879 remove any information we have recorded. */
880 void
881 end_recording_case_labels (void)
882 {
883 bitmap_iterator bi;
884 unsigned i;
885 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
886 pointer_map_destroy (edge_to_cases);
887 edge_to_cases = NULL;
888 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
889 {
890 basic_block bb = BASIC_BLOCK (i);
891 if (bb)
892 {
893 gimple stmt = last_stmt (bb);
894 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
895 group_case_labels_stmt (stmt);
896 }
897 }
898 BITMAP_FREE (touched_switch_bbs);
899 }
900
901 /* If we are inside a {start,end}_recording_cases block, then return
902 a chain of CASE_LABEL_EXPRs from T which reference E.
903
904 Otherwise return NULL. */
905
906 static tree
907 get_cases_for_edge (edge e, gimple t)
908 {
909 void **slot;
910 size_t i, n;
911
912 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
913 chains available. Return NULL so the caller can detect this case. */
914 if (!recording_case_labels_p ())
915 return NULL;
916
917 slot = pointer_map_contains (edge_to_cases, e);
918 if (slot)
919 return (tree) *slot;
920
921 /* If we did not find E in the hash table, then this must be the first
922 time we have been queried for information about E & T. Add all the
923 elements from T to the hash table then perform the query again. */
924
925 n = gimple_switch_num_labels (t);
926 for (i = 0; i < n; i++)
927 {
928 tree elt = gimple_switch_label (t, i);
929 tree lab = CASE_LABEL (elt);
930 basic_block label_bb = label_to_block (lab);
931 edge this_edge = find_edge (e->src, label_bb);
932
933 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
934 a new chain. */
935 slot = pointer_map_insert (edge_to_cases, this_edge);
936 CASE_CHAIN (elt) = (tree) *slot;
937 *slot = elt;
938 }
939
940 return (tree) *pointer_map_contains (edge_to_cases, e);
941 }
942
943 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
944
945 static void
946 make_gimple_switch_edges (basic_block bb)
947 {
948 gimple entry = last_stmt (bb);
949 size_t i, n;
950
951 n = gimple_switch_num_labels (entry);
952
953 for (i = 0; i < n; ++i)
954 {
955 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
956 basic_block label_bb = label_to_block (lab);
957 make_edge (bb, label_bb, 0);
958 }
959 }
960
961
962 /* Return the basic block holding label DEST. */
963
964 basic_block
965 label_to_block_fn (struct function *ifun, tree dest)
966 {
967 int uid = LABEL_DECL_UID (dest);
968
969 /* We would die hard when faced by an undefined label. Emit a label to
970 the very first basic block. This will hopefully make even the dataflow
971 and undefined variable warnings quite right. */
972 if (seen_error () && uid < 0)
973 {
974 gimple_stmt_iterator gsi = gsi_start_bb (BASIC_BLOCK (NUM_FIXED_BLOCKS));
975 gimple stmt;
976
977 stmt = gimple_build_label (dest);
978 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
979 uid = LABEL_DECL_UID (dest);
980 }
981 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
982 return NULL;
983 return (*ifun->cfg->x_label_to_block_map)[uid];
984 }
985
986 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
987 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
988
989 void
990 make_abnormal_goto_edges (basic_block bb, bool for_call)
991 {
992 basic_block target_bb;
993 gimple_stmt_iterator gsi;
994
995 FOR_EACH_BB (target_bb)
996 {
997 for (gsi = gsi_start_bb (target_bb); !gsi_end_p (gsi); gsi_next (&gsi))
998 {
999 gimple label_stmt = gsi_stmt (gsi);
1000 tree target;
1001
1002 if (gimple_code (label_stmt) != GIMPLE_LABEL)
1003 break;
1004
1005 target = gimple_label_label (label_stmt);
1006
1007 /* Make an edge to every label block that has been marked as a
1008 potential target for a computed goto or a non-local goto. */
1009 if ((FORCED_LABEL (target) && !for_call)
1010 || (DECL_NONLOCAL (target) && for_call))
1011 {
1012 make_edge (bb, target_bb, EDGE_ABNORMAL);
1013 break;
1014 }
1015 }
1016 if (!gsi_end_p (gsi)
1017 && is_gimple_debug (gsi_stmt (gsi)))
1018 gsi_next_nondebug (&gsi);
1019 if (!gsi_end_p (gsi))
1020 {
1021 /* Make an edge to every setjmp-like call. */
1022 gimple call_stmt = gsi_stmt (gsi);
1023 if (is_gimple_call (call_stmt)
1024 && (gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE))
1025 make_edge (bb, target_bb, EDGE_ABNORMAL);
1026 }
1027 }
1028 }
1029
1030 /* Create edges for a goto statement at block BB. */
1031
1032 static void
1033 make_goto_expr_edges (basic_block bb)
1034 {
1035 gimple_stmt_iterator last = gsi_last_bb (bb);
1036 gimple goto_t = gsi_stmt (last);
1037
1038 /* A simple GOTO creates normal edges. */
1039 if (simple_goto_p (goto_t))
1040 {
1041 tree dest = gimple_goto_dest (goto_t);
1042 basic_block label_bb = label_to_block (dest);
1043 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1044 e->goto_locus = gimple_location (goto_t);
1045 gsi_remove (&last, true);
1046 return;
1047 }
1048
1049 /* A computed GOTO creates abnormal edges. */
1050 make_abnormal_goto_edges (bb, false);
1051 }
1052
1053 /* Create edges for an asm statement with labels at block BB. */
1054
1055 static void
1056 make_gimple_asm_edges (basic_block bb)
1057 {
1058 gimple stmt = last_stmt (bb);
1059 int i, n = gimple_asm_nlabels (stmt);
1060
1061 for (i = 0; i < n; ++i)
1062 {
1063 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1064 basic_block label_bb = label_to_block (label);
1065 make_edge (bb, label_bb, 0);
1066 }
1067 }
1068
1069 /*---------------------------------------------------------------------------
1070 Flowgraph analysis
1071 ---------------------------------------------------------------------------*/
1072
1073 /* Cleanup useless labels in basic blocks. This is something we wish
1074 to do early because it allows us to group case labels before creating
1075 the edges for the CFG, and it speeds up block statement iterators in
1076 all passes later on.
1077 We rerun this pass after CFG is created, to get rid of the labels that
1078 are no longer referenced. After then we do not run it any more, since
1079 (almost) no new labels should be created. */
1080
1081 /* A map from basic block index to the leading label of that block. */
1082 static struct label_record
1083 {
1084 /* The label. */
1085 tree label;
1086
1087 /* True if the label is referenced from somewhere. */
1088 bool used;
1089 } *label_for_bb;
1090
1091 /* Given LABEL return the first label in the same basic block. */
1092
1093 static tree
1094 main_block_label (tree label)
1095 {
1096 basic_block bb = label_to_block (label);
1097 tree main_label = label_for_bb[bb->index].label;
1098
1099 /* label_to_block possibly inserted undefined label into the chain. */
1100 if (!main_label)
1101 {
1102 label_for_bb[bb->index].label = label;
1103 main_label = label;
1104 }
1105
1106 label_for_bb[bb->index].used = true;
1107 return main_label;
1108 }
1109
1110 /* Clean up redundant labels within the exception tree. */
1111
1112 static void
1113 cleanup_dead_labels_eh (void)
1114 {
1115 eh_landing_pad lp;
1116 eh_region r;
1117 tree lab;
1118 int i;
1119
1120 if (cfun->eh == NULL)
1121 return;
1122
1123 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1124 if (lp && lp->post_landing_pad)
1125 {
1126 lab = main_block_label (lp->post_landing_pad);
1127 if (lab != lp->post_landing_pad)
1128 {
1129 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1130 EH_LANDING_PAD_NR (lab) = lp->index;
1131 }
1132 }
1133
1134 FOR_ALL_EH_REGION (r)
1135 switch (r->type)
1136 {
1137 case ERT_CLEANUP:
1138 case ERT_MUST_NOT_THROW:
1139 break;
1140
1141 case ERT_TRY:
1142 {
1143 eh_catch c;
1144 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1145 {
1146 lab = c->label;
1147 if (lab)
1148 c->label = main_block_label (lab);
1149 }
1150 }
1151 break;
1152
1153 case ERT_ALLOWED_EXCEPTIONS:
1154 lab = r->u.allowed.label;
1155 if (lab)
1156 r->u.allowed.label = main_block_label (lab);
1157 break;
1158 }
1159 }
1160
1161
1162 /* Cleanup redundant labels. This is a three-step process:
1163 1) Find the leading label for each block.
1164 2) Redirect all references to labels to the leading labels.
1165 3) Cleanup all useless labels. */
1166
1167 void
1168 cleanup_dead_labels (void)
1169 {
1170 basic_block bb;
1171 label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
1172
1173 /* Find a suitable label for each block. We use the first user-defined
1174 label if there is one, or otherwise just the first label we see. */
1175 FOR_EACH_BB (bb)
1176 {
1177 gimple_stmt_iterator i;
1178
1179 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1180 {
1181 tree label;
1182 gimple stmt = gsi_stmt (i);
1183
1184 if (gimple_code (stmt) != GIMPLE_LABEL)
1185 break;
1186
1187 label = gimple_label_label (stmt);
1188
1189 /* If we have not yet seen a label for the current block,
1190 remember this one and see if there are more labels. */
1191 if (!label_for_bb[bb->index].label)
1192 {
1193 label_for_bb[bb->index].label = label;
1194 continue;
1195 }
1196
1197 /* If we did see a label for the current block already, but it
1198 is an artificially created label, replace it if the current
1199 label is a user defined label. */
1200 if (!DECL_ARTIFICIAL (label)
1201 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1202 {
1203 label_for_bb[bb->index].label = label;
1204 break;
1205 }
1206 }
1207 }
1208
1209 /* Now redirect all jumps/branches to the selected label.
1210 First do so for each block ending in a control statement. */
1211 FOR_EACH_BB (bb)
1212 {
1213 gimple stmt = last_stmt (bb);
1214 tree label, new_label;
1215
1216 if (!stmt)
1217 continue;
1218
1219 switch (gimple_code (stmt))
1220 {
1221 case GIMPLE_COND:
1222 label = gimple_cond_true_label (stmt);
1223 if (label)
1224 {
1225 new_label = main_block_label (label);
1226 if (new_label != label)
1227 gimple_cond_set_true_label (stmt, new_label);
1228 }
1229
1230 label = gimple_cond_false_label (stmt);
1231 if (label)
1232 {
1233 new_label = main_block_label (label);
1234 if (new_label != label)
1235 gimple_cond_set_false_label (stmt, new_label);
1236 }
1237 break;
1238
1239 case GIMPLE_SWITCH:
1240 {
1241 size_t i, n = gimple_switch_num_labels (stmt);
1242
1243 /* Replace all destination labels. */
1244 for (i = 0; i < n; ++i)
1245 {
1246 tree case_label = gimple_switch_label (stmt, i);
1247 label = CASE_LABEL (case_label);
1248 new_label = main_block_label (label);
1249 if (new_label != label)
1250 CASE_LABEL (case_label) = new_label;
1251 }
1252 break;
1253 }
1254
1255 case GIMPLE_ASM:
1256 {
1257 int i, n = gimple_asm_nlabels (stmt);
1258
1259 for (i = 0; i < n; ++i)
1260 {
1261 tree cons = gimple_asm_label_op (stmt, i);
1262 tree label = main_block_label (TREE_VALUE (cons));
1263 TREE_VALUE (cons) = label;
1264 }
1265 break;
1266 }
1267
1268 /* We have to handle gotos until they're removed, and we don't
1269 remove them until after we've created the CFG edges. */
1270 case GIMPLE_GOTO:
1271 if (!computed_goto_p (stmt))
1272 {
1273 label = gimple_goto_dest (stmt);
1274 new_label = main_block_label (label);
1275 if (new_label != label)
1276 gimple_goto_set_dest (stmt, new_label);
1277 }
1278 break;
1279
1280 case GIMPLE_TRANSACTION:
1281 {
1282 tree label = gimple_transaction_label (stmt);
1283 if (label)
1284 {
1285 tree new_label = main_block_label (label);
1286 if (new_label != label)
1287 gimple_transaction_set_label (stmt, new_label);
1288 }
1289 }
1290 break;
1291
1292 default:
1293 break;
1294 }
1295 }
1296
1297 /* Do the same for the exception region tree labels. */
1298 cleanup_dead_labels_eh ();
1299
1300 /* Finally, purge dead labels. All user-defined labels and labels that
1301 can be the target of non-local gotos and labels which have their
1302 address taken are preserved. */
1303 FOR_EACH_BB (bb)
1304 {
1305 gimple_stmt_iterator i;
1306 tree label_for_this_bb = label_for_bb[bb->index].label;
1307
1308 if (!label_for_this_bb)
1309 continue;
1310
1311 /* If the main label of the block is unused, we may still remove it. */
1312 if (!label_for_bb[bb->index].used)
1313 label_for_this_bb = NULL;
1314
1315 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1316 {
1317 tree label;
1318 gimple stmt = gsi_stmt (i);
1319
1320 if (gimple_code (stmt) != GIMPLE_LABEL)
1321 break;
1322
1323 label = gimple_label_label (stmt);
1324
1325 if (label == label_for_this_bb
1326 || !DECL_ARTIFICIAL (label)
1327 || DECL_NONLOCAL (label)
1328 || FORCED_LABEL (label))
1329 gsi_next (&i);
1330 else
1331 gsi_remove (&i, true);
1332 }
1333 }
1334
1335 free (label_for_bb);
1336 }
1337
1338 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1339 the ones jumping to the same label.
1340 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1341
1342 void
1343 group_case_labels_stmt (gimple stmt)
1344 {
1345 int old_size = gimple_switch_num_labels (stmt);
1346 int i, j, new_size = old_size;
1347 basic_block default_bb = NULL;
1348
1349 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1350
1351 /* Look for possible opportunities to merge cases. */
1352 i = 1;
1353 while (i < old_size)
1354 {
1355 tree base_case, base_high;
1356 basic_block base_bb;
1357
1358 base_case = gimple_switch_label (stmt, i);
1359
1360 gcc_assert (base_case);
1361 base_bb = label_to_block (CASE_LABEL (base_case));
1362
1363 /* Discard cases that have the same destination as the
1364 default case. */
1365 if (base_bb == default_bb)
1366 {
1367 gimple_switch_set_label (stmt, i, NULL_TREE);
1368 i++;
1369 new_size--;
1370 continue;
1371 }
1372
1373 base_high = CASE_HIGH (base_case)
1374 ? CASE_HIGH (base_case)
1375 : CASE_LOW (base_case);
1376 i++;
1377
1378 /* Try to merge case labels. Break out when we reach the end
1379 of the label vector or when we cannot merge the next case
1380 label with the current one. */
1381 while (i < old_size)
1382 {
1383 tree merge_case = gimple_switch_label (stmt, i);
1384 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1385 double_int bhp1 = tree_to_double_int (base_high) + double_int_one;
1386
1387 /* Merge the cases if they jump to the same place,
1388 and their ranges are consecutive. */
1389 if (merge_bb == base_bb
1390 && tree_to_double_int (CASE_LOW (merge_case)) == bhp1)
1391 {
1392 base_high = CASE_HIGH (merge_case) ?
1393 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1394 CASE_HIGH (base_case) = base_high;
1395 gimple_switch_set_label (stmt, i, NULL_TREE);
1396 new_size--;
1397 i++;
1398 }
1399 else
1400 break;
1401 }
1402 }
1403
1404 /* Compress the case labels in the label vector, and adjust the
1405 length of the vector. */
1406 for (i = 0, j = 0; i < new_size; i++)
1407 {
1408 while (! gimple_switch_label (stmt, j))
1409 j++;
1410 gimple_switch_set_label (stmt, i,
1411 gimple_switch_label (stmt, j++));
1412 }
1413
1414 gcc_assert (new_size <= old_size);
1415 gimple_switch_set_num_labels (stmt, new_size);
1416 }
1417
1418 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1419 and scan the sorted vector of cases. Combine the ones jumping to the
1420 same label. */
1421
1422 void
1423 group_case_labels (void)
1424 {
1425 basic_block bb;
1426
1427 FOR_EACH_BB (bb)
1428 {
1429 gimple stmt = last_stmt (bb);
1430 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1431 group_case_labels_stmt (stmt);
1432 }
1433 }
1434
1435 /* Checks whether we can merge block B into block A. */
1436
1437 static bool
1438 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1439 {
1440 gimple stmt;
1441 gimple_stmt_iterator gsi;
1442
1443 if (!single_succ_p (a))
1444 return false;
1445
1446 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1447 return false;
1448
1449 if (single_succ (a) != b)
1450 return false;
1451
1452 if (!single_pred_p (b))
1453 return false;
1454
1455 if (b == EXIT_BLOCK_PTR)
1456 return false;
1457
1458 /* If A ends by a statement causing exceptions or something similar, we
1459 cannot merge the blocks. */
1460 stmt = last_stmt (a);
1461 if (stmt && stmt_ends_bb_p (stmt))
1462 return false;
1463
1464 /* Do not allow a block with only a non-local label to be merged. */
1465 if (stmt
1466 && gimple_code (stmt) == GIMPLE_LABEL
1467 && DECL_NONLOCAL (gimple_label_label (stmt)))
1468 return false;
1469
1470 /* Examine the labels at the beginning of B. */
1471 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
1472 {
1473 tree lab;
1474 stmt = gsi_stmt (gsi);
1475 if (gimple_code (stmt) != GIMPLE_LABEL)
1476 break;
1477 lab = gimple_label_label (stmt);
1478
1479 /* Do not remove user forced labels or for -O0 any user labels. */
1480 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1481 return false;
1482 }
1483
1484 /* Protect the loop latches. */
1485 if (current_loops && b->loop_father->latch == b)
1486 return false;
1487
1488 /* It must be possible to eliminate all phi nodes in B. If ssa form
1489 is not up-to-date and a name-mapping is registered, we cannot eliminate
1490 any phis. Symbols marked for renaming are never a problem though. */
1491 for (gsi = gsi_start_phis (b); !gsi_end_p (gsi); gsi_next (&gsi))
1492 {
1493 gimple phi = gsi_stmt (gsi);
1494 /* Technically only new names matter. */
1495 if (name_registered_for_update_p (PHI_RESULT (phi)))
1496 return false;
1497 }
1498
1499 /* When not optimizing, don't merge if we'd lose goto_locus. */
1500 if (!optimize
1501 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1502 {
1503 location_t goto_locus = single_succ_edge (a)->goto_locus;
1504 gimple_stmt_iterator prev, next;
1505 prev = gsi_last_nondebug_bb (a);
1506 next = gsi_after_labels (b);
1507 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1508 gsi_next_nondebug (&next);
1509 if ((gsi_end_p (prev)
1510 || gimple_location (gsi_stmt (prev)) != goto_locus)
1511 && (gsi_end_p (next)
1512 || gimple_location (gsi_stmt (next)) != goto_locus))
1513 return false;
1514 }
1515
1516 return true;
1517 }
1518
1519 /* Return true if the var whose chain of uses starts at PTR has no
1520 nondebug uses. */
1521 bool
1522 has_zero_uses_1 (const ssa_use_operand_t *head)
1523 {
1524 const ssa_use_operand_t *ptr;
1525
1526 for (ptr = head->next; ptr != head; ptr = ptr->next)
1527 if (!is_gimple_debug (USE_STMT (ptr)))
1528 return false;
1529
1530 return true;
1531 }
1532
1533 /* Return true if the var whose chain of uses starts at PTR has a
1534 single nondebug use. Set USE_P and STMT to that single nondebug
1535 use, if so, or to NULL otherwise. */
1536 bool
1537 single_imm_use_1 (const ssa_use_operand_t *head,
1538 use_operand_p *use_p, gimple *stmt)
1539 {
1540 ssa_use_operand_t *ptr, *single_use = 0;
1541
1542 for (ptr = head->next; ptr != head; ptr = ptr->next)
1543 if (!is_gimple_debug (USE_STMT (ptr)))
1544 {
1545 if (single_use)
1546 {
1547 single_use = NULL;
1548 break;
1549 }
1550 single_use = ptr;
1551 }
1552
1553 if (use_p)
1554 *use_p = single_use;
1555
1556 if (stmt)
1557 *stmt = single_use ? single_use->loc.stmt : NULL;
1558
1559 return !!single_use;
1560 }
1561
1562 /* Replaces all uses of NAME by VAL. */
1563
1564 void
1565 replace_uses_by (tree name, tree val)
1566 {
1567 imm_use_iterator imm_iter;
1568 use_operand_p use;
1569 gimple stmt;
1570 edge e;
1571
1572 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1573 {
1574 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1575 {
1576 replace_exp (use, val);
1577
1578 if (gimple_code (stmt) == GIMPLE_PHI)
1579 {
1580 e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
1581 if (e->flags & EDGE_ABNORMAL)
1582 {
1583 /* This can only occur for virtual operands, since
1584 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1585 would prevent replacement. */
1586 gcc_checking_assert (virtual_operand_p (name));
1587 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1588 }
1589 }
1590 }
1591
1592 if (gimple_code (stmt) != GIMPLE_PHI)
1593 {
1594 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1595 gimple orig_stmt = stmt;
1596 size_t i;
1597
1598 /* Mark the block if we changed the last stmt in it. */
1599 if (cfgcleanup_altered_bbs
1600 && stmt_ends_bb_p (stmt))
1601 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1602
1603 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1604 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1605 only change sth from non-invariant to invariant, and only
1606 when propagating constants. */
1607 if (is_gimple_min_invariant (val))
1608 for (i = 0; i < gimple_num_ops (stmt); i++)
1609 {
1610 tree op = gimple_op (stmt, i);
1611 /* Operands may be empty here. For example, the labels
1612 of a GIMPLE_COND are nulled out following the creation
1613 of the corresponding CFG edges. */
1614 if (op && TREE_CODE (op) == ADDR_EXPR)
1615 recompute_tree_invariant_for_addr_expr (op);
1616 }
1617
1618 if (fold_stmt (&gsi))
1619 stmt = gsi_stmt (gsi);
1620
1621 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1622 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1623
1624 update_stmt (stmt);
1625 }
1626 }
1627
1628 gcc_checking_assert (has_zero_uses (name));
1629
1630 /* Also update the trees stored in loop structures. */
1631 if (current_loops)
1632 {
1633 struct loop *loop;
1634 loop_iterator li;
1635
1636 FOR_EACH_LOOP (li, loop, 0)
1637 {
1638 substitute_in_loop_info (loop, name, val);
1639 }
1640 }
1641 }
1642
1643 /* Merge block B into block A. */
1644
1645 static void
1646 gimple_merge_blocks (basic_block a, basic_block b)
1647 {
1648 gimple_stmt_iterator last, gsi, psi;
1649
1650 if (dump_file)
1651 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1652
1653 /* Remove all single-valued PHI nodes from block B of the form
1654 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1655 gsi = gsi_last_bb (a);
1656 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1657 {
1658 gimple phi = gsi_stmt (psi);
1659 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1660 gimple copy;
1661 bool may_replace_uses = (virtual_operand_p (def)
1662 || may_propagate_copy (def, use));
1663
1664 /* In case we maintain loop closed ssa form, do not propagate arguments
1665 of loop exit phi nodes. */
1666 if (current_loops
1667 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1668 && !virtual_operand_p (def)
1669 && TREE_CODE (use) == SSA_NAME
1670 && a->loop_father != b->loop_father)
1671 may_replace_uses = false;
1672
1673 if (!may_replace_uses)
1674 {
1675 gcc_assert (!virtual_operand_p (def));
1676
1677 /* Note that just emitting the copies is fine -- there is no problem
1678 with ordering of phi nodes. This is because A is the single
1679 predecessor of B, therefore results of the phi nodes cannot
1680 appear as arguments of the phi nodes. */
1681 copy = gimple_build_assign (def, use);
1682 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1683 remove_phi_node (&psi, false);
1684 }
1685 else
1686 {
1687 /* If we deal with a PHI for virtual operands, we can simply
1688 propagate these without fussing with folding or updating
1689 the stmt. */
1690 if (virtual_operand_p (def))
1691 {
1692 imm_use_iterator iter;
1693 use_operand_p use_p;
1694 gimple stmt;
1695
1696 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1697 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1698 SET_USE (use_p, use);
1699
1700 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1701 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1702 }
1703 else
1704 replace_uses_by (def, use);
1705
1706 remove_phi_node (&psi, true);
1707 }
1708 }
1709
1710 /* Ensure that B follows A. */
1711 move_block_after (b, a);
1712
1713 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1714 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1715
1716 /* Remove labels from B and set gimple_bb to A for other statements. */
1717 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1718 {
1719 gimple stmt = gsi_stmt (gsi);
1720 if (gimple_code (stmt) == GIMPLE_LABEL)
1721 {
1722 tree label = gimple_label_label (stmt);
1723 int lp_nr;
1724
1725 gsi_remove (&gsi, false);
1726
1727 /* Now that we can thread computed gotos, we might have
1728 a situation where we have a forced label in block B
1729 However, the label at the start of block B might still be
1730 used in other ways (think about the runtime checking for
1731 Fortran assigned gotos). So we can not just delete the
1732 label. Instead we move the label to the start of block A. */
1733 if (FORCED_LABEL (label))
1734 {
1735 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1736 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1737 }
1738 /* Other user labels keep around in a form of a debug stmt. */
1739 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1740 {
1741 gimple dbg = gimple_build_debug_bind (label,
1742 integer_zero_node,
1743 stmt);
1744 gimple_debug_bind_reset_value (dbg);
1745 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1746 }
1747
1748 lp_nr = EH_LANDING_PAD_NR (label);
1749 if (lp_nr)
1750 {
1751 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1752 lp->post_landing_pad = NULL;
1753 }
1754 }
1755 else
1756 {
1757 gimple_set_bb (stmt, a);
1758 gsi_next (&gsi);
1759 }
1760 }
1761
1762 /* Merge the sequences. */
1763 last = gsi_last_bb (a);
1764 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1765 set_bb_seq (b, NULL);
1766
1767 if (cfgcleanup_altered_bbs)
1768 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1769 }
1770
1771
1772 /* Return the one of two successors of BB that is not reachable by a
1773 complex edge, if there is one. Else, return BB. We use
1774 this in optimizations that use post-dominators for their heuristics,
1775 to catch the cases in C++ where function calls are involved. */
1776
1777 basic_block
1778 single_noncomplex_succ (basic_block bb)
1779 {
1780 edge e0, e1;
1781 if (EDGE_COUNT (bb->succs) != 2)
1782 return bb;
1783
1784 e0 = EDGE_SUCC (bb, 0);
1785 e1 = EDGE_SUCC (bb, 1);
1786 if (e0->flags & EDGE_COMPLEX)
1787 return e1->dest;
1788 if (e1->flags & EDGE_COMPLEX)
1789 return e0->dest;
1790
1791 return bb;
1792 }
1793
1794 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1795
1796 void
1797 notice_special_calls (gimple call)
1798 {
1799 int flags = gimple_call_flags (call);
1800
1801 if (flags & ECF_MAY_BE_ALLOCA)
1802 cfun->calls_alloca = true;
1803 if (flags & ECF_RETURNS_TWICE)
1804 cfun->calls_setjmp = true;
1805 }
1806
1807
1808 /* Clear flags set by notice_special_calls. Used by dead code removal
1809 to update the flags. */
1810
1811 void
1812 clear_special_calls (void)
1813 {
1814 cfun->calls_alloca = false;
1815 cfun->calls_setjmp = false;
1816 }
1817
1818 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1819
1820 static void
1821 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1822 {
1823 /* Since this block is no longer reachable, we can just delete all
1824 of its PHI nodes. */
1825 remove_phi_nodes (bb);
1826
1827 /* Remove edges to BB's successors. */
1828 while (EDGE_COUNT (bb->succs) > 0)
1829 remove_edge (EDGE_SUCC (bb, 0));
1830 }
1831
1832
1833 /* Remove statements of basic block BB. */
1834
1835 static void
1836 remove_bb (basic_block bb)
1837 {
1838 gimple_stmt_iterator i;
1839
1840 if (dump_file)
1841 {
1842 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1843 if (dump_flags & TDF_DETAILS)
1844 {
1845 dump_bb (dump_file, bb, 0, dump_flags);
1846 fprintf (dump_file, "\n");
1847 }
1848 }
1849
1850 if (current_loops)
1851 {
1852 struct loop *loop = bb->loop_father;
1853
1854 /* If a loop gets removed, clean up the information associated
1855 with it. */
1856 if (loop->latch == bb
1857 || loop->header == bb)
1858 free_numbers_of_iterations_estimates_loop (loop);
1859 }
1860
1861 /* Remove all the instructions in the block. */
1862 if (bb_seq (bb) != NULL)
1863 {
1864 /* Walk backwards so as to get a chance to substitute all
1865 released DEFs into debug stmts. See
1866 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
1867 details. */
1868 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
1869 {
1870 gimple stmt = gsi_stmt (i);
1871 if (gimple_code (stmt) == GIMPLE_LABEL
1872 && (FORCED_LABEL (gimple_label_label (stmt))
1873 || DECL_NONLOCAL (gimple_label_label (stmt))))
1874 {
1875 basic_block new_bb;
1876 gimple_stmt_iterator new_gsi;
1877
1878 /* A non-reachable non-local label may still be referenced.
1879 But it no longer needs to carry the extra semantics of
1880 non-locality. */
1881 if (DECL_NONLOCAL (gimple_label_label (stmt)))
1882 {
1883 DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
1884 FORCED_LABEL (gimple_label_label (stmt)) = 1;
1885 }
1886
1887 new_bb = bb->prev_bb;
1888 new_gsi = gsi_start_bb (new_bb);
1889 gsi_remove (&i, false);
1890 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
1891 }
1892 else
1893 {
1894 /* Release SSA definitions if we are in SSA. Note that we
1895 may be called when not in SSA. For example,
1896 final_cleanup calls this function via
1897 cleanup_tree_cfg. */
1898 if (gimple_in_ssa_p (cfun))
1899 release_defs (stmt);
1900
1901 gsi_remove (&i, true);
1902 }
1903
1904 if (gsi_end_p (i))
1905 i = gsi_last_bb (bb);
1906 else
1907 gsi_prev (&i);
1908 }
1909 }
1910
1911 remove_phi_nodes_and_edges_for_unreachable_block (bb);
1912 bb->il.gimple.seq = NULL;
1913 bb->il.gimple.phi_nodes = NULL;
1914 }
1915
1916
1917 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
1918 predicate VAL, return the edge that will be taken out of the block.
1919 If VAL does not match a unique edge, NULL is returned. */
1920
1921 edge
1922 find_taken_edge (basic_block bb, tree val)
1923 {
1924 gimple stmt;
1925
1926 stmt = last_stmt (bb);
1927
1928 gcc_assert (stmt);
1929 gcc_assert (is_ctrl_stmt (stmt));
1930
1931 if (val == NULL)
1932 return NULL;
1933
1934 if (!is_gimple_min_invariant (val))
1935 return NULL;
1936
1937 if (gimple_code (stmt) == GIMPLE_COND)
1938 return find_taken_edge_cond_expr (bb, val);
1939
1940 if (gimple_code (stmt) == GIMPLE_SWITCH)
1941 return find_taken_edge_switch_expr (bb, val);
1942
1943 if (computed_goto_p (stmt))
1944 {
1945 /* Only optimize if the argument is a label, if the argument is
1946 not a label then we can not construct a proper CFG.
1947
1948 It may be the case that we only need to allow the LABEL_REF to
1949 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
1950 appear inside a LABEL_EXPR just to be safe. */
1951 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
1952 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
1953 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
1954 return NULL;
1955 }
1956
1957 gcc_unreachable ();
1958 }
1959
1960 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
1961 statement, determine which of the outgoing edges will be taken out of the
1962 block. Return NULL if either edge may be taken. */
1963
1964 static edge
1965 find_taken_edge_computed_goto (basic_block bb, tree val)
1966 {
1967 basic_block dest;
1968 edge e = NULL;
1969
1970 dest = label_to_block (val);
1971 if (dest)
1972 {
1973 e = find_edge (bb, dest);
1974 gcc_assert (e != NULL);
1975 }
1976
1977 return e;
1978 }
1979
1980 /* Given a constant value VAL and the entry block BB to a COND_EXPR
1981 statement, determine which of the two edges will be taken out of the
1982 block. Return NULL if either edge may be taken. */
1983
1984 static edge
1985 find_taken_edge_cond_expr (basic_block bb, tree val)
1986 {
1987 edge true_edge, false_edge;
1988
1989 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1990
1991 gcc_assert (TREE_CODE (val) == INTEGER_CST);
1992 return (integer_zerop (val) ? false_edge : true_edge);
1993 }
1994
1995 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
1996 statement, determine which edge will be taken out of the block. Return
1997 NULL if any edge may be taken. */
1998
1999 static edge
2000 find_taken_edge_switch_expr (basic_block bb, tree val)
2001 {
2002 basic_block dest_bb;
2003 edge e;
2004 gimple switch_stmt;
2005 tree taken_case;
2006
2007 switch_stmt = last_stmt (bb);
2008 taken_case = find_case_label_for_value (switch_stmt, val);
2009 dest_bb = label_to_block (CASE_LABEL (taken_case));
2010
2011 e = find_edge (bb, dest_bb);
2012 gcc_assert (e);
2013 return e;
2014 }
2015
2016
2017 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2018 We can make optimal use here of the fact that the case labels are
2019 sorted: We can do a binary search for a case matching VAL. */
2020
2021 static tree
2022 find_case_label_for_value (gimple switch_stmt, tree val)
2023 {
2024 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2025 tree default_case = gimple_switch_default_label (switch_stmt);
2026
2027 for (low = 0, high = n; high - low > 1; )
2028 {
2029 size_t i = (high + low) / 2;
2030 tree t = gimple_switch_label (switch_stmt, i);
2031 int cmp;
2032
2033 /* Cache the result of comparing CASE_LOW and val. */
2034 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2035
2036 if (cmp > 0)
2037 high = i;
2038 else
2039 low = i;
2040
2041 if (CASE_HIGH (t) == NULL)
2042 {
2043 /* A singe-valued case label. */
2044 if (cmp == 0)
2045 return t;
2046 }
2047 else
2048 {
2049 /* A case range. We can only handle integer ranges. */
2050 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2051 return t;
2052 }
2053 }
2054
2055 return default_case;
2056 }
2057
2058
2059 /* Dump a basic block on stderr. */
2060
2061 void
2062 gimple_debug_bb (basic_block bb)
2063 {
2064 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2065 }
2066
2067
2068 /* Dump basic block with index N on stderr. */
2069
2070 basic_block
2071 gimple_debug_bb_n (int n)
2072 {
2073 gimple_debug_bb (BASIC_BLOCK (n));
2074 return BASIC_BLOCK (n);
2075 }
2076
2077
2078 /* Dump the CFG on stderr.
2079
2080 FLAGS are the same used by the tree dumping functions
2081 (see TDF_* in dumpfile.h). */
2082
2083 void
2084 gimple_debug_cfg (int flags)
2085 {
2086 gimple_dump_cfg (stderr, flags);
2087 }
2088
2089
2090 /* Dump the program showing basic block boundaries on the given FILE.
2091
2092 FLAGS are the same used by the tree dumping functions (see TDF_* in
2093 tree.h). */
2094
2095 void
2096 gimple_dump_cfg (FILE *file, int flags)
2097 {
2098 if (flags & TDF_DETAILS)
2099 {
2100 dump_function_header (file, current_function_decl, flags);
2101 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2102 n_basic_blocks, n_edges, last_basic_block);
2103
2104 brief_dump_cfg (file, flags | TDF_COMMENT);
2105 fprintf (file, "\n");
2106 }
2107
2108 if (flags & TDF_STATS)
2109 dump_cfg_stats (file);
2110
2111 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2112 }
2113
2114
2115 /* Dump CFG statistics on FILE. */
2116
2117 void
2118 dump_cfg_stats (FILE *file)
2119 {
2120 static long max_num_merged_labels = 0;
2121 unsigned long size, total = 0;
2122 long num_edges;
2123 basic_block bb;
2124 const char * const fmt_str = "%-30s%-13s%12s\n";
2125 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2126 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2127 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2128 const char *funcname = current_function_name ();
2129
2130 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2131
2132 fprintf (file, "---------------------------------------------------------\n");
2133 fprintf (file, fmt_str, "", " Number of ", "Memory");
2134 fprintf (file, fmt_str, "", " instances ", "used ");
2135 fprintf (file, "---------------------------------------------------------\n");
2136
2137 size = n_basic_blocks * sizeof (struct basic_block_def);
2138 total += size;
2139 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2140 SCALE (size), LABEL (size));
2141
2142 num_edges = 0;
2143 FOR_EACH_BB (bb)
2144 num_edges += EDGE_COUNT (bb->succs);
2145 size = num_edges * sizeof (struct edge_def);
2146 total += size;
2147 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2148
2149 fprintf (file, "---------------------------------------------------------\n");
2150 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2151 LABEL (total));
2152 fprintf (file, "---------------------------------------------------------\n");
2153 fprintf (file, "\n");
2154
2155 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2156 max_num_merged_labels = cfg_stats.num_merged_labels;
2157
2158 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2159 cfg_stats.num_merged_labels, max_num_merged_labels);
2160
2161 fprintf (file, "\n");
2162 }
2163
2164
2165 /* Dump CFG statistics on stderr. Keep extern so that it's always
2166 linked in the final executable. */
2167
2168 DEBUG_FUNCTION void
2169 debug_cfg_stats (void)
2170 {
2171 dump_cfg_stats (stderr);
2172 }
2173
2174 /*---------------------------------------------------------------------------
2175 Miscellaneous helpers
2176 ---------------------------------------------------------------------------*/
2177
2178 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2179 flow. Transfers of control flow associated with EH are excluded. */
2180
2181 static bool
2182 call_can_make_abnormal_goto (gimple t)
2183 {
2184 /* If the function has no non-local labels, then a call cannot make an
2185 abnormal transfer of control. */
2186 if (!cfun->has_nonlocal_label
2187 && !cfun->calls_setjmp)
2188 return false;
2189
2190 /* Likewise if the call has no side effects. */
2191 if (!gimple_has_side_effects (t))
2192 return false;
2193
2194 /* Likewise if the called function is leaf. */
2195 if (gimple_call_flags (t) & ECF_LEAF)
2196 return false;
2197
2198 return true;
2199 }
2200
2201
2202 /* Return true if T can make an abnormal transfer of control flow.
2203 Transfers of control flow associated with EH are excluded. */
2204
2205 bool
2206 stmt_can_make_abnormal_goto (gimple t)
2207 {
2208 if (computed_goto_p (t))
2209 return true;
2210 if (is_gimple_call (t))
2211 return call_can_make_abnormal_goto (t);
2212 return false;
2213 }
2214
2215
2216 /* Return true if T represents a stmt that always transfers control. */
2217
2218 bool
2219 is_ctrl_stmt (gimple t)
2220 {
2221 switch (gimple_code (t))
2222 {
2223 case GIMPLE_COND:
2224 case GIMPLE_SWITCH:
2225 case GIMPLE_GOTO:
2226 case GIMPLE_RETURN:
2227 case GIMPLE_RESX:
2228 return true;
2229 default:
2230 return false;
2231 }
2232 }
2233
2234
2235 /* Return true if T is a statement that may alter the flow of control
2236 (e.g., a call to a non-returning function). */
2237
2238 bool
2239 is_ctrl_altering_stmt (gimple t)
2240 {
2241 gcc_assert (t);
2242
2243 switch (gimple_code (t))
2244 {
2245 case GIMPLE_CALL:
2246 {
2247 int flags = gimple_call_flags (t);
2248
2249 /* A call alters control flow if it can make an abnormal goto. */
2250 if (call_can_make_abnormal_goto (t))
2251 return true;
2252
2253 /* A call also alters control flow if it does not return. */
2254 if (flags & ECF_NORETURN)
2255 return true;
2256
2257 /* TM ending statements have backedges out of the transaction.
2258 Return true so we split the basic block containing them.
2259 Note that the TM_BUILTIN test is merely an optimization. */
2260 if ((flags & ECF_TM_BUILTIN)
2261 && is_tm_ending_fndecl (gimple_call_fndecl (t)))
2262 return true;
2263
2264 /* BUILT_IN_RETURN call is same as return statement. */
2265 if (gimple_call_builtin_p (t, BUILT_IN_RETURN))
2266 return true;
2267 }
2268 break;
2269
2270 case GIMPLE_EH_DISPATCH:
2271 /* EH_DISPATCH branches to the individual catch handlers at
2272 this level of a try or allowed-exceptions region. It can
2273 fallthru to the next statement as well. */
2274 return true;
2275
2276 case GIMPLE_ASM:
2277 if (gimple_asm_nlabels (t) > 0)
2278 return true;
2279 break;
2280
2281 CASE_GIMPLE_OMP:
2282 /* OpenMP directives alter control flow. */
2283 return true;
2284
2285 case GIMPLE_TRANSACTION:
2286 /* A transaction start alters control flow. */
2287 return true;
2288
2289 default:
2290 break;
2291 }
2292
2293 /* If a statement can throw, it alters control flow. */
2294 return stmt_can_throw_internal (t);
2295 }
2296
2297
2298 /* Return true if T is a simple local goto. */
2299
2300 bool
2301 simple_goto_p (gimple t)
2302 {
2303 return (gimple_code (t) == GIMPLE_GOTO
2304 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2305 }
2306
2307
2308 /* Return true if STMT should start a new basic block. PREV_STMT is
2309 the statement preceding STMT. It is used when STMT is a label or a
2310 case label. Labels should only start a new basic block if their
2311 previous statement wasn't a label. Otherwise, sequence of labels
2312 would generate unnecessary basic blocks that only contain a single
2313 label. */
2314
2315 static inline bool
2316 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2317 {
2318 if (stmt == NULL)
2319 return false;
2320
2321 /* Labels start a new basic block only if the preceding statement
2322 wasn't a label of the same type. This prevents the creation of
2323 consecutive blocks that have nothing but a single label. */
2324 if (gimple_code (stmt) == GIMPLE_LABEL)
2325 {
2326 /* Nonlocal and computed GOTO targets always start a new block. */
2327 if (DECL_NONLOCAL (gimple_label_label (stmt))
2328 || FORCED_LABEL (gimple_label_label (stmt)))
2329 return true;
2330
2331 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2332 {
2333 if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
2334 return true;
2335
2336 cfg_stats.num_merged_labels++;
2337 return false;
2338 }
2339 else
2340 return true;
2341 }
2342 else if (gimple_code (stmt) == GIMPLE_CALL
2343 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2344 /* setjmp acts similar to a nonlocal GOTO target and thus should
2345 start a new block. */
2346 return true;
2347
2348 return false;
2349 }
2350
2351
2352 /* Return true if T should end a basic block. */
2353
2354 bool
2355 stmt_ends_bb_p (gimple t)
2356 {
2357 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2358 }
2359
2360 /* Remove block annotations and other data structures. */
2361
2362 void
2363 delete_tree_cfg_annotations (void)
2364 {
2365 vec_free (label_to_block_map);
2366 }
2367
2368
2369 /* Return the first statement in basic block BB. */
2370
2371 gimple
2372 first_stmt (basic_block bb)
2373 {
2374 gimple_stmt_iterator i = gsi_start_bb (bb);
2375 gimple stmt = NULL;
2376
2377 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2378 {
2379 gsi_next (&i);
2380 stmt = NULL;
2381 }
2382 return stmt;
2383 }
2384
2385 /* Return the first non-label statement in basic block BB. */
2386
2387 static gimple
2388 first_non_label_stmt (basic_block bb)
2389 {
2390 gimple_stmt_iterator i = gsi_start_bb (bb);
2391 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2392 gsi_next (&i);
2393 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2394 }
2395
2396 /* Return the last statement in basic block BB. */
2397
2398 gimple
2399 last_stmt (basic_block bb)
2400 {
2401 gimple_stmt_iterator i = gsi_last_bb (bb);
2402 gimple stmt = NULL;
2403
2404 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2405 {
2406 gsi_prev (&i);
2407 stmt = NULL;
2408 }
2409 return stmt;
2410 }
2411
2412 /* Return the last statement of an otherwise empty block. Return NULL
2413 if the block is totally empty, or if it contains more than one
2414 statement. */
2415
2416 gimple
2417 last_and_only_stmt (basic_block bb)
2418 {
2419 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2420 gimple last, prev;
2421
2422 if (gsi_end_p (i))
2423 return NULL;
2424
2425 last = gsi_stmt (i);
2426 gsi_prev_nondebug (&i);
2427 if (gsi_end_p (i))
2428 return last;
2429
2430 /* Empty statements should no longer appear in the instruction stream.
2431 Everything that might have appeared before should be deleted by
2432 remove_useless_stmts, and the optimizers should just gsi_remove
2433 instead of smashing with build_empty_stmt.
2434
2435 Thus the only thing that should appear here in a block containing
2436 one executable statement is a label. */
2437 prev = gsi_stmt (i);
2438 if (gimple_code (prev) == GIMPLE_LABEL)
2439 return last;
2440 else
2441 return NULL;
2442 }
2443
2444 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2445
2446 static void
2447 reinstall_phi_args (edge new_edge, edge old_edge)
2448 {
2449 edge_var_map_vector *v;
2450 edge_var_map *vm;
2451 int i;
2452 gimple_stmt_iterator phis;
2453
2454 v = redirect_edge_var_map_vector (old_edge);
2455 if (!v)
2456 return;
2457
2458 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2459 v->iterate (i, &vm) && !gsi_end_p (phis);
2460 i++, gsi_next (&phis))
2461 {
2462 gimple phi = gsi_stmt (phis);
2463 tree result = redirect_edge_var_map_result (vm);
2464 tree arg = redirect_edge_var_map_def (vm);
2465
2466 gcc_assert (result == gimple_phi_result (phi));
2467
2468 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2469 }
2470
2471 redirect_edge_var_map_clear (old_edge);
2472 }
2473
2474 /* Returns the basic block after which the new basic block created
2475 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2476 near its "logical" location. This is of most help to humans looking
2477 at debugging dumps. */
2478
2479 static basic_block
2480 split_edge_bb_loc (edge edge_in)
2481 {
2482 basic_block dest = edge_in->dest;
2483 basic_block dest_prev = dest->prev_bb;
2484
2485 if (dest_prev)
2486 {
2487 edge e = find_edge (dest_prev, dest);
2488 if (e && !(e->flags & EDGE_COMPLEX))
2489 return edge_in->src;
2490 }
2491 return dest_prev;
2492 }
2493
2494 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2495 Abort on abnormal edges. */
2496
2497 static basic_block
2498 gimple_split_edge (edge edge_in)
2499 {
2500 basic_block new_bb, after_bb, dest;
2501 edge new_edge, e;
2502
2503 /* Abnormal edges cannot be split. */
2504 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2505
2506 dest = edge_in->dest;
2507
2508 after_bb = split_edge_bb_loc (edge_in);
2509
2510 new_bb = create_empty_bb (after_bb);
2511 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2512 new_bb->count = edge_in->count;
2513 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2514 new_edge->probability = REG_BR_PROB_BASE;
2515 new_edge->count = edge_in->count;
2516
2517 e = redirect_edge_and_branch (edge_in, new_bb);
2518 gcc_assert (e == edge_in);
2519 reinstall_phi_args (new_edge, e);
2520
2521 return new_bb;
2522 }
2523
2524
2525 /* Verify properties of the address expression T with base object BASE. */
2526
2527 static tree
2528 verify_address (tree t, tree base)
2529 {
2530 bool old_constant;
2531 bool old_side_effects;
2532 bool new_constant;
2533 bool new_side_effects;
2534
2535 old_constant = TREE_CONSTANT (t);
2536 old_side_effects = TREE_SIDE_EFFECTS (t);
2537
2538 recompute_tree_invariant_for_addr_expr (t);
2539 new_side_effects = TREE_SIDE_EFFECTS (t);
2540 new_constant = TREE_CONSTANT (t);
2541
2542 if (old_constant != new_constant)
2543 {
2544 error ("constant not recomputed when ADDR_EXPR changed");
2545 return t;
2546 }
2547 if (old_side_effects != new_side_effects)
2548 {
2549 error ("side effects not recomputed when ADDR_EXPR changed");
2550 return t;
2551 }
2552
2553 if (!(TREE_CODE (base) == VAR_DECL
2554 || TREE_CODE (base) == PARM_DECL
2555 || TREE_CODE (base) == RESULT_DECL))
2556 return NULL_TREE;
2557
2558 if (DECL_GIMPLE_REG_P (base))
2559 {
2560 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2561 return base;
2562 }
2563
2564 return NULL_TREE;
2565 }
2566
2567 /* Callback for walk_tree, check that all elements with address taken are
2568 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2569 inside a PHI node. */
2570
2571 static tree
2572 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2573 {
2574 tree t = *tp, x;
2575
2576 if (TYPE_P (t))
2577 *walk_subtrees = 0;
2578
2579 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2580 #define CHECK_OP(N, MSG) \
2581 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2582 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2583
2584 switch (TREE_CODE (t))
2585 {
2586 case SSA_NAME:
2587 if (SSA_NAME_IN_FREE_LIST (t))
2588 {
2589 error ("SSA name in freelist but still referenced");
2590 return *tp;
2591 }
2592 break;
2593
2594 case INDIRECT_REF:
2595 error ("INDIRECT_REF in gimple IL");
2596 return t;
2597
2598 case MEM_REF:
2599 x = TREE_OPERAND (t, 0);
2600 if (!POINTER_TYPE_P (TREE_TYPE (x))
2601 || !is_gimple_mem_ref_addr (x))
2602 {
2603 error ("invalid first operand of MEM_REF");
2604 return x;
2605 }
2606 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2607 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2608 {
2609 error ("invalid offset operand of MEM_REF");
2610 return TREE_OPERAND (t, 1);
2611 }
2612 if (TREE_CODE (x) == ADDR_EXPR
2613 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2614 return x;
2615 *walk_subtrees = 0;
2616 break;
2617
2618 case ASSERT_EXPR:
2619 x = fold (ASSERT_EXPR_COND (t));
2620 if (x == boolean_false_node)
2621 {
2622 error ("ASSERT_EXPR with an always-false condition");
2623 return *tp;
2624 }
2625 break;
2626
2627 case MODIFY_EXPR:
2628 error ("MODIFY_EXPR not expected while having tuples");
2629 return *tp;
2630
2631 case ADDR_EXPR:
2632 {
2633 tree tem;
2634
2635 gcc_assert (is_gimple_address (t));
2636
2637 /* Skip any references (they will be checked when we recurse down the
2638 tree) and ensure that any variable used as a prefix is marked
2639 addressable. */
2640 for (x = TREE_OPERAND (t, 0);
2641 handled_component_p (x);
2642 x = TREE_OPERAND (x, 0))
2643 ;
2644
2645 if ((tem = verify_address (t, x)))
2646 return tem;
2647
2648 if (!(TREE_CODE (x) == VAR_DECL
2649 || TREE_CODE (x) == PARM_DECL
2650 || TREE_CODE (x) == RESULT_DECL))
2651 return NULL;
2652
2653 if (!TREE_ADDRESSABLE (x))
2654 {
2655 error ("address taken, but ADDRESSABLE bit not set");
2656 return x;
2657 }
2658
2659 break;
2660 }
2661
2662 case COND_EXPR:
2663 x = COND_EXPR_COND (t);
2664 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2665 {
2666 error ("non-integral used in condition");
2667 return x;
2668 }
2669 if (!is_gimple_condexpr (x))
2670 {
2671 error ("invalid conditional operand");
2672 return x;
2673 }
2674 break;
2675
2676 case NON_LVALUE_EXPR:
2677 case TRUTH_NOT_EXPR:
2678 gcc_unreachable ();
2679
2680 CASE_CONVERT:
2681 case FIX_TRUNC_EXPR:
2682 case FLOAT_EXPR:
2683 case NEGATE_EXPR:
2684 case ABS_EXPR:
2685 case BIT_NOT_EXPR:
2686 CHECK_OP (0, "invalid operand to unary operator");
2687 break;
2688
2689 case REALPART_EXPR:
2690 case IMAGPART_EXPR:
2691 case BIT_FIELD_REF:
2692 if (!is_gimple_reg_type (TREE_TYPE (t)))
2693 {
2694 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2695 return t;
2696 }
2697
2698 if (TREE_CODE (t) == BIT_FIELD_REF)
2699 {
2700 if (!host_integerp (TREE_OPERAND (t, 1), 1)
2701 || !host_integerp (TREE_OPERAND (t, 2), 1))
2702 {
2703 error ("invalid position or size operand to BIT_FIELD_REF");
2704 return t;
2705 }
2706 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2707 && (TYPE_PRECISION (TREE_TYPE (t))
2708 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2709 {
2710 error ("integral result type precision does not match "
2711 "field size of BIT_FIELD_REF");
2712 return t;
2713 }
2714 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2715 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2716 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2717 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2718 {
2719 error ("mode precision of non-integral result does not "
2720 "match field size of BIT_FIELD_REF");
2721 return t;
2722 }
2723 }
2724 t = TREE_OPERAND (t, 0);
2725
2726 /* Fall-through. */
2727 case COMPONENT_REF:
2728 case ARRAY_REF:
2729 case ARRAY_RANGE_REF:
2730 case VIEW_CONVERT_EXPR:
2731 /* We have a nest of references. Verify that each of the operands
2732 that determine where to reference is either a constant or a variable,
2733 verify that the base is valid, and then show we've already checked
2734 the subtrees. */
2735 while (handled_component_p (t))
2736 {
2737 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2738 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2739 else if (TREE_CODE (t) == ARRAY_REF
2740 || TREE_CODE (t) == ARRAY_RANGE_REF)
2741 {
2742 CHECK_OP (1, "invalid array index");
2743 if (TREE_OPERAND (t, 2))
2744 CHECK_OP (2, "invalid array lower bound");
2745 if (TREE_OPERAND (t, 3))
2746 CHECK_OP (3, "invalid array stride");
2747 }
2748 else if (TREE_CODE (t) == BIT_FIELD_REF
2749 || TREE_CODE (t) == REALPART_EXPR
2750 || TREE_CODE (t) == IMAGPART_EXPR)
2751 {
2752 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
2753 "REALPART_EXPR");
2754 return t;
2755 }
2756
2757 t = TREE_OPERAND (t, 0);
2758 }
2759
2760 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2761 {
2762 error ("invalid reference prefix");
2763 return t;
2764 }
2765 *walk_subtrees = 0;
2766 break;
2767 case PLUS_EXPR:
2768 case MINUS_EXPR:
2769 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2770 POINTER_PLUS_EXPR. */
2771 if (POINTER_TYPE_P (TREE_TYPE (t)))
2772 {
2773 error ("invalid operand to plus/minus, type is a pointer");
2774 return t;
2775 }
2776 CHECK_OP (0, "invalid operand to binary operator");
2777 CHECK_OP (1, "invalid operand to binary operator");
2778 break;
2779
2780 case POINTER_PLUS_EXPR:
2781 /* Check to make sure the first operand is a pointer or reference type. */
2782 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
2783 {
2784 error ("invalid operand to pointer plus, first operand is not a pointer");
2785 return t;
2786 }
2787 /* Check to make sure the second operand is a ptrofftype. */
2788 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
2789 {
2790 error ("invalid operand to pointer plus, second operand is not an "
2791 "integer type of appropriate width");
2792 return t;
2793 }
2794 /* FALLTHROUGH */
2795 case LT_EXPR:
2796 case LE_EXPR:
2797 case GT_EXPR:
2798 case GE_EXPR:
2799 case EQ_EXPR:
2800 case NE_EXPR:
2801 case UNORDERED_EXPR:
2802 case ORDERED_EXPR:
2803 case UNLT_EXPR:
2804 case UNLE_EXPR:
2805 case UNGT_EXPR:
2806 case UNGE_EXPR:
2807 case UNEQ_EXPR:
2808 case LTGT_EXPR:
2809 case MULT_EXPR:
2810 case TRUNC_DIV_EXPR:
2811 case CEIL_DIV_EXPR:
2812 case FLOOR_DIV_EXPR:
2813 case ROUND_DIV_EXPR:
2814 case TRUNC_MOD_EXPR:
2815 case CEIL_MOD_EXPR:
2816 case FLOOR_MOD_EXPR:
2817 case ROUND_MOD_EXPR:
2818 case RDIV_EXPR:
2819 case EXACT_DIV_EXPR:
2820 case MIN_EXPR:
2821 case MAX_EXPR:
2822 case LSHIFT_EXPR:
2823 case RSHIFT_EXPR:
2824 case LROTATE_EXPR:
2825 case RROTATE_EXPR:
2826 case BIT_IOR_EXPR:
2827 case BIT_XOR_EXPR:
2828 case BIT_AND_EXPR:
2829 CHECK_OP (0, "invalid operand to binary operator");
2830 CHECK_OP (1, "invalid operand to binary operator");
2831 break;
2832
2833 case CONSTRUCTOR:
2834 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2835 *walk_subtrees = 0;
2836 break;
2837
2838 case CASE_LABEL_EXPR:
2839 if (CASE_CHAIN (t))
2840 {
2841 error ("invalid CASE_CHAIN");
2842 return t;
2843 }
2844 break;
2845
2846 default:
2847 break;
2848 }
2849 return NULL;
2850
2851 #undef CHECK_OP
2852 }
2853
2854
2855 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
2856 Returns true if there is an error, otherwise false. */
2857
2858 static bool
2859 verify_types_in_gimple_min_lval (tree expr)
2860 {
2861 tree op;
2862
2863 if (is_gimple_id (expr))
2864 return false;
2865
2866 if (TREE_CODE (expr) != TARGET_MEM_REF
2867 && TREE_CODE (expr) != MEM_REF)
2868 {
2869 error ("invalid expression for min lvalue");
2870 return true;
2871 }
2872
2873 /* TARGET_MEM_REFs are strange beasts. */
2874 if (TREE_CODE (expr) == TARGET_MEM_REF)
2875 return false;
2876
2877 op = TREE_OPERAND (expr, 0);
2878 if (!is_gimple_val (op))
2879 {
2880 error ("invalid operand in indirect reference");
2881 debug_generic_stmt (op);
2882 return true;
2883 }
2884 /* Memory references now generally can involve a value conversion. */
2885
2886 return false;
2887 }
2888
2889 /* Verify if EXPR is a valid GIMPLE reference expression. If
2890 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
2891 if there is an error, otherwise false. */
2892
2893 static bool
2894 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
2895 {
2896 while (handled_component_p (expr))
2897 {
2898 tree op = TREE_OPERAND (expr, 0);
2899
2900 if (TREE_CODE (expr) == ARRAY_REF
2901 || TREE_CODE (expr) == ARRAY_RANGE_REF)
2902 {
2903 if (!is_gimple_val (TREE_OPERAND (expr, 1))
2904 || (TREE_OPERAND (expr, 2)
2905 && !is_gimple_val (TREE_OPERAND (expr, 2)))
2906 || (TREE_OPERAND (expr, 3)
2907 && !is_gimple_val (TREE_OPERAND (expr, 3))))
2908 {
2909 error ("invalid operands to array reference");
2910 debug_generic_stmt (expr);
2911 return true;
2912 }
2913 }
2914
2915 /* Verify if the reference array element types are compatible. */
2916 if (TREE_CODE (expr) == ARRAY_REF
2917 && !useless_type_conversion_p (TREE_TYPE (expr),
2918 TREE_TYPE (TREE_TYPE (op))))
2919 {
2920 error ("type mismatch in array reference");
2921 debug_generic_stmt (TREE_TYPE (expr));
2922 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2923 return true;
2924 }
2925 if (TREE_CODE (expr) == ARRAY_RANGE_REF
2926 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
2927 TREE_TYPE (TREE_TYPE (op))))
2928 {
2929 error ("type mismatch in array range reference");
2930 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
2931 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2932 return true;
2933 }
2934
2935 if ((TREE_CODE (expr) == REALPART_EXPR
2936 || TREE_CODE (expr) == IMAGPART_EXPR)
2937 && !useless_type_conversion_p (TREE_TYPE (expr),
2938 TREE_TYPE (TREE_TYPE (op))))
2939 {
2940 error ("type mismatch in real/imagpart reference");
2941 debug_generic_stmt (TREE_TYPE (expr));
2942 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2943 return true;
2944 }
2945
2946 if (TREE_CODE (expr) == COMPONENT_REF
2947 && !useless_type_conversion_p (TREE_TYPE (expr),
2948 TREE_TYPE (TREE_OPERAND (expr, 1))))
2949 {
2950 error ("type mismatch in component reference");
2951 debug_generic_stmt (TREE_TYPE (expr));
2952 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
2953 return true;
2954 }
2955
2956 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2957 {
2958 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
2959 that their operand is not an SSA name or an invariant when
2960 requiring an lvalue (this usually means there is a SRA or IPA-SRA
2961 bug). Otherwise there is nothing to verify, gross mismatches at
2962 most invoke undefined behavior. */
2963 if (require_lvalue
2964 && (TREE_CODE (op) == SSA_NAME
2965 || is_gimple_min_invariant (op)))
2966 {
2967 error ("conversion of an SSA_NAME on the left hand side");
2968 debug_generic_stmt (expr);
2969 return true;
2970 }
2971 else if (TREE_CODE (op) == SSA_NAME
2972 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
2973 {
2974 error ("conversion of register to a different size");
2975 debug_generic_stmt (expr);
2976 return true;
2977 }
2978 else if (!handled_component_p (op))
2979 return false;
2980 }
2981
2982 expr = op;
2983 }
2984
2985 if (TREE_CODE (expr) == MEM_REF)
2986 {
2987 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
2988 {
2989 error ("invalid address operand in MEM_REF");
2990 debug_generic_stmt (expr);
2991 return true;
2992 }
2993 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
2994 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
2995 {
2996 error ("invalid offset operand in MEM_REF");
2997 debug_generic_stmt (expr);
2998 return true;
2999 }
3000 }
3001 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3002 {
3003 if (!TMR_BASE (expr)
3004 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3005 {
3006 error ("invalid address operand in TARGET_MEM_REF");
3007 return true;
3008 }
3009 if (!TMR_OFFSET (expr)
3010 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3011 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3012 {
3013 error ("invalid offset operand in TARGET_MEM_REF");
3014 debug_generic_stmt (expr);
3015 return true;
3016 }
3017 }
3018
3019 return ((require_lvalue || !is_gimple_min_invariant (expr))
3020 && verify_types_in_gimple_min_lval (expr));
3021 }
3022
3023 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3024 list of pointer-to types that is trivially convertible to DEST. */
3025
3026 static bool
3027 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3028 {
3029 tree src;
3030
3031 if (!TYPE_POINTER_TO (src_obj))
3032 return true;
3033
3034 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3035 if (useless_type_conversion_p (dest, src))
3036 return true;
3037
3038 return false;
3039 }
3040
3041 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3042 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3043
3044 static bool
3045 valid_fixed_convert_types_p (tree type1, tree type2)
3046 {
3047 return (FIXED_POINT_TYPE_P (type1)
3048 && (INTEGRAL_TYPE_P (type2)
3049 || SCALAR_FLOAT_TYPE_P (type2)
3050 || FIXED_POINT_TYPE_P (type2)));
3051 }
3052
3053 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3054 is a problem, otherwise false. */
3055
3056 static bool
3057 verify_gimple_call (gimple stmt)
3058 {
3059 tree fn = gimple_call_fn (stmt);
3060 tree fntype, fndecl;
3061 unsigned i;
3062
3063 if (gimple_call_internal_p (stmt))
3064 {
3065 if (fn)
3066 {
3067 error ("gimple call has two targets");
3068 debug_generic_stmt (fn);
3069 return true;
3070 }
3071 }
3072 else
3073 {
3074 if (!fn)
3075 {
3076 error ("gimple call has no target");
3077 return true;
3078 }
3079 }
3080
3081 if (fn && !is_gimple_call_addr (fn))
3082 {
3083 error ("invalid function in gimple call");
3084 debug_generic_stmt (fn);
3085 return true;
3086 }
3087
3088 if (fn
3089 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3090 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3091 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3092 {
3093 error ("non-function in gimple call");
3094 return true;
3095 }
3096
3097 fndecl = gimple_call_fndecl (stmt);
3098 if (fndecl
3099 && TREE_CODE (fndecl) == FUNCTION_DECL
3100 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3101 && !DECL_PURE_P (fndecl)
3102 && !TREE_READONLY (fndecl))
3103 {
3104 error ("invalid pure const state for function");
3105 return true;
3106 }
3107
3108 if (gimple_call_lhs (stmt)
3109 && (!is_gimple_lvalue (gimple_call_lhs (stmt))
3110 || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
3111 {
3112 error ("invalid LHS in gimple call");
3113 return true;
3114 }
3115
3116 if (gimple_call_lhs (stmt) && gimple_call_noreturn_p (stmt))
3117 {
3118 error ("LHS in noreturn call");
3119 return true;
3120 }
3121
3122 fntype = gimple_call_fntype (stmt);
3123 if (fntype
3124 && gimple_call_lhs (stmt)
3125 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3126 TREE_TYPE (fntype))
3127 /* ??? At least C++ misses conversions at assignments from
3128 void * call results.
3129 ??? Java is completely off. Especially with functions
3130 returning java.lang.Object.
3131 For now simply allow arbitrary pointer type conversions. */
3132 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3133 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3134 {
3135 error ("invalid conversion in gimple call");
3136 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3137 debug_generic_stmt (TREE_TYPE (fntype));
3138 return true;
3139 }
3140
3141 if (gimple_call_chain (stmt)
3142 && !is_gimple_val (gimple_call_chain (stmt)))
3143 {
3144 error ("invalid static chain in gimple call");
3145 debug_generic_stmt (gimple_call_chain (stmt));
3146 return true;
3147 }
3148
3149 /* If there is a static chain argument, this should not be an indirect
3150 call, and the decl should have DECL_STATIC_CHAIN set. */
3151 if (gimple_call_chain (stmt))
3152 {
3153 if (!gimple_call_fndecl (stmt))
3154 {
3155 error ("static chain in indirect gimple call");
3156 return true;
3157 }
3158 fn = TREE_OPERAND (fn, 0);
3159
3160 if (!DECL_STATIC_CHAIN (fn))
3161 {
3162 error ("static chain with function that doesn%'t use one");
3163 return true;
3164 }
3165 }
3166
3167 /* ??? The C frontend passes unpromoted arguments in case it
3168 didn't see a function declaration before the call. So for now
3169 leave the call arguments mostly unverified. Once we gimplify
3170 unit-at-a-time we have a chance to fix this. */
3171
3172 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3173 {
3174 tree arg = gimple_call_arg (stmt, i);
3175 if ((is_gimple_reg_type (TREE_TYPE (arg))
3176 && !is_gimple_val (arg))
3177 || (!is_gimple_reg_type (TREE_TYPE (arg))
3178 && !is_gimple_lvalue (arg)))
3179 {
3180 error ("invalid argument to gimple call");
3181 debug_generic_expr (arg);
3182 return true;
3183 }
3184 }
3185
3186 return false;
3187 }
3188
3189 /* Verifies the gimple comparison with the result type TYPE and
3190 the operands OP0 and OP1. */
3191
3192 static bool
3193 verify_gimple_comparison (tree type, tree op0, tree op1)
3194 {
3195 tree op0_type = TREE_TYPE (op0);
3196 tree op1_type = TREE_TYPE (op1);
3197
3198 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3199 {
3200 error ("invalid operands in gimple comparison");
3201 return true;
3202 }
3203
3204 /* For comparisons we do not have the operations type as the
3205 effective type the comparison is carried out in. Instead
3206 we require that either the first operand is trivially
3207 convertible into the second, or the other way around.
3208 Because we special-case pointers to void we allow
3209 comparisons of pointers with the same mode as well. */
3210 if (!useless_type_conversion_p (op0_type, op1_type)
3211 && !useless_type_conversion_p (op1_type, op0_type)
3212 && (!POINTER_TYPE_P (op0_type)
3213 || !POINTER_TYPE_P (op1_type)
3214 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3215 {
3216 error ("mismatching comparison operand types");
3217 debug_generic_expr (op0_type);
3218 debug_generic_expr (op1_type);
3219 return true;
3220 }
3221
3222 /* The resulting type of a comparison may be an effective boolean type. */
3223 if (INTEGRAL_TYPE_P (type)
3224 && (TREE_CODE (type) == BOOLEAN_TYPE
3225 || TYPE_PRECISION (type) == 1))
3226 {
3227 if (TREE_CODE (op0_type) == VECTOR_TYPE
3228 || TREE_CODE (op1_type) == VECTOR_TYPE)
3229 {
3230 error ("vector comparison returning a boolean");
3231 debug_generic_expr (op0_type);
3232 debug_generic_expr (op1_type);
3233 return true;
3234 }
3235 }
3236 /* Or an integer vector type with the same size and element count
3237 as the comparison operand types. */
3238 else if (TREE_CODE (type) == VECTOR_TYPE
3239 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3240 {
3241 if (TREE_CODE (op0_type) != VECTOR_TYPE
3242 || TREE_CODE (op1_type) != VECTOR_TYPE)
3243 {
3244 error ("non-vector operands in vector comparison");
3245 debug_generic_expr (op0_type);
3246 debug_generic_expr (op1_type);
3247 return true;
3248 }
3249
3250 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3251 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3252 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type))))
3253 /* The result of a vector comparison is of signed
3254 integral type. */
3255 || TYPE_UNSIGNED (TREE_TYPE (type)))
3256 {
3257 error ("invalid vector comparison resulting type");
3258 debug_generic_expr (type);
3259 return true;
3260 }
3261 }
3262 else
3263 {
3264 error ("bogus comparison result type");
3265 debug_generic_expr (type);
3266 return true;
3267 }
3268
3269 return false;
3270 }
3271
3272 /* Verify a gimple assignment statement STMT with an unary rhs.
3273 Returns true if anything is wrong. */
3274
3275 static bool
3276 verify_gimple_assign_unary (gimple stmt)
3277 {
3278 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3279 tree lhs = gimple_assign_lhs (stmt);
3280 tree lhs_type = TREE_TYPE (lhs);
3281 tree rhs1 = gimple_assign_rhs1 (stmt);
3282 tree rhs1_type = TREE_TYPE (rhs1);
3283
3284 if (!is_gimple_reg (lhs))
3285 {
3286 error ("non-register as LHS of unary operation");
3287 return true;
3288 }
3289
3290 if (!is_gimple_val (rhs1))
3291 {
3292 error ("invalid operand in unary operation");
3293 return true;
3294 }
3295
3296 /* First handle conversions. */
3297 switch (rhs_code)
3298 {
3299 CASE_CONVERT:
3300 {
3301 /* Allow conversions from pointer type to integral type only if
3302 there is no sign or zero extension involved.
3303 For targets were the precision of ptrofftype doesn't match that
3304 of pointers we need to allow arbitrary conversions to ptrofftype. */
3305 if ((POINTER_TYPE_P (lhs_type)
3306 && INTEGRAL_TYPE_P (rhs1_type))
3307 || (POINTER_TYPE_P (rhs1_type)
3308 && INTEGRAL_TYPE_P (lhs_type)
3309 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3310 || ptrofftype_p (sizetype))))
3311 return false;
3312
3313 /* Allow conversion from integral to offset type and vice versa. */
3314 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3315 && INTEGRAL_TYPE_P (rhs1_type))
3316 || (INTEGRAL_TYPE_P (lhs_type)
3317 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3318 return false;
3319
3320 /* Otherwise assert we are converting between types of the
3321 same kind. */
3322 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3323 {
3324 error ("invalid types in nop conversion");
3325 debug_generic_expr (lhs_type);
3326 debug_generic_expr (rhs1_type);
3327 return true;
3328 }
3329
3330 return false;
3331 }
3332
3333 case ADDR_SPACE_CONVERT_EXPR:
3334 {
3335 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3336 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3337 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3338 {
3339 error ("invalid types in address space conversion");
3340 debug_generic_expr (lhs_type);
3341 debug_generic_expr (rhs1_type);
3342 return true;
3343 }
3344
3345 return false;
3346 }
3347
3348 case FIXED_CONVERT_EXPR:
3349 {
3350 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3351 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3352 {
3353 error ("invalid types in fixed-point conversion");
3354 debug_generic_expr (lhs_type);
3355 debug_generic_expr (rhs1_type);
3356 return true;
3357 }
3358
3359 return false;
3360 }
3361
3362 case FLOAT_EXPR:
3363 {
3364 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3365 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3366 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3367 {
3368 error ("invalid types in conversion to floating point");
3369 debug_generic_expr (lhs_type);
3370 debug_generic_expr (rhs1_type);
3371 return true;
3372 }
3373
3374 return false;
3375 }
3376
3377 case FIX_TRUNC_EXPR:
3378 {
3379 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3380 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3381 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3382 {
3383 error ("invalid types in conversion to integer");
3384 debug_generic_expr (lhs_type);
3385 debug_generic_expr (rhs1_type);
3386 return true;
3387 }
3388
3389 return false;
3390 }
3391
3392 case VEC_UNPACK_HI_EXPR:
3393 case VEC_UNPACK_LO_EXPR:
3394 case REDUC_MAX_EXPR:
3395 case REDUC_MIN_EXPR:
3396 case REDUC_PLUS_EXPR:
3397 case VEC_UNPACK_FLOAT_HI_EXPR:
3398 case VEC_UNPACK_FLOAT_LO_EXPR:
3399 /* FIXME. */
3400 return false;
3401
3402 case NEGATE_EXPR:
3403 case ABS_EXPR:
3404 case BIT_NOT_EXPR:
3405 case PAREN_EXPR:
3406 case NON_LVALUE_EXPR:
3407 case CONJ_EXPR:
3408 break;
3409
3410 default:
3411 gcc_unreachable ();
3412 }
3413
3414 /* For the remaining codes assert there is no conversion involved. */
3415 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3416 {
3417 error ("non-trivial conversion in unary operation");
3418 debug_generic_expr (lhs_type);
3419 debug_generic_expr (rhs1_type);
3420 return true;
3421 }
3422
3423 return false;
3424 }
3425
3426 /* Verify a gimple assignment statement STMT with a binary rhs.
3427 Returns true if anything is wrong. */
3428
3429 static bool
3430 verify_gimple_assign_binary (gimple stmt)
3431 {
3432 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3433 tree lhs = gimple_assign_lhs (stmt);
3434 tree lhs_type = TREE_TYPE (lhs);
3435 tree rhs1 = gimple_assign_rhs1 (stmt);
3436 tree rhs1_type = TREE_TYPE (rhs1);
3437 tree rhs2 = gimple_assign_rhs2 (stmt);
3438 tree rhs2_type = TREE_TYPE (rhs2);
3439
3440 if (!is_gimple_reg (lhs))
3441 {
3442 error ("non-register as LHS of binary operation");
3443 return true;
3444 }
3445
3446 if (!is_gimple_val (rhs1)
3447 || !is_gimple_val (rhs2))
3448 {
3449 error ("invalid operands in binary operation");
3450 return true;
3451 }
3452
3453 /* First handle operations that involve different types. */
3454 switch (rhs_code)
3455 {
3456 case COMPLEX_EXPR:
3457 {
3458 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3459 || !(INTEGRAL_TYPE_P (rhs1_type)
3460 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3461 || !(INTEGRAL_TYPE_P (rhs2_type)
3462 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3463 {
3464 error ("type mismatch in complex expression");
3465 debug_generic_expr (lhs_type);
3466 debug_generic_expr (rhs1_type);
3467 debug_generic_expr (rhs2_type);
3468 return true;
3469 }
3470
3471 return false;
3472 }
3473
3474 case LSHIFT_EXPR:
3475 case RSHIFT_EXPR:
3476 case LROTATE_EXPR:
3477 case RROTATE_EXPR:
3478 {
3479 /* Shifts and rotates are ok on integral types, fixed point
3480 types and integer vector types. */
3481 if ((!INTEGRAL_TYPE_P (rhs1_type)
3482 && !FIXED_POINT_TYPE_P (rhs1_type)
3483 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3484 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3485 || (!INTEGRAL_TYPE_P (rhs2_type)
3486 /* Vector shifts of vectors are also ok. */
3487 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3488 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3489 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3490 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3491 || !useless_type_conversion_p (lhs_type, rhs1_type))
3492 {
3493 error ("type mismatch in shift expression");
3494 debug_generic_expr (lhs_type);
3495 debug_generic_expr (rhs1_type);
3496 debug_generic_expr (rhs2_type);
3497 return true;
3498 }
3499
3500 return false;
3501 }
3502
3503 case VEC_LSHIFT_EXPR:
3504 case VEC_RSHIFT_EXPR:
3505 {
3506 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3507 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3508 || POINTER_TYPE_P (TREE_TYPE (rhs1_type))
3509 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type))
3510 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3511 || (!INTEGRAL_TYPE_P (rhs2_type)
3512 && (TREE_CODE (rhs2_type) != VECTOR_TYPE
3513 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3514 || !useless_type_conversion_p (lhs_type, rhs1_type))
3515 {
3516 error ("type mismatch in vector shift expression");
3517 debug_generic_expr (lhs_type);
3518 debug_generic_expr (rhs1_type);
3519 debug_generic_expr (rhs2_type);
3520 return true;
3521 }
3522 /* For shifting a vector of non-integral components we
3523 only allow shifting by a constant multiple of the element size. */
3524 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3525 && (TREE_CODE (rhs2) != INTEGER_CST
3526 || !div_if_zero_remainder (EXACT_DIV_EXPR, rhs2,
3527 TYPE_SIZE (TREE_TYPE (rhs1_type)))))
3528 {
3529 error ("non-element sized vector shift of floating point vector");
3530 return true;
3531 }
3532
3533 return false;
3534 }
3535
3536 case WIDEN_LSHIFT_EXPR:
3537 {
3538 if (!INTEGRAL_TYPE_P (lhs_type)
3539 || !INTEGRAL_TYPE_P (rhs1_type)
3540 || TREE_CODE (rhs2) != INTEGER_CST
3541 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3542 {
3543 error ("type mismatch in widening vector shift expression");
3544 debug_generic_expr (lhs_type);
3545 debug_generic_expr (rhs1_type);
3546 debug_generic_expr (rhs2_type);
3547 return true;
3548 }
3549
3550 return false;
3551 }
3552
3553 case VEC_WIDEN_LSHIFT_HI_EXPR:
3554 case VEC_WIDEN_LSHIFT_LO_EXPR:
3555 {
3556 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3557 || TREE_CODE (lhs_type) != VECTOR_TYPE
3558 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3559 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3560 || TREE_CODE (rhs2) != INTEGER_CST
3561 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3562 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3563 {
3564 error ("type mismatch in widening vector shift expression");
3565 debug_generic_expr (lhs_type);
3566 debug_generic_expr (rhs1_type);
3567 debug_generic_expr (rhs2_type);
3568 return true;
3569 }
3570
3571 return false;
3572 }
3573
3574 case PLUS_EXPR:
3575 case MINUS_EXPR:
3576 {
3577 tree lhs_etype = lhs_type;
3578 tree rhs1_etype = rhs1_type;
3579 tree rhs2_etype = rhs2_type;
3580 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3581 {
3582 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3583 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3584 {
3585 error ("invalid non-vector operands to vector valued plus");
3586 return true;
3587 }
3588 lhs_etype = TREE_TYPE (lhs_type);
3589 rhs1_etype = TREE_TYPE (rhs1_type);
3590 rhs2_etype = TREE_TYPE (rhs2_type);
3591 }
3592 if (POINTER_TYPE_P (lhs_etype)
3593 || POINTER_TYPE_P (rhs1_etype)
3594 || POINTER_TYPE_P (rhs2_etype))
3595 {
3596 error ("invalid (pointer) operands to plus/minus");
3597 return true;
3598 }
3599
3600 /* Continue with generic binary expression handling. */
3601 break;
3602 }
3603
3604 case POINTER_PLUS_EXPR:
3605 {
3606 if (!POINTER_TYPE_P (rhs1_type)
3607 || !useless_type_conversion_p (lhs_type, rhs1_type)
3608 || !ptrofftype_p (rhs2_type))
3609 {
3610 error ("type mismatch in pointer plus expression");
3611 debug_generic_stmt (lhs_type);
3612 debug_generic_stmt (rhs1_type);
3613 debug_generic_stmt (rhs2_type);
3614 return true;
3615 }
3616
3617 return false;
3618 }
3619
3620 case TRUTH_ANDIF_EXPR:
3621 case TRUTH_ORIF_EXPR:
3622 case TRUTH_AND_EXPR:
3623 case TRUTH_OR_EXPR:
3624 case TRUTH_XOR_EXPR:
3625
3626 gcc_unreachable ();
3627
3628 case LT_EXPR:
3629 case LE_EXPR:
3630 case GT_EXPR:
3631 case GE_EXPR:
3632 case EQ_EXPR:
3633 case NE_EXPR:
3634 case UNORDERED_EXPR:
3635 case ORDERED_EXPR:
3636 case UNLT_EXPR:
3637 case UNLE_EXPR:
3638 case UNGT_EXPR:
3639 case UNGE_EXPR:
3640 case UNEQ_EXPR:
3641 case LTGT_EXPR:
3642 /* Comparisons are also binary, but the result type is not
3643 connected to the operand types. */
3644 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3645
3646 case WIDEN_MULT_EXPR:
3647 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3648 return true;
3649 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3650 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3651
3652 case WIDEN_SUM_EXPR:
3653 case VEC_WIDEN_MULT_HI_EXPR:
3654 case VEC_WIDEN_MULT_LO_EXPR:
3655 case VEC_WIDEN_MULT_EVEN_EXPR:
3656 case VEC_WIDEN_MULT_ODD_EXPR:
3657 case VEC_PACK_TRUNC_EXPR:
3658 case VEC_PACK_SAT_EXPR:
3659 case VEC_PACK_FIX_TRUNC_EXPR:
3660 /* FIXME. */
3661 return false;
3662
3663 case MULT_EXPR:
3664 case MULT_HIGHPART_EXPR:
3665 case TRUNC_DIV_EXPR:
3666 case CEIL_DIV_EXPR:
3667 case FLOOR_DIV_EXPR:
3668 case ROUND_DIV_EXPR:
3669 case TRUNC_MOD_EXPR:
3670 case CEIL_MOD_EXPR:
3671 case FLOOR_MOD_EXPR:
3672 case ROUND_MOD_EXPR:
3673 case RDIV_EXPR:
3674 case EXACT_DIV_EXPR:
3675 case MIN_EXPR:
3676 case MAX_EXPR:
3677 case BIT_IOR_EXPR:
3678 case BIT_XOR_EXPR:
3679 case BIT_AND_EXPR:
3680 /* Continue with generic binary expression handling. */
3681 break;
3682
3683 default:
3684 gcc_unreachable ();
3685 }
3686
3687 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3688 || !useless_type_conversion_p (lhs_type, rhs2_type))
3689 {
3690 error ("type mismatch in binary expression");
3691 debug_generic_stmt (lhs_type);
3692 debug_generic_stmt (rhs1_type);
3693 debug_generic_stmt (rhs2_type);
3694 return true;
3695 }
3696
3697 return false;
3698 }
3699
3700 /* Verify a gimple assignment statement STMT with a ternary rhs.
3701 Returns true if anything is wrong. */
3702
3703 static bool
3704 verify_gimple_assign_ternary (gimple stmt)
3705 {
3706 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3707 tree lhs = gimple_assign_lhs (stmt);
3708 tree lhs_type = TREE_TYPE (lhs);
3709 tree rhs1 = gimple_assign_rhs1 (stmt);
3710 tree rhs1_type = TREE_TYPE (rhs1);
3711 tree rhs2 = gimple_assign_rhs2 (stmt);
3712 tree rhs2_type = TREE_TYPE (rhs2);
3713 tree rhs3 = gimple_assign_rhs3 (stmt);
3714 tree rhs3_type = TREE_TYPE (rhs3);
3715
3716 if (!is_gimple_reg (lhs))
3717 {
3718 error ("non-register as LHS of ternary operation");
3719 return true;
3720 }
3721
3722 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3723 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3724 || !is_gimple_val (rhs2)
3725 || !is_gimple_val (rhs3))
3726 {
3727 error ("invalid operands in ternary operation");
3728 return true;
3729 }
3730
3731 /* First handle operations that involve different types. */
3732 switch (rhs_code)
3733 {
3734 case WIDEN_MULT_PLUS_EXPR:
3735 case WIDEN_MULT_MINUS_EXPR:
3736 if ((!INTEGRAL_TYPE_P (rhs1_type)
3737 && !FIXED_POINT_TYPE_P (rhs1_type))
3738 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3739 || !useless_type_conversion_p (lhs_type, rhs3_type)
3740 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3741 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3742 {
3743 error ("type mismatch in widening multiply-accumulate expression");
3744 debug_generic_expr (lhs_type);
3745 debug_generic_expr (rhs1_type);
3746 debug_generic_expr (rhs2_type);
3747 debug_generic_expr (rhs3_type);
3748 return true;
3749 }
3750 break;
3751
3752 case FMA_EXPR:
3753 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3754 || !useless_type_conversion_p (lhs_type, rhs2_type)
3755 || !useless_type_conversion_p (lhs_type, rhs3_type))
3756 {
3757 error ("type mismatch in fused multiply-add expression");
3758 debug_generic_expr (lhs_type);
3759 debug_generic_expr (rhs1_type);
3760 debug_generic_expr (rhs2_type);
3761 debug_generic_expr (rhs3_type);
3762 return true;
3763 }
3764 break;
3765
3766 case COND_EXPR:
3767 case VEC_COND_EXPR:
3768 if (!useless_type_conversion_p (lhs_type, rhs2_type)
3769 || !useless_type_conversion_p (lhs_type, rhs3_type))
3770 {
3771 error ("type mismatch in conditional expression");
3772 debug_generic_expr (lhs_type);
3773 debug_generic_expr (rhs2_type);
3774 debug_generic_expr (rhs3_type);
3775 return true;
3776 }
3777 break;
3778
3779 case VEC_PERM_EXPR:
3780 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3781 || !useless_type_conversion_p (lhs_type, rhs2_type))
3782 {
3783 error ("type mismatch in vector permute expression");
3784 debug_generic_expr (lhs_type);
3785 debug_generic_expr (rhs1_type);
3786 debug_generic_expr (rhs2_type);
3787 debug_generic_expr (rhs3_type);
3788 return true;
3789 }
3790
3791 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3792 || TREE_CODE (rhs2_type) != VECTOR_TYPE
3793 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
3794 {
3795 error ("vector types expected in vector permute expression");
3796 debug_generic_expr (lhs_type);
3797 debug_generic_expr (rhs1_type);
3798 debug_generic_expr (rhs2_type);
3799 debug_generic_expr (rhs3_type);
3800 return true;
3801 }
3802
3803 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
3804 || TYPE_VECTOR_SUBPARTS (rhs2_type)
3805 != TYPE_VECTOR_SUBPARTS (rhs3_type)
3806 || TYPE_VECTOR_SUBPARTS (rhs3_type)
3807 != TYPE_VECTOR_SUBPARTS (lhs_type))
3808 {
3809 error ("vectors with different element number found "
3810 "in vector permute expression");
3811 debug_generic_expr (lhs_type);
3812 debug_generic_expr (rhs1_type);
3813 debug_generic_expr (rhs2_type);
3814 debug_generic_expr (rhs3_type);
3815 return true;
3816 }
3817
3818 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
3819 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
3820 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
3821 {
3822 error ("invalid mask type in vector permute expression");
3823 debug_generic_expr (lhs_type);
3824 debug_generic_expr (rhs1_type);
3825 debug_generic_expr (rhs2_type);
3826 debug_generic_expr (rhs3_type);
3827 return true;
3828 }
3829
3830 return false;
3831
3832 case DOT_PROD_EXPR:
3833 case REALIGN_LOAD_EXPR:
3834 /* FIXME. */
3835 return false;
3836
3837 default:
3838 gcc_unreachable ();
3839 }
3840 return false;
3841 }
3842
3843 /* Verify a gimple assignment statement STMT with a single rhs.
3844 Returns true if anything is wrong. */
3845
3846 static bool
3847 verify_gimple_assign_single (gimple stmt)
3848 {
3849 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3850 tree lhs = gimple_assign_lhs (stmt);
3851 tree lhs_type = TREE_TYPE (lhs);
3852 tree rhs1 = gimple_assign_rhs1 (stmt);
3853 tree rhs1_type = TREE_TYPE (rhs1);
3854 bool res = false;
3855
3856 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3857 {
3858 error ("non-trivial conversion at assignment");
3859 debug_generic_expr (lhs_type);
3860 debug_generic_expr (rhs1_type);
3861 return true;
3862 }
3863
3864 if (gimple_clobber_p (stmt)
3865 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
3866 {
3867 error ("non-decl/MEM_REF LHS in clobber statement");
3868 debug_generic_expr (lhs);
3869 return true;
3870 }
3871
3872 if (handled_component_p (lhs))
3873 res |= verify_types_in_gimple_reference (lhs, true);
3874
3875 /* Special codes we cannot handle via their class. */
3876 switch (rhs_code)
3877 {
3878 case ADDR_EXPR:
3879 {
3880 tree op = TREE_OPERAND (rhs1, 0);
3881 if (!is_gimple_addressable (op))
3882 {
3883 error ("invalid operand in unary expression");
3884 return true;
3885 }
3886
3887 /* Technically there is no longer a need for matching types, but
3888 gimple hygiene asks for this check. In LTO we can end up
3889 combining incompatible units and thus end up with addresses
3890 of globals that change their type to a common one. */
3891 if (!in_lto_p
3892 && !types_compatible_p (TREE_TYPE (op),
3893 TREE_TYPE (TREE_TYPE (rhs1)))
3894 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
3895 TREE_TYPE (op)))
3896 {
3897 error ("type mismatch in address expression");
3898 debug_generic_stmt (TREE_TYPE (rhs1));
3899 debug_generic_stmt (TREE_TYPE (op));
3900 return true;
3901 }
3902
3903 return verify_types_in_gimple_reference (op, true);
3904 }
3905
3906 /* tcc_reference */
3907 case INDIRECT_REF:
3908 error ("INDIRECT_REF in gimple IL");
3909 return true;
3910
3911 case COMPONENT_REF:
3912 case BIT_FIELD_REF:
3913 case ARRAY_REF:
3914 case ARRAY_RANGE_REF:
3915 case VIEW_CONVERT_EXPR:
3916 case REALPART_EXPR:
3917 case IMAGPART_EXPR:
3918 case TARGET_MEM_REF:
3919 case MEM_REF:
3920 if (!is_gimple_reg (lhs)
3921 && is_gimple_reg_type (TREE_TYPE (lhs)))
3922 {
3923 error ("invalid rhs for gimple memory store");
3924 debug_generic_stmt (lhs);
3925 debug_generic_stmt (rhs1);
3926 return true;
3927 }
3928 return res || verify_types_in_gimple_reference (rhs1, false);
3929
3930 /* tcc_constant */
3931 case SSA_NAME:
3932 case INTEGER_CST:
3933 case REAL_CST:
3934 case FIXED_CST:
3935 case COMPLEX_CST:
3936 case VECTOR_CST:
3937 case STRING_CST:
3938 return res;
3939
3940 /* tcc_declaration */
3941 case CONST_DECL:
3942 return res;
3943 case VAR_DECL:
3944 case PARM_DECL:
3945 if (!is_gimple_reg (lhs)
3946 && !is_gimple_reg (rhs1)
3947 && is_gimple_reg_type (TREE_TYPE (lhs)))
3948 {
3949 error ("invalid rhs for gimple memory store");
3950 debug_generic_stmt (lhs);
3951 debug_generic_stmt (rhs1);
3952 return true;
3953 }
3954 return res;
3955
3956 case CONSTRUCTOR:
3957 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
3958 {
3959 unsigned int i;
3960 tree elt_i, elt_v, elt_t = NULL_TREE;
3961
3962 if (CONSTRUCTOR_NELTS (rhs1) == 0)
3963 return res;
3964 /* For vector CONSTRUCTORs we require that either it is empty
3965 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
3966 (then the element count must be correct to cover the whole
3967 outer vector and index must be NULL on all elements, or it is
3968 a CONSTRUCTOR of scalar elements, where we as an exception allow
3969 smaller number of elements (assuming zero filling) and
3970 consecutive indexes as compared to NULL indexes (such
3971 CONSTRUCTORs can appear in the IL from FEs). */
3972 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
3973 {
3974 if (elt_t == NULL_TREE)
3975 {
3976 elt_t = TREE_TYPE (elt_v);
3977 if (TREE_CODE (elt_t) == VECTOR_TYPE)
3978 {
3979 tree elt_t = TREE_TYPE (elt_v);
3980 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
3981 TREE_TYPE (elt_t)))
3982 {
3983 error ("incorrect type of vector CONSTRUCTOR"
3984 " elements");
3985 debug_generic_stmt (rhs1);
3986 return true;
3987 }
3988 else if (CONSTRUCTOR_NELTS (rhs1)
3989 * TYPE_VECTOR_SUBPARTS (elt_t)
3990 != TYPE_VECTOR_SUBPARTS (rhs1_type))
3991 {
3992 error ("incorrect number of vector CONSTRUCTOR"
3993 " elements");
3994 debug_generic_stmt (rhs1);
3995 return true;
3996 }
3997 }
3998 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
3999 elt_t))
4000 {
4001 error ("incorrect type of vector CONSTRUCTOR elements");
4002 debug_generic_stmt (rhs1);
4003 return true;
4004 }
4005 else if (CONSTRUCTOR_NELTS (rhs1)
4006 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4007 {
4008 error ("incorrect number of vector CONSTRUCTOR elements");
4009 debug_generic_stmt (rhs1);
4010 return true;
4011 }
4012 }
4013 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4014 {
4015 error ("incorrect type of vector CONSTRUCTOR elements");
4016 debug_generic_stmt (rhs1);
4017 return true;
4018 }
4019 if (elt_i != NULL_TREE
4020 && (TREE_CODE (elt_t) == VECTOR_TYPE
4021 || TREE_CODE (elt_i) != INTEGER_CST
4022 || compare_tree_int (elt_i, i) != 0))
4023 {
4024 error ("vector CONSTRUCTOR with non-NULL element index");
4025 debug_generic_stmt (rhs1);
4026 return true;
4027 }
4028 }
4029 }
4030 return res;
4031 case OBJ_TYPE_REF:
4032 case ASSERT_EXPR:
4033 case WITH_SIZE_EXPR:
4034 /* FIXME. */
4035 return res;
4036
4037 default:;
4038 }
4039
4040 return res;
4041 }
4042
4043 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4044 is a problem, otherwise false. */
4045
4046 static bool
4047 verify_gimple_assign (gimple stmt)
4048 {
4049 switch (gimple_assign_rhs_class (stmt))
4050 {
4051 case GIMPLE_SINGLE_RHS:
4052 return verify_gimple_assign_single (stmt);
4053
4054 case GIMPLE_UNARY_RHS:
4055 return verify_gimple_assign_unary (stmt);
4056
4057 case GIMPLE_BINARY_RHS:
4058 return verify_gimple_assign_binary (stmt);
4059
4060 case GIMPLE_TERNARY_RHS:
4061 return verify_gimple_assign_ternary (stmt);
4062
4063 default:
4064 gcc_unreachable ();
4065 }
4066 }
4067
4068 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4069 is a problem, otherwise false. */
4070
4071 static bool
4072 verify_gimple_return (gimple stmt)
4073 {
4074 tree op = gimple_return_retval (stmt);
4075 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4076
4077 /* We cannot test for present return values as we do not fix up missing
4078 return values from the original source. */
4079 if (op == NULL)
4080 return false;
4081
4082 if (!is_gimple_val (op)
4083 && TREE_CODE (op) != RESULT_DECL)
4084 {
4085 error ("invalid operand in return statement");
4086 debug_generic_stmt (op);
4087 return true;
4088 }
4089
4090 if ((TREE_CODE (op) == RESULT_DECL
4091 && DECL_BY_REFERENCE (op))
4092 || (TREE_CODE (op) == SSA_NAME
4093 && SSA_NAME_VAR (op)
4094 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4095 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4096 op = TREE_TYPE (op);
4097
4098 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4099 {
4100 error ("invalid conversion in return statement");
4101 debug_generic_stmt (restype);
4102 debug_generic_stmt (TREE_TYPE (op));
4103 return true;
4104 }
4105
4106 return false;
4107 }
4108
4109
4110 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4111 is a problem, otherwise false. */
4112
4113 static bool
4114 verify_gimple_goto (gimple stmt)
4115 {
4116 tree dest = gimple_goto_dest (stmt);
4117
4118 /* ??? We have two canonical forms of direct goto destinations, a
4119 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4120 if (TREE_CODE (dest) != LABEL_DECL
4121 && (!is_gimple_val (dest)
4122 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4123 {
4124 error ("goto destination is neither a label nor a pointer");
4125 return true;
4126 }
4127
4128 return false;
4129 }
4130
4131 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4132 is a problem, otherwise false. */
4133
4134 static bool
4135 verify_gimple_switch (gimple stmt)
4136 {
4137 unsigned int i, n;
4138 tree elt, prev_upper_bound = NULL_TREE;
4139 tree index_type, elt_type = NULL_TREE;
4140
4141 if (!is_gimple_val (gimple_switch_index (stmt)))
4142 {
4143 error ("invalid operand to switch statement");
4144 debug_generic_stmt (gimple_switch_index (stmt));
4145 return true;
4146 }
4147
4148 index_type = TREE_TYPE (gimple_switch_index (stmt));
4149 if (! INTEGRAL_TYPE_P (index_type))
4150 {
4151 error ("non-integral type switch statement");
4152 debug_generic_expr (index_type);
4153 return true;
4154 }
4155
4156 elt = gimple_switch_label (stmt, 0);
4157 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4158 {
4159 error ("invalid default case label in switch statement");
4160 debug_generic_expr (elt);
4161 return true;
4162 }
4163
4164 n = gimple_switch_num_labels (stmt);
4165 for (i = 1; i < n; i++)
4166 {
4167 elt = gimple_switch_label (stmt, i);
4168
4169 if (! CASE_LOW (elt))
4170 {
4171 error ("invalid case label in switch statement");
4172 debug_generic_expr (elt);
4173 return true;
4174 }
4175 if (CASE_HIGH (elt)
4176 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4177 {
4178 error ("invalid case range in switch statement");
4179 debug_generic_expr (elt);
4180 return true;
4181 }
4182
4183 if (elt_type)
4184 {
4185 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4186 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4187 {
4188 error ("type mismatch for case label in switch statement");
4189 debug_generic_expr (elt);
4190 return true;
4191 }
4192 }
4193 else
4194 {
4195 elt_type = TREE_TYPE (CASE_LOW (elt));
4196 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4197 {
4198 error ("type precision mismatch in switch statement");
4199 return true;
4200 }
4201 }
4202
4203 if (prev_upper_bound)
4204 {
4205 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4206 {
4207 error ("case labels not sorted in switch statement");
4208 return true;
4209 }
4210 }
4211
4212 prev_upper_bound = CASE_HIGH (elt);
4213 if (! prev_upper_bound)
4214 prev_upper_bound = CASE_LOW (elt);
4215 }
4216
4217 return false;
4218 }
4219
4220 /* Verify a gimple debug statement STMT.
4221 Returns true if anything is wrong. */
4222
4223 static bool
4224 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4225 {
4226 /* There isn't much that could be wrong in a gimple debug stmt. A
4227 gimple debug bind stmt, for example, maps a tree, that's usually
4228 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4229 component or member of an aggregate type, to another tree, that
4230 can be an arbitrary expression. These stmts expand into debug
4231 insns, and are converted to debug notes by var-tracking.c. */
4232 return false;
4233 }
4234
4235 /* Verify a gimple label statement STMT.
4236 Returns true if anything is wrong. */
4237
4238 static bool
4239 verify_gimple_label (gimple stmt)
4240 {
4241 tree decl = gimple_label_label (stmt);
4242 int uid;
4243 bool err = false;
4244
4245 if (TREE_CODE (decl) != LABEL_DECL)
4246 return true;
4247 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4248 && DECL_CONTEXT (decl) != current_function_decl)
4249 {
4250 error ("label's context is not the current function decl");
4251 err |= true;
4252 }
4253
4254 uid = LABEL_DECL_UID (decl);
4255 if (cfun->cfg
4256 && (uid == -1 || (*label_to_block_map)[uid] != gimple_bb (stmt)))
4257 {
4258 error ("incorrect entry in label_to_block_map");
4259 err |= true;
4260 }
4261
4262 uid = EH_LANDING_PAD_NR (decl);
4263 if (uid)
4264 {
4265 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4266 if (decl != lp->post_landing_pad)
4267 {
4268 error ("incorrect setting of landing pad number");
4269 err |= true;
4270 }
4271 }
4272
4273 return err;
4274 }
4275
4276 /* Verify the GIMPLE statement STMT. Returns true if there is an
4277 error, otherwise false. */
4278
4279 static bool
4280 verify_gimple_stmt (gimple stmt)
4281 {
4282 switch (gimple_code (stmt))
4283 {
4284 case GIMPLE_ASSIGN:
4285 return verify_gimple_assign (stmt);
4286
4287 case GIMPLE_LABEL:
4288 return verify_gimple_label (stmt);
4289
4290 case GIMPLE_CALL:
4291 return verify_gimple_call (stmt);
4292
4293 case GIMPLE_COND:
4294 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4295 {
4296 error ("invalid comparison code in gimple cond");
4297 return true;
4298 }
4299 if (!(!gimple_cond_true_label (stmt)
4300 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4301 || !(!gimple_cond_false_label (stmt)
4302 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4303 {
4304 error ("invalid labels in gimple cond");
4305 return true;
4306 }
4307
4308 return verify_gimple_comparison (boolean_type_node,
4309 gimple_cond_lhs (stmt),
4310 gimple_cond_rhs (stmt));
4311
4312 case GIMPLE_GOTO:
4313 return verify_gimple_goto (stmt);
4314
4315 case GIMPLE_SWITCH:
4316 return verify_gimple_switch (stmt);
4317
4318 case GIMPLE_RETURN:
4319 return verify_gimple_return (stmt);
4320
4321 case GIMPLE_ASM:
4322 return false;
4323
4324 case GIMPLE_TRANSACTION:
4325 return verify_gimple_transaction (stmt);
4326
4327 /* Tuples that do not have tree operands. */
4328 case GIMPLE_NOP:
4329 case GIMPLE_PREDICT:
4330 case GIMPLE_RESX:
4331 case GIMPLE_EH_DISPATCH:
4332 case GIMPLE_EH_MUST_NOT_THROW:
4333 return false;
4334
4335 CASE_GIMPLE_OMP:
4336 /* OpenMP directives are validated by the FE and never operated
4337 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4338 non-gimple expressions when the main index variable has had
4339 its address taken. This does not affect the loop itself
4340 because the header of an GIMPLE_OMP_FOR is merely used to determine
4341 how to setup the parallel iteration. */
4342 return false;
4343
4344 case GIMPLE_DEBUG:
4345 return verify_gimple_debug (stmt);
4346
4347 default:
4348 gcc_unreachable ();
4349 }
4350 }
4351
4352 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4353 and false otherwise. */
4354
4355 static bool
4356 verify_gimple_phi (gimple phi)
4357 {
4358 bool err = false;
4359 unsigned i;
4360 tree phi_result = gimple_phi_result (phi);
4361 bool virtual_p;
4362
4363 if (!phi_result)
4364 {
4365 error ("invalid PHI result");
4366 return true;
4367 }
4368
4369 virtual_p = virtual_operand_p (phi_result);
4370 if (TREE_CODE (phi_result) != SSA_NAME
4371 || (virtual_p
4372 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4373 {
4374 error ("invalid PHI result");
4375 err = true;
4376 }
4377
4378 for (i = 0; i < gimple_phi_num_args (phi); i++)
4379 {
4380 tree t = gimple_phi_arg_def (phi, i);
4381
4382 if (!t)
4383 {
4384 error ("missing PHI def");
4385 err |= true;
4386 continue;
4387 }
4388 /* Addressable variables do have SSA_NAMEs but they
4389 are not considered gimple values. */
4390 else if ((TREE_CODE (t) == SSA_NAME
4391 && virtual_p != virtual_operand_p (t))
4392 || (virtual_p
4393 && (TREE_CODE (t) != SSA_NAME
4394 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4395 || (!virtual_p
4396 && !is_gimple_val (t)))
4397 {
4398 error ("invalid PHI argument");
4399 debug_generic_expr (t);
4400 err |= true;
4401 }
4402 #ifdef ENABLE_TYPES_CHECKING
4403 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4404 {
4405 error ("incompatible types in PHI argument %u", i);
4406 debug_generic_stmt (TREE_TYPE (phi_result));
4407 debug_generic_stmt (TREE_TYPE (t));
4408 err |= true;
4409 }
4410 #endif
4411 }
4412
4413 return err;
4414 }
4415
4416 /* Verify the GIMPLE statements inside the sequence STMTS. */
4417
4418 static bool
4419 verify_gimple_in_seq_2 (gimple_seq stmts)
4420 {
4421 gimple_stmt_iterator ittr;
4422 bool err = false;
4423
4424 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4425 {
4426 gimple stmt = gsi_stmt (ittr);
4427
4428 switch (gimple_code (stmt))
4429 {
4430 case GIMPLE_BIND:
4431 err |= verify_gimple_in_seq_2 (gimple_bind_body (stmt));
4432 break;
4433
4434 case GIMPLE_TRY:
4435 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4436 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4437 break;
4438
4439 case GIMPLE_EH_FILTER:
4440 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4441 break;
4442
4443 case GIMPLE_EH_ELSE:
4444 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (stmt));
4445 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (stmt));
4446 break;
4447
4448 case GIMPLE_CATCH:
4449 err |= verify_gimple_in_seq_2 (gimple_catch_handler (stmt));
4450 break;
4451
4452 case GIMPLE_TRANSACTION:
4453 err |= verify_gimple_transaction (stmt);
4454 break;
4455
4456 default:
4457 {
4458 bool err2 = verify_gimple_stmt (stmt);
4459 if (err2)
4460 debug_gimple_stmt (stmt);
4461 err |= err2;
4462 }
4463 }
4464 }
4465
4466 return err;
4467 }
4468
4469 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4470 is a problem, otherwise false. */
4471
4472 static bool
4473 verify_gimple_transaction (gimple stmt)
4474 {
4475 tree lab = gimple_transaction_label (stmt);
4476 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4477 return true;
4478 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4479 }
4480
4481
4482 /* Verify the GIMPLE statements inside the statement list STMTS. */
4483
4484 DEBUG_FUNCTION void
4485 verify_gimple_in_seq (gimple_seq stmts)
4486 {
4487 timevar_push (TV_TREE_STMT_VERIFY);
4488 if (verify_gimple_in_seq_2 (stmts))
4489 internal_error ("verify_gimple failed");
4490 timevar_pop (TV_TREE_STMT_VERIFY);
4491 }
4492
4493 /* Return true when the T can be shared. */
4494
4495 bool
4496 tree_node_can_be_shared (tree t)
4497 {
4498 if (IS_TYPE_OR_DECL_P (t)
4499 || is_gimple_min_invariant (t)
4500 || TREE_CODE (t) == SSA_NAME
4501 || t == error_mark_node
4502 || TREE_CODE (t) == IDENTIFIER_NODE)
4503 return true;
4504
4505 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4506 return true;
4507
4508 if (DECL_P (t))
4509 return true;
4510
4511 return false;
4512 }
4513
4514 /* Called via walk_tree. Verify tree sharing. */
4515
4516 static tree
4517 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4518 {
4519 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4520
4521 if (tree_node_can_be_shared (*tp))
4522 {
4523 *walk_subtrees = false;
4524 return NULL;
4525 }
4526
4527 if (pointer_set_insert (visited, *tp))
4528 return *tp;
4529
4530 return NULL;
4531 }
4532
4533 /* Called via walk_gimple_stmt. Verify tree sharing. */
4534
4535 static tree
4536 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4537 {
4538 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4539 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4540 }
4541
4542 static bool eh_error_found;
4543 static int
4544 verify_eh_throw_stmt_node (void **slot, void *data)
4545 {
4546 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4547 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4548
4549 if (!pointer_set_contains (visited, node->stmt))
4550 {
4551 error ("dead STMT in EH table");
4552 debug_gimple_stmt (node->stmt);
4553 eh_error_found = true;
4554 }
4555 return 1;
4556 }
4557
4558 /* Verify if the location LOCs block is in BLOCKS. */
4559
4560 static bool
4561 verify_location (pointer_set_t *blocks, location_t loc)
4562 {
4563 tree block = LOCATION_BLOCK (loc);
4564 if (block != NULL_TREE
4565 && !pointer_set_contains (blocks, block))
4566 {
4567 error ("location references block not in block tree");
4568 return true;
4569 }
4570 if (block != NULL_TREE)
4571 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4572 return false;
4573 }
4574
4575 /* Called via walk_tree. Verify that expressions have no blocks. */
4576
4577 static tree
4578 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4579 {
4580 if (!EXPR_P (*tp))
4581 {
4582 *walk_subtrees = false;
4583 return NULL;
4584 }
4585
4586 location_t loc = EXPR_LOCATION (*tp);
4587 if (LOCATION_BLOCK (loc) != NULL)
4588 return *tp;
4589
4590 return NULL;
4591 }
4592
4593 /* Called via walk_tree. Verify locations of expressions. */
4594
4595 static tree
4596 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4597 {
4598 struct pointer_set_t *blocks = (struct pointer_set_t *) data;
4599
4600 if (TREE_CODE (*tp) == VAR_DECL
4601 && DECL_HAS_DEBUG_EXPR_P (*tp))
4602 {
4603 tree t = DECL_DEBUG_EXPR (*tp);
4604 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4605 if (addr)
4606 return addr;
4607 }
4608 if ((TREE_CODE (*tp) == VAR_DECL
4609 || TREE_CODE (*tp) == PARM_DECL
4610 || TREE_CODE (*tp) == RESULT_DECL)
4611 && DECL_HAS_VALUE_EXPR_P (*tp))
4612 {
4613 tree t = DECL_VALUE_EXPR (*tp);
4614 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4615 if (addr)
4616 return addr;
4617 }
4618
4619 if (!EXPR_P (*tp))
4620 {
4621 *walk_subtrees = false;
4622 return NULL;
4623 }
4624
4625 location_t loc = EXPR_LOCATION (*tp);
4626 if (verify_location (blocks, loc))
4627 return *tp;
4628
4629 return NULL;
4630 }
4631
4632 /* Called via walk_gimple_op. Verify locations of expressions. */
4633
4634 static tree
4635 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
4636 {
4637 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4638 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
4639 }
4640
4641 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4642
4643 static void
4644 collect_subblocks (pointer_set_t *blocks, tree block)
4645 {
4646 tree t;
4647 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
4648 {
4649 pointer_set_insert (blocks, t);
4650 collect_subblocks (blocks, t);
4651 }
4652 }
4653
4654 /* Verify the GIMPLE statements in the CFG of FN. */
4655
4656 DEBUG_FUNCTION void
4657 verify_gimple_in_cfg (struct function *fn)
4658 {
4659 basic_block bb;
4660 bool err = false;
4661 struct pointer_set_t *visited, *visited_stmts, *blocks;
4662
4663 timevar_push (TV_TREE_STMT_VERIFY);
4664 visited = pointer_set_create ();
4665 visited_stmts = pointer_set_create ();
4666
4667 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4668 blocks = pointer_set_create ();
4669 if (DECL_INITIAL (fn->decl))
4670 {
4671 pointer_set_insert (blocks, DECL_INITIAL (fn->decl));
4672 collect_subblocks (blocks, DECL_INITIAL (fn->decl));
4673 }
4674
4675 FOR_EACH_BB_FN (bb, fn)
4676 {
4677 gimple_stmt_iterator gsi;
4678
4679 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4680 {
4681 gimple phi = gsi_stmt (gsi);
4682 bool err2 = false;
4683 unsigned i;
4684
4685 pointer_set_insert (visited_stmts, phi);
4686
4687 if (gimple_bb (phi) != bb)
4688 {
4689 error ("gimple_bb (phi) is set to a wrong basic block");
4690 err2 = true;
4691 }
4692
4693 err2 |= verify_gimple_phi (phi);
4694
4695 /* Only PHI arguments have locations. */
4696 if (gimple_location (phi) != UNKNOWN_LOCATION)
4697 {
4698 error ("PHI node with location");
4699 err2 = true;
4700 }
4701
4702 for (i = 0; i < gimple_phi_num_args (phi); i++)
4703 {
4704 tree arg = gimple_phi_arg_def (phi, i);
4705 tree addr = walk_tree (&arg, verify_node_sharing_1,
4706 visited, NULL);
4707 if (addr)
4708 {
4709 error ("incorrect sharing of tree nodes");
4710 debug_generic_expr (addr);
4711 err2 |= true;
4712 }
4713 location_t loc = gimple_phi_arg_location (phi, i);
4714 if (virtual_operand_p (gimple_phi_result (phi))
4715 && loc != UNKNOWN_LOCATION)
4716 {
4717 error ("virtual PHI with argument locations");
4718 err2 = true;
4719 }
4720 addr = walk_tree (&arg, verify_expr_location_1, blocks, NULL);
4721 if (addr)
4722 {
4723 debug_generic_expr (addr);
4724 err2 = true;
4725 }
4726 err2 |= verify_location (blocks, loc);
4727 }
4728
4729 if (err2)
4730 debug_gimple_stmt (phi);
4731 err |= err2;
4732 }
4733
4734 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4735 {
4736 gimple stmt = gsi_stmt (gsi);
4737 bool err2 = false;
4738 struct walk_stmt_info wi;
4739 tree addr;
4740 int lp_nr;
4741
4742 pointer_set_insert (visited_stmts, stmt);
4743
4744 if (gimple_bb (stmt) != bb)
4745 {
4746 error ("gimple_bb (stmt) is set to a wrong basic block");
4747 err2 = true;
4748 }
4749
4750 err2 |= verify_gimple_stmt (stmt);
4751 err2 |= verify_location (blocks, gimple_location (stmt));
4752
4753 memset (&wi, 0, sizeof (wi));
4754 wi.info = (void *) visited;
4755 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
4756 if (addr)
4757 {
4758 error ("incorrect sharing of tree nodes");
4759 debug_generic_expr (addr);
4760 err2 |= true;
4761 }
4762
4763 memset (&wi, 0, sizeof (wi));
4764 wi.info = (void *) blocks;
4765 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
4766 if (addr)
4767 {
4768 debug_generic_expr (addr);
4769 err2 |= true;
4770 }
4771
4772 /* ??? Instead of not checking these stmts at all the walker
4773 should know its context via wi. */
4774 if (!is_gimple_debug (stmt)
4775 && !is_gimple_omp (stmt))
4776 {
4777 memset (&wi, 0, sizeof (wi));
4778 addr = walk_gimple_op (stmt, verify_expr, &wi);
4779 if (addr)
4780 {
4781 debug_generic_expr (addr);
4782 inform (gimple_location (stmt), "in statement");
4783 err2 |= true;
4784 }
4785 }
4786
4787 /* If the statement is marked as part of an EH region, then it is
4788 expected that the statement could throw. Verify that when we
4789 have optimizations that simplify statements such that we prove
4790 that they cannot throw, that we update other data structures
4791 to match. */
4792 lp_nr = lookup_stmt_eh_lp (stmt);
4793 if (lp_nr != 0)
4794 {
4795 if (!stmt_could_throw_p (stmt))
4796 {
4797 error ("statement marked for throw, but doesn%'t");
4798 err2 |= true;
4799 }
4800 else if (lp_nr > 0
4801 && !gsi_one_before_end_p (gsi)
4802 && stmt_can_throw_internal (stmt))
4803 {
4804 error ("statement marked for throw in middle of block");
4805 err2 |= true;
4806 }
4807 }
4808
4809 if (err2)
4810 debug_gimple_stmt (stmt);
4811 err |= err2;
4812 }
4813 }
4814
4815 eh_error_found = false;
4816 if (get_eh_throw_stmt_table (cfun))
4817 htab_traverse (get_eh_throw_stmt_table (cfun),
4818 verify_eh_throw_stmt_node,
4819 visited_stmts);
4820
4821 if (err || eh_error_found)
4822 internal_error ("verify_gimple failed");
4823
4824 pointer_set_destroy (visited);
4825 pointer_set_destroy (visited_stmts);
4826 pointer_set_destroy (blocks);
4827 verify_histograms ();
4828 timevar_pop (TV_TREE_STMT_VERIFY);
4829 }
4830
4831
4832 /* Verifies that the flow information is OK. */
4833
4834 static int
4835 gimple_verify_flow_info (void)
4836 {
4837 int err = 0;
4838 basic_block bb;
4839 gimple_stmt_iterator gsi;
4840 gimple stmt;
4841 edge e;
4842 edge_iterator ei;
4843
4844 if (ENTRY_BLOCK_PTR->il.gimple.seq || ENTRY_BLOCK_PTR->il.gimple.phi_nodes)
4845 {
4846 error ("ENTRY_BLOCK has IL associated with it");
4847 err = 1;
4848 }
4849
4850 if (EXIT_BLOCK_PTR->il.gimple.seq || EXIT_BLOCK_PTR->il.gimple.phi_nodes)
4851 {
4852 error ("EXIT_BLOCK has IL associated with it");
4853 err = 1;
4854 }
4855
4856 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4857 if (e->flags & EDGE_FALLTHRU)
4858 {
4859 error ("fallthru to exit from bb %d", e->src->index);
4860 err = 1;
4861 }
4862
4863 FOR_EACH_BB (bb)
4864 {
4865 bool found_ctrl_stmt = false;
4866
4867 stmt = NULL;
4868
4869 /* Skip labels on the start of basic block. */
4870 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4871 {
4872 tree label;
4873 gimple prev_stmt = stmt;
4874
4875 stmt = gsi_stmt (gsi);
4876
4877 if (gimple_code (stmt) != GIMPLE_LABEL)
4878 break;
4879
4880 label = gimple_label_label (stmt);
4881 if (prev_stmt && DECL_NONLOCAL (label))
4882 {
4883 error ("nonlocal label ");
4884 print_generic_expr (stderr, label, 0);
4885 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4886 bb->index);
4887 err = 1;
4888 }
4889
4890 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
4891 {
4892 error ("EH landing pad label ");
4893 print_generic_expr (stderr, label, 0);
4894 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4895 bb->index);
4896 err = 1;
4897 }
4898
4899 if (label_to_block (label) != bb)
4900 {
4901 error ("label ");
4902 print_generic_expr (stderr, label, 0);
4903 fprintf (stderr, " to block does not match in bb %d",
4904 bb->index);
4905 err = 1;
4906 }
4907
4908 if (decl_function_context (label) != current_function_decl)
4909 {
4910 error ("label ");
4911 print_generic_expr (stderr, label, 0);
4912 fprintf (stderr, " has incorrect context in bb %d",
4913 bb->index);
4914 err = 1;
4915 }
4916 }
4917
4918 /* Verify that body of basic block BB is free of control flow. */
4919 for (; !gsi_end_p (gsi); gsi_next (&gsi))
4920 {
4921 gimple stmt = gsi_stmt (gsi);
4922
4923 if (found_ctrl_stmt)
4924 {
4925 error ("control flow in the middle of basic block %d",
4926 bb->index);
4927 err = 1;
4928 }
4929
4930 if (stmt_ends_bb_p (stmt))
4931 found_ctrl_stmt = true;
4932
4933 if (gimple_code (stmt) == GIMPLE_LABEL)
4934 {
4935 error ("label ");
4936 print_generic_expr (stderr, gimple_label_label (stmt), 0);
4937 fprintf (stderr, " in the middle of basic block %d", bb->index);
4938 err = 1;
4939 }
4940 }
4941
4942 gsi = gsi_last_bb (bb);
4943 if (gsi_end_p (gsi))
4944 continue;
4945
4946 stmt = gsi_stmt (gsi);
4947
4948 if (gimple_code (stmt) == GIMPLE_LABEL)
4949 continue;
4950
4951 err |= verify_eh_edges (stmt);
4952
4953 if (is_ctrl_stmt (stmt))
4954 {
4955 FOR_EACH_EDGE (e, ei, bb->succs)
4956 if (e->flags & EDGE_FALLTHRU)
4957 {
4958 error ("fallthru edge after a control statement in bb %d",
4959 bb->index);
4960 err = 1;
4961 }
4962 }
4963
4964 if (gimple_code (stmt) != GIMPLE_COND)
4965 {
4966 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4967 after anything else but if statement. */
4968 FOR_EACH_EDGE (e, ei, bb->succs)
4969 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
4970 {
4971 error ("true/false edge after a non-GIMPLE_COND in bb %d",
4972 bb->index);
4973 err = 1;
4974 }
4975 }
4976
4977 switch (gimple_code (stmt))
4978 {
4979 case GIMPLE_COND:
4980 {
4981 edge true_edge;
4982 edge false_edge;
4983
4984 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
4985
4986 if (!true_edge
4987 || !false_edge
4988 || !(true_edge->flags & EDGE_TRUE_VALUE)
4989 || !(false_edge->flags & EDGE_FALSE_VALUE)
4990 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4991 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4992 || EDGE_COUNT (bb->succs) >= 3)
4993 {
4994 error ("wrong outgoing edge flags at end of bb %d",
4995 bb->index);
4996 err = 1;
4997 }
4998 }
4999 break;
5000
5001 case GIMPLE_GOTO:
5002 if (simple_goto_p (stmt))
5003 {
5004 error ("explicit goto at end of bb %d", bb->index);
5005 err = 1;
5006 }
5007 else
5008 {
5009 /* FIXME. We should double check that the labels in the
5010 destination blocks have their address taken. */
5011 FOR_EACH_EDGE (e, ei, bb->succs)
5012 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5013 | EDGE_FALSE_VALUE))
5014 || !(e->flags & EDGE_ABNORMAL))
5015 {
5016 error ("wrong outgoing edge flags at end of bb %d",
5017 bb->index);
5018 err = 1;
5019 }
5020 }
5021 break;
5022
5023 case GIMPLE_CALL:
5024 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5025 break;
5026 /* ... fallthru ... */
5027 case GIMPLE_RETURN:
5028 if (!single_succ_p (bb)
5029 || (single_succ_edge (bb)->flags
5030 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5031 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5032 {
5033 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5034 err = 1;
5035 }
5036 if (single_succ (bb) != EXIT_BLOCK_PTR)
5037 {
5038 error ("return edge does not point to exit in bb %d",
5039 bb->index);
5040 err = 1;
5041 }
5042 break;
5043
5044 case GIMPLE_SWITCH:
5045 {
5046 tree prev;
5047 edge e;
5048 size_t i, n;
5049
5050 n = gimple_switch_num_labels (stmt);
5051
5052 /* Mark all the destination basic blocks. */
5053 for (i = 0; i < n; ++i)
5054 {
5055 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
5056 basic_block label_bb = label_to_block (lab);
5057 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5058 label_bb->aux = (void *)1;
5059 }
5060
5061 /* Verify that the case labels are sorted. */
5062 prev = gimple_switch_label (stmt, 0);
5063 for (i = 1; i < n; ++i)
5064 {
5065 tree c = gimple_switch_label (stmt, i);
5066 if (!CASE_LOW (c))
5067 {
5068 error ("found default case not at the start of "
5069 "case vector");
5070 err = 1;
5071 continue;
5072 }
5073 if (CASE_LOW (prev)
5074 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5075 {
5076 error ("case labels not sorted: ");
5077 print_generic_expr (stderr, prev, 0);
5078 fprintf (stderr," is greater than ");
5079 print_generic_expr (stderr, c, 0);
5080 fprintf (stderr," but comes before it.\n");
5081 err = 1;
5082 }
5083 prev = c;
5084 }
5085 /* VRP will remove the default case if it can prove it will
5086 never be executed. So do not verify there always exists
5087 a default case here. */
5088
5089 FOR_EACH_EDGE (e, ei, bb->succs)
5090 {
5091 if (!e->dest->aux)
5092 {
5093 error ("extra outgoing edge %d->%d",
5094 bb->index, e->dest->index);
5095 err = 1;
5096 }
5097
5098 e->dest->aux = (void *)2;
5099 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5100 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5101 {
5102 error ("wrong outgoing edge flags at end of bb %d",
5103 bb->index);
5104 err = 1;
5105 }
5106 }
5107
5108 /* Check that we have all of them. */
5109 for (i = 0; i < n; ++i)
5110 {
5111 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
5112 basic_block label_bb = label_to_block (lab);
5113
5114 if (label_bb->aux != (void *)2)
5115 {
5116 error ("missing edge %i->%i", bb->index, label_bb->index);
5117 err = 1;
5118 }
5119 }
5120
5121 FOR_EACH_EDGE (e, ei, bb->succs)
5122 e->dest->aux = (void *)0;
5123 }
5124 break;
5125
5126 case GIMPLE_EH_DISPATCH:
5127 err |= verify_eh_dispatch_edge (stmt);
5128 break;
5129
5130 default:
5131 break;
5132 }
5133 }
5134
5135 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5136 verify_dominators (CDI_DOMINATORS);
5137
5138 return err;
5139 }
5140
5141
5142 /* Updates phi nodes after creating a forwarder block joined
5143 by edge FALLTHRU. */
5144
5145 static void
5146 gimple_make_forwarder_block (edge fallthru)
5147 {
5148 edge e;
5149 edge_iterator ei;
5150 basic_block dummy, bb;
5151 tree var;
5152 gimple_stmt_iterator gsi;
5153
5154 dummy = fallthru->src;
5155 bb = fallthru->dest;
5156
5157 if (single_pred_p (bb))
5158 return;
5159
5160 /* If we redirected a branch we must create new PHI nodes at the
5161 start of BB. */
5162 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5163 {
5164 gimple phi, new_phi;
5165
5166 phi = gsi_stmt (gsi);
5167 var = gimple_phi_result (phi);
5168 new_phi = create_phi_node (var, bb);
5169 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5170 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5171 UNKNOWN_LOCATION);
5172 }
5173
5174 /* Add the arguments we have stored on edges. */
5175 FOR_EACH_EDGE (e, ei, bb->preds)
5176 {
5177 if (e == fallthru)
5178 continue;
5179
5180 flush_pending_stmts (e);
5181 }
5182 }
5183
5184
5185 /* Return a non-special label in the head of basic block BLOCK.
5186 Create one if it doesn't exist. */
5187
5188 tree
5189 gimple_block_label (basic_block bb)
5190 {
5191 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5192 bool first = true;
5193 tree label;
5194 gimple stmt;
5195
5196 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5197 {
5198 stmt = gsi_stmt (i);
5199 if (gimple_code (stmt) != GIMPLE_LABEL)
5200 break;
5201 label = gimple_label_label (stmt);
5202 if (!DECL_NONLOCAL (label))
5203 {
5204 if (!first)
5205 gsi_move_before (&i, &s);
5206 return label;
5207 }
5208 }
5209
5210 label = create_artificial_label (UNKNOWN_LOCATION);
5211 stmt = gimple_build_label (label);
5212 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5213 return label;
5214 }
5215
5216
5217 /* Attempt to perform edge redirection by replacing a possibly complex
5218 jump instruction by a goto or by removing the jump completely.
5219 This can apply only if all edges now point to the same block. The
5220 parameters and return values are equivalent to
5221 redirect_edge_and_branch. */
5222
5223 static edge
5224 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5225 {
5226 basic_block src = e->src;
5227 gimple_stmt_iterator i;
5228 gimple stmt;
5229
5230 /* We can replace or remove a complex jump only when we have exactly
5231 two edges. */
5232 if (EDGE_COUNT (src->succs) != 2
5233 /* Verify that all targets will be TARGET. Specifically, the
5234 edge that is not E must also go to TARGET. */
5235 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5236 return NULL;
5237
5238 i = gsi_last_bb (src);
5239 if (gsi_end_p (i))
5240 return NULL;
5241
5242 stmt = gsi_stmt (i);
5243
5244 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5245 {
5246 gsi_remove (&i, true);
5247 e = ssa_redirect_edge (e, target);
5248 e->flags = EDGE_FALLTHRU;
5249 return e;
5250 }
5251
5252 return NULL;
5253 }
5254
5255
5256 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5257 edge representing the redirected branch. */
5258
5259 static edge
5260 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5261 {
5262 basic_block bb = e->src;
5263 gimple_stmt_iterator gsi;
5264 edge ret;
5265 gimple stmt;
5266
5267 if (e->flags & EDGE_ABNORMAL)
5268 return NULL;
5269
5270 if (e->dest == dest)
5271 return NULL;
5272
5273 if (e->flags & EDGE_EH)
5274 return redirect_eh_edge (e, dest);
5275
5276 if (e->src != ENTRY_BLOCK_PTR)
5277 {
5278 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5279 if (ret)
5280 return ret;
5281 }
5282
5283 gsi = gsi_last_bb (bb);
5284 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5285
5286 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5287 {
5288 case GIMPLE_COND:
5289 /* For COND_EXPR, we only need to redirect the edge. */
5290 break;
5291
5292 case GIMPLE_GOTO:
5293 /* No non-abnormal edges should lead from a non-simple goto, and
5294 simple ones should be represented implicitly. */
5295 gcc_unreachable ();
5296
5297 case GIMPLE_SWITCH:
5298 {
5299 tree label = gimple_block_label (dest);
5300 tree cases = get_cases_for_edge (e, stmt);
5301
5302 /* If we have a list of cases associated with E, then use it
5303 as it's a lot faster than walking the entire case vector. */
5304 if (cases)
5305 {
5306 edge e2 = find_edge (e->src, dest);
5307 tree last, first;
5308
5309 first = cases;
5310 while (cases)
5311 {
5312 last = cases;
5313 CASE_LABEL (cases) = label;
5314 cases = CASE_CHAIN (cases);
5315 }
5316
5317 /* If there was already an edge in the CFG, then we need
5318 to move all the cases associated with E to E2. */
5319 if (e2)
5320 {
5321 tree cases2 = get_cases_for_edge (e2, stmt);
5322
5323 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5324 CASE_CHAIN (cases2) = first;
5325 }
5326 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5327 }
5328 else
5329 {
5330 size_t i, n = gimple_switch_num_labels (stmt);
5331
5332 for (i = 0; i < n; i++)
5333 {
5334 tree elt = gimple_switch_label (stmt, i);
5335 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5336 CASE_LABEL (elt) = label;
5337 }
5338 }
5339 }
5340 break;
5341
5342 case GIMPLE_ASM:
5343 {
5344 int i, n = gimple_asm_nlabels (stmt);
5345 tree label = NULL;
5346
5347 for (i = 0; i < n; ++i)
5348 {
5349 tree cons = gimple_asm_label_op (stmt, i);
5350 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5351 {
5352 if (!label)
5353 label = gimple_block_label (dest);
5354 TREE_VALUE (cons) = label;
5355 }
5356 }
5357
5358 /* If we didn't find any label matching the former edge in the
5359 asm labels, we must be redirecting the fallthrough
5360 edge. */
5361 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5362 }
5363 break;
5364
5365 case GIMPLE_RETURN:
5366 gsi_remove (&gsi, true);
5367 e->flags |= EDGE_FALLTHRU;
5368 break;
5369
5370 case GIMPLE_OMP_RETURN:
5371 case GIMPLE_OMP_CONTINUE:
5372 case GIMPLE_OMP_SECTIONS_SWITCH:
5373 case GIMPLE_OMP_FOR:
5374 /* The edges from OMP constructs can be simply redirected. */
5375 break;
5376
5377 case GIMPLE_EH_DISPATCH:
5378 if (!(e->flags & EDGE_FALLTHRU))
5379 redirect_eh_dispatch_edge (stmt, e, dest);
5380 break;
5381
5382 case GIMPLE_TRANSACTION:
5383 /* The ABORT edge has a stored label associated with it, otherwise
5384 the edges are simply redirectable. */
5385 if (e->flags == 0)
5386 gimple_transaction_set_label (stmt, gimple_block_label (dest));
5387 break;
5388
5389 default:
5390 /* Otherwise it must be a fallthru edge, and we don't need to
5391 do anything besides redirecting it. */
5392 gcc_assert (e->flags & EDGE_FALLTHRU);
5393 break;
5394 }
5395
5396 /* Update/insert PHI nodes as necessary. */
5397
5398 /* Now update the edges in the CFG. */
5399 e = ssa_redirect_edge (e, dest);
5400
5401 return e;
5402 }
5403
5404 /* Returns true if it is possible to remove edge E by redirecting
5405 it to the destination of the other edge from E->src. */
5406
5407 static bool
5408 gimple_can_remove_branch_p (const_edge e)
5409 {
5410 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5411 return false;
5412
5413 return true;
5414 }
5415
5416 /* Simple wrapper, as we can always redirect fallthru edges. */
5417
5418 static basic_block
5419 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5420 {
5421 e = gimple_redirect_edge_and_branch (e, dest);
5422 gcc_assert (e);
5423
5424 return NULL;
5425 }
5426
5427
5428 /* Splits basic block BB after statement STMT (but at least after the
5429 labels). If STMT is NULL, BB is split just after the labels. */
5430
5431 static basic_block
5432 gimple_split_block (basic_block bb, void *stmt)
5433 {
5434 gimple_stmt_iterator gsi;
5435 gimple_stmt_iterator gsi_tgt;
5436 gimple act;
5437 gimple_seq list;
5438 basic_block new_bb;
5439 edge e;
5440 edge_iterator ei;
5441
5442 new_bb = create_empty_bb (bb);
5443
5444 /* Redirect the outgoing edges. */
5445 new_bb->succs = bb->succs;
5446 bb->succs = NULL;
5447 FOR_EACH_EDGE (e, ei, new_bb->succs)
5448 e->src = new_bb;
5449
5450 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5451 stmt = NULL;
5452
5453 /* Move everything from GSI to the new basic block. */
5454 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5455 {
5456 act = gsi_stmt (gsi);
5457 if (gimple_code (act) == GIMPLE_LABEL)
5458 continue;
5459
5460 if (!stmt)
5461 break;
5462
5463 if (stmt == act)
5464 {
5465 gsi_next (&gsi);
5466 break;
5467 }
5468 }
5469
5470 if (gsi_end_p (gsi))
5471 return new_bb;
5472
5473 /* Split the statement list - avoid re-creating new containers as this
5474 brings ugly quadratic memory consumption in the inliner.
5475 (We are still quadratic since we need to update stmt BB pointers,
5476 sadly.) */
5477 gsi_split_seq_before (&gsi, &list);
5478 set_bb_seq (new_bb, list);
5479 for (gsi_tgt = gsi_start (list);
5480 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5481 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5482
5483 return new_bb;
5484 }
5485
5486
5487 /* Moves basic block BB after block AFTER. */
5488
5489 static bool
5490 gimple_move_block_after (basic_block bb, basic_block after)
5491 {
5492 if (bb->prev_bb == after)
5493 return true;
5494
5495 unlink_block (bb);
5496 link_block (bb, after);
5497
5498 return true;
5499 }
5500
5501
5502 /* Return TRUE if block BB has no executable statements, otherwise return
5503 FALSE. */
5504
5505 bool
5506 gimple_empty_block_p (basic_block bb)
5507 {
5508 /* BB must have no executable statements. */
5509 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5510 if (phi_nodes (bb))
5511 return false;
5512 if (gsi_end_p (gsi))
5513 return true;
5514 if (is_gimple_debug (gsi_stmt (gsi)))
5515 gsi_next_nondebug (&gsi);
5516 return gsi_end_p (gsi);
5517 }
5518
5519
5520 /* Split a basic block if it ends with a conditional branch and if the
5521 other part of the block is not empty. */
5522
5523 static basic_block
5524 gimple_split_block_before_cond_jump (basic_block bb)
5525 {
5526 gimple last, split_point;
5527 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5528 if (gsi_end_p (gsi))
5529 return NULL;
5530 last = gsi_stmt (gsi);
5531 if (gimple_code (last) != GIMPLE_COND
5532 && gimple_code (last) != GIMPLE_SWITCH)
5533 return NULL;
5534 gsi_prev_nondebug (&gsi);
5535 split_point = gsi_stmt (gsi);
5536 return split_block (bb, split_point)->dest;
5537 }
5538
5539
5540 /* Return true if basic_block can be duplicated. */
5541
5542 static bool
5543 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5544 {
5545 return true;
5546 }
5547
5548 /* Create a duplicate of the basic block BB. NOTE: This does not
5549 preserve SSA form. */
5550
5551 static basic_block
5552 gimple_duplicate_bb (basic_block bb)
5553 {
5554 basic_block new_bb;
5555 gimple_stmt_iterator gsi, gsi_tgt;
5556 gimple_seq phis = phi_nodes (bb);
5557 gimple phi, stmt, copy;
5558
5559 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
5560
5561 /* Copy the PHI nodes. We ignore PHI node arguments here because
5562 the incoming edges have not been setup yet. */
5563 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
5564 {
5565 phi = gsi_stmt (gsi);
5566 copy = create_phi_node (NULL_TREE, new_bb);
5567 create_new_def_for (gimple_phi_result (phi), copy,
5568 gimple_phi_result_ptr (copy));
5569 gimple_set_uid (copy, gimple_uid (phi));
5570 }
5571
5572 gsi_tgt = gsi_start_bb (new_bb);
5573 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5574 {
5575 def_operand_p def_p;
5576 ssa_op_iter op_iter;
5577 tree lhs;
5578
5579 stmt = gsi_stmt (gsi);
5580 if (gimple_code (stmt) == GIMPLE_LABEL)
5581 continue;
5582
5583 /* Don't duplicate label debug stmts. */
5584 if (gimple_debug_bind_p (stmt)
5585 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5586 == LABEL_DECL)
5587 continue;
5588
5589 /* Create a new copy of STMT and duplicate STMT's virtual
5590 operands. */
5591 copy = gimple_copy (stmt);
5592 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5593
5594 maybe_duplicate_eh_stmt (copy, stmt);
5595 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5596
5597 /* When copying around a stmt writing into a local non-user
5598 aggregate, make sure it won't share stack slot with other
5599 vars. */
5600 lhs = gimple_get_lhs (stmt);
5601 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5602 {
5603 tree base = get_base_address (lhs);
5604 if (base
5605 && (TREE_CODE (base) == VAR_DECL
5606 || TREE_CODE (base) == RESULT_DECL)
5607 && DECL_IGNORED_P (base)
5608 && !TREE_STATIC (base)
5609 && !DECL_EXTERNAL (base)
5610 && (TREE_CODE (base) != VAR_DECL
5611 || !DECL_HAS_VALUE_EXPR_P (base)))
5612 DECL_NONSHAREABLE (base) = 1;
5613 }
5614
5615 /* Create new names for all the definitions created by COPY and
5616 add replacement mappings for each new name. */
5617 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5618 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5619 }
5620
5621 return new_bb;
5622 }
5623
5624 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5625
5626 static void
5627 add_phi_args_after_copy_edge (edge e_copy)
5628 {
5629 basic_block bb, bb_copy = e_copy->src, dest;
5630 edge e;
5631 edge_iterator ei;
5632 gimple phi, phi_copy;
5633 tree def;
5634 gimple_stmt_iterator psi, psi_copy;
5635
5636 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5637 return;
5638
5639 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5640
5641 if (e_copy->dest->flags & BB_DUPLICATED)
5642 dest = get_bb_original (e_copy->dest);
5643 else
5644 dest = e_copy->dest;
5645
5646 e = find_edge (bb, dest);
5647 if (!e)
5648 {
5649 /* During loop unrolling the target of the latch edge is copied.
5650 In this case we are not looking for edge to dest, but to
5651 duplicated block whose original was dest. */
5652 FOR_EACH_EDGE (e, ei, bb->succs)
5653 {
5654 if ((e->dest->flags & BB_DUPLICATED)
5655 && get_bb_original (e->dest) == dest)
5656 break;
5657 }
5658
5659 gcc_assert (e != NULL);
5660 }
5661
5662 for (psi = gsi_start_phis (e->dest),
5663 psi_copy = gsi_start_phis (e_copy->dest);
5664 !gsi_end_p (psi);
5665 gsi_next (&psi), gsi_next (&psi_copy))
5666 {
5667 phi = gsi_stmt (psi);
5668 phi_copy = gsi_stmt (psi_copy);
5669 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5670 add_phi_arg (phi_copy, def, e_copy,
5671 gimple_phi_arg_location_from_edge (phi, e));
5672 }
5673 }
5674
5675
5676 /* Basic block BB_COPY was created by code duplication. Add phi node
5677 arguments for edges going out of BB_COPY. The blocks that were
5678 duplicated have BB_DUPLICATED set. */
5679
5680 void
5681 add_phi_args_after_copy_bb (basic_block bb_copy)
5682 {
5683 edge e_copy;
5684 edge_iterator ei;
5685
5686 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5687 {
5688 add_phi_args_after_copy_edge (e_copy);
5689 }
5690 }
5691
5692 /* Blocks in REGION_COPY array of length N_REGION were created by
5693 duplication of basic blocks. Add phi node arguments for edges
5694 going from these blocks. If E_COPY is not NULL, also add
5695 phi node arguments for its destination.*/
5696
5697 void
5698 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5699 edge e_copy)
5700 {
5701 unsigned i;
5702
5703 for (i = 0; i < n_region; i++)
5704 region_copy[i]->flags |= BB_DUPLICATED;
5705
5706 for (i = 0; i < n_region; i++)
5707 add_phi_args_after_copy_bb (region_copy[i]);
5708 if (e_copy)
5709 add_phi_args_after_copy_edge (e_copy);
5710
5711 for (i = 0; i < n_region; i++)
5712 region_copy[i]->flags &= ~BB_DUPLICATED;
5713 }
5714
5715 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5716 important exit edge EXIT. By important we mean that no SSA name defined
5717 inside region is live over the other exit edges of the region. All entry
5718 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5719 to the duplicate of the region. Dominance and loop information is
5720 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
5721 UPDATE_DOMINANCE is false then we assume that the caller will update the
5722 dominance information after calling this function. The new basic
5723 blocks are stored to REGION_COPY in the same order as they had in REGION,
5724 provided that REGION_COPY is not NULL.
5725 The function returns false if it is unable to copy the region,
5726 true otherwise. */
5727
5728 bool
5729 gimple_duplicate_sese_region (edge entry, edge exit,
5730 basic_block *region, unsigned n_region,
5731 basic_block *region_copy,
5732 bool update_dominance)
5733 {
5734 unsigned i;
5735 bool free_region_copy = false, copying_header = false;
5736 struct loop *loop = entry->dest->loop_father;
5737 edge exit_copy;
5738 vec<basic_block> doms;
5739 edge redirected;
5740 int total_freq = 0, entry_freq = 0;
5741 gcov_type total_count = 0, entry_count = 0;
5742
5743 if (!can_copy_bbs_p (region, n_region))
5744 return false;
5745
5746 /* Some sanity checking. Note that we do not check for all possible
5747 missuses of the functions. I.e. if you ask to copy something weird,
5748 it will work, but the state of structures probably will not be
5749 correct. */
5750 for (i = 0; i < n_region; i++)
5751 {
5752 /* We do not handle subloops, i.e. all the blocks must belong to the
5753 same loop. */
5754 if (region[i]->loop_father != loop)
5755 return false;
5756
5757 if (region[i] != entry->dest
5758 && region[i] == loop->header)
5759 return false;
5760 }
5761
5762 set_loop_copy (loop, loop);
5763
5764 /* In case the function is used for loop header copying (which is the primary
5765 use), ensure that EXIT and its copy will be new latch and entry edges. */
5766 if (loop->header == entry->dest)
5767 {
5768 copying_header = true;
5769 set_loop_copy (loop, loop_outer (loop));
5770
5771 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5772 return false;
5773
5774 for (i = 0; i < n_region; i++)
5775 if (region[i] != exit->src
5776 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5777 return false;
5778 }
5779
5780 if (!region_copy)
5781 {
5782 region_copy = XNEWVEC (basic_block, n_region);
5783 free_region_copy = true;
5784 }
5785
5786 initialize_original_copy_tables ();
5787
5788 /* Record blocks outside the region that are dominated by something
5789 inside. */
5790 if (update_dominance)
5791 {
5792 doms.create (0);
5793 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5794 }
5795
5796 if (entry->dest->count)
5797 {
5798 total_count = entry->dest->count;
5799 entry_count = entry->count;
5800 /* Fix up corner cases, to avoid division by zero or creation of negative
5801 frequencies. */
5802 if (entry_count > total_count)
5803 entry_count = total_count;
5804 }
5805 else
5806 {
5807 total_freq = entry->dest->frequency;
5808 entry_freq = EDGE_FREQUENCY (entry);
5809 /* Fix up corner cases, to avoid division by zero or creation of negative
5810 frequencies. */
5811 if (total_freq == 0)
5812 total_freq = 1;
5813 else if (entry_freq > total_freq)
5814 entry_freq = total_freq;
5815 }
5816
5817 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5818 split_edge_bb_loc (entry), update_dominance);
5819 if (total_count)
5820 {
5821 scale_bbs_frequencies_gcov_type (region, n_region,
5822 total_count - entry_count,
5823 total_count);
5824 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
5825 total_count);
5826 }
5827 else
5828 {
5829 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5830 total_freq);
5831 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5832 }
5833
5834 if (copying_header)
5835 {
5836 loop->header = exit->dest;
5837 loop->latch = exit->src;
5838 }
5839
5840 /* Redirect the entry and add the phi node arguments. */
5841 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
5842 gcc_assert (redirected != NULL);
5843 flush_pending_stmts (entry);
5844
5845 /* Concerning updating of dominators: We must recount dominators
5846 for entry block and its copy. Anything that is outside of the
5847 region, but was dominated by something inside needs recounting as
5848 well. */
5849 if (update_dominance)
5850 {
5851 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
5852 doms.safe_push (get_bb_original (entry->dest));
5853 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5854 doms.release ();
5855 }
5856
5857 /* Add the other PHI node arguments. */
5858 add_phi_args_after_copy (region_copy, n_region, NULL);
5859
5860 if (free_region_copy)
5861 free (region_copy);
5862
5863 free_original_copy_tables ();
5864 return true;
5865 }
5866
5867 /* Checks if BB is part of the region defined by N_REGION BBS. */
5868 static bool
5869 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
5870 {
5871 unsigned int n;
5872
5873 for (n = 0; n < n_region; n++)
5874 {
5875 if (bb == bbs[n])
5876 return true;
5877 }
5878 return false;
5879 }
5880
5881 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
5882 are stored to REGION_COPY in the same order in that they appear
5883 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5884 the region, EXIT an exit from it. The condition guarding EXIT
5885 is moved to ENTRY. Returns true if duplication succeeds, false
5886 otherwise.
5887
5888 For example,
5889
5890 some_code;
5891 if (cond)
5892 A;
5893 else
5894 B;
5895
5896 is transformed to
5897
5898 if (cond)
5899 {
5900 some_code;
5901 A;
5902 }
5903 else
5904 {
5905 some_code;
5906 B;
5907 }
5908 */
5909
5910 bool
5911 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
5912 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
5913 basic_block *region_copy ATTRIBUTE_UNUSED)
5914 {
5915 unsigned i;
5916 bool free_region_copy = false;
5917 struct loop *loop = exit->dest->loop_father;
5918 struct loop *orig_loop = entry->dest->loop_father;
5919 basic_block switch_bb, entry_bb, nentry_bb;
5920 vec<basic_block> doms;
5921 int total_freq = 0, exit_freq = 0;
5922 gcov_type total_count = 0, exit_count = 0;
5923 edge exits[2], nexits[2], e;
5924 gimple_stmt_iterator gsi;
5925 gimple cond_stmt;
5926 edge sorig, snew;
5927 basic_block exit_bb;
5928 gimple_stmt_iterator psi;
5929 gimple phi;
5930 tree def;
5931 struct loop *target, *aloop, *cloop;
5932
5933 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
5934 exits[0] = exit;
5935 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
5936
5937 if (!can_copy_bbs_p (region, n_region))
5938 return false;
5939
5940 initialize_original_copy_tables ();
5941 set_loop_copy (orig_loop, loop);
5942
5943 target= loop;
5944 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
5945 {
5946 if (bb_part_of_region_p (aloop->header, region, n_region))
5947 {
5948 cloop = duplicate_loop (aloop, target);
5949 duplicate_subloops (aloop, cloop);
5950 }
5951 }
5952
5953 if (!region_copy)
5954 {
5955 region_copy = XNEWVEC (basic_block, n_region);
5956 free_region_copy = true;
5957 }
5958
5959 gcc_assert (!need_ssa_update_p (cfun));
5960
5961 /* Record blocks outside the region that are dominated by something
5962 inside. */
5963 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5964
5965 if (exit->src->count)
5966 {
5967 total_count = exit->src->count;
5968 exit_count = exit->count;
5969 /* Fix up corner cases, to avoid division by zero or creation of negative
5970 frequencies. */
5971 if (exit_count > total_count)
5972 exit_count = total_count;
5973 }
5974 else
5975 {
5976 total_freq = exit->src->frequency;
5977 exit_freq = EDGE_FREQUENCY (exit);
5978 /* Fix up corner cases, to avoid division by zero or creation of negative
5979 frequencies. */
5980 if (total_freq == 0)
5981 total_freq = 1;
5982 if (exit_freq > total_freq)
5983 exit_freq = total_freq;
5984 }
5985
5986 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
5987 split_edge_bb_loc (exit), true);
5988 if (total_count)
5989 {
5990 scale_bbs_frequencies_gcov_type (region, n_region,
5991 total_count - exit_count,
5992 total_count);
5993 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
5994 total_count);
5995 }
5996 else
5997 {
5998 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
5999 total_freq);
6000 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6001 }
6002
6003 /* Create the switch block, and put the exit condition to it. */
6004 entry_bb = entry->dest;
6005 nentry_bb = get_bb_copy (entry_bb);
6006 if (!last_stmt (entry->src)
6007 || !stmt_ends_bb_p (last_stmt (entry->src)))
6008 switch_bb = entry->src;
6009 else
6010 switch_bb = split_edge (entry);
6011 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6012
6013 gsi = gsi_last_bb (switch_bb);
6014 cond_stmt = last_stmt (exit->src);
6015 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6016 cond_stmt = gimple_copy (cond_stmt);
6017
6018 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6019
6020 sorig = single_succ_edge (switch_bb);
6021 sorig->flags = exits[1]->flags;
6022 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6023
6024 /* Register the new edge from SWITCH_BB in loop exit lists. */
6025 rescan_loop_exit (snew, true, false);
6026
6027 /* Add the PHI node arguments. */
6028 add_phi_args_after_copy (region_copy, n_region, snew);
6029
6030 /* Get rid of now superfluous conditions and associated edges (and phi node
6031 arguments). */
6032 exit_bb = exit->dest;
6033
6034 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6035 PENDING_STMT (e) = NULL;
6036
6037 /* The latch of ORIG_LOOP was copied, and so was the backedge
6038 to the original header. We redirect this backedge to EXIT_BB. */
6039 for (i = 0; i < n_region; i++)
6040 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6041 {
6042 gcc_assert (single_succ_edge (region_copy[i]));
6043 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6044 PENDING_STMT (e) = NULL;
6045 for (psi = gsi_start_phis (exit_bb);
6046 !gsi_end_p (psi);
6047 gsi_next (&psi))
6048 {
6049 phi = gsi_stmt (psi);
6050 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6051 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6052 }
6053 }
6054 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6055 PENDING_STMT (e) = NULL;
6056
6057 /* Anything that is outside of the region, but was dominated by something
6058 inside needs to update dominance info. */
6059 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6060 doms.release ();
6061 /* Update the SSA web. */
6062 update_ssa (TODO_update_ssa);
6063
6064 if (free_region_copy)
6065 free (region_copy);
6066
6067 free_original_copy_tables ();
6068 return true;
6069 }
6070
6071 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6072 adding blocks when the dominator traversal reaches EXIT. This
6073 function silently assumes that ENTRY strictly dominates EXIT. */
6074
6075 void
6076 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6077 vec<basic_block> *bbs_p)
6078 {
6079 basic_block son;
6080
6081 for (son = first_dom_son (CDI_DOMINATORS, entry);
6082 son;
6083 son = next_dom_son (CDI_DOMINATORS, son))
6084 {
6085 bbs_p->safe_push (son);
6086 if (son != exit)
6087 gather_blocks_in_sese_region (son, exit, bbs_p);
6088 }
6089 }
6090
6091 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6092 The duplicates are recorded in VARS_MAP. */
6093
6094 static void
6095 replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
6096 tree to_context)
6097 {
6098 tree t = *tp, new_t;
6099 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6100 void **loc;
6101
6102 if (DECL_CONTEXT (t) == to_context)
6103 return;
6104
6105 loc = pointer_map_contains (vars_map, t);
6106
6107 if (!loc)
6108 {
6109 loc = pointer_map_insert (vars_map, t);
6110
6111 if (SSA_VAR_P (t))
6112 {
6113 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6114 add_local_decl (f, new_t);
6115 }
6116 else
6117 {
6118 gcc_assert (TREE_CODE (t) == CONST_DECL);
6119 new_t = copy_node (t);
6120 }
6121 DECL_CONTEXT (new_t) = to_context;
6122
6123 *loc = new_t;
6124 }
6125 else
6126 new_t = (tree) *loc;
6127
6128 *tp = new_t;
6129 }
6130
6131
6132 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6133 VARS_MAP maps old ssa names and var_decls to the new ones. */
6134
6135 static tree
6136 replace_ssa_name (tree name, struct pointer_map_t *vars_map,
6137 tree to_context)
6138 {
6139 void **loc;
6140 tree new_name;
6141
6142 gcc_assert (!virtual_operand_p (name));
6143
6144 loc = pointer_map_contains (vars_map, name);
6145
6146 if (!loc)
6147 {
6148 tree decl = SSA_NAME_VAR (name);
6149 if (decl)
6150 {
6151 replace_by_duplicate_decl (&decl, vars_map, to_context);
6152 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6153 decl, SSA_NAME_DEF_STMT (name));
6154 if (SSA_NAME_IS_DEFAULT_DEF (name))
6155 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context),
6156 decl, new_name);
6157 }
6158 else
6159 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6160 name, SSA_NAME_DEF_STMT (name));
6161
6162 loc = pointer_map_insert (vars_map, name);
6163 *loc = new_name;
6164 }
6165 else
6166 new_name = (tree) *loc;
6167
6168 return new_name;
6169 }
6170
6171 struct move_stmt_d
6172 {
6173 tree orig_block;
6174 tree new_block;
6175 tree from_context;
6176 tree to_context;
6177 struct pointer_map_t *vars_map;
6178 htab_t new_label_map;
6179 struct pointer_map_t *eh_map;
6180 bool remap_decls_p;
6181 };
6182
6183 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6184 contained in *TP if it has been ORIG_BLOCK previously and change the
6185 DECL_CONTEXT of every local variable referenced in *TP. */
6186
6187 static tree
6188 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6189 {
6190 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6191 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6192 tree t = *tp;
6193
6194 if (EXPR_P (t))
6195 {
6196 tree block = TREE_BLOCK (t);
6197 if (block == p->orig_block
6198 || (p->orig_block == NULL_TREE
6199 && block != NULL_TREE))
6200 TREE_SET_BLOCK (t, p->new_block);
6201 #ifdef ENABLE_CHECKING
6202 else if (block != NULL_TREE)
6203 {
6204 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6205 block = BLOCK_SUPERCONTEXT (block);
6206 gcc_assert (block == p->orig_block);
6207 }
6208 #endif
6209 }
6210 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6211 {
6212 if (TREE_CODE (t) == SSA_NAME)
6213 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6214 else if (TREE_CODE (t) == LABEL_DECL)
6215 {
6216 if (p->new_label_map)
6217 {
6218 struct tree_map in, *out;
6219 in.base.from = t;
6220 out = (struct tree_map *)
6221 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6222 if (out)
6223 *tp = t = out->to;
6224 }
6225
6226 DECL_CONTEXT (t) = p->to_context;
6227 }
6228 else if (p->remap_decls_p)
6229 {
6230 /* Replace T with its duplicate. T should no longer appear in the
6231 parent function, so this looks wasteful; however, it may appear
6232 in referenced_vars, and more importantly, as virtual operands of
6233 statements, and in alias lists of other variables. It would be
6234 quite difficult to expunge it from all those places. ??? It might
6235 suffice to do this for addressable variables. */
6236 if ((TREE_CODE (t) == VAR_DECL
6237 && !is_global_var (t))
6238 || TREE_CODE (t) == CONST_DECL)
6239 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6240 }
6241 *walk_subtrees = 0;
6242 }
6243 else if (TYPE_P (t))
6244 *walk_subtrees = 0;
6245
6246 return NULL_TREE;
6247 }
6248
6249 /* Helper for move_stmt_r. Given an EH region number for the source
6250 function, map that to the duplicate EH regio number in the dest. */
6251
6252 static int
6253 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6254 {
6255 eh_region old_r, new_r;
6256 void **slot;
6257
6258 old_r = get_eh_region_from_number (old_nr);
6259 slot = pointer_map_contains (p->eh_map, old_r);
6260 new_r = (eh_region) *slot;
6261
6262 return new_r->index;
6263 }
6264
6265 /* Similar, but operate on INTEGER_CSTs. */
6266
6267 static tree
6268 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6269 {
6270 int old_nr, new_nr;
6271
6272 old_nr = tree_low_cst (old_t_nr, 0);
6273 new_nr = move_stmt_eh_region_nr (old_nr, p);
6274
6275 return build_int_cst (integer_type_node, new_nr);
6276 }
6277
6278 /* Like move_stmt_op, but for gimple statements.
6279
6280 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6281 contained in the current statement in *GSI_P and change the
6282 DECL_CONTEXT of every local variable referenced in the current
6283 statement. */
6284
6285 static tree
6286 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6287 struct walk_stmt_info *wi)
6288 {
6289 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6290 gimple stmt = gsi_stmt (*gsi_p);
6291 tree block = gimple_block (stmt);
6292
6293 if (block == p->orig_block
6294 || (p->orig_block == NULL_TREE
6295 && block != NULL_TREE))
6296 gimple_set_block (stmt, p->new_block);
6297
6298 switch (gimple_code (stmt))
6299 {
6300 case GIMPLE_CALL:
6301 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6302 {
6303 tree r, fndecl = gimple_call_fndecl (stmt);
6304 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6305 switch (DECL_FUNCTION_CODE (fndecl))
6306 {
6307 case BUILT_IN_EH_COPY_VALUES:
6308 r = gimple_call_arg (stmt, 1);
6309 r = move_stmt_eh_region_tree_nr (r, p);
6310 gimple_call_set_arg (stmt, 1, r);
6311 /* FALLTHRU */
6312
6313 case BUILT_IN_EH_POINTER:
6314 case BUILT_IN_EH_FILTER:
6315 r = gimple_call_arg (stmt, 0);
6316 r = move_stmt_eh_region_tree_nr (r, p);
6317 gimple_call_set_arg (stmt, 0, r);
6318 break;
6319
6320 default:
6321 break;
6322 }
6323 }
6324 break;
6325
6326 case GIMPLE_RESX:
6327 {
6328 int r = gimple_resx_region (stmt);
6329 r = move_stmt_eh_region_nr (r, p);
6330 gimple_resx_set_region (stmt, r);
6331 }
6332 break;
6333
6334 case GIMPLE_EH_DISPATCH:
6335 {
6336 int r = gimple_eh_dispatch_region (stmt);
6337 r = move_stmt_eh_region_nr (r, p);
6338 gimple_eh_dispatch_set_region (stmt, r);
6339 }
6340 break;
6341
6342 case GIMPLE_OMP_RETURN:
6343 case GIMPLE_OMP_CONTINUE:
6344 break;
6345 default:
6346 if (is_gimple_omp (stmt))
6347 {
6348 /* Do not remap variables inside OMP directives. Variables
6349 referenced in clauses and directive header belong to the
6350 parent function and should not be moved into the child
6351 function. */
6352 bool save_remap_decls_p = p->remap_decls_p;
6353 p->remap_decls_p = false;
6354 *handled_ops_p = true;
6355
6356 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6357 move_stmt_op, wi);
6358
6359 p->remap_decls_p = save_remap_decls_p;
6360 }
6361 break;
6362 }
6363
6364 return NULL_TREE;
6365 }
6366
6367 /* Move basic block BB from function CFUN to function DEST_FN. The
6368 block is moved out of the original linked list and placed after
6369 block AFTER in the new list. Also, the block is removed from the
6370 original array of blocks and placed in DEST_FN's array of blocks.
6371 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6372 updated to reflect the moved edges.
6373
6374 The local variables are remapped to new instances, VARS_MAP is used
6375 to record the mapping. */
6376
6377 static void
6378 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6379 basic_block after, bool update_edge_count_p,
6380 struct move_stmt_d *d)
6381 {
6382 struct control_flow_graph *cfg;
6383 edge_iterator ei;
6384 edge e;
6385 gimple_stmt_iterator si;
6386 unsigned old_len, new_len;
6387
6388 /* Remove BB from dominance structures. */
6389 delete_from_dominance_info (CDI_DOMINATORS, bb);
6390
6391 /* Move BB from its current loop to the copy in the new function. */
6392 if (current_loops)
6393 {
6394 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6395 if (new_loop)
6396 bb->loop_father = new_loop;
6397 }
6398
6399 /* Link BB to the new linked list. */
6400 move_block_after (bb, after);
6401
6402 /* Update the edge count in the corresponding flowgraphs. */
6403 if (update_edge_count_p)
6404 FOR_EACH_EDGE (e, ei, bb->succs)
6405 {
6406 cfun->cfg->x_n_edges--;
6407 dest_cfun->cfg->x_n_edges++;
6408 }
6409
6410 /* Remove BB from the original basic block array. */
6411 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6412 cfun->cfg->x_n_basic_blocks--;
6413
6414 /* Grow DEST_CFUN's basic block array if needed. */
6415 cfg = dest_cfun->cfg;
6416 cfg->x_n_basic_blocks++;
6417 if (bb->index >= cfg->x_last_basic_block)
6418 cfg->x_last_basic_block = bb->index + 1;
6419
6420 old_len = vec_safe_length (cfg->x_basic_block_info);
6421 if ((unsigned) cfg->x_last_basic_block >= old_len)
6422 {
6423 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6424 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6425 }
6426
6427 (*cfg->x_basic_block_info)[bb->index] = bb;
6428
6429 /* Remap the variables in phi nodes. */
6430 for (si = gsi_start_phis (bb); !gsi_end_p (si); )
6431 {
6432 gimple phi = gsi_stmt (si);
6433 use_operand_p use;
6434 tree op = PHI_RESULT (phi);
6435 ssa_op_iter oi;
6436 unsigned i;
6437
6438 if (virtual_operand_p (op))
6439 {
6440 /* Remove the phi nodes for virtual operands (alias analysis will be
6441 run for the new function, anyway). */
6442 remove_phi_node (&si, true);
6443 continue;
6444 }
6445
6446 SET_PHI_RESULT (phi,
6447 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6448 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6449 {
6450 op = USE_FROM_PTR (use);
6451 if (TREE_CODE (op) == SSA_NAME)
6452 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6453 }
6454
6455 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6456 {
6457 location_t locus = gimple_phi_arg_location (phi, i);
6458 tree block = LOCATION_BLOCK (locus);
6459
6460 if (locus == UNKNOWN_LOCATION)
6461 continue;
6462 if (d->orig_block == NULL_TREE || block == d->orig_block)
6463 {
6464 if (d->new_block == NULL_TREE)
6465 locus = LOCATION_LOCUS (locus);
6466 else
6467 locus = COMBINE_LOCATION_DATA (line_table, locus, d->new_block);
6468 gimple_phi_arg_set_location (phi, i, locus);
6469 }
6470 }
6471
6472 gsi_next (&si);
6473 }
6474
6475 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6476 {
6477 gimple stmt = gsi_stmt (si);
6478 struct walk_stmt_info wi;
6479
6480 memset (&wi, 0, sizeof (wi));
6481 wi.info = d;
6482 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6483
6484 if (gimple_code (stmt) == GIMPLE_LABEL)
6485 {
6486 tree label = gimple_label_label (stmt);
6487 int uid = LABEL_DECL_UID (label);
6488
6489 gcc_assert (uid > -1);
6490
6491 old_len = vec_safe_length (cfg->x_label_to_block_map);
6492 if (old_len <= (unsigned) uid)
6493 {
6494 new_len = 3 * uid / 2 + 1;
6495 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6496 }
6497
6498 (*cfg->x_label_to_block_map)[uid] = bb;
6499 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6500
6501 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6502
6503 if (uid >= dest_cfun->cfg->last_label_uid)
6504 dest_cfun->cfg->last_label_uid = uid + 1;
6505 }
6506
6507 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6508 remove_stmt_from_eh_lp_fn (cfun, stmt);
6509
6510 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6511 gimple_remove_stmt_histograms (cfun, stmt);
6512
6513 /* We cannot leave any operands allocated from the operand caches of
6514 the current function. */
6515 free_stmt_operands (stmt);
6516 push_cfun (dest_cfun);
6517 update_stmt (stmt);
6518 pop_cfun ();
6519 }
6520
6521 FOR_EACH_EDGE (e, ei, bb->succs)
6522 if (e->goto_locus != UNKNOWN_LOCATION)
6523 {
6524 tree block = LOCATION_BLOCK (e->goto_locus);
6525 if (d->orig_block == NULL_TREE
6526 || block == d->orig_block)
6527 e->goto_locus = d->new_block ?
6528 COMBINE_LOCATION_DATA (line_table, e->goto_locus, d->new_block) :
6529 LOCATION_LOCUS (e->goto_locus);
6530 }
6531 }
6532
6533 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6534 the outermost EH region. Use REGION as the incoming base EH region. */
6535
6536 static eh_region
6537 find_outermost_region_in_block (struct function *src_cfun,
6538 basic_block bb, eh_region region)
6539 {
6540 gimple_stmt_iterator si;
6541
6542 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6543 {
6544 gimple stmt = gsi_stmt (si);
6545 eh_region stmt_region;
6546 int lp_nr;
6547
6548 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6549 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6550 if (stmt_region)
6551 {
6552 if (region == NULL)
6553 region = stmt_region;
6554 else if (stmt_region != region)
6555 {
6556 region = eh_region_outermost (src_cfun, stmt_region, region);
6557 gcc_assert (region != NULL);
6558 }
6559 }
6560 }
6561
6562 return region;
6563 }
6564
6565 static tree
6566 new_label_mapper (tree decl, void *data)
6567 {
6568 htab_t hash = (htab_t) data;
6569 struct tree_map *m;
6570 void **slot;
6571
6572 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6573
6574 m = XNEW (struct tree_map);
6575 m->hash = DECL_UID (decl);
6576 m->base.from = decl;
6577 m->to = create_artificial_label (UNKNOWN_LOCATION);
6578 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6579 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6580 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6581
6582 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6583 gcc_assert (*slot == NULL);
6584
6585 *slot = m;
6586
6587 return m->to;
6588 }
6589
6590 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6591 subblocks. */
6592
6593 static void
6594 replace_block_vars_by_duplicates (tree block, struct pointer_map_t *vars_map,
6595 tree to_context)
6596 {
6597 tree *tp, t;
6598
6599 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6600 {
6601 t = *tp;
6602 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6603 continue;
6604 replace_by_duplicate_decl (&t, vars_map, to_context);
6605 if (t != *tp)
6606 {
6607 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6608 {
6609 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6610 DECL_HAS_VALUE_EXPR_P (t) = 1;
6611 }
6612 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6613 *tp = t;
6614 }
6615 }
6616
6617 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6618 replace_block_vars_by_duplicates (block, vars_map, to_context);
6619 }
6620
6621 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6622 from FN1 to FN2. */
6623
6624 static void
6625 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
6626 struct loop *loop)
6627 {
6628 /* Discard it from the old loop array. */
6629 (*get_loops (fn1))[loop->num] = NULL;
6630
6631 /* Place it in the new loop array, assigning it a new number. */
6632 loop->num = number_of_loops (fn2);
6633 vec_safe_push (loops_for_fn (fn2)->larray, loop);
6634
6635 /* Recurse to children. */
6636 for (loop = loop->inner; loop; loop = loop->next)
6637 fixup_loop_arrays_after_move (fn1, fn2, loop);
6638 }
6639
6640 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6641 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6642 single basic block in the original CFG and the new basic block is
6643 returned. DEST_CFUN must not have a CFG yet.
6644
6645 Note that the region need not be a pure SESE region. Blocks inside
6646 the region may contain calls to abort/exit. The only restriction
6647 is that ENTRY_BB should be the only entry point and it must
6648 dominate EXIT_BB.
6649
6650 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6651 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6652 to the new function.
6653
6654 All local variables referenced in the region are assumed to be in
6655 the corresponding BLOCK_VARS and unexpanded variable lists
6656 associated with DEST_CFUN. */
6657
6658 basic_block
6659 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
6660 basic_block exit_bb, tree orig_block)
6661 {
6662 vec<basic_block> bbs, dom_bbs;
6663 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6664 basic_block after, bb, *entry_pred, *exit_succ, abb;
6665 struct function *saved_cfun = cfun;
6666 int *entry_flag, *exit_flag;
6667 unsigned *entry_prob, *exit_prob;
6668 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
6669 edge e;
6670 edge_iterator ei;
6671 htab_t new_label_map;
6672 struct pointer_map_t *vars_map, *eh_map;
6673 struct loop *loop = entry_bb->loop_father;
6674 struct loop *loop0 = get_loop (saved_cfun, 0);
6675 struct move_stmt_d d;
6676
6677 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6678 region. */
6679 gcc_assert (entry_bb != exit_bb
6680 && (!exit_bb
6681 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
6682
6683 /* Collect all the blocks in the region. Manually add ENTRY_BB
6684 because it won't be added by dfs_enumerate_from. */
6685 bbs.create (0);
6686 bbs.safe_push (entry_bb);
6687 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6688
6689 /* The blocks that used to be dominated by something in BBS will now be
6690 dominated by the new block. */
6691 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6692 bbs.address (),
6693 bbs.length ());
6694
6695 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6696 the predecessor edges to ENTRY_BB and the successor edges to
6697 EXIT_BB so that we can re-attach them to the new basic block that
6698 will replace the region. */
6699 num_entry_edges = EDGE_COUNT (entry_bb->preds);
6700 entry_pred = XNEWVEC (basic_block, num_entry_edges);
6701 entry_flag = XNEWVEC (int, num_entry_edges);
6702 entry_prob = XNEWVEC (unsigned, num_entry_edges);
6703 i = 0;
6704 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
6705 {
6706 entry_prob[i] = e->probability;
6707 entry_flag[i] = e->flags;
6708 entry_pred[i++] = e->src;
6709 remove_edge (e);
6710 }
6711
6712 if (exit_bb)
6713 {
6714 num_exit_edges = EDGE_COUNT (exit_bb->succs);
6715 exit_succ = XNEWVEC (basic_block, num_exit_edges);
6716 exit_flag = XNEWVEC (int, num_exit_edges);
6717 exit_prob = XNEWVEC (unsigned, num_exit_edges);
6718 i = 0;
6719 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
6720 {
6721 exit_prob[i] = e->probability;
6722 exit_flag[i] = e->flags;
6723 exit_succ[i++] = e->dest;
6724 remove_edge (e);
6725 }
6726 }
6727 else
6728 {
6729 num_exit_edges = 0;
6730 exit_succ = NULL;
6731 exit_flag = NULL;
6732 exit_prob = NULL;
6733 }
6734
6735 /* Switch context to the child function to initialize DEST_FN's CFG. */
6736 gcc_assert (dest_cfun->cfg == NULL);
6737 push_cfun (dest_cfun);
6738
6739 init_empty_tree_cfg ();
6740
6741 /* Initialize EH information for the new function. */
6742 eh_map = NULL;
6743 new_label_map = NULL;
6744 if (saved_cfun->eh)
6745 {
6746 eh_region region = NULL;
6747
6748 FOR_EACH_VEC_ELT (bbs, i, bb)
6749 region = find_outermost_region_in_block (saved_cfun, bb, region);
6750
6751 init_eh_for_function ();
6752 if (region != NULL)
6753 {
6754 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6755 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
6756 new_label_mapper, new_label_map);
6757 }
6758 }
6759
6760 /* Initialize an empty loop tree. */
6761 struct loops *loops = ggc_alloc_cleared_loops ();
6762 init_loops_structure (dest_cfun, loops, 1);
6763 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
6764 set_loops_for_fn (dest_cfun, loops);
6765
6766 /* Move the outlined loop tree part. */
6767 num_nodes = bbs.length ();
6768 FOR_EACH_VEC_ELT (bbs, i, bb)
6769 {
6770 if (bb->loop_father->header == bb)
6771 {
6772 struct loop *this_loop = bb->loop_father;
6773 struct loop *outer = loop_outer (this_loop);
6774 if (outer == loop
6775 /* If the SESE region contains some bbs ending with
6776 a noreturn call, those are considered to belong
6777 to the outermost loop in saved_cfun, rather than
6778 the entry_bb's loop_father. */
6779 || outer == loop0)
6780 {
6781 if (outer != loop)
6782 num_nodes -= this_loop->num_nodes;
6783 flow_loop_tree_node_remove (bb->loop_father);
6784 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
6785 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
6786 }
6787 }
6788 else if (bb->loop_father == loop0 && loop0 != loop)
6789 num_nodes--;
6790
6791 /* Remove loop exits from the outlined region. */
6792 if (loops_for_fn (saved_cfun)->exits)
6793 FOR_EACH_EDGE (e, ei, bb->succs)
6794 {
6795 void **slot = htab_find_slot_with_hash
6796 (loops_for_fn (saved_cfun)->exits, e,
6797 htab_hash_pointer (e), NO_INSERT);
6798 if (slot)
6799 htab_clear_slot (loops_for_fn (saved_cfun)->exits, slot);
6800 }
6801 }
6802
6803
6804 /* Adjust the number of blocks in the tree root of the outlined part. */
6805 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
6806
6807 /* Setup a mapping to be used by move_block_to_fn. */
6808 loop->aux = current_loops->tree_root;
6809 loop0->aux = current_loops->tree_root;
6810
6811 pop_cfun ();
6812
6813 /* Move blocks from BBS into DEST_CFUN. */
6814 gcc_assert (bbs.length () >= 2);
6815 after = dest_cfun->cfg->x_entry_block_ptr;
6816 vars_map = pointer_map_create ();
6817
6818 memset (&d, 0, sizeof (d));
6819 d.orig_block = orig_block;
6820 d.new_block = DECL_INITIAL (dest_cfun->decl);
6821 d.from_context = cfun->decl;
6822 d.to_context = dest_cfun->decl;
6823 d.vars_map = vars_map;
6824 d.new_label_map = new_label_map;
6825 d.eh_map = eh_map;
6826 d.remap_decls_p = true;
6827
6828 FOR_EACH_VEC_ELT (bbs, i, bb)
6829 {
6830 /* No need to update edge counts on the last block. It has
6831 already been updated earlier when we detached the region from
6832 the original CFG. */
6833 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
6834 after = bb;
6835 }
6836
6837 loop->aux = NULL;
6838 loop0->aux = NULL;
6839 /* Loop sizes are no longer correct, fix them up. */
6840 loop->num_nodes -= num_nodes;
6841 for (struct loop *outer = loop_outer (loop);
6842 outer; outer = loop_outer (outer))
6843 outer->num_nodes -= num_nodes;
6844 loop0->num_nodes -= bbs.length () - num_nodes;
6845
6846 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vect_loops)
6847 {
6848 struct loop *aloop;
6849 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
6850 if (aloop != NULL)
6851 {
6852 if (aloop->simduid)
6853 {
6854 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
6855 d.to_context);
6856 dest_cfun->has_simduid_loops = true;
6857 }
6858 if (aloop->force_vect)
6859 dest_cfun->has_force_vect_loops = true;
6860 }
6861 }
6862
6863 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
6864 if (orig_block)
6865 {
6866 tree block;
6867 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6868 == NULL_TREE);
6869 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6870 = BLOCK_SUBBLOCKS (orig_block);
6871 for (block = BLOCK_SUBBLOCKS (orig_block);
6872 block; block = BLOCK_CHAIN (block))
6873 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
6874 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
6875 }
6876
6877 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
6878 vars_map, dest_cfun->decl);
6879
6880 if (new_label_map)
6881 htab_delete (new_label_map);
6882 if (eh_map)
6883 pointer_map_destroy (eh_map);
6884 pointer_map_destroy (vars_map);
6885
6886 /* Rewire the entry and exit blocks. The successor to the entry
6887 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6888 the child function. Similarly, the predecessor of DEST_FN's
6889 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
6890 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6891 various CFG manipulation function get to the right CFG.
6892
6893 FIXME, this is silly. The CFG ought to become a parameter to
6894 these helpers. */
6895 push_cfun (dest_cfun);
6896 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
6897 if (exit_bb)
6898 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
6899 pop_cfun ();
6900
6901 /* Back in the original function, the SESE region has disappeared,
6902 create a new basic block in its place. */
6903 bb = create_empty_bb (entry_pred[0]);
6904 if (current_loops)
6905 add_bb_to_loop (bb, loop);
6906 for (i = 0; i < num_entry_edges; i++)
6907 {
6908 e = make_edge (entry_pred[i], bb, entry_flag[i]);
6909 e->probability = entry_prob[i];
6910 }
6911
6912 for (i = 0; i < num_exit_edges; i++)
6913 {
6914 e = make_edge (bb, exit_succ[i], exit_flag[i]);
6915 e->probability = exit_prob[i];
6916 }
6917
6918 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
6919 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
6920 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
6921 dom_bbs.release ();
6922
6923 if (exit_bb)
6924 {
6925 free (exit_prob);
6926 free (exit_flag);
6927 free (exit_succ);
6928 }
6929 free (entry_prob);
6930 free (entry_flag);
6931 free (entry_pred);
6932 bbs.release ();
6933
6934 return bb;
6935 }
6936
6937
6938 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
6939 */
6940
6941 void
6942 dump_function_to_file (tree fndecl, FILE *file, int flags)
6943 {
6944 tree arg, var, old_current_fndecl = current_function_decl;
6945 struct function *dsf;
6946 bool ignore_topmost_bind = false, any_var = false;
6947 basic_block bb;
6948 tree chain;
6949 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
6950 && decl_is_tm_clone (fndecl));
6951 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
6952
6953 current_function_decl = fndecl;
6954 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
6955
6956 arg = DECL_ARGUMENTS (fndecl);
6957 while (arg)
6958 {
6959 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
6960 fprintf (file, " ");
6961 print_generic_expr (file, arg, dump_flags);
6962 if (flags & TDF_VERBOSE)
6963 print_node (file, "", arg, 4);
6964 if (DECL_CHAIN (arg))
6965 fprintf (file, ", ");
6966 arg = DECL_CHAIN (arg);
6967 }
6968 fprintf (file, ")\n");
6969
6970 if (flags & TDF_VERBOSE)
6971 print_node (file, "", fndecl, 2);
6972
6973 dsf = DECL_STRUCT_FUNCTION (fndecl);
6974 if (dsf && (flags & TDF_EH))
6975 dump_eh_tree (file, dsf);
6976
6977 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
6978 {
6979 dump_node (fndecl, TDF_SLIM | flags, file);
6980 current_function_decl = old_current_fndecl;
6981 return;
6982 }
6983
6984 /* When GIMPLE is lowered, the variables are no longer available in
6985 BIND_EXPRs, so display them separately. */
6986 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
6987 {
6988 unsigned ix;
6989 ignore_topmost_bind = true;
6990
6991 fprintf (file, "{\n");
6992 if (!vec_safe_is_empty (fun->local_decls))
6993 FOR_EACH_LOCAL_DECL (fun, ix, var)
6994 {
6995 print_generic_decl (file, var, flags);
6996 if (flags & TDF_VERBOSE)
6997 print_node (file, "", var, 4);
6998 fprintf (file, "\n");
6999
7000 any_var = true;
7001 }
7002 if (gimple_in_ssa_p (cfun))
7003 for (ix = 1; ix < num_ssa_names; ++ix)
7004 {
7005 tree name = ssa_name (ix);
7006 if (name && !SSA_NAME_VAR (name))
7007 {
7008 fprintf (file, " ");
7009 print_generic_expr (file, TREE_TYPE (name), flags);
7010 fprintf (file, " ");
7011 print_generic_expr (file, name, flags);
7012 fprintf (file, ";\n");
7013
7014 any_var = true;
7015 }
7016 }
7017 }
7018
7019 if (fun && fun->decl == fndecl
7020 && fun->cfg
7021 && basic_block_info_for_function (fun))
7022 {
7023 /* If the CFG has been built, emit a CFG-based dump. */
7024 if (!ignore_topmost_bind)
7025 fprintf (file, "{\n");
7026
7027 if (any_var && n_basic_blocks_for_function (fun))
7028 fprintf (file, "\n");
7029
7030 FOR_EACH_BB_FN (bb, fun)
7031 dump_bb (file, bb, 2, flags | TDF_COMMENT);
7032
7033 fprintf (file, "}\n");
7034 }
7035 else if (DECL_SAVED_TREE (fndecl) == NULL)
7036 {
7037 /* The function is now in GIMPLE form but the CFG has not been
7038 built yet. Emit the single sequence of GIMPLE statements
7039 that make up its body. */
7040 gimple_seq body = gimple_body (fndecl);
7041
7042 if (gimple_seq_first_stmt (body)
7043 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7044 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7045 print_gimple_seq (file, body, 0, flags);
7046 else
7047 {
7048 if (!ignore_topmost_bind)
7049 fprintf (file, "{\n");
7050
7051 if (any_var)
7052 fprintf (file, "\n");
7053
7054 print_gimple_seq (file, body, 2, flags);
7055 fprintf (file, "}\n");
7056 }
7057 }
7058 else
7059 {
7060 int indent;
7061
7062 /* Make a tree based dump. */
7063 chain = DECL_SAVED_TREE (fndecl);
7064 if (chain && TREE_CODE (chain) == BIND_EXPR)
7065 {
7066 if (ignore_topmost_bind)
7067 {
7068 chain = BIND_EXPR_BODY (chain);
7069 indent = 2;
7070 }
7071 else
7072 indent = 0;
7073 }
7074 else
7075 {
7076 if (!ignore_topmost_bind)
7077 fprintf (file, "{\n");
7078 indent = 2;
7079 }
7080
7081 if (any_var)
7082 fprintf (file, "\n");
7083
7084 print_generic_stmt_indented (file, chain, flags, indent);
7085 if (ignore_topmost_bind)
7086 fprintf (file, "}\n");
7087 }
7088
7089 if (flags & TDF_ENUMERATE_LOCALS)
7090 dump_enumerated_decls (file, flags);
7091 fprintf (file, "\n\n");
7092
7093 current_function_decl = old_current_fndecl;
7094 }
7095
7096 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7097
7098 DEBUG_FUNCTION void
7099 debug_function (tree fn, int flags)
7100 {
7101 dump_function_to_file (fn, stderr, flags);
7102 }
7103
7104
7105 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7106
7107 static void
7108 print_pred_bbs (FILE *file, basic_block bb)
7109 {
7110 edge e;
7111 edge_iterator ei;
7112
7113 FOR_EACH_EDGE (e, ei, bb->preds)
7114 fprintf (file, "bb_%d ", e->src->index);
7115 }
7116
7117
7118 /* Print on FILE the indexes for the successors of basic_block BB. */
7119
7120 static void
7121 print_succ_bbs (FILE *file, basic_block bb)
7122 {
7123 edge e;
7124 edge_iterator ei;
7125
7126 FOR_EACH_EDGE (e, ei, bb->succs)
7127 fprintf (file, "bb_%d ", e->dest->index);
7128 }
7129
7130 /* Print to FILE the basic block BB following the VERBOSITY level. */
7131
7132 void
7133 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7134 {
7135 char *s_indent = (char *) alloca ((size_t) indent + 1);
7136 memset ((void *) s_indent, ' ', (size_t) indent);
7137 s_indent[indent] = '\0';
7138
7139 /* Print basic_block's header. */
7140 if (verbosity >= 2)
7141 {
7142 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7143 print_pred_bbs (file, bb);
7144 fprintf (file, "}, succs = {");
7145 print_succ_bbs (file, bb);
7146 fprintf (file, "})\n");
7147 }
7148
7149 /* Print basic_block's body. */
7150 if (verbosity >= 3)
7151 {
7152 fprintf (file, "%s {\n", s_indent);
7153 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7154 fprintf (file, "%s }\n", s_indent);
7155 }
7156 }
7157
7158 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7159
7160 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7161 VERBOSITY level this outputs the contents of the loop, or just its
7162 structure. */
7163
7164 static void
7165 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7166 {
7167 char *s_indent;
7168 basic_block bb;
7169
7170 if (loop == NULL)
7171 return;
7172
7173 s_indent = (char *) alloca ((size_t) indent + 1);
7174 memset ((void *) s_indent, ' ', (size_t) indent);
7175 s_indent[indent] = '\0';
7176
7177 /* Print loop's header. */
7178 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7179 if (loop->header)
7180 fprintf (file, "header = %d", loop->header->index);
7181 else
7182 {
7183 fprintf (file, "deleted)\n");
7184 return;
7185 }
7186 if (loop->latch)
7187 fprintf (file, ", latch = %d", loop->latch->index);
7188 else
7189 fprintf (file, ", multiple latches");
7190 fprintf (file, ", niter = ");
7191 print_generic_expr (file, loop->nb_iterations, 0);
7192
7193 if (loop->any_upper_bound)
7194 {
7195 fprintf (file, ", upper_bound = ");
7196 dump_double_int (file, loop->nb_iterations_upper_bound, true);
7197 }
7198
7199 if (loop->any_estimate)
7200 {
7201 fprintf (file, ", estimate = ");
7202 dump_double_int (file, loop->nb_iterations_estimate, true);
7203 }
7204 fprintf (file, ")\n");
7205
7206 /* Print loop's body. */
7207 if (verbosity >= 1)
7208 {
7209 fprintf (file, "%s{\n", s_indent);
7210 FOR_EACH_BB (bb)
7211 if (bb->loop_father == loop)
7212 print_loops_bb (file, bb, indent, verbosity);
7213
7214 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7215 fprintf (file, "%s}\n", s_indent);
7216 }
7217 }
7218
7219 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7220 spaces. Following VERBOSITY level this outputs the contents of the
7221 loop, or just its structure. */
7222
7223 static void
7224 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7225 int verbosity)
7226 {
7227 if (loop == NULL)
7228 return;
7229
7230 print_loop (file, loop, indent, verbosity);
7231 print_loop_and_siblings (file, loop->next, indent, verbosity);
7232 }
7233
7234 /* Follow a CFG edge from the entry point of the program, and on entry
7235 of a loop, pretty print the loop structure on FILE. */
7236
7237 void
7238 print_loops (FILE *file, int verbosity)
7239 {
7240 basic_block bb;
7241
7242 bb = ENTRY_BLOCK_PTR;
7243 if (bb && bb->loop_father)
7244 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7245 }
7246
7247 /* Dump a loop. */
7248
7249 DEBUG_FUNCTION void
7250 debug (struct loop &ref)
7251 {
7252 print_loop (stderr, &ref, 0, /*verbosity*/0);
7253 }
7254
7255 DEBUG_FUNCTION void
7256 debug (struct loop *ptr)
7257 {
7258 if (ptr)
7259 debug (*ptr);
7260 else
7261 fprintf (stderr, "<nil>\n");
7262 }
7263
7264 /* Dump a loop verbosely. */
7265
7266 DEBUG_FUNCTION void
7267 debug_verbose (struct loop &ref)
7268 {
7269 print_loop (stderr, &ref, 0, /*verbosity*/3);
7270 }
7271
7272 DEBUG_FUNCTION void
7273 debug_verbose (struct loop *ptr)
7274 {
7275 if (ptr)
7276 debug (*ptr);
7277 else
7278 fprintf (stderr, "<nil>\n");
7279 }
7280
7281
7282 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7283
7284 DEBUG_FUNCTION void
7285 debug_loops (int verbosity)
7286 {
7287 print_loops (stderr, verbosity);
7288 }
7289
7290 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7291
7292 DEBUG_FUNCTION void
7293 debug_loop (struct loop *loop, int verbosity)
7294 {
7295 print_loop (stderr, loop, 0, verbosity);
7296 }
7297
7298 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7299 level. */
7300
7301 DEBUG_FUNCTION void
7302 debug_loop_num (unsigned num, int verbosity)
7303 {
7304 debug_loop (get_loop (cfun, num), verbosity);
7305 }
7306
7307 /* Return true if BB ends with a call, possibly followed by some
7308 instructions that must stay with the call. Return false,
7309 otherwise. */
7310
7311 static bool
7312 gimple_block_ends_with_call_p (basic_block bb)
7313 {
7314 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7315 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7316 }
7317
7318
7319 /* Return true if BB ends with a conditional branch. Return false,
7320 otherwise. */
7321
7322 static bool
7323 gimple_block_ends_with_condjump_p (const_basic_block bb)
7324 {
7325 gimple stmt = last_stmt (CONST_CAST_BB (bb));
7326 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7327 }
7328
7329
7330 /* Return true if we need to add fake edge to exit at statement T.
7331 Helper function for gimple_flow_call_edges_add. */
7332
7333 static bool
7334 need_fake_edge_p (gimple t)
7335 {
7336 tree fndecl = NULL_TREE;
7337 int call_flags = 0;
7338
7339 /* NORETURN and LONGJMP calls already have an edge to exit.
7340 CONST and PURE calls do not need one.
7341 We don't currently check for CONST and PURE here, although
7342 it would be a good idea, because those attributes are
7343 figured out from the RTL in mark_constant_function, and
7344 the counter incrementation code from -fprofile-arcs
7345 leads to different results from -fbranch-probabilities. */
7346 if (is_gimple_call (t))
7347 {
7348 fndecl = gimple_call_fndecl (t);
7349 call_flags = gimple_call_flags (t);
7350 }
7351
7352 if (is_gimple_call (t)
7353 && fndecl
7354 && DECL_BUILT_IN (fndecl)
7355 && (call_flags & ECF_NOTHROW)
7356 && !(call_flags & ECF_RETURNS_TWICE)
7357 /* fork() doesn't really return twice, but the effect of
7358 wrapping it in __gcov_fork() which calls __gcov_flush()
7359 and clears the counters before forking has the same
7360 effect as returning twice. Force a fake edge. */
7361 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7362 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7363 return false;
7364
7365 if (is_gimple_call (t))
7366 {
7367 edge_iterator ei;
7368 edge e;
7369 basic_block bb;
7370
7371 if (!(call_flags & ECF_NORETURN))
7372 return true;
7373
7374 bb = gimple_bb (t);
7375 FOR_EACH_EDGE (e, ei, bb->succs)
7376 if ((e->flags & EDGE_FAKE) == 0)
7377 return true;
7378 }
7379
7380 if (gimple_code (t) == GIMPLE_ASM
7381 && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
7382 return true;
7383
7384 return false;
7385 }
7386
7387
7388 /* Add fake edges to the function exit for any non constant and non
7389 noreturn calls (or noreturn calls with EH/abnormal edges),
7390 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7391 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7392 that were split.
7393
7394 The goal is to expose cases in which entering a basic block does
7395 not imply that all subsequent instructions must be executed. */
7396
7397 static int
7398 gimple_flow_call_edges_add (sbitmap blocks)
7399 {
7400 int i;
7401 int blocks_split = 0;
7402 int last_bb = last_basic_block;
7403 bool check_last_block = false;
7404
7405 if (n_basic_blocks == NUM_FIXED_BLOCKS)
7406 return 0;
7407
7408 if (! blocks)
7409 check_last_block = true;
7410 else
7411 check_last_block = bitmap_bit_p (blocks, EXIT_BLOCK_PTR->prev_bb->index);
7412
7413 /* In the last basic block, before epilogue generation, there will be
7414 a fallthru edge to EXIT. Special care is required if the last insn
7415 of the last basic block is a call because make_edge folds duplicate
7416 edges, which would result in the fallthru edge also being marked
7417 fake, which would result in the fallthru edge being removed by
7418 remove_fake_edges, which would result in an invalid CFG.
7419
7420 Moreover, we can't elide the outgoing fake edge, since the block
7421 profiler needs to take this into account in order to solve the minimal
7422 spanning tree in the case that the call doesn't return.
7423
7424 Handle this by adding a dummy instruction in a new last basic block. */
7425 if (check_last_block)
7426 {
7427 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
7428 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7429 gimple t = NULL;
7430
7431 if (!gsi_end_p (gsi))
7432 t = gsi_stmt (gsi);
7433
7434 if (t && need_fake_edge_p (t))
7435 {
7436 edge e;
7437
7438 e = find_edge (bb, EXIT_BLOCK_PTR);
7439 if (e)
7440 {
7441 gsi_insert_on_edge (e, gimple_build_nop ());
7442 gsi_commit_edge_inserts ();
7443 }
7444 }
7445 }
7446
7447 /* Now add fake edges to the function exit for any non constant
7448 calls since there is no way that we can determine if they will
7449 return or not... */
7450 for (i = 0; i < last_bb; i++)
7451 {
7452 basic_block bb = BASIC_BLOCK (i);
7453 gimple_stmt_iterator gsi;
7454 gimple stmt, last_stmt;
7455
7456 if (!bb)
7457 continue;
7458
7459 if (blocks && !bitmap_bit_p (blocks, i))
7460 continue;
7461
7462 gsi = gsi_last_nondebug_bb (bb);
7463 if (!gsi_end_p (gsi))
7464 {
7465 last_stmt = gsi_stmt (gsi);
7466 do
7467 {
7468 stmt = gsi_stmt (gsi);
7469 if (need_fake_edge_p (stmt))
7470 {
7471 edge e;
7472
7473 /* The handling above of the final block before the
7474 epilogue should be enough to verify that there is
7475 no edge to the exit block in CFG already.
7476 Calling make_edge in such case would cause us to
7477 mark that edge as fake and remove it later. */
7478 #ifdef ENABLE_CHECKING
7479 if (stmt == last_stmt)
7480 {
7481 e = find_edge (bb, EXIT_BLOCK_PTR);
7482 gcc_assert (e == NULL);
7483 }
7484 #endif
7485
7486 /* Note that the following may create a new basic block
7487 and renumber the existing basic blocks. */
7488 if (stmt != last_stmt)
7489 {
7490 e = split_block (bb, stmt);
7491 if (e)
7492 blocks_split++;
7493 }
7494 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
7495 }
7496 gsi_prev (&gsi);
7497 }
7498 while (!gsi_end_p (gsi));
7499 }
7500 }
7501
7502 if (blocks_split)
7503 verify_flow_info ();
7504
7505 return blocks_split;
7506 }
7507
7508 /* Removes edge E and all the blocks dominated by it, and updates dominance
7509 information. The IL in E->src needs to be updated separately.
7510 If dominance info is not available, only the edge E is removed.*/
7511
7512 void
7513 remove_edge_and_dominated_blocks (edge e)
7514 {
7515 vec<basic_block> bbs_to_remove = vNULL;
7516 vec<basic_block> bbs_to_fix_dom = vNULL;
7517 bitmap df, df_idom;
7518 edge f;
7519 edge_iterator ei;
7520 bool none_removed = false;
7521 unsigned i;
7522 basic_block bb, dbb;
7523 bitmap_iterator bi;
7524
7525 if (!dom_info_available_p (CDI_DOMINATORS))
7526 {
7527 remove_edge (e);
7528 return;
7529 }
7530
7531 /* No updating is needed for edges to exit. */
7532 if (e->dest == EXIT_BLOCK_PTR)
7533 {
7534 if (cfgcleanup_altered_bbs)
7535 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7536 remove_edge (e);
7537 return;
7538 }
7539
7540 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7541 that is not dominated by E->dest, then this set is empty. Otherwise,
7542 all the basic blocks dominated by E->dest are removed.
7543
7544 Also, to DF_IDOM we store the immediate dominators of the blocks in
7545 the dominance frontier of E (i.e., of the successors of the
7546 removed blocks, if there are any, and of E->dest otherwise). */
7547 FOR_EACH_EDGE (f, ei, e->dest->preds)
7548 {
7549 if (f == e)
7550 continue;
7551
7552 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7553 {
7554 none_removed = true;
7555 break;
7556 }
7557 }
7558
7559 df = BITMAP_ALLOC (NULL);
7560 df_idom = BITMAP_ALLOC (NULL);
7561
7562 if (none_removed)
7563 bitmap_set_bit (df_idom,
7564 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7565 else
7566 {
7567 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7568 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7569 {
7570 FOR_EACH_EDGE (f, ei, bb->succs)
7571 {
7572 if (f->dest != EXIT_BLOCK_PTR)
7573 bitmap_set_bit (df, f->dest->index);
7574 }
7575 }
7576 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7577 bitmap_clear_bit (df, bb->index);
7578
7579 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7580 {
7581 bb = BASIC_BLOCK (i);
7582 bitmap_set_bit (df_idom,
7583 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7584 }
7585 }
7586
7587 if (cfgcleanup_altered_bbs)
7588 {
7589 /* Record the set of the altered basic blocks. */
7590 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7591 bitmap_ior_into (cfgcleanup_altered_bbs, df);
7592 }
7593
7594 /* Remove E and the cancelled blocks. */
7595 if (none_removed)
7596 remove_edge (e);
7597 else
7598 {
7599 /* Walk backwards so as to get a chance to substitute all
7600 released DEFs into debug stmts. See
7601 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7602 details. */
7603 for (i = bbs_to_remove.length (); i-- > 0; )
7604 delete_basic_block (bbs_to_remove[i]);
7605 }
7606
7607 /* Update the dominance information. The immediate dominator may change only
7608 for blocks whose immediate dominator belongs to DF_IDOM:
7609
7610 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7611 removal. Let Z the arbitrary block such that idom(Z) = Y and
7612 Z dominates X after the removal. Before removal, there exists a path P
7613 from Y to X that avoids Z. Let F be the last edge on P that is
7614 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7615 dominates W, and because of P, Z does not dominate W), and W belongs to
7616 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7617 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
7618 {
7619 bb = BASIC_BLOCK (i);
7620 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
7621 dbb;
7622 dbb = next_dom_son (CDI_DOMINATORS, dbb))
7623 bbs_to_fix_dom.safe_push (dbb);
7624 }
7625
7626 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
7627
7628 BITMAP_FREE (df);
7629 BITMAP_FREE (df_idom);
7630 bbs_to_remove.release ();
7631 bbs_to_fix_dom.release ();
7632 }
7633
7634 /* Purge dead EH edges from basic block BB. */
7635
7636 bool
7637 gimple_purge_dead_eh_edges (basic_block bb)
7638 {
7639 bool changed = false;
7640 edge e;
7641 edge_iterator ei;
7642 gimple stmt = last_stmt (bb);
7643
7644 if (stmt && stmt_can_throw_internal (stmt))
7645 return false;
7646
7647 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7648 {
7649 if (e->flags & EDGE_EH)
7650 {
7651 remove_edge_and_dominated_blocks (e);
7652 changed = true;
7653 }
7654 else
7655 ei_next (&ei);
7656 }
7657
7658 return changed;
7659 }
7660
7661 /* Purge dead EH edges from basic block listed in BLOCKS. */
7662
7663 bool
7664 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
7665 {
7666 bool changed = false;
7667 unsigned i;
7668 bitmap_iterator bi;
7669
7670 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7671 {
7672 basic_block bb = BASIC_BLOCK (i);
7673
7674 /* Earlier gimple_purge_dead_eh_edges could have removed
7675 this basic block already. */
7676 gcc_assert (bb || changed);
7677 if (bb != NULL)
7678 changed |= gimple_purge_dead_eh_edges (bb);
7679 }
7680
7681 return changed;
7682 }
7683
7684 /* Purge dead abnormal call edges from basic block BB. */
7685
7686 bool
7687 gimple_purge_dead_abnormal_call_edges (basic_block bb)
7688 {
7689 bool changed = false;
7690 edge e;
7691 edge_iterator ei;
7692 gimple stmt = last_stmt (bb);
7693
7694 if (!cfun->has_nonlocal_label
7695 && !cfun->calls_setjmp)
7696 return false;
7697
7698 if (stmt && stmt_can_make_abnormal_goto (stmt))
7699 return false;
7700
7701 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7702 {
7703 if (e->flags & EDGE_ABNORMAL)
7704 {
7705 if (e->flags & EDGE_FALLTHRU)
7706 e->flags &= ~EDGE_ABNORMAL;
7707 else
7708 remove_edge_and_dominated_blocks (e);
7709 changed = true;
7710 }
7711 else
7712 ei_next (&ei);
7713 }
7714
7715 return changed;
7716 }
7717
7718 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
7719
7720 bool
7721 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
7722 {
7723 bool changed = false;
7724 unsigned i;
7725 bitmap_iterator bi;
7726
7727 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7728 {
7729 basic_block bb = BASIC_BLOCK (i);
7730
7731 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
7732 this basic block already. */
7733 gcc_assert (bb || changed);
7734 if (bb != NULL)
7735 changed |= gimple_purge_dead_abnormal_call_edges (bb);
7736 }
7737
7738 return changed;
7739 }
7740
7741 /* This function is called whenever a new edge is created or
7742 redirected. */
7743
7744 static void
7745 gimple_execute_on_growing_pred (edge e)
7746 {
7747 basic_block bb = e->dest;
7748
7749 if (!gimple_seq_empty_p (phi_nodes (bb)))
7750 reserve_phi_args_for_new_edge (bb);
7751 }
7752
7753 /* This function is called immediately before edge E is removed from
7754 the edge vector E->dest->preds. */
7755
7756 static void
7757 gimple_execute_on_shrinking_pred (edge e)
7758 {
7759 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
7760 remove_phi_args (e);
7761 }
7762
7763 /*---------------------------------------------------------------------------
7764 Helper functions for Loop versioning
7765 ---------------------------------------------------------------------------*/
7766
7767 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
7768 of 'first'. Both of them are dominated by 'new_head' basic block. When
7769 'new_head' was created by 'second's incoming edge it received phi arguments
7770 on the edge by split_edge(). Later, additional edge 'e' was created to
7771 connect 'new_head' and 'first'. Now this routine adds phi args on this
7772 additional edge 'e' that new_head to second edge received as part of edge
7773 splitting. */
7774
7775 static void
7776 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
7777 basic_block new_head, edge e)
7778 {
7779 gimple phi1, phi2;
7780 gimple_stmt_iterator psi1, psi2;
7781 tree def;
7782 edge e2 = find_edge (new_head, second);
7783
7784 /* Because NEW_HEAD has been created by splitting SECOND's incoming
7785 edge, we should always have an edge from NEW_HEAD to SECOND. */
7786 gcc_assert (e2 != NULL);
7787
7788 /* Browse all 'second' basic block phi nodes and add phi args to
7789 edge 'e' for 'first' head. PHI args are always in correct order. */
7790
7791 for (psi2 = gsi_start_phis (second),
7792 psi1 = gsi_start_phis (first);
7793 !gsi_end_p (psi2) && !gsi_end_p (psi1);
7794 gsi_next (&psi2), gsi_next (&psi1))
7795 {
7796 phi1 = gsi_stmt (psi1);
7797 phi2 = gsi_stmt (psi2);
7798 def = PHI_ARG_DEF (phi2, e2->dest_idx);
7799 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
7800 }
7801 }
7802
7803
7804 /* Adds a if else statement to COND_BB with condition COND_EXPR.
7805 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
7806 the destination of the ELSE part. */
7807
7808 static void
7809 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
7810 basic_block second_head ATTRIBUTE_UNUSED,
7811 basic_block cond_bb, void *cond_e)
7812 {
7813 gimple_stmt_iterator gsi;
7814 gimple new_cond_expr;
7815 tree cond_expr = (tree) cond_e;
7816 edge e0;
7817
7818 /* Build new conditional expr */
7819 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
7820 NULL_TREE, NULL_TREE);
7821
7822 /* Add new cond in cond_bb. */
7823 gsi = gsi_last_bb (cond_bb);
7824 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
7825
7826 /* Adjust edges appropriately to connect new head with first head
7827 as well as second head. */
7828 e0 = single_succ_edge (cond_bb);
7829 e0->flags &= ~EDGE_FALLTHRU;
7830 e0->flags |= EDGE_FALSE_VALUE;
7831 }
7832
7833
7834 /* Do book-keeping of basic block BB for the profile consistency checker.
7835 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
7836 then do post-pass accounting. Store the counting in RECORD. */
7837 static void
7838 gimple_account_profile_record (basic_block bb, int after_pass,
7839 struct profile_record *record)
7840 {
7841 gimple_stmt_iterator i;
7842 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
7843 {
7844 record->size[after_pass]
7845 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
7846 if (profile_status == PROFILE_READ)
7847 record->time[after_pass]
7848 += estimate_num_insns (gsi_stmt (i),
7849 &eni_time_weights) * bb->count;
7850 else if (profile_status == PROFILE_GUESSED)
7851 record->time[after_pass]
7852 += estimate_num_insns (gsi_stmt (i),
7853 &eni_time_weights) * bb->frequency;
7854 }
7855 }
7856
7857 struct cfg_hooks gimple_cfg_hooks = {
7858 "gimple",
7859 gimple_verify_flow_info,
7860 gimple_dump_bb, /* dump_bb */
7861 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
7862 create_bb, /* create_basic_block */
7863 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
7864 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
7865 gimple_can_remove_branch_p, /* can_remove_branch_p */
7866 remove_bb, /* delete_basic_block */
7867 gimple_split_block, /* split_block */
7868 gimple_move_block_after, /* move_block_after */
7869 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
7870 gimple_merge_blocks, /* merge_blocks */
7871 gimple_predict_edge, /* predict_edge */
7872 gimple_predicted_by_p, /* predicted_by_p */
7873 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
7874 gimple_duplicate_bb, /* duplicate_block */
7875 gimple_split_edge, /* split_edge */
7876 gimple_make_forwarder_block, /* make_forward_block */
7877 NULL, /* tidy_fallthru_edge */
7878 NULL, /* force_nonfallthru */
7879 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
7880 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
7881 gimple_flow_call_edges_add, /* flow_call_edges_add */
7882 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
7883 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
7884 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
7885 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
7886 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
7887 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
7888 flush_pending_stmts, /* flush_pending_stmts */
7889 gimple_empty_block_p, /* block_empty_p */
7890 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
7891 gimple_account_profile_record,
7892 };
7893
7894
7895 /* Split all critical edges. */
7896
7897 static unsigned int
7898 split_critical_edges (void)
7899 {
7900 basic_block bb;
7901 edge e;
7902 edge_iterator ei;
7903
7904 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
7905 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
7906 mappings around the calls to split_edge. */
7907 start_recording_case_labels ();
7908 FOR_ALL_BB (bb)
7909 {
7910 FOR_EACH_EDGE (e, ei, bb->succs)
7911 {
7912 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
7913 split_edge (e);
7914 /* PRE inserts statements to edges and expects that
7915 since split_critical_edges was done beforehand, committing edge
7916 insertions will not split more edges. In addition to critical
7917 edges we must split edges that have multiple successors and
7918 end by control flow statements, such as RESX.
7919 Go ahead and split them too. This matches the logic in
7920 gimple_find_edge_insert_loc. */
7921 else if ((!single_pred_p (e->dest)
7922 || !gimple_seq_empty_p (phi_nodes (e->dest))
7923 || e->dest == EXIT_BLOCK_PTR)
7924 && e->src != ENTRY_BLOCK_PTR
7925 && !(e->flags & EDGE_ABNORMAL))
7926 {
7927 gimple_stmt_iterator gsi;
7928
7929 gsi = gsi_last_bb (e->src);
7930 if (!gsi_end_p (gsi)
7931 && stmt_ends_bb_p (gsi_stmt (gsi))
7932 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
7933 && !gimple_call_builtin_p (gsi_stmt (gsi),
7934 BUILT_IN_RETURN)))
7935 split_edge (e);
7936 }
7937 }
7938 }
7939 end_recording_case_labels ();
7940 return 0;
7941 }
7942
7943 namespace {
7944
7945 const pass_data pass_data_split_crit_edges =
7946 {
7947 GIMPLE_PASS, /* type */
7948 "crited", /* name */
7949 OPTGROUP_NONE, /* optinfo_flags */
7950 false, /* has_gate */
7951 true, /* has_execute */
7952 TV_TREE_SPLIT_EDGES, /* tv_id */
7953 PROP_cfg, /* properties_required */
7954 PROP_no_crit_edges, /* properties_provided */
7955 0, /* properties_destroyed */
7956 0, /* todo_flags_start */
7957 TODO_verify_flow, /* todo_flags_finish */
7958 };
7959
7960 class pass_split_crit_edges : public gimple_opt_pass
7961 {
7962 public:
7963 pass_split_crit_edges (gcc::context *ctxt)
7964 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
7965 {}
7966
7967 /* opt_pass methods: */
7968 unsigned int execute () { return split_critical_edges (); }
7969
7970 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
7971 }; // class pass_split_crit_edges
7972
7973 } // anon namespace
7974
7975 gimple_opt_pass *
7976 make_pass_split_crit_edges (gcc::context *ctxt)
7977 {
7978 return new pass_split_crit_edges (ctxt);
7979 }
7980
7981
7982 /* Build a ternary operation and gimplify it. Emit code before GSI.
7983 Return the gimple_val holding the result. */
7984
7985 tree
7986 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
7987 tree type, tree a, tree b, tree c)
7988 {
7989 tree ret;
7990 location_t loc = gimple_location (gsi_stmt (*gsi));
7991
7992 ret = fold_build3_loc (loc, code, type, a, b, c);
7993 STRIP_NOPS (ret);
7994
7995 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7996 GSI_SAME_STMT);
7997 }
7998
7999 /* Build a binary operation and gimplify it. Emit code before GSI.
8000 Return the gimple_val holding the result. */
8001
8002 tree
8003 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8004 tree type, tree a, tree b)
8005 {
8006 tree ret;
8007
8008 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8009 STRIP_NOPS (ret);
8010
8011 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8012 GSI_SAME_STMT);
8013 }
8014
8015 /* Build a unary operation and gimplify it. Emit code before GSI.
8016 Return the gimple_val holding the result. */
8017
8018 tree
8019 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8020 tree a)
8021 {
8022 tree ret;
8023
8024 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8025 STRIP_NOPS (ret);
8026
8027 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8028 GSI_SAME_STMT);
8029 }
8030
8031
8032 \f
8033 /* Emit return warnings. */
8034
8035 static unsigned int
8036 execute_warn_function_return (void)
8037 {
8038 source_location location;
8039 gimple last;
8040 edge e;
8041 edge_iterator ei;
8042
8043 if (!targetm.warn_func_return (cfun->decl))
8044 return 0;
8045
8046 /* If we have a path to EXIT, then we do return. */
8047 if (TREE_THIS_VOLATILE (cfun->decl)
8048 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
8049 {
8050 location = UNKNOWN_LOCATION;
8051 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
8052 {
8053 last = last_stmt (e->src);
8054 if ((gimple_code (last) == GIMPLE_RETURN
8055 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8056 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8057 break;
8058 }
8059 if (location == UNKNOWN_LOCATION)
8060 location = cfun->function_end_locus;
8061 warning_at (location, 0, "%<noreturn%> function does return");
8062 }
8063
8064 /* If we see "return;" in some basic block, then we do reach the end
8065 without returning a value. */
8066 else if (warn_return_type
8067 && !TREE_NO_WARNING (cfun->decl)
8068 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
8069 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
8070 {
8071 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
8072 {
8073 gimple last = last_stmt (e->src);
8074 if (gimple_code (last) == GIMPLE_RETURN
8075 && gimple_return_retval (last) == NULL
8076 && !gimple_no_warning_p (last))
8077 {
8078 location = gimple_location (last);
8079 if (location == UNKNOWN_LOCATION)
8080 location = cfun->function_end_locus;
8081 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8082 TREE_NO_WARNING (cfun->decl) = 1;
8083 break;
8084 }
8085 }
8086 }
8087 return 0;
8088 }
8089
8090
8091 /* Given a basic block B which ends with a conditional and has
8092 precisely two successors, determine which of the edges is taken if
8093 the conditional is true and which is taken if the conditional is
8094 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8095
8096 void
8097 extract_true_false_edges_from_block (basic_block b,
8098 edge *true_edge,
8099 edge *false_edge)
8100 {
8101 edge e = EDGE_SUCC (b, 0);
8102
8103 if (e->flags & EDGE_TRUE_VALUE)
8104 {
8105 *true_edge = e;
8106 *false_edge = EDGE_SUCC (b, 1);
8107 }
8108 else
8109 {
8110 *false_edge = e;
8111 *true_edge = EDGE_SUCC (b, 1);
8112 }
8113 }
8114
8115 namespace {
8116
8117 const pass_data pass_data_warn_function_return =
8118 {
8119 GIMPLE_PASS, /* type */
8120 "*warn_function_return", /* name */
8121 OPTGROUP_NONE, /* optinfo_flags */
8122 false, /* has_gate */
8123 true, /* has_execute */
8124 TV_NONE, /* tv_id */
8125 PROP_cfg, /* properties_required */
8126 0, /* properties_provided */
8127 0, /* properties_destroyed */
8128 0, /* todo_flags_start */
8129 0, /* todo_flags_finish */
8130 };
8131
8132 class pass_warn_function_return : public gimple_opt_pass
8133 {
8134 public:
8135 pass_warn_function_return (gcc::context *ctxt)
8136 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8137 {}
8138
8139 /* opt_pass methods: */
8140 unsigned int execute () { return execute_warn_function_return (); }
8141
8142 }; // class pass_warn_function_return
8143
8144 } // anon namespace
8145
8146 gimple_opt_pass *
8147 make_pass_warn_function_return (gcc::context *ctxt)
8148 {
8149 return new pass_warn_function_return (ctxt);
8150 }
8151
8152 /* Emit noreturn warnings. */
8153
8154 static unsigned int
8155 execute_warn_function_noreturn (void)
8156 {
8157 if (!TREE_THIS_VOLATILE (current_function_decl)
8158 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0)
8159 warn_function_noreturn (current_function_decl);
8160 return 0;
8161 }
8162
8163 static bool
8164 gate_warn_function_noreturn (void)
8165 {
8166 return warn_suggest_attribute_noreturn;
8167 }
8168
8169 namespace {
8170
8171 const pass_data pass_data_warn_function_noreturn =
8172 {
8173 GIMPLE_PASS, /* type */
8174 "*warn_function_noreturn", /* name */
8175 OPTGROUP_NONE, /* optinfo_flags */
8176 true, /* has_gate */
8177 true, /* has_execute */
8178 TV_NONE, /* tv_id */
8179 PROP_cfg, /* properties_required */
8180 0, /* properties_provided */
8181 0, /* properties_destroyed */
8182 0, /* todo_flags_start */
8183 0, /* todo_flags_finish */
8184 };
8185
8186 class pass_warn_function_noreturn : public gimple_opt_pass
8187 {
8188 public:
8189 pass_warn_function_noreturn (gcc::context *ctxt)
8190 : gimple_opt_pass (pass_data_warn_function_noreturn, ctxt)
8191 {}
8192
8193 /* opt_pass methods: */
8194 bool gate () { return gate_warn_function_noreturn (); }
8195 unsigned int execute () { return execute_warn_function_noreturn (); }
8196
8197 }; // class pass_warn_function_noreturn
8198
8199 } // anon namespace
8200
8201 gimple_opt_pass *
8202 make_pass_warn_function_noreturn (gcc::context *ctxt)
8203 {
8204 return new pass_warn_function_noreturn (ctxt);
8205 }
8206
8207
8208 /* Walk a gimplified function and warn for functions whose return value is
8209 ignored and attribute((warn_unused_result)) is set. This is done before
8210 inlining, so we don't have to worry about that. */
8211
8212 static void
8213 do_warn_unused_result (gimple_seq seq)
8214 {
8215 tree fdecl, ftype;
8216 gimple_stmt_iterator i;
8217
8218 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8219 {
8220 gimple g = gsi_stmt (i);
8221
8222 switch (gimple_code (g))
8223 {
8224 case GIMPLE_BIND:
8225 do_warn_unused_result (gimple_bind_body (g));
8226 break;
8227 case GIMPLE_TRY:
8228 do_warn_unused_result (gimple_try_eval (g));
8229 do_warn_unused_result (gimple_try_cleanup (g));
8230 break;
8231 case GIMPLE_CATCH:
8232 do_warn_unused_result (gimple_catch_handler (g));
8233 break;
8234 case GIMPLE_EH_FILTER:
8235 do_warn_unused_result (gimple_eh_filter_failure (g));
8236 break;
8237
8238 case GIMPLE_CALL:
8239 if (gimple_call_lhs (g))
8240 break;
8241 if (gimple_call_internal_p (g))
8242 break;
8243
8244 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8245 LHS. All calls whose value is ignored should be
8246 represented like this. Look for the attribute. */
8247 fdecl = gimple_call_fndecl (g);
8248 ftype = gimple_call_fntype (g);
8249
8250 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8251 {
8252 location_t loc = gimple_location (g);
8253
8254 if (fdecl)
8255 warning_at (loc, OPT_Wunused_result,
8256 "ignoring return value of %qD, "
8257 "declared with attribute warn_unused_result",
8258 fdecl);
8259 else
8260 warning_at (loc, OPT_Wunused_result,
8261 "ignoring return value of function "
8262 "declared with attribute warn_unused_result");
8263 }
8264 break;
8265
8266 default:
8267 /* Not a container, not a call, or a call whose value is used. */
8268 break;
8269 }
8270 }
8271 }
8272
8273 static unsigned int
8274 run_warn_unused_result (void)
8275 {
8276 do_warn_unused_result (gimple_body (current_function_decl));
8277 return 0;
8278 }
8279
8280 static bool
8281 gate_warn_unused_result (void)
8282 {
8283 return flag_warn_unused_result;
8284 }
8285
8286 namespace {
8287
8288 const pass_data pass_data_warn_unused_result =
8289 {
8290 GIMPLE_PASS, /* type */
8291 "*warn_unused_result", /* name */
8292 OPTGROUP_NONE, /* optinfo_flags */
8293 true, /* has_gate */
8294 true, /* has_execute */
8295 TV_NONE, /* tv_id */
8296 PROP_gimple_any, /* properties_required */
8297 0, /* properties_provided */
8298 0, /* properties_destroyed */
8299 0, /* todo_flags_start */
8300 0, /* todo_flags_finish */
8301 };
8302
8303 class pass_warn_unused_result : public gimple_opt_pass
8304 {
8305 public:
8306 pass_warn_unused_result (gcc::context *ctxt)
8307 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8308 {}
8309
8310 /* opt_pass methods: */
8311 bool gate () { return gate_warn_unused_result (); }
8312 unsigned int execute () { return run_warn_unused_result (); }
8313
8314 }; // class pass_warn_unused_result
8315
8316 } // anon namespace
8317
8318 gimple_opt_pass *
8319 make_pass_warn_unused_result (gcc::context *ctxt)
8320 {
8321 return new pass_warn_unused_result (ctxt);
8322 }
8323
8324
8325 /* Garbage collection support for edge_def. */
8326
8327 extern void gt_ggc_mx (tree&);
8328 extern void gt_ggc_mx (gimple&);
8329 extern void gt_ggc_mx (rtx&);
8330 extern void gt_ggc_mx (basic_block&);
8331
8332 void
8333 gt_ggc_mx (edge_def *e)
8334 {
8335 tree block = LOCATION_BLOCK (e->goto_locus);
8336 gt_ggc_mx (e->src);
8337 gt_ggc_mx (e->dest);
8338 if (current_ir_type () == IR_GIMPLE)
8339 gt_ggc_mx (e->insns.g);
8340 else
8341 gt_ggc_mx (e->insns.r);
8342 gt_ggc_mx (block);
8343 }
8344
8345 /* PCH support for edge_def. */
8346
8347 extern void gt_pch_nx (tree&);
8348 extern void gt_pch_nx (gimple&);
8349 extern void gt_pch_nx (rtx&);
8350 extern void gt_pch_nx (basic_block&);
8351
8352 void
8353 gt_pch_nx (edge_def *e)
8354 {
8355 tree block = LOCATION_BLOCK (e->goto_locus);
8356 gt_pch_nx (e->src);
8357 gt_pch_nx (e->dest);
8358 if (current_ir_type () == IR_GIMPLE)
8359 gt_pch_nx (e->insns.g);
8360 else
8361 gt_pch_nx (e->insns.r);
8362 gt_pch_nx (block);
8363 }
8364
8365 void
8366 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
8367 {
8368 tree block = LOCATION_BLOCK (e->goto_locus);
8369 op (&(e->src), cookie);
8370 op (&(e->dest), cookie);
8371 if (current_ir_type () == IR_GIMPLE)
8372 op (&(e->insns.g), cookie);
8373 else
8374 op (&(e->insns.r), cookie);
8375 op (&(block), cookie);
8376 }