Daily bump.
[gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
3 2010, 2011, 2012 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "flags.h"
30 #include "function.h"
31 #include "ggc.h"
32 #include "gimple-pretty-print.h"
33 #include "tree-flow.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
36 #include "diagnostic-core.h"
37 #include "except.h"
38 #include "cfgloop.h"
39 #include "tree-ssa-propagate.h"
40 #include "value-prof.h"
41 #include "pointer-set.h"
42 #include "tree-inline.h"
43 #include "target.h"
44
45 /* This file contains functions for building the Control Flow Graph (CFG)
46 for a function tree. */
47
48 /* Local declarations. */
49
50 /* Initial capacity for the basic block array. */
51 static const int initial_cfg_capacity = 20;
52
53 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
54 which use a particular edge. The CASE_LABEL_EXPRs are chained together
55 via their CASE_CHAIN field, which we clear after we're done with the
56 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
57
58 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
59 update the case vector in response to edge redirections.
60
61 Right now this table is set up and torn down at key points in the
62 compilation process. It would be nice if we could make the table
63 more persistent. The key is getting notification of changes to
64 the CFG (particularly edge removal, creation and redirection). */
65
66 static struct pointer_map_t *edge_to_cases;
67
68 /* If we record edge_to_cases, this bitmap will hold indexes
69 of basic blocks that end in a GIMPLE_SWITCH which we touched
70 due to edge manipulations. */
71
72 static bitmap touched_switch_bbs;
73
74 /* CFG statistics. */
75 struct cfg_stats_d
76 {
77 long num_merged_labels;
78 };
79
80 static struct cfg_stats_d cfg_stats;
81
82 /* Nonzero if we found a computed goto while building basic blocks. */
83 static bool found_computed_goto;
84
85 /* Hash table to store last discriminator assigned for each locus. */
86 struct locus_discrim_map
87 {
88 location_t locus;
89 int discriminator;
90 };
91 static htab_t discriminator_per_locus;
92
93 /* Basic blocks and flowgraphs. */
94 static void make_blocks (gimple_seq);
95 static void factor_computed_gotos (void);
96
97 /* Edges. */
98 static void make_edges (void);
99 static void make_cond_expr_edges (basic_block);
100 static void make_gimple_switch_edges (basic_block);
101 static void make_goto_expr_edges (basic_block);
102 static void make_gimple_asm_edges (basic_block);
103 static unsigned int locus_map_hash (const void *);
104 static int locus_map_eq (const void *, const void *);
105 static void assign_discriminator (location_t, basic_block);
106 static edge gimple_redirect_edge_and_branch (edge, basic_block);
107 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
108 static unsigned int split_critical_edges (void);
109
110 /* Various helpers. */
111 static inline bool stmt_starts_bb_p (gimple, gimple);
112 static int gimple_verify_flow_info (void);
113 static void gimple_make_forwarder_block (edge);
114 static gimple first_non_label_stmt (basic_block);
115 static bool verify_gimple_transaction (gimple);
116
117 /* Flowgraph optimization and cleanup. */
118 static void gimple_merge_blocks (basic_block, basic_block);
119 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
120 static void remove_bb (basic_block);
121 static edge find_taken_edge_computed_goto (basic_block, tree);
122 static edge find_taken_edge_cond_expr (basic_block, tree);
123 static edge find_taken_edge_switch_expr (basic_block, tree);
124 static tree find_case_label_for_value (gimple, tree);
125
126 void
127 init_empty_tree_cfg_for_function (struct function *fn)
128 {
129 /* Initialize the basic block array. */
130 init_flow (fn);
131 profile_status_for_function (fn) = PROFILE_ABSENT;
132 n_basic_blocks_for_function (fn) = NUM_FIXED_BLOCKS;
133 last_basic_block_for_function (fn) = NUM_FIXED_BLOCKS;
134 vec_alloc (basic_block_info_for_function (fn), initial_cfg_capacity);
135 vec_safe_grow_cleared (basic_block_info_for_function (fn),
136 initial_cfg_capacity);
137
138 /* Build a mapping of labels to their associated blocks. */
139 vec_alloc (label_to_block_map_for_function (fn), initial_cfg_capacity);
140 vec_safe_grow_cleared (label_to_block_map_for_function (fn),
141 initial_cfg_capacity);
142
143 SET_BASIC_BLOCK_FOR_FUNCTION (fn, ENTRY_BLOCK,
144 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn));
145 SET_BASIC_BLOCK_FOR_FUNCTION (fn, EXIT_BLOCK,
146 EXIT_BLOCK_PTR_FOR_FUNCTION (fn));
147
148 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)->next_bb
149 = EXIT_BLOCK_PTR_FOR_FUNCTION (fn);
150 EXIT_BLOCK_PTR_FOR_FUNCTION (fn)->prev_bb
151 = ENTRY_BLOCK_PTR_FOR_FUNCTION (fn);
152 }
153
154 void
155 init_empty_tree_cfg (void)
156 {
157 init_empty_tree_cfg_for_function (cfun);
158 }
159
160 /*---------------------------------------------------------------------------
161 Create basic blocks
162 ---------------------------------------------------------------------------*/
163
164 /* Entry point to the CFG builder for trees. SEQ is the sequence of
165 statements to be added to the flowgraph. */
166
167 static void
168 build_gimple_cfg (gimple_seq seq)
169 {
170 /* Register specific gimple functions. */
171 gimple_register_cfg_hooks ();
172
173 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
174
175 init_empty_tree_cfg ();
176
177 found_computed_goto = 0;
178 make_blocks (seq);
179
180 /* Computed gotos are hell to deal with, especially if there are
181 lots of them with a large number of destinations. So we factor
182 them to a common computed goto location before we build the
183 edge list. After we convert back to normal form, we will un-factor
184 the computed gotos since factoring introduces an unwanted jump. */
185 if (found_computed_goto)
186 factor_computed_gotos ();
187
188 /* Make sure there is always at least one block, even if it's empty. */
189 if (n_basic_blocks == NUM_FIXED_BLOCKS)
190 create_empty_bb (ENTRY_BLOCK_PTR);
191
192 /* Adjust the size of the array. */
193 if (basic_block_info->length () < (size_t) n_basic_blocks)
194 vec_safe_grow_cleared (basic_block_info, n_basic_blocks);
195
196 /* To speed up statement iterator walks, we first purge dead labels. */
197 cleanup_dead_labels ();
198
199 /* Group case nodes to reduce the number of edges.
200 We do this after cleaning up dead labels because otherwise we miss
201 a lot of obvious case merging opportunities. */
202 group_case_labels ();
203
204 /* Create the edges of the flowgraph. */
205 discriminator_per_locus = htab_create (13, locus_map_hash, locus_map_eq,
206 free);
207 make_edges ();
208 cleanup_dead_labels ();
209 htab_delete (discriminator_per_locus);
210 }
211
212 static unsigned int
213 execute_build_cfg (void)
214 {
215 gimple_seq body = gimple_body (current_function_decl);
216
217 build_gimple_cfg (body);
218 gimple_set_body (current_function_decl, NULL);
219 if (dump_file && (dump_flags & TDF_DETAILS))
220 {
221 fprintf (dump_file, "Scope blocks:\n");
222 dump_scope_blocks (dump_file, dump_flags);
223 }
224 return 0;
225 }
226
227 struct gimple_opt_pass pass_build_cfg =
228 {
229 {
230 GIMPLE_PASS,
231 "cfg", /* name */
232 OPTGROUP_NONE, /* optinfo_flags */
233 NULL, /* gate */
234 execute_build_cfg, /* execute */
235 NULL, /* sub */
236 NULL, /* next */
237 0, /* static_pass_number */
238 TV_TREE_CFG, /* tv_id */
239 PROP_gimple_leh, /* properties_required */
240 PROP_cfg, /* properties_provided */
241 0, /* properties_destroyed */
242 0, /* todo_flags_start */
243 TODO_verify_stmts | TODO_cleanup_cfg /* todo_flags_finish */
244 }
245 };
246
247
248 /* Return true if T is a computed goto. */
249
250 static bool
251 computed_goto_p (gimple t)
252 {
253 return (gimple_code (t) == GIMPLE_GOTO
254 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
255 }
256
257
258 /* Search the CFG for any computed gotos. If found, factor them to a
259 common computed goto site. Also record the location of that site so
260 that we can un-factor the gotos after we have converted back to
261 normal form. */
262
263 static void
264 factor_computed_gotos (void)
265 {
266 basic_block bb;
267 tree factored_label_decl = NULL;
268 tree var = NULL;
269 gimple factored_computed_goto_label = NULL;
270 gimple factored_computed_goto = NULL;
271
272 /* We know there are one or more computed gotos in this function.
273 Examine the last statement in each basic block to see if the block
274 ends with a computed goto. */
275
276 FOR_EACH_BB (bb)
277 {
278 gimple_stmt_iterator gsi = gsi_last_bb (bb);
279 gimple last;
280
281 if (gsi_end_p (gsi))
282 continue;
283
284 last = gsi_stmt (gsi);
285
286 /* Ignore the computed goto we create when we factor the original
287 computed gotos. */
288 if (last == factored_computed_goto)
289 continue;
290
291 /* If the last statement is a computed goto, factor it. */
292 if (computed_goto_p (last))
293 {
294 gimple assignment;
295
296 /* The first time we find a computed goto we need to create
297 the factored goto block and the variable each original
298 computed goto will use for their goto destination. */
299 if (!factored_computed_goto)
300 {
301 basic_block new_bb = create_empty_bb (bb);
302 gimple_stmt_iterator new_gsi = gsi_start_bb (new_bb);
303
304 /* Create the destination of the factored goto. Each original
305 computed goto will put its desired destination into this
306 variable and jump to the label we create immediately
307 below. */
308 var = create_tmp_var (ptr_type_node, "gotovar");
309
310 /* Build a label for the new block which will contain the
311 factored computed goto. */
312 factored_label_decl = create_artificial_label (UNKNOWN_LOCATION);
313 factored_computed_goto_label
314 = gimple_build_label (factored_label_decl);
315 gsi_insert_after (&new_gsi, factored_computed_goto_label,
316 GSI_NEW_STMT);
317
318 /* Build our new computed goto. */
319 factored_computed_goto = gimple_build_goto (var);
320 gsi_insert_after (&new_gsi, factored_computed_goto, GSI_NEW_STMT);
321 }
322
323 /* Copy the original computed goto's destination into VAR. */
324 assignment = gimple_build_assign (var, gimple_goto_dest (last));
325 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
326
327 /* And re-vector the computed goto to the new destination. */
328 gimple_goto_set_dest (last, factored_label_decl);
329 }
330 }
331 }
332
333
334 /* Build a flowgraph for the sequence of stmts SEQ. */
335
336 static void
337 make_blocks (gimple_seq seq)
338 {
339 gimple_stmt_iterator i = gsi_start (seq);
340 gimple stmt = NULL;
341 bool start_new_block = true;
342 bool first_stmt_of_seq = true;
343 basic_block bb = ENTRY_BLOCK_PTR;
344
345 while (!gsi_end_p (i))
346 {
347 gimple prev_stmt;
348
349 prev_stmt = stmt;
350 stmt = gsi_stmt (i);
351
352 /* If the statement starts a new basic block or if we have determined
353 in a previous pass that we need to create a new block for STMT, do
354 so now. */
355 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
356 {
357 if (!first_stmt_of_seq)
358 gsi_split_seq_before (&i, &seq);
359 bb = create_basic_block (seq, NULL, bb);
360 start_new_block = false;
361 }
362
363 /* Now add STMT to BB and create the subgraphs for special statement
364 codes. */
365 gimple_set_bb (stmt, bb);
366
367 if (computed_goto_p (stmt))
368 found_computed_goto = true;
369
370 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
371 next iteration. */
372 if (stmt_ends_bb_p (stmt))
373 {
374 /* If the stmt can make abnormal goto use a new temporary
375 for the assignment to the LHS. This makes sure the old value
376 of the LHS is available on the abnormal edge. Otherwise
377 we will end up with overlapping life-ranges for abnormal
378 SSA names. */
379 if (gimple_has_lhs (stmt)
380 && stmt_can_make_abnormal_goto (stmt)
381 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
382 {
383 tree lhs = gimple_get_lhs (stmt);
384 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
385 gimple s = gimple_build_assign (lhs, tmp);
386 gimple_set_location (s, gimple_location (stmt));
387 gimple_set_block (s, gimple_block (stmt));
388 gimple_set_lhs (stmt, tmp);
389 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
390 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
391 DECL_GIMPLE_REG_P (tmp) = 1;
392 gsi_insert_after (&i, s, GSI_SAME_STMT);
393 }
394 start_new_block = true;
395 }
396
397 gsi_next (&i);
398 first_stmt_of_seq = false;
399 }
400 }
401
402
403 /* Create and return a new empty basic block after bb AFTER. */
404
405 static basic_block
406 create_bb (void *h, void *e, basic_block after)
407 {
408 basic_block bb;
409
410 gcc_assert (!e);
411
412 /* Create and initialize a new basic block. Since alloc_block uses
413 GC allocation that clears memory to allocate a basic block, we do
414 not have to clear the newly allocated basic block here. */
415 bb = alloc_block ();
416
417 bb->index = last_basic_block;
418 bb->flags = BB_NEW;
419 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
420
421 /* Add the new block to the linked list of blocks. */
422 link_block (bb, after);
423
424 /* Grow the basic block array if needed. */
425 if ((size_t) last_basic_block == basic_block_info->length ())
426 {
427 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
428 vec_safe_grow_cleared (basic_block_info, new_size);
429 }
430
431 /* Add the newly created block to the array. */
432 SET_BASIC_BLOCK (last_basic_block, bb);
433
434 n_basic_blocks++;
435 last_basic_block++;
436
437 return bb;
438 }
439
440
441 /*---------------------------------------------------------------------------
442 Edge creation
443 ---------------------------------------------------------------------------*/
444
445 /* Fold COND_EXPR_COND of each COND_EXPR. */
446
447 void
448 fold_cond_expr_cond (void)
449 {
450 basic_block bb;
451
452 FOR_EACH_BB (bb)
453 {
454 gimple stmt = last_stmt (bb);
455
456 if (stmt && gimple_code (stmt) == GIMPLE_COND)
457 {
458 location_t loc = gimple_location (stmt);
459 tree cond;
460 bool zerop, onep;
461
462 fold_defer_overflow_warnings ();
463 cond = fold_binary_loc (loc, gimple_cond_code (stmt), boolean_type_node,
464 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
465 if (cond)
466 {
467 zerop = integer_zerop (cond);
468 onep = integer_onep (cond);
469 }
470 else
471 zerop = onep = false;
472
473 fold_undefer_overflow_warnings (zerop || onep,
474 stmt,
475 WARN_STRICT_OVERFLOW_CONDITIONAL);
476 if (zerop)
477 gimple_cond_make_false (stmt);
478 else if (onep)
479 gimple_cond_make_true (stmt);
480 }
481 }
482 }
483
484 /* Join all the blocks in the flowgraph. */
485
486 static void
487 make_edges (void)
488 {
489 basic_block bb;
490 struct omp_region *cur_region = NULL;
491
492 /* Create an edge from entry to the first block with executable
493 statements in it. */
494 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
495
496 /* Traverse the basic block array placing edges. */
497 FOR_EACH_BB (bb)
498 {
499 gimple last = last_stmt (bb);
500 bool fallthru;
501
502 if (last)
503 {
504 enum gimple_code code = gimple_code (last);
505 switch (code)
506 {
507 case GIMPLE_GOTO:
508 make_goto_expr_edges (bb);
509 fallthru = false;
510 break;
511 case GIMPLE_RETURN:
512 make_edge (bb, EXIT_BLOCK_PTR, 0);
513 fallthru = false;
514 break;
515 case GIMPLE_COND:
516 make_cond_expr_edges (bb);
517 fallthru = false;
518 break;
519 case GIMPLE_SWITCH:
520 make_gimple_switch_edges (bb);
521 fallthru = false;
522 break;
523 case GIMPLE_RESX:
524 make_eh_edges (last);
525 fallthru = false;
526 break;
527 case GIMPLE_EH_DISPATCH:
528 fallthru = make_eh_dispatch_edges (last);
529 break;
530
531 case GIMPLE_CALL:
532 /* If this function receives a nonlocal goto, then we need to
533 make edges from this call site to all the nonlocal goto
534 handlers. */
535 if (stmt_can_make_abnormal_goto (last))
536 make_abnormal_goto_edges (bb, true);
537
538 /* If this statement has reachable exception handlers, then
539 create abnormal edges to them. */
540 make_eh_edges (last);
541
542 /* BUILTIN_RETURN is really a return statement. */
543 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
544 make_edge (bb, EXIT_BLOCK_PTR, 0), fallthru = false;
545 /* Some calls are known not to return. */
546 else
547 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
548 break;
549
550 case GIMPLE_ASSIGN:
551 /* A GIMPLE_ASSIGN may throw internally and thus be considered
552 control-altering. */
553 if (is_ctrl_altering_stmt (last))
554 make_eh_edges (last);
555 fallthru = true;
556 break;
557
558 case GIMPLE_ASM:
559 make_gimple_asm_edges (bb);
560 fallthru = true;
561 break;
562
563 case GIMPLE_OMP_PARALLEL:
564 case GIMPLE_OMP_TASK:
565 case GIMPLE_OMP_FOR:
566 case GIMPLE_OMP_SINGLE:
567 case GIMPLE_OMP_MASTER:
568 case GIMPLE_OMP_ORDERED:
569 case GIMPLE_OMP_CRITICAL:
570 case GIMPLE_OMP_SECTION:
571 cur_region = new_omp_region (bb, code, cur_region);
572 fallthru = true;
573 break;
574
575 case GIMPLE_OMP_SECTIONS:
576 cur_region = new_omp_region (bb, code, cur_region);
577 fallthru = true;
578 break;
579
580 case GIMPLE_OMP_SECTIONS_SWITCH:
581 fallthru = false;
582 break;
583
584 case GIMPLE_OMP_ATOMIC_LOAD:
585 case GIMPLE_OMP_ATOMIC_STORE:
586 fallthru = true;
587 break;
588
589 case GIMPLE_OMP_RETURN:
590 /* In the case of a GIMPLE_OMP_SECTION, the edge will go
591 somewhere other than the next block. This will be
592 created later. */
593 cur_region->exit = bb;
594 fallthru = cur_region->type != GIMPLE_OMP_SECTION;
595 cur_region = cur_region->outer;
596 break;
597
598 case GIMPLE_OMP_CONTINUE:
599 cur_region->cont = bb;
600 switch (cur_region->type)
601 {
602 case GIMPLE_OMP_FOR:
603 /* Mark all GIMPLE_OMP_FOR and GIMPLE_OMP_CONTINUE
604 succs edges as abnormal to prevent splitting
605 them. */
606 single_succ_edge (cur_region->entry)->flags |= EDGE_ABNORMAL;
607 /* Make the loopback edge. */
608 make_edge (bb, single_succ (cur_region->entry),
609 EDGE_ABNORMAL);
610
611 /* Create an edge from GIMPLE_OMP_FOR to exit, which
612 corresponds to the case that the body of the loop
613 is not executed at all. */
614 make_edge (cur_region->entry, bb->next_bb, EDGE_ABNORMAL);
615 make_edge (bb, bb->next_bb, EDGE_FALLTHRU | EDGE_ABNORMAL);
616 fallthru = false;
617 break;
618
619 case GIMPLE_OMP_SECTIONS:
620 /* Wire up the edges into and out of the nested sections. */
621 {
622 basic_block switch_bb = single_succ (cur_region->entry);
623
624 struct omp_region *i;
625 for (i = cur_region->inner; i ; i = i->next)
626 {
627 gcc_assert (i->type == GIMPLE_OMP_SECTION);
628 make_edge (switch_bb, i->entry, 0);
629 make_edge (i->exit, bb, EDGE_FALLTHRU);
630 }
631
632 /* Make the loopback edge to the block with
633 GIMPLE_OMP_SECTIONS_SWITCH. */
634 make_edge (bb, switch_bb, 0);
635
636 /* Make the edge from the switch to exit. */
637 make_edge (switch_bb, bb->next_bb, 0);
638 fallthru = false;
639 }
640 break;
641
642 default:
643 gcc_unreachable ();
644 }
645 break;
646
647 case GIMPLE_TRANSACTION:
648 {
649 tree abort_label = gimple_transaction_label (last);
650 if (abort_label)
651 make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT);
652 fallthru = true;
653 }
654 break;
655
656 default:
657 gcc_assert (!stmt_ends_bb_p (last));
658 fallthru = true;
659 }
660 }
661 else
662 fallthru = true;
663
664 if (fallthru)
665 {
666 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
667 if (last)
668 assign_discriminator (gimple_location (last), bb->next_bb);
669 }
670 }
671
672 if (root_omp_region)
673 free_omp_regions ();
674
675 /* Fold COND_EXPR_COND of each COND_EXPR. */
676 fold_cond_expr_cond ();
677 }
678
679 /* Trivial hash function for a location_t. ITEM is a pointer to
680 a hash table entry that maps a location_t to a discriminator. */
681
682 static unsigned int
683 locus_map_hash (const void *item)
684 {
685 return ((const struct locus_discrim_map *) item)->locus;
686 }
687
688 /* Equality function for the locus-to-discriminator map. VA and VB
689 point to the two hash table entries to compare. */
690
691 static int
692 locus_map_eq (const void *va, const void *vb)
693 {
694 const struct locus_discrim_map *a = (const struct locus_discrim_map *) va;
695 const struct locus_discrim_map *b = (const struct locus_discrim_map *) vb;
696 return a->locus == b->locus;
697 }
698
699 /* Find the next available discriminator value for LOCUS. The
700 discriminator distinguishes among several basic blocks that
701 share a common locus, allowing for more accurate sample-based
702 profiling. */
703
704 static int
705 next_discriminator_for_locus (location_t locus)
706 {
707 struct locus_discrim_map item;
708 struct locus_discrim_map **slot;
709
710 item.locus = locus;
711 item.discriminator = 0;
712 slot = (struct locus_discrim_map **)
713 htab_find_slot_with_hash (discriminator_per_locus, (void *) &item,
714 (hashval_t) locus, INSERT);
715 gcc_assert (slot);
716 if (*slot == HTAB_EMPTY_ENTRY)
717 {
718 *slot = XNEW (struct locus_discrim_map);
719 gcc_assert (*slot);
720 (*slot)->locus = locus;
721 (*slot)->discriminator = 0;
722 }
723 (*slot)->discriminator++;
724 return (*slot)->discriminator;
725 }
726
727 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
728
729 static bool
730 same_line_p (location_t locus1, location_t locus2)
731 {
732 expanded_location from, to;
733
734 if (locus1 == locus2)
735 return true;
736
737 from = expand_location (locus1);
738 to = expand_location (locus2);
739
740 if (from.line != to.line)
741 return false;
742 if (from.file == to.file)
743 return true;
744 return (from.file != NULL
745 && to.file != NULL
746 && filename_cmp (from.file, to.file) == 0);
747 }
748
749 /* Assign a unique discriminator value to block BB if it begins at the same
750 LOCUS as its predecessor block. */
751
752 static void
753 assign_discriminator (location_t locus, basic_block bb)
754 {
755 gimple first_in_to_bb, last_in_to_bb;
756
757 if (locus == 0 || bb->discriminator != 0)
758 return;
759
760 first_in_to_bb = first_non_label_stmt (bb);
761 last_in_to_bb = last_stmt (bb);
762 if ((first_in_to_bb && same_line_p (locus, gimple_location (first_in_to_bb)))
763 || (last_in_to_bb && same_line_p (locus, gimple_location (last_in_to_bb))))
764 bb->discriminator = next_discriminator_for_locus (locus);
765 }
766
767 /* Create the edges for a GIMPLE_COND starting at block BB. */
768
769 static void
770 make_cond_expr_edges (basic_block bb)
771 {
772 gimple entry = last_stmt (bb);
773 gimple then_stmt, else_stmt;
774 basic_block then_bb, else_bb;
775 tree then_label, else_label;
776 edge e;
777 location_t entry_locus;
778
779 gcc_assert (entry);
780 gcc_assert (gimple_code (entry) == GIMPLE_COND);
781
782 entry_locus = gimple_location (entry);
783
784 /* Entry basic blocks for each component. */
785 then_label = gimple_cond_true_label (entry);
786 else_label = gimple_cond_false_label (entry);
787 then_bb = label_to_block (then_label);
788 else_bb = label_to_block (else_label);
789 then_stmt = first_stmt (then_bb);
790 else_stmt = first_stmt (else_bb);
791
792 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
793 assign_discriminator (entry_locus, then_bb);
794 e->goto_locus = gimple_location (then_stmt);
795 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
796 if (e)
797 {
798 assign_discriminator (entry_locus, else_bb);
799 e->goto_locus = gimple_location (else_stmt);
800 }
801
802 /* We do not need the labels anymore. */
803 gimple_cond_set_true_label (entry, NULL_TREE);
804 gimple_cond_set_false_label (entry, NULL_TREE);
805 }
806
807
808 /* Called for each element in the hash table (P) as we delete the
809 edge to cases hash table.
810
811 Clear all the TREE_CHAINs to prevent problems with copying of
812 SWITCH_EXPRs and structure sharing rules, then free the hash table
813 element. */
814
815 static bool
816 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
817 void *data ATTRIBUTE_UNUSED)
818 {
819 tree t, next;
820
821 for (t = (tree) *value; t; t = next)
822 {
823 next = CASE_CHAIN (t);
824 CASE_CHAIN (t) = NULL;
825 }
826
827 *value = NULL;
828 return true;
829 }
830
831 /* Start recording information mapping edges to case labels. */
832
833 void
834 start_recording_case_labels (void)
835 {
836 gcc_assert (edge_to_cases == NULL);
837 edge_to_cases = pointer_map_create ();
838 touched_switch_bbs = BITMAP_ALLOC (NULL);
839 }
840
841 /* Return nonzero if we are recording information for case labels. */
842
843 static bool
844 recording_case_labels_p (void)
845 {
846 return (edge_to_cases != NULL);
847 }
848
849 /* Stop recording information mapping edges to case labels and
850 remove any information we have recorded. */
851 void
852 end_recording_case_labels (void)
853 {
854 bitmap_iterator bi;
855 unsigned i;
856 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
857 pointer_map_destroy (edge_to_cases);
858 edge_to_cases = NULL;
859 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
860 {
861 basic_block bb = BASIC_BLOCK (i);
862 if (bb)
863 {
864 gimple stmt = last_stmt (bb);
865 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
866 group_case_labels_stmt (stmt);
867 }
868 }
869 BITMAP_FREE (touched_switch_bbs);
870 }
871
872 /* If we are inside a {start,end}_recording_cases block, then return
873 a chain of CASE_LABEL_EXPRs from T which reference E.
874
875 Otherwise return NULL. */
876
877 static tree
878 get_cases_for_edge (edge e, gimple t)
879 {
880 void **slot;
881 size_t i, n;
882
883 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
884 chains available. Return NULL so the caller can detect this case. */
885 if (!recording_case_labels_p ())
886 return NULL;
887
888 slot = pointer_map_contains (edge_to_cases, e);
889 if (slot)
890 return (tree) *slot;
891
892 /* If we did not find E in the hash table, then this must be the first
893 time we have been queried for information about E & T. Add all the
894 elements from T to the hash table then perform the query again. */
895
896 n = gimple_switch_num_labels (t);
897 for (i = 0; i < n; i++)
898 {
899 tree elt = gimple_switch_label (t, i);
900 tree lab = CASE_LABEL (elt);
901 basic_block label_bb = label_to_block (lab);
902 edge this_edge = find_edge (e->src, label_bb);
903
904 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
905 a new chain. */
906 slot = pointer_map_insert (edge_to_cases, this_edge);
907 CASE_CHAIN (elt) = (tree) *slot;
908 *slot = elt;
909 }
910
911 return (tree) *pointer_map_contains (edge_to_cases, e);
912 }
913
914 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
915
916 static void
917 make_gimple_switch_edges (basic_block bb)
918 {
919 gimple entry = last_stmt (bb);
920 location_t entry_locus;
921 size_t i, n;
922
923 entry_locus = gimple_location (entry);
924
925 n = gimple_switch_num_labels (entry);
926
927 for (i = 0; i < n; ++i)
928 {
929 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
930 basic_block label_bb = label_to_block (lab);
931 make_edge (bb, label_bb, 0);
932 assign_discriminator (entry_locus, label_bb);
933 }
934 }
935
936
937 /* Return the basic block holding label DEST. */
938
939 basic_block
940 label_to_block_fn (struct function *ifun, tree dest)
941 {
942 int uid = LABEL_DECL_UID (dest);
943
944 /* We would die hard when faced by an undefined label. Emit a label to
945 the very first basic block. This will hopefully make even the dataflow
946 and undefined variable warnings quite right. */
947 if (seen_error () && uid < 0)
948 {
949 gimple_stmt_iterator gsi = gsi_start_bb (BASIC_BLOCK (NUM_FIXED_BLOCKS));
950 gimple stmt;
951
952 stmt = gimple_build_label (dest);
953 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
954 uid = LABEL_DECL_UID (dest);
955 }
956 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
957 return NULL;
958 return (*ifun->cfg->x_label_to_block_map)[uid];
959 }
960
961 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
962 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
963
964 void
965 make_abnormal_goto_edges (basic_block bb, bool for_call)
966 {
967 basic_block target_bb;
968 gimple_stmt_iterator gsi;
969
970 FOR_EACH_BB (target_bb)
971 for (gsi = gsi_start_bb (target_bb); !gsi_end_p (gsi); gsi_next (&gsi))
972 {
973 gimple label_stmt = gsi_stmt (gsi);
974 tree target;
975
976 if (gimple_code (label_stmt) != GIMPLE_LABEL)
977 break;
978
979 target = gimple_label_label (label_stmt);
980
981 /* Make an edge to every label block that has been marked as a
982 potential target for a computed goto or a non-local goto. */
983 if ((FORCED_LABEL (target) && !for_call)
984 || (DECL_NONLOCAL (target) && for_call))
985 {
986 make_edge (bb, target_bb, EDGE_ABNORMAL);
987 break;
988 }
989 }
990 }
991
992 /* Create edges for a goto statement at block BB. */
993
994 static void
995 make_goto_expr_edges (basic_block bb)
996 {
997 gimple_stmt_iterator last = gsi_last_bb (bb);
998 gimple goto_t = gsi_stmt (last);
999
1000 /* A simple GOTO creates normal edges. */
1001 if (simple_goto_p (goto_t))
1002 {
1003 tree dest = gimple_goto_dest (goto_t);
1004 basic_block label_bb = label_to_block (dest);
1005 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1006 e->goto_locus = gimple_location (goto_t);
1007 assign_discriminator (e->goto_locus, label_bb);
1008 gsi_remove (&last, true);
1009 return;
1010 }
1011
1012 /* A computed GOTO creates abnormal edges. */
1013 make_abnormal_goto_edges (bb, false);
1014 }
1015
1016 /* Create edges for an asm statement with labels at block BB. */
1017
1018 static void
1019 make_gimple_asm_edges (basic_block bb)
1020 {
1021 gimple stmt = last_stmt (bb);
1022 location_t stmt_loc = gimple_location (stmt);
1023 int i, n = gimple_asm_nlabels (stmt);
1024
1025 for (i = 0; i < n; ++i)
1026 {
1027 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1028 basic_block label_bb = label_to_block (label);
1029 make_edge (bb, label_bb, 0);
1030 assign_discriminator (stmt_loc, label_bb);
1031 }
1032 }
1033
1034 /*---------------------------------------------------------------------------
1035 Flowgraph analysis
1036 ---------------------------------------------------------------------------*/
1037
1038 /* Cleanup useless labels in basic blocks. This is something we wish
1039 to do early because it allows us to group case labels before creating
1040 the edges for the CFG, and it speeds up block statement iterators in
1041 all passes later on.
1042 We rerun this pass after CFG is created, to get rid of the labels that
1043 are no longer referenced. After then we do not run it any more, since
1044 (almost) no new labels should be created. */
1045
1046 /* A map from basic block index to the leading label of that block. */
1047 static struct label_record
1048 {
1049 /* The label. */
1050 tree label;
1051
1052 /* True if the label is referenced from somewhere. */
1053 bool used;
1054 } *label_for_bb;
1055
1056 /* Given LABEL return the first label in the same basic block. */
1057
1058 static tree
1059 main_block_label (tree label)
1060 {
1061 basic_block bb = label_to_block (label);
1062 tree main_label = label_for_bb[bb->index].label;
1063
1064 /* label_to_block possibly inserted undefined label into the chain. */
1065 if (!main_label)
1066 {
1067 label_for_bb[bb->index].label = label;
1068 main_label = label;
1069 }
1070
1071 label_for_bb[bb->index].used = true;
1072 return main_label;
1073 }
1074
1075 /* Clean up redundant labels within the exception tree. */
1076
1077 static void
1078 cleanup_dead_labels_eh (void)
1079 {
1080 eh_landing_pad lp;
1081 eh_region r;
1082 tree lab;
1083 int i;
1084
1085 if (cfun->eh == NULL)
1086 return;
1087
1088 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1089 if (lp && lp->post_landing_pad)
1090 {
1091 lab = main_block_label (lp->post_landing_pad);
1092 if (lab != lp->post_landing_pad)
1093 {
1094 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1095 EH_LANDING_PAD_NR (lab) = lp->index;
1096 }
1097 }
1098
1099 FOR_ALL_EH_REGION (r)
1100 switch (r->type)
1101 {
1102 case ERT_CLEANUP:
1103 case ERT_MUST_NOT_THROW:
1104 break;
1105
1106 case ERT_TRY:
1107 {
1108 eh_catch c;
1109 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1110 {
1111 lab = c->label;
1112 if (lab)
1113 c->label = main_block_label (lab);
1114 }
1115 }
1116 break;
1117
1118 case ERT_ALLOWED_EXCEPTIONS:
1119 lab = r->u.allowed.label;
1120 if (lab)
1121 r->u.allowed.label = main_block_label (lab);
1122 break;
1123 }
1124 }
1125
1126
1127 /* Cleanup redundant labels. This is a three-step process:
1128 1) Find the leading label for each block.
1129 2) Redirect all references to labels to the leading labels.
1130 3) Cleanup all useless labels. */
1131
1132 void
1133 cleanup_dead_labels (void)
1134 {
1135 basic_block bb;
1136 label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
1137
1138 /* Find a suitable label for each block. We use the first user-defined
1139 label if there is one, or otherwise just the first label we see. */
1140 FOR_EACH_BB (bb)
1141 {
1142 gimple_stmt_iterator i;
1143
1144 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1145 {
1146 tree label;
1147 gimple stmt = gsi_stmt (i);
1148
1149 if (gimple_code (stmt) != GIMPLE_LABEL)
1150 break;
1151
1152 label = gimple_label_label (stmt);
1153
1154 /* If we have not yet seen a label for the current block,
1155 remember this one and see if there are more labels. */
1156 if (!label_for_bb[bb->index].label)
1157 {
1158 label_for_bb[bb->index].label = label;
1159 continue;
1160 }
1161
1162 /* If we did see a label for the current block already, but it
1163 is an artificially created label, replace it if the current
1164 label is a user defined label. */
1165 if (!DECL_ARTIFICIAL (label)
1166 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1167 {
1168 label_for_bb[bb->index].label = label;
1169 break;
1170 }
1171 }
1172 }
1173
1174 /* Now redirect all jumps/branches to the selected label.
1175 First do so for each block ending in a control statement. */
1176 FOR_EACH_BB (bb)
1177 {
1178 gimple stmt = last_stmt (bb);
1179 tree label, new_label;
1180
1181 if (!stmt)
1182 continue;
1183
1184 switch (gimple_code (stmt))
1185 {
1186 case GIMPLE_COND:
1187 label = gimple_cond_true_label (stmt);
1188 if (label)
1189 {
1190 new_label = main_block_label (label);
1191 if (new_label != label)
1192 gimple_cond_set_true_label (stmt, new_label);
1193 }
1194
1195 label = gimple_cond_false_label (stmt);
1196 if (label)
1197 {
1198 new_label = main_block_label (label);
1199 if (new_label != label)
1200 gimple_cond_set_false_label (stmt, new_label);
1201 }
1202 break;
1203
1204 case GIMPLE_SWITCH:
1205 {
1206 size_t i, n = gimple_switch_num_labels (stmt);
1207
1208 /* Replace all destination labels. */
1209 for (i = 0; i < n; ++i)
1210 {
1211 tree case_label = gimple_switch_label (stmt, i);
1212 label = CASE_LABEL (case_label);
1213 new_label = main_block_label (label);
1214 if (new_label != label)
1215 CASE_LABEL (case_label) = new_label;
1216 }
1217 break;
1218 }
1219
1220 case GIMPLE_ASM:
1221 {
1222 int i, n = gimple_asm_nlabels (stmt);
1223
1224 for (i = 0; i < n; ++i)
1225 {
1226 tree cons = gimple_asm_label_op (stmt, i);
1227 tree label = main_block_label (TREE_VALUE (cons));
1228 TREE_VALUE (cons) = label;
1229 }
1230 break;
1231 }
1232
1233 /* We have to handle gotos until they're removed, and we don't
1234 remove them until after we've created the CFG edges. */
1235 case GIMPLE_GOTO:
1236 if (!computed_goto_p (stmt))
1237 {
1238 label = gimple_goto_dest (stmt);
1239 new_label = main_block_label (label);
1240 if (new_label != label)
1241 gimple_goto_set_dest (stmt, new_label);
1242 }
1243 break;
1244
1245 case GIMPLE_TRANSACTION:
1246 {
1247 tree label = gimple_transaction_label (stmt);
1248 if (label)
1249 {
1250 tree new_label = main_block_label (label);
1251 if (new_label != label)
1252 gimple_transaction_set_label (stmt, new_label);
1253 }
1254 }
1255 break;
1256
1257 default:
1258 break;
1259 }
1260 }
1261
1262 /* Do the same for the exception region tree labels. */
1263 cleanup_dead_labels_eh ();
1264
1265 /* Finally, purge dead labels. All user-defined labels and labels that
1266 can be the target of non-local gotos and labels which have their
1267 address taken are preserved. */
1268 FOR_EACH_BB (bb)
1269 {
1270 gimple_stmt_iterator i;
1271 tree label_for_this_bb = label_for_bb[bb->index].label;
1272
1273 if (!label_for_this_bb)
1274 continue;
1275
1276 /* If the main label of the block is unused, we may still remove it. */
1277 if (!label_for_bb[bb->index].used)
1278 label_for_this_bb = NULL;
1279
1280 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1281 {
1282 tree label;
1283 gimple stmt = gsi_stmt (i);
1284
1285 if (gimple_code (stmt) != GIMPLE_LABEL)
1286 break;
1287
1288 label = gimple_label_label (stmt);
1289
1290 if (label == label_for_this_bb
1291 || !DECL_ARTIFICIAL (label)
1292 || DECL_NONLOCAL (label)
1293 || FORCED_LABEL (label))
1294 gsi_next (&i);
1295 else
1296 gsi_remove (&i, true);
1297 }
1298 }
1299
1300 free (label_for_bb);
1301 }
1302
1303 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1304 the ones jumping to the same label.
1305 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1306
1307 void
1308 group_case_labels_stmt (gimple stmt)
1309 {
1310 int old_size = gimple_switch_num_labels (stmt);
1311 int i, j, new_size = old_size;
1312 basic_block default_bb = NULL;
1313
1314 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1315
1316 /* Look for possible opportunities to merge cases. */
1317 i = 1;
1318 while (i < old_size)
1319 {
1320 tree base_case, base_high;
1321 basic_block base_bb;
1322
1323 base_case = gimple_switch_label (stmt, i);
1324
1325 gcc_assert (base_case);
1326 base_bb = label_to_block (CASE_LABEL (base_case));
1327
1328 /* Discard cases that have the same destination as the
1329 default case. */
1330 if (base_bb == default_bb)
1331 {
1332 gimple_switch_set_label (stmt, i, NULL_TREE);
1333 i++;
1334 new_size--;
1335 continue;
1336 }
1337
1338 base_high = CASE_HIGH (base_case)
1339 ? CASE_HIGH (base_case)
1340 : CASE_LOW (base_case);
1341 i++;
1342
1343 /* Try to merge case labels. Break out when we reach the end
1344 of the label vector or when we cannot merge the next case
1345 label with the current one. */
1346 while (i < old_size)
1347 {
1348 tree merge_case = gimple_switch_label (stmt, i);
1349 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1350 double_int bhp1 = tree_to_double_int (base_high) + double_int_one;
1351
1352 /* Merge the cases if they jump to the same place,
1353 and their ranges are consecutive. */
1354 if (merge_bb == base_bb
1355 && tree_to_double_int (CASE_LOW (merge_case)) == bhp1)
1356 {
1357 base_high = CASE_HIGH (merge_case) ?
1358 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1359 CASE_HIGH (base_case) = base_high;
1360 gimple_switch_set_label (stmt, i, NULL_TREE);
1361 new_size--;
1362 i++;
1363 }
1364 else
1365 break;
1366 }
1367 }
1368
1369 /* Compress the case labels in the label vector, and adjust the
1370 length of the vector. */
1371 for (i = 0, j = 0; i < new_size; i++)
1372 {
1373 while (! gimple_switch_label (stmt, j))
1374 j++;
1375 gimple_switch_set_label (stmt, i,
1376 gimple_switch_label (stmt, j++));
1377 }
1378
1379 gcc_assert (new_size <= old_size);
1380 gimple_switch_set_num_labels (stmt, new_size);
1381 }
1382
1383 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1384 and scan the sorted vector of cases. Combine the ones jumping to the
1385 same label. */
1386
1387 void
1388 group_case_labels (void)
1389 {
1390 basic_block bb;
1391
1392 FOR_EACH_BB (bb)
1393 {
1394 gimple stmt = last_stmt (bb);
1395 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1396 group_case_labels_stmt (stmt);
1397 }
1398 }
1399
1400 /* Checks whether we can merge block B into block A. */
1401
1402 static bool
1403 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1404 {
1405 gimple stmt;
1406 gimple_stmt_iterator gsi;
1407
1408 if (!single_succ_p (a))
1409 return false;
1410
1411 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1412 return false;
1413
1414 if (single_succ (a) != b)
1415 return false;
1416
1417 if (!single_pred_p (b))
1418 return false;
1419
1420 if (b == EXIT_BLOCK_PTR)
1421 return false;
1422
1423 /* If A ends by a statement causing exceptions or something similar, we
1424 cannot merge the blocks. */
1425 stmt = last_stmt (a);
1426 if (stmt && stmt_ends_bb_p (stmt))
1427 return false;
1428
1429 /* Do not allow a block with only a non-local label to be merged. */
1430 if (stmt
1431 && gimple_code (stmt) == GIMPLE_LABEL
1432 && DECL_NONLOCAL (gimple_label_label (stmt)))
1433 return false;
1434
1435 /* Examine the labels at the beginning of B. */
1436 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
1437 {
1438 tree lab;
1439 stmt = gsi_stmt (gsi);
1440 if (gimple_code (stmt) != GIMPLE_LABEL)
1441 break;
1442 lab = gimple_label_label (stmt);
1443
1444 /* Do not remove user forced labels or for -O0 any user labels. */
1445 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1446 return false;
1447 }
1448
1449 /* Protect the loop latches. */
1450 if (current_loops && b->loop_father->latch == b)
1451 return false;
1452
1453 /* It must be possible to eliminate all phi nodes in B. If ssa form
1454 is not up-to-date and a name-mapping is registered, we cannot eliminate
1455 any phis. Symbols marked for renaming are never a problem though. */
1456 for (gsi = gsi_start_phis (b); !gsi_end_p (gsi); gsi_next (&gsi))
1457 {
1458 gimple phi = gsi_stmt (gsi);
1459 /* Technically only new names matter. */
1460 if (name_registered_for_update_p (PHI_RESULT (phi)))
1461 return false;
1462 }
1463
1464 /* When not optimizing, don't merge if we'd lose goto_locus. */
1465 if (!optimize
1466 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1467 {
1468 location_t goto_locus = single_succ_edge (a)->goto_locus;
1469 gimple_stmt_iterator prev, next;
1470 prev = gsi_last_nondebug_bb (a);
1471 next = gsi_after_labels (b);
1472 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1473 gsi_next_nondebug (&next);
1474 if ((gsi_end_p (prev)
1475 || gimple_location (gsi_stmt (prev)) != goto_locus)
1476 && (gsi_end_p (next)
1477 || gimple_location (gsi_stmt (next)) != goto_locus))
1478 return false;
1479 }
1480
1481 return true;
1482 }
1483
1484 /* Return true if the var whose chain of uses starts at PTR has no
1485 nondebug uses. */
1486 bool
1487 has_zero_uses_1 (const ssa_use_operand_t *head)
1488 {
1489 const ssa_use_operand_t *ptr;
1490
1491 for (ptr = head->next; ptr != head; ptr = ptr->next)
1492 if (!is_gimple_debug (USE_STMT (ptr)))
1493 return false;
1494
1495 return true;
1496 }
1497
1498 /* Return true if the var whose chain of uses starts at PTR has a
1499 single nondebug use. Set USE_P and STMT to that single nondebug
1500 use, if so, or to NULL otherwise. */
1501 bool
1502 single_imm_use_1 (const ssa_use_operand_t *head,
1503 use_operand_p *use_p, gimple *stmt)
1504 {
1505 ssa_use_operand_t *ptr, *single_use = 0;
1506
1507 for (ptr = head->next; ptr != head; ptr = ptr->next)
1508 if (!is_gimple_debug (USE_STMT (ptr)))
1509 {
1510 if (single_use)
1511 {
1512 single_use = NULL;
1513 break;
1514 }
1515 single_use = ptr;
1516 }
1517
1518 if (use_p)
1519 *use_p = single_use;
1520
1521 if (stmt)
1522 *stmt = single_use ? single_use->loc.stmt : NULL;
1523
1524 return !!single_use;
1525 }
1526
1527 /* Replaces all uses of NAME by VAL. */
1528
1529 void
1530 replace_uses_by (tree name, tree val)
1531 {
1532 imm_use_iterator imm_iter;
1533 use_operand_p use;
1534 gimple stmt;
1535 edge e;
1536
1537 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1538 {
1539 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1540 {
1541 replace_exp (use, val);
1542
1543 if (gimple_code (stmt) == GIMPLE_PHI)
1544 {
1545 e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
1546 if (e->flags & EDGE_ABNORMAL)
1547 {
1548 /* This can only occur for virtual operands, since
1549 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1550 would prevent replacement. */
1551 gcc_checking_assert (virtual_operand_p (name));
1552 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1553 }
1554 }
1555 }
1556
1557 if (gimple_code (stmt) != GIMPLE_PHI)
1558 {
1559 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1560 gimple orig_stmt = stmt;
1561 size_t i;
1562
1563 /* Mark the block if we changed the last stmt in it. */
1564 if (cfgcleanup_altered_bbs
1565 && stmt_ends_bb_p (stmt))
1566 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1567
1568 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1569 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1570 only change sth from non-invariant to invariant, and only
1571 when propagating constants. */
1572 if (is_gimple_min_invariant (val))
1573 for (i = 0; i < gimple_num_ops (stmt); i++)
1574 {
1575 tree op = gimple_op (stmt, i);
1576 /* Operands may be empty here. For example, the labels
1577 of a GIMPLE_COND are nulled out following the creation
1578 of the corresponding CFG edges. */
1579 if (op && TREE_CODE (op) == ADDR_EXPR)
1580 recompute_tree_invariant_for_addr_expr (op);
1581 }
1582
1583 if (fold_stmt (&gsi))
1584 stmt = gsi_stmt (gsi);
1585
1586 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1587 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1588
1589 update_stmt (stmt);
1590 }
1591 }
1592
1593 gcc_checking_assert (has_zero_uses (name));
1594
1595 /* Also update the trees stored in loop structures. */
1596 if (current_loops)
1597 {
1598 struct loop *loop;
1599 loop_iterator li;
1600
1601 FOR_EACH_LOOP (li, loop, 0)
1602 {
1603 substitute_in_loop_info (loop, name, val);
1604 }
1605 }
1606 }
1607
1608 /* Merge block B into block A. */
1609
1610 static void
1611 gimple_merge_blocks (basic_block a, basic_block b)
1612 {
1613 gimple_stmt_iterator last, gsi, psi;
1614
1615 if (dump_file)
1616 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1617
1618 /* Remove all single-valued PHI nodes from block B of the form
1619 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1620 gsi = gsi_last_bb (a);
1621 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1622 {
1623 gimple phi = gsi_stmt (psi);
1624 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1625 gimple copy;
1626 bool may_replace_uses = (virtual_operand_p (def)
1627 || may_propagate_copy (def, use));
1628
1629 /* In case we maintain loop closed ssa form, do not propagate arguments
1630 of loop exit phi nodes. */
1631 if (current_loops
1632 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1633 && !virtual_operand_p (def)
1634 && TREE_CODE (use) == SSA_NAME
1635 && a->loop_father != b->loop_father)
1636 may_replace_uses = false;
1637
1638 if (!may_replace_uses)
1639 {
1640 gcc_assert (!virtual_operand_p (def));
1641
1642 /* Note that just emitting the copies is fine -- there is no problem
1643 with ordering of phi nodes. This is because A is the single
1644 predecessor of B, therefore results of the phi nodes cannot
1645 appear as arguments of the phi nodes. */
1646 copy = gimple_build_assign (def, use);
1647 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1648 remove_phi_node (&psi, false);
1649 }
1650 else
1651 {
1652 /* If we deal with a PHI for virtual operands, we can simply
1653 propagate these without fussing with folding or updating
1654 the stmt. */
1655 if (virtual_operand_p (def))
1656 {
1657 imm_use_iterator iter;
1658 use_operand_p use_p;
1659 gimple stmt;
1660
1661 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1662 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1663 SET_USE (use_p, use);
1664
1665 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1666 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1667 }
1668 else
1669 replace_uses_by (def, use);
1670
1671 remove_phi_node (&psi, true);
1672 }
1673 }
1674
1675 /* Ensure that B follows A. */
1676 move_block_after (b, a);
1677
1678 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1679 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1680
1681 /* Remove labels from B and set gimple_bb to A for other statements. */
1682 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1683 {
1684 gimple stmt = gsi_stmt (gsi);
1685 if (gimple_code (stmt) == GIMPLE_LABEL)
1686 {
1687 tree label = gimple_label_label (stmt);
1688 int lp_nr;
1689
1690 gsi_remove (&gsi, false);
1691
1692 /* Now that we can thread computed gotos, we might have
1693 a situation where we have a forced label in block B
1694 However, the label at the start of block B might still be
1695 used in other ways (think about the runtime checking for
1696 Fortran assigned gotos). So we can not just delete the
1697 label. Instead we move the label to the start of block A. */
1698 if (FORCED_LABEL (label))
1699 {
1700 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1701 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1702 }
1703 /* Other user labels keep around in a form of a debug stmt. */
1704 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1705 {
1706 gimple dbg = gimple_build_debug_bind (label,
1707 integer_zero_node,
1708 stmt);
1709 gimple_debug_bind_reset_value (dbg);
1710 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1711 }
1712
1713 lp_nr = EH_LANDING_PAD_NR (label);
1714 if (lp_nr)
1715 {
1716 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1717 lp->post_landing_pad = NULL;
1718 }
1719 }
1720 else
1721 {
1722 gimple_set_bb (stmt, a);
1723 gsi_next (&gsi);
1724 }
1725 }
1726
1727 /* Merge the sequences. */
1728 last = gsi_last_bb (a);
1729 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1730 set_bb_seq (b, NULL);
1731
1732 if (cfgcleanup_altered_bbs)
1733 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1734 }
1735
1736
1737 /* Return the one of two successors of BB that is not reachable by a
1738 complex edge, if there is one. Else, return BB. We use
1739 this in optimizations that use post-dominators for their heuristics,
1740 to catch the cases in C++ where function calls are involved. */
1741
1742 basic_block
1743 single_noncomplex_succ (basic_block bb)
1744 {
1745 edge e0, e1;
1746 if (EDGE_COUNT (bb->succs) != 2)
1747 return bb;
1748
1749 e0 = EDGE_SUCC (bb, 0);
1750 e1 = EDGE_SUCC (bb, 1);
1751 if (e0->flags & EDGE_COMPLEX)
1752 return e1->dest;
1753 if (e1->flags & EDGE_COMPLEX)
1754 return e0->dest;
1755
1756 return bb;
1757 }
1758
1759 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1760
1761 void
1762 notice_special_calls (gimple call)
1763 {
1764 int flags = gimple_call_flags (call);
1765
1766 if (flags & ECF_MAY_BE_ALLOCA)
1767 cfun->calls_alloca = true;
1768 if (flags & ECF_RETURNS_TWICE)
1769 cfun->calls_setjmp = true;
1770 }
1771
1772
1773 /* Clear flags set by notice_special_calls. Used by dead code removal
1774 to update the flags. */
1775
1776 void
1777 clear_special_calls (void)
1778 {
1779 cfun->calls_alloca = false;
1780 cfun->calls_setjmp = false;
1781 }
1782
1783 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1784
1785 static void
1786 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1787 {
1788 /* Since this block is no longer reachable, we can just delete all
1789 of its PHI nodes. */
1790 remove_phi_nodes (bb);
1791
1792 /* Remove edges to BB's successors. */
1793 while (EDGE_COUNT (bb->succs) > 0)
1794 remove_edge (EDGE_SUCC (bb, 0));
1795 }
1796
1797
1798 /* Remove statements of basic block BB. */
1799
1800 static void
1801 remove_bb (basic_block bb)
1802 {
1803 gimple_stmt_iterator i;
1804
1805 if (dump_file)
1806 {
1807 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1808 if (dump_flags & TDF_DETAILS)
1809 {
1810 dump_bb (dump_file, bb, 0, dump_flags);
1811 fprintf (dump_file, "\n");
1812 }
1813 }
1814
1815 if (current_loops)
1816 {
1817 struct loop *loop = bb->loop_father;
1818
1819 /* If a loop gets removed, clean up the information associated
1820 with it. */
1821 if (loop->latch == bb
1822 || loop->header == bb)
1823 free_numbers_of_iterations_estimates_loop (loop);
1824 }
1825
1826 /* Remove all the instructions in the block. */
1827 if (bb_seq (bb) != NULL)
1828 {
1829 /* Walk backwards so as to get a chance to substitute all
1830 released DEFs into debug stmts. See
1831 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
1832 details. */
1833 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
1834 {
1835 gimple stmt = gsi_stmt (i);
1836 if (gimple_code (stmt) == GIMPLE_LABEL
1837 && (FORCED_LABEL (gimple_label_label (stmt))
1838 || DECL_NONLOCAL (gimple_label_label (stmt))))
1839 {
1840 basic_block new_bb;
1841 gimple_stmt_iterator new_gsi;
1842
1843 /* A non-reachable non-local label may still be referenced.
1844 But it no longer needs to carry the extra semantics of
1845 non-locality. */
1846 if (DECL_NONLOCAL (gimple_label_label (stmt)))
1847 {
1848 DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
1849 FORCED_LABEL (gimple_label_label (stmt)) = 1;
1850 }
1851
1852 new_bb = bb->prev_bb;
1853 new_gsi = gsi_start_bb (new_bb);
1854 gsi_remove (&i, false);
1855 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
1856 }
1857 else
1858 {
1859 /* Release SSA definitions if we are in SSA. Note that we
1860 may be called when not in SSA. For example,
1861 final_cleanup calls this function via
1862 cleanup_tree_cfg. */
1863 if (gimple_in_ssa_p (cfun))
1864 release_defs (stmt);
1865
1866 gsi_remove (&i, true);
1867 }
1868
1869 if (gsi_end_p (i))
1870 i = gsi_last_bb (bb);
1871 else
1872 gsi_prev (&i);
1873 }
1874 }
1875
1876 remove_phi_nodes_and_edges_for_unreachable_block (bb);
1877 bb->il.gimple.seq = NULL;
1878 bb->il.gimple.phi_nodes = NULL;
1879 }
1880
1881
1882 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
1883 predicate VAL, return the edge that will be taken out of the block.
1884 If VAL does not match a unique edge, NULL is returned. */
1885
1886 edge
1887 find_taken_edge (basic_block bb, tree val)
1888 {
1889 gimple stmt;
1890
1891 stmt = last_stmt (bb);
1892
1893 gcc_assert (stmt);
1894 gcc_assert (is_ctrl_stmt (stmt));
1895
1896 if (val == NULL)
1897 return NULL;
1898
1899 if (!is_gimple_min_invariant (val))
1900 return NULL;
1901
1902 if (gimple_code (stmt) == GIMPLE_COND)
1903 return find_taken_edge_cond_expr (bb, val);
1904
1905 if (gimple_code (stmt) == GIMPLE_SWITCH)
1906 return find_taken_edge_switch_expr (bb, val);
1907
1908 if (computed_goto_p (stmt))
1909 {
1910 /* Only optimize if the argument is a label, if the argument is
1911 not a label then we can not construct a proper CFG.
1912
1913 It may be the case that we only need to allow the LABEL_REF to
1914 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
1915 appear inside a LABEL_EXPR just to be safe. */
1916 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
1917 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
1918 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
1919 return NULL;
1920 }
1921
1922 gcc_unreachable ();
1923 }
1924
1925 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
1926 statement, determine which of the outgoing edges will be taken out of the
1927 block. Return NULL if either edge may be taken. */
1928
1929 static edge
1930 find_taken_edge_computed_goto (basic_block bb, tree val)
1931 {
1932 basic_block dest;
1933 edge e = NULL;
1934
1935 dest = label_to_block (val);
1936 if (dest)
1937 {
1938 e = find_edge (bb, dest);
1939 gcc_assert (e != NULL);
1940 }
1941
1942 return e;
1943 }
1944
1945 /* Given a constant value VAL and the entry block BB to a COND_EXPR
1946 statement, determine which of the two edges will be taken out of the
1947 block. Return NULL if either edge may be taken. */
1948
1949 static edge
1950 find_taken_edge_cond_expr (basic_block bb, tree val)
1951 {
1952 edge true_edge, false_edge;
1953
1954 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1955
1956 gcc_assert (TREE_CODE (val) == INTEGER_CST);
1957 return (integer_zerop (val) ? false_edge : true_edge);
1958 }
1959
1960 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
1961 statement, determine which edge will be taken out of the block. Return
1962 NULL if any edge may be taken. */
1963
1964 static edge
1965 find_taken_edge_switch_expr (basic_block bb, tree val)
1966 {
1967 basic_block dest_bb;
1968 edge e;
1969 gimple switch_stmt;
1970 tree taken_case;
1971
1972 switch_stmt = last_stmt (bb);
1973 taken_case = find_case_label_for_value (switch_stmt, val);
1974 dest_bb = label_to_block (CASE_LABEL (taken_case));
1975
1976 e = find_edge (bb, dest_bb);
1977 gcc_assert (e);
1978 return e;
1979 }
1980
1981
1982 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
1983 We can make optimal use here of the fact that the case labels are
1984 sorted: We can do a binary search for a case matching VAL. */
1985
1986 static tree
1987 find_case_label_for_value (gimple switch_stmt, tree val)
1988 {
1989 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
1990 tree default_case = gimple_switch_default_label (switch_stmt);
1991
1992 for (low = 0, high = n; high - low > 1; )
1993 {
1994 size_t i = (high + low) / 2;
1995 tree t = gimple_switch_label (switch_stmt, i);
1996 int cmp;
1997
1998 /* Cache the result of comparing CASE_LOW and val. */
1999 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2000
2001 if (cmp > 0)
2002 high = i;
2003 else
2004 low = i;
2005
2006 if (CASE_HIGH (t) == NULL)
2007 {
2008 /* A singe-valued case label. */
2009 if (cmp == 0)
2010 return t;
2011 }
2012 else
2013 {
2014 /* A case range. We can only handle integer ranges. */
2015 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2016 return t;
2017 }
2018 }
2019
2020 return default_case;
2021 }
2022
2023
2024 /* Dump a basic block on stderr. */
2025
2026 void
2027 gimple_debug_bb (basic_block bb)
2028 {
2029 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2030 }
2031
2032
2033 /* Dump basic block with index N on stderr. */
2034
2035 basic_block
2036 gimple_debug_bb_n (int n)
2037 {
2038 gimple_debug_bb (BASIC_BLOCK (n));
2039 return BASIC_BLOCK (n);
2040 }
2041
2042
2043 /* Dump the CFG on stderr.
2044
2045 FLAGS are the same used by the tree dumping functions
2046 (see TDF_* in dumpfile.h). */
2047
2048 void
2049 gimple_debug_cfg (int flags)
2050 {
2051 gimple_dump_cfg (stderr, flags);
2052 }
2053
2054
2055 /* Dump the program showing basic block boundaries on the given FILE.
2056
2057 FLAGS are the same used by the tree dumping functions (see TDF_* in
2058 tree.h). */
2059
2060 void
2061 gimple_dump_cfg (FILE *file, int flags)
2062 {
2063 if (flags & TDF_DETAILS)
2064 {
2065 dump_function_header (file, current_function_decl, flags);
2066 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2067 n_basic_blocks, n_edges, last_basic_block);
2068
2069 brief_dump_cfg (file, flags | TDF_COMMENT);
2070 fprintf (file, "\n");
2071 }
2072
2073 if (flags & TDF_STATS)
2074 dump_cfg_stats (file);
2075
2076 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2077 }
2078
2079
2080 /* Dump CFG statistics on FILE. */
2081
2082 void
2083 dump_cfg_stats (FILE *file)
2084 {
2085 static long max_num_merged_labels = 0;
2086 unsigned long size, total = 0;
2087 long num_edges;
2088 basic_block bb;
2089 const char * const fmt_str = "%-30s%-13s%12s\n";
2090 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2091 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2092 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2093 const char *funcname = current_function_name ();
2094
2095 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2096
2097 fprintf (file, "---------------------------------------------------------\n");
2098 fprintf (file, fmt_str, "", " Number of ", "Memory");
2099 fprintf (file, fmt_str, "", " instances ", "used ");
2100 fprintf (file, "---------------------------------------------------------\n");
2101
2102 size = n_basic_blocks * sizeof (struct basic_block_def);
2103 total += size;
2104 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2105 SCALE (size), LABEL (size));
2106
2107 num_edges = 0;
2108 FOR_EACH_BB (bb)
2109 num_edges += EDGE_COUNT (bb->succs);
2110 size = num_edges * sizeof (struct edge_def);
2111 total += size;
2112 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2113
2114 fprintf (file, "---------------------------------------------------------\n");
2115 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2116 LABEL (total));
2117 fprintf (file, "---------------------------------------------------------\n");
2118 fprintf (file, "\n");
2119
2120 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2121 max_num_merged_labels = cfg_stats.num_merged_labels;
2122
2123 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2124 cfg_stats.num_merged_labels, max_num_merged_labels);
2125
2126 fprintf (file, "\n");
2127 }
2128
2129
2130 /* Dump CFG statistics on stderr. Keep extern so that it's always
2131 linked in the final executable. */
2132
2133 DEBUG_FUNCTION void
2134 debug_cfg_stats (void)
2135 {
2136 dump_cfg_stats (stderr);
2137 }
2138
2139 /*---------------------------------------------------------------------------
2140 Miscellaneous helpers
2141 ---------------------------------------------------------------------------*/
2142
2143 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2144 flow. Transfers of control flow associated with EH are excluded. */
2145
2146 static bool
2147 call_can_make_abnormal_goto (gimple t)
2148 {
2149 /* If the function has no non-local labels, then a call cannot make an
2150 abnormal transfer of control. */
2151 if (!cfun->has_nonlocal_label)
2152 return false;
2153
2154 /* Likewise if the call has no side effects. */
2155 if (!gimple_has_side_effects (t))
2156 return false;
2157
2158 /* Likewise if the called function is leaf. */
2159 if (gimple_call_flags (t) & ECF_LEAF)
2160 return false;
2161
2162 return true;
2163 }
2164
2165
2166 /* Return true if T can make an abnormal transfer of control flow.
2167 Transfers of control flow associated with EH are excluded. */
2168
2169 bool
2170 stmt_can_make_abnormal_goto (gimple t)
2171 {
2172 if (computed_goto_p (t))
2173 return true;
2174 if (is_gimple_call (t))
2175 return call_can_make_abnormal_goto (t);
2176 return false;
2177 }
2178
2179
2180 /* Return true if T represents a stmt that always transfers control. */
2181
2182 bool
2183 is_ctrl_stmt (gimple t)
2184 {
2185 switch (gimple_code (t))
2186 {
2187 case GIMPLE_COND:
2188 case GIMPLE_SWITCH:
2189 case GIMPLE_GOTO:
2190 case GIMPLE_RETURN:
2191 case GIMPLE_RESX:
2192 return true;
2193 default:
2194 return false;
2195 }
2196 }
2197
2198
2199 /* Return true if T is a statement that may alter the flow of control
2200 (e.g., a call to a non-returning function). */
2201
2202 bool
2203 is_ctrl_altering_stmt (gimple t)
2204 {
2205 gcc_assert (t);
2206
2207 switch (gimple_code (t))
2208 {
2209 case GIMPLE_CALL:
2210 {
2211 int flags = gimple_call_flags (t);
2212
2213 /* A call alters control flow if it can make an abnormal goto. */
2214 if (call_can_make_abnormal_goto (t))
2215 return true;
2216
2217 /* A call also alters control flow if it does not return. */
2218 if (flags & ECF_NORETURN)
2219 return true;
2220
2221 /* TM ending statements have backedges out of the transaction.
2222 Return true so we split the basic block containing them.
2223 Note that the TM_BUILTIN test is merely an optimization. */
2224 if ((flags & ECF_TM_BUILTIN)
2225 && is_tm_ending_fndecl (gimple_call_fndecl (t)))
2226 return true;
2227
2228 /* BUILT_IN_RETURN call is same as return statement. */
2229 if (gimple_call_builtin_p (t, BUILT_IN_RETURN))
2230 return true;
2231 }
2232 break;
2233
2234 case GIMPLE_EH_DISPATCH:
2235 /* EH_DISPATCH branches to the individual catch handlers at
2236 this level of a try or allowed-exceptions region. It can
2237 fallthru to the next statement as well. */
2238 return true;
2239
2240 case GIMPLE_ASM:
2241 if (gimple_asm_nlabels (t) > 0)
2242 return true;
2243 break;
2244
2245 CASE_GIMPLE_OMP:
2246 /* OpenMP directives alter control flow. */
2247 return true;
2248
2249 case GIMPLE_TRANSACTION:
2250 /* A transaction start alters control flow. */
2251 return true;
2252
2253 default:
2254 break;
2255 }
2256
2257 /* If a statement can throw, it alters control flow. */
2258 return stmt_can_throw_internal (t);
2259 }
2260
2261
2262 /* Return true if T is a simple local goto. */
2263
2264 bool
2265 simple_goto_p (gimple t)
2266 {
2267 return (gimple_code (t) == GIMPLE_GOTO
2268 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2269 }
2270
2271
2272 /* Return true if STMT should start a new basic block. PREV_STMT is
2273 the statement preceding STMT. It is used when STMT is a label or a
2274 case label. Labels should only start a new basic block if their
2275 previous statement wasn't a label. Otherwise, sequence of labels
2276 would generate unnecessary basic blocks that only contain a single
2277 label. */
2278
2279 static inline bool
2280 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2281 {
2282 if (stmt == NULL)
2283 return false;
2284
2285 /* Labels start a new basic block only if the preceding statement
2286 wasn't a label of the same type. This prevents the creation of
2287 consecutive blocks that have nothing but a single label. */
2288 if (gimple_code (stmt) == GIMPLE_LABEL)
2289 {
2290 /* Nonlocal and computed GOTO targets always start a new block. */
2291 if (DECL_NONLOCAL (gimple_label_label (stmt))
2292 || FORCED_LABEL (gimple_label_label (stmt)))
2293 return true;
2294
2295 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2296 {
2297 if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
2298 return true;
2299
2300 cfg_stats.num_merged_labels++;
2301 return false;
2302 }
2303 else
2304 return true;
2305 }
2306
2307 return false;
2308 }
2309
2310
2311 /* Return true if T should end a basic block. */
2312
2313 bool
2314 stmt_ends_bb_p (gimple t)
2315 {
2316 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2317 }
2318
2319 /* Remove block annotations and other data structures. */
2320
2321 void
2322 delete_tree_cfg_annotations (void)
2323 {
2324 vec_free (label_to_block_map);
2325 }
2326
2327
2328 /* Return the first statement in basic block BB. */
2329
2330 gimple
2331 first_stmt (basic_block bb)
2332 {
2333 gimple_stmt_iterator i = gsi_start_bb (bb);
2334 gimple stmt = NULL;
2335
2336 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2337 {
2338 gsi_next (&i);
2339 stmt = NULL;
2340 }
2341 return stmt;
2342 }
2343
2344 /* Return the first non-label statement in basic block BB. */
2345
2346 static gimple
2347 first_non_label_stmt (basic_block bb)
2348 {
2349 gimple_stmt_iterator i = gsi_start_bb (bb);
2350 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2351 gsi_next (&i);
2352 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2353 }
2354
2355 /* Return the last statement in basic block BB. */
2356
2357 gimple
2358 last_stmt (basic_block bb)
2359 {
2360 gimple_stmt_iterator i = gsi_last_bb (bb);
2361 gimple stmt = NULL;
2362
2363 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2364 {
2365 gsi_prev (&i);
2366 stmt = NULL;
2367 }
2368 return stmt;
2369 }
2370
2371 /* Return the last statement of an otherwise empty block. Return NULL
2372 if the block is totally empty, or if it contains more than one
2373 statement. */
2374
2375 gimple
2376 last_and_only_stmt (basic_block bb)
2377 {
2378 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2379 gimple last, prev;
2380
2381 if (gsi_end_p (i))
2382 return NULL;
2383
2384 last = gsi_stmt (i);
2385 gsi_prev_nondebug (&i);
2386 if (gsi_end_p (i))
2387 return last;
2388
2389 /* Empty statements should no longer appear in the instruction stream.
2390 Everything that might have appeared before should be deleted by
2391 remove_useless_stmts, and the optimizers should just gsi_remove
2392 instead of smashing with build_empty_stmt.
2393
2394 Thus the only thing that should appear here in a block containing
2395 one executable statement is a label. */
2396 prev = gsi_stmt (i);
2397 if (gimple_code (prev) == GIMPLE_LABEL)
2398 return last;
2399 else
2400 return NULL;
2401 }
2402
2403 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2404
2405 static void
2406 reinstall_phi_args (edge new_edge, edge old_edge)
2407 {
2408 edge_var_map_vector *v;
2409 edge_var_map *vm;
2410 int i;
2411 gimple_stmt_iterator phis;
2412
2413 v = redirect_edge_var_map_vector (old_edge);
2414 if (!v)
2415 return;
2416
2417 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2418 v->iterate (i, &vm) && !gsi_end_p (phis);
2419 i++, gsi_next (&phis))
2420 {
2421 gimple phi = gsi_stmt (phis);
2422 tree result = redirect_edge_var_map_result (vm);
2423 tree arg = redirect_edge_var_map_def (vm);
2424
2425 gcc_assert (result == gimple_phi_result (phi));
2426
2427 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2428 }
2429
2430 redirect_edge_var_map_clear (old_edge);
2431 }
2432
2433 /* Returns the basic block after which the new basic block created
2434 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2435 near its "logical" location. This is of most help to humans looking
2436 at debugging dumps. */
2437
2438 static basic_block
2439 split_edge_bb_loc (edge edge_in)
2440 {
2441 basic_block dest = edge_in->dest;
2442 basic_block dest_prev = dest->prev_bb;
2443
2444 if (dest_prev)
2445 {
2446 edge e = find_edge (dest_prev, dest);
2447 if (e && !(e->flags & EDGE_COMPLEX))
2448 return edge_in->src;
2449 }
2450 return dest_prev;
2451 }
2452
2453 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2454 Abort on abnormal edges. */
2455
2456 static basic_block
2457 gimple_split_edge (edge edge_in)
2458 {
2459 basic_block new_bb, after_bb, dest;
2460 edge new_edge, e;
2461
2462 /* Abnormal edges cannot be split. */
2463 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2464
2465 dest = edge_in->dest;
2466
2467 after_bb = split_edge_bb_loc (edge_in);
2468
2469 new_bb = create_empty_bb (after_bb);
2470 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2471 new_bb->count = edge_in->count;
2472 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2473 new_edge->probability = REG_BR_PROB_BASE;
2474 new_edge->count = edge_in->count;
2475
2476 e = redirect_edge_and_branch (edge_in, new_bb);
2477 gcc_assert (e == edge_in);
2478 reinstall_phi_args (new_edge, e);
2479
2480 return new_bb;
2481 }
2482
2483
2484 /* Verify properties of the address expression T with base object BASE. */
2485
2486 static tree
2487 verify_address (tree t, tree base)
2488 {
2489 bool old_constant;
2490 bool old_side_effects;
2491 bool new_constant;
2492 bool new_side_effects;
2493
2494 old_constant = TREE_CONSTANT (t);
2495 old_side_effects = TREE_SIDE_EFFECTS (t);
2496
2497 recompute_tree_invariant_for_addr_expr (t);
2498 new_side_effects = TREE_SIDE_EFFECTS (t);
2499 new_constant = TREE_CONSTANT (t);
2500
2501 if (old_constant != new_constant)
2502 {
2503 error ("constant not recomputed when ADDR_EXPR changed");
2504 return t;
2505 }
2506 if (old_side_effects != new_side_effects)
2507 {
2508 error ("side effects not recomputed when ADDR_EXPR changed");
2509 return t;
2510 }
2511
2512 if (!(TREE_CODE (base) == VAR_DECL
2513 || TREE_CODE (base) == PARM_DECL
2514 || TREE_CODE (base) == RESULT_DECL))
2515 return NULL_TREE;
2516
2517 if (DECL_GIMPLE_REG_P (base))
2518 {
2519 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2520 return base;
2521 }
2522
2523 return NULL_TREE;
2524 }
2525
2526 /* Callback for walk_tree, check that all elements with address taken are
2527 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2528 inside a PHI node. */
2529
2530 static tree
2531 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2532 {
2533 tree t = *tp, x;
2534
2535 if (TYPE_P (t))
2536 *walk_subtrees = 0;
2537
2538 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2539 #define CHECK_OP(N, MSG) \
2540 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2541 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2542
2543 switch (TREE_CODE (t))
2544 {
2545 case SSA_NAME:
2546 if (SSA_NAME_IN_FREE_LIST (t))
2547 {
2548 error ("SSA name in freelist but still referenced");
2549 return *tp;
2550 }
2551 break;
2552
2553 case INDIRECT_REF:
2554 error ("INDIRECT_REF in gimple IL");
2555 return t;
2556
2557 case MEM_REF:
2558 x = TREE_OPERAND (t, 0);
2559 if (!POINTER_TYPE_P (TREE_TYPE (x))
2560 || !is_gimple_mem_ref_addr (x))
2561 {
2562 error ("invalid first operand of MEM_REF");
2563 return x;
2564 }
2565 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2566 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2567 {
2568 error ("invalid offset operand of MEM_REF");
2569 return TREE_OPERAND (t, 1);
2570 }
2571 if (TREE_CODE (x) == ADDR_EXPR
2572 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2573 return x;
2574 *walk_subtrees = 0;
2575 break;
2576
2577 case ASSERT_EXPR:
2578 x = fold (ASSERT_EXPR_COND (t));
2579 if (x == boolean_false_node)
2580 {
2581 error ("ASSERT_EXPR with an always-false condition");
2582 return *tp;
2583 }
2584 break;
2585
2586 case MODIFY_EXPR:
2587 error ("MODIFY_EXPR not expected while having tuples");
2588 return *tp;
2589
2590 case ADDR_EXPR:
2591 {
2592 tree tem;
2593
2594 gcc_assert (is_gimple_address (t));
2595
2596 /* Skip any references (they will be checked when we recurse down the
2597 tree) and ensure that any variable used as a prefix is marked
2598 addressable. */
2599 for (x = TREE_OPERAND (t, 0);
2600 handled_component_p (x);
2601 x = TREE_OPERAND (x, 0))
2602 ;
2603
2604 if ((tem = verify_address (t, x)))
2605 return tem;
2606
2607 if (!(TREE_CODE (x) == VAR_DECL
2608 || TREE_CODE (x) == PARM_DECL
2609 || TREE_CODE (x) == RESULT_DECL))
2610 return NULL;
2611
2612 if (!TREE_ADDRESSABLE (x))
2613 {
2614 error ("address taken, but ADDRESSABLE bit not set");
2615 return x;
2616 }
2617
2618 break;
2619 }
2620
2621 case COND_EXPR:
2622 x = COND_EXPR_COND (t);
2623 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2624 {
2625 error ("non-integral used in condition");
2626 return x;
2627 }
2628 if (!is_gimple_condexpr (x))
2629 {
2630 error ("invalid conditional operand");
2631 return x;
2632 }
2633 break;
2634
2635 case NON_LVALUE_EXPR:
2636 case TRUTH_NOT_EXPR:
2637 gcc_unreachable ();
2638
2639 CASE_CONVERT:
2640 case FIX_TRUNC_EXPR:
2641 case FLOAT_EXPR:
2642 case NEGATE_EXPR:
2643 case ABS_EXPR:
2644 case BIT_NOT_EXPR:
2645 CHECK_OP (0, "invalid operand to unary operator");
2646 break;
2647
2648 case REALPART_EXPR:
2649 case IMAGPART_EXPR:
2650 case COMPONENT_REF:
2651 case ARRAY_REF:
2652 case ARRAY_RANGE_REF:
2653 case BIT_FIELD_REF:
2654 case VIEW_CONVERT_EXPR:
2655 /* We have a nest of references. Verify that each of the operands
2656 that determine where to reference is either a constant or a variable,
2657 verify that the base is valid, and then show we've already checked
2658 the subtrees. */
2659 while (handled_component_p (t))
2660 {
2661 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2662 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2663 else if (TREE_CODE (t) == ARRAY_REF
2664 || TREE_CODE (t) == ARRAY_RANGE_REF)
2665 {
2666 CHECK_OP (1, "invalid array index");
2667 if (TREE_OPERAND (t, 2))
2668 CHECK_OP (2, "invalid array lower bound");
2669 if (TREE_OPERAND (t, 3))
2670 CHECK_OP (3, "invalid array stride");
2671 }
2672 else if (TREE_CODE (t) == BIT_FIELD_REF)
2673 {
2674 if (!host_integerp (TREE_OPERAND (t, 1), 1)
2675 || !host_integerp (TREE_OPERAND (t, 2), 1))
2676 {
2677 error ("invalid position or size operand to BIT_FIELD_REF");
2678 return t;
2679 }
2680 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2681 && (TYPE_PRECISION (TREE_TYPE (t))
2682 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2683 {
2684 error ("integral result type precision does not match "
2685 "field size of BIT_FIELD_REF");
2686 return t;
2687 }
2688 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2689 && !AGGREGATE_TYPE_P (TREE_TYPE (t))
2690 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2691 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2692 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2693 {
2694 error ("mode precision of non-integral result does not "
2695 "match field size of BIT_FIELD_REF");
2696 return t;
2697 }
2698 }
2699
2700 t = TREE_OPERAND (t, 0);
2701 }
2702
2703 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2704 {
2705 error ("invalid reference prefix");
2706 return t;
2707 }
2708 *walk_subtrees = 0;
2709 break;
2710 case PLUS_EXPR:
2711 case MINUS_EXPR:
2712 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2713 POINTER_PLUS_EXPR. */
2714 if (POINTER_TYPE_P (TREE_TYPE (t)))
2715 {
2716 error ("invalid operand to plus/minus, type is a pointer");
2717 return t;
2718 }
2719 CHECK_OP (0, "invalid operand to binary operator");
2720 CHECK_OP (1, "invalid operand to binary operator");
2721 break;
2722
2723 case POINTER_PLUS_EXPR:
2724 /* Check to make sure the first operand is a pointer or reference type. */
2725 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
2726 {
2727 error ("invalid operand to pointer plus, first operand is not a pointer");
2728 return t;
2729 }
2730 /* Check to make sure the second operand is a ptrofftype. */
2731 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
2732 {
2733 error ("invalid operand to pointer plus, second operand is not an "
2734 "integer type of appropriate width");
2735 return t;
2736 }
2737 /* FALLTHROUGH */
2738 case LT_EXPR:
2739 case LE_EXPR:
2740 case GT_EXPR:
2741 case GE_EXPR:
2742 case EQ_EXPR:
2743 case NE_EXPR:
2744 case UNORDERED_EXPR:
2745 case ORDERED_EXPR:
2746 case UNLT_EXPR:
2747 case UNLE_EXPR:
2748 case UNGT_EXPR:
2749 case UNGE_EXPR:
2750 case UNEQ_EXPR:
2751 case LTGT_EXPR:
2752 case MULT_EXPR:
2753 case TRUNC_DIV_EXPR:
2754 case CEIL_DIV_EXPR:
2755 case FLOOR_DIV_EXPR:
2756 case ROUND_DIV_EXPR:
2757 case TRUNC_MOD_EXPR:
2758 case CEIL_MOD_EXPR:
2759 case FLOOR_MOD_EXPR:
2760 case ROUND_MOD_EXPR:
2761 case RDIV_EXPR:
2762 case EXACT_DIV_EXPR:
2763 case MIN_EXPR:
2764 case MAX_EXPR:
2765 case LSHIFT_EXPR:
2766 case RSHIFT_EXPR:
2767 case LROTATE_EXPR:
2768 case RROTATE_EXPR:
2769 case BIT_IOR_EXPR:
2770 case BIT_XOR_EXPR:
2771 case BIT_AND_EXPR:
2772 CHECK_OP (0, "invalid operand to binary operator");
2773 CHECK_OP (1, "invalid operand to binary operator");
2774 break;
2775
2776 case CONSTRUCTOR:
2777 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2778 *walk_subtrees = 0;
2779 break;
2780
2781 case CASE_LABEL_EXPR:
2782 if (CASE_CHAIN (t))
2783 {
2784 error ("invalid CASE_CHAIN");
2785 return t;
2786 }
2787 break;
2788
2789 default:
2790 break;
2791 }
2792 return NULL;
2793
2794 #undef CHECK_OP
2795 }
2796
2797
2798 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
2799 Returns true if there is an error, otherwise false. */
2800
2801 static bool
2802 verify_types_in_gimple_min_lval (tree expr)
2803 {
2804 tree op;
2805
2806 if (is_gimple_id (expr))
2807 return false;
2808
2809 if (TREE_CODE (expr) != TARGET_MEM_REF
2810 && TREE_CODE (expr) != MEM_REF)
2811 {
2812 error ("invalid expression for min lvalue");
2813 return true;
2814 }
2815
2816 /* TARGET_MEM_REFs are strange beasts. */
2817 if (TREE_CODE (expr) == TARGET_MEM_REF)
2818 return false;
2819
2820 op = TREE_OPERAND (expr, 0);
2821 if (!is_gimple_val (op))
2822 {
2823 error ("invalid operand in indirect reference");
2824 debug_generic_stmt (op);
2825 return true;
2826 }
2827 /* Memory references now generally can involve a value conversion. */
2828
2829 return false;
2830 }
2831
2832 /* Verify if EXPR is a valid GIMPLE reference expression. If
2833 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
2834 if there is an error, otherwise false. */
2835
2836 static bool
2837 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
2838 {
2839 while (handled_component_p (expr))
2840 {
2841 tree op = TREE_OPERAND (expr, 0);
2842
2843 if (TREE_CODE (expr) == ARRAY_REF
2844 || TREE_CODE (expr) == ARRAY_RANGE_REF)
2845 {
2846 if (!is_gimple_val (TREE_OPERAND (expr, 1))
2847 || (TREE_OPERAND (expr, 2)
2848 && !is_gimple_val (TREE_OPERAND (expr, 2)))
2849 || (TREE_OPERAND (expr, 3)
2850 && !is_gimple_val (TREE_OPERAND (expr, 3))))
2851 {
2852 error ("invalid operands to array reference");
2853 debug_generic_stmt (expr);
2854 return true;
2855 }
2856 }
2857
2858 /* Verify if the reference array element types are compatible. */
2859 if (TREE_CODE (expr) == ARRAY_REF
2860 && !useless_type_conversion_p (TREE_TYPE (expr),
2861 TREE_TYPE (TREE_TYPE (op))))
2862 {
2863 error ("type mismatch in array reference");
2864 debug_generic_stmt (TREE_TYPE (expr));
2865 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2866 return true;
2867 }
2868 if (TREE_CODE (expr) == ARRAY_RANGE_REF
2869 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
2870 TREE_TYPE (TREE_TYPE (op))))
2871 {
2872 error ("type mismatch in array range reference");
2873 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
2874 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2875 return true;
2876 }
2877
2878 if ((TREE_CODE (expr) == REALPART_EXPR
2879 || TREE_CODE (expr) == IMAGPART_EXPR)
2880 && !useless_type_conversion_p (TREE_TYPE (expr),
2881 TREE_TYPE (TREE_TYPE (op))))
2882 {
2883 error ("type mismatch in real/imagpart reference");
2884 debug_generic_stmt (TREE_TYPE (expr));
2885 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2886 return true;
2887 }
2888
2889 if (TREE_CODE (expr) == COMPONENT_REF
2890 && !useless_type_conversion_p (TREE_TYPE (expr),
2891 TREE_TYPE (TREE_OPERAND (expr, 1))))
2892 {
2893 error ("type mismatch in component reference");
2894 debug_generic_stmt (TREE_TYPE (expr));
2895 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
2896 return true;
2897 }
2898
2899 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2900 {
2901 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
2902 that their operand is not an SSA name or an invariant when
2903 requiring an lvalue (this usually means there is a SRA or IPA-SRA
2904 bug). Otherwise there is nothing to verify, gross mismatches at
2905 most invoke undefined behavior. */
2906 if (require_lvalue
2907 && (TREE_CODE (op) == SSA_NAME
2908 || is_gimple_min_invariant (op)))
2909 {
2910 error ("conversion of an SSA_NAME on the left hand side");
2911 debug_generic_stmt (expr);
2912 return true;
2913 }
2914 else if (TREE_CODE (op) == SSA_NAME
2915 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
2916 {
2917 error ("conversion of register to a different size");
2918 debug_generic_stmt (expr);
2919 return true;
2920 }
2921 else if (!handled_component_p (op))
2922 return false;
2923 }
2924
2925 expr = op;
2926 }
2927
2928 if (TREE_CODE (expr) == MEM_REF)
2929 {
2930 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
2931 {
2932 error ("invalid address operand in MEM_REF");
2933 debug_generic_stmt (expr);
2934 return true;
2935 }
2936 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
2937 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
2938 {
2939 error ("invalid offset operand in MEM_REF");
2940 debug_generic_stmt (expr);
2941 return true;
2942 }
2943 }
2944 else if (TREE_CODE (expr) == TARGET_MEM_REF)
2945 {
2946 if (!TMR_BASE (expr)
2947 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
2948 {
2949 error ("invalid address operand in TARGET_MEM_REF");
2950 return true;
2951 }
2952 if (!TMR_OFFSET (expr)
2953 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
2954 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
2955 {
2956 error ("invalid offset operand in TARGET_MEM_REF");
2957 debug_generic_stmt (expr);
2958 return true;
2959 }
2960 }
2961
2962 return ((require_lvalue || !is_gimple_min_invariant (expr))
2963 && verify_types_in_gimple_min_lval (expr));
2964 }
2965
2966 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
2967 list of pointer-to types that is trivially convertible to DEST. */
2968
2969 static bool
2970 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
2971 {
2972 tree src;
2973
2974 if (!TYPE_POINTER_TO (src_obj))
2975 return true;
2976
2977 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
2978 if (useless_type_conversion_p (dest, src))
2979 return true;
2980
2981 return false;
2982 }
2983
2984 /* Return true if TYPE1 is a fixed-point type and if conversions to and
2985 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
2986
2987 static bool
2988 valid_fixed_convert_types_p (tree type1, tree type2)
2989 {
2990 return (FIXED_POINT_TYPE_P (type1)
2991 && (INTEGRAL_TYPE_P (type2)
2992 || SCALAR_FLOAT_TYPE_P (type2)
2993 || FIXED_POINT_TYPE_P (type2)));
2994 }
2995
2996 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
2997 is a problem, otherwise false. */
2998
2999 static bool
3000 verify_gimple_call (gimple stmt)
3001 {
3002 tree fn = gimple_call_fn (stmt);
3003 tree fntype, fndecl;
3004 unsigned i;
3005
3006 if (gimple_call_internal_p (stmt))
3007 {
3008 if (fn)
3009 {
3010 error ("gimple call has two targets");
3011 debug_generic_stmt (fn);
3012 return true;
3013 }
3014 }
3015 else
3016 {
3017 if (!fn)
3018 {
3019 error ("gimple call has no target");
3020 return true;
3021 }
3022 }
3023
3024 if (fn && !is_gimple_call_addr (fn))
3025 {
3026 error ("invalid function in gimple call");
3027 debug_generic_stmt (fn);
3028 return true;
3029 }
3030
3031 if (fn
3032 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3033 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3034 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3035 {
3036 error ("non-function in gimple call");
3037 return true;
3038 }
3039
3040 fndecl = gimple_call_fndecl (stmt);
3041 if (fndecl
3042 && TREE_CODE (fndecl) == FUNCTION_DECL
3043 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3044 && !DECL_PURE_P (fndecl)
3045 && !TREE_READONLY (fndecl))
3046 {
3047 error ("invalid pure const state for function");
3048 return true;
3049 }
3050
3051 if (gimple_call_lhs (stmt)
3052 && (!is_gimple_lvalue (gimple_call_lhs (stmt))
3053 || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
3054 {
3055 error ("invalid LHS in gimple call");
3056 return true;
3057 }
3058
3059 if (gimple_call_lhs (stmt) && gimple_call_noreturn_p (stmt))
3060 {
3061 error ("LHS in noreturn call");
3062 return true;
3063 }
3064
3065 fntype = gimple_call_fntype (stmt);
3066 if (fntype
3067 && gimple_call_lhs (stmt)
3068 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3069 TREE_TYPE (fntype))
3070 /* ??? At least C++ misses conversions at assignments from
3071 void * call results.
3072 ??? Java is completely off. Especially with functions
3073 returning java.lang.Object.
3074 For now simply allow arbitrary pointer type conversions. */
3075 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3076 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3077 {
3078 error ("invalid conversion in gimple call");
3079 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3080 debug_generic_stmt (TREE_TYPE (fntype));
3081 return true;
3082 }
3083
3084 if (gimple_call_chain (stmt)
3085 && !is_gimple_val (gimple_call_chain (stmt)))
3086 {
3087 error ("invalid static chain in gimple call");
3088 debug_generic_stmt (gimple_call_chain (stmt));
3089 return true;
3090 }
3091
3092 /* If there is a static chain argument, this should not be an indirect
3093 call, and the decl should have DECL_STATIC_CHAIN set. */
3094 if (gimple_call_chain (stmt))
3095 {
3096 if (!gimple_call_fndecl (stmt))
3097 {
3098 error ("static chain in indirect gimple call");
3099 return true;
3100 }
3101 fn = TREE_OPERAND (fn, 0);
3102
3103 if (!DECL_STATIC_CHAIN (fn))
3104 {
3105 error ("static chain with function that doesn%'t use one");
3106 return true;
3107 }
3108 }
3109
3110 /* ??? The C frontend passes unpromoted arguments in case it
3111 didn't see a function declaration before the call. So for now
3112 leave the call arguments mostly unverified. Once we gimplify
3113 unit-at-a-time we have a chance to fix this. */
3114
3115 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3116 {
3117 tree arg = gimple_call_arg (stmt, i);
3118 if ((is_gimple_reg_type (TREE_TYPE (arg))
3119 && !is_gimple_val (arg))
3120 || (!is_gimple_reg_type (TREE_TYPE (arg))
3121 && !is_gimple_lvalue (arg)))
3122 {
3123 error ("invalid argument to gimple call");
3124 debug_generic_expr (arg);
3125 return true;
3126 }
3127 }
3128
3129 return false;
3130 }
3131
3132 /* Verifies the gimple comparison with the result type TYPE and
3133 the operands OP0 and OP1. */
3134
3135 static bool
3136 verify_gimple_comparison (tree type, tree op0, tree op1)
3137 {
3138 tree op0_type = TREE_TYPE (op0);
3139 tree op1_type = TREE_TYPE (op1);
3140
3141 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3142 {
3143 error ("invalid operands in gimple comparison");
3144 return true;
3145 }
3146
3147 /* For comparisons we do not have the operations type as the
3148 effective type the comparison is carried out in. Instead
3149 we require that either the first operand is trivially
3150 convertible into the second, or the other way around.
3151 Because we special-case pointers to void we allow
3152 comparisons of pointers with the same mode as well. */
3153 if (!useless_type_conversion_p (op0_type, op1_type)
3154 && !useless_type_conversion_p (op1_type, op0_type)
3155 && (!POINTER_TYPE_P (op0_type)
3156 || !POINTER_TYPE_P (op1_type)
3157 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3158 {
3159 error ("mismatching comparison operand types");
3160 debug_generic_expr (op0_type);
3161 debug_generic_expr (op1_type);
3162 return true;
3163 }
3164
3165 /* The resulting type of a comparison may be an effective boolean type. */
3166 if (INTEGRAL_TYPE_P (type)
3167 && (TREE_CODE (type) == BOOLEAN_TYPE
3168 || TYPE_PRECISION (type) == 1))
3169 {
3170 if (TREE_CODE (op0_type) == VECTOR_TYPE
3171 || TREE_CODE (op1_type) == VECTOR_TYPE)
3172 {
3173 error ("vector comparison returning a boolean");
3174 debug_generic_expr (op0_type);
3175 debug_generic_expr (op1_type);
3176 return true;
3177 }
3178 }
3179 /* Or an integer vector type with the same size and element count
3180 as the comparison operand types. */
3181 else if (TREE_CODE (type) == VECTOR_TYPE
3182 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3183 {
3184 if (TREE_CODE (op0_type) != VECTOR_TYPE
3185 || TREE_CODE (op1_type) != VECTOR_TYPE)
3186 {
3187 error ("non-vector operands in vector comparison");
3188 debug_generic_expr (op0_type);
3189 debug_generic_expr (op1_type);
3190 return true;
3191 }
3192
3193 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3194 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3195 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type)))))
3196 {
3197 error ("invalid vector comparison resulting type");
3198 debug_generic_expr (type);
3199 return true;
3200 }
3201 }
3202 else
3203 {
3204 error ("bogus comparison result type");
3205 debug_generic_expr (type);
3206 return true;
3207 }
3208
3209 return false;
3210 }
3211
3212 /* Verify a gimple assignment statement STMT with an unary rhs.
3213 Returns true if anything is wrong. */
3214
3215 static bool
3216 verify_gimple_assign_unary (gimple stmt)
3217 {
3218 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3219 tree lhs = gimple_assign_lhs (stmt);
3220 tree lhs_type = TREE_TYPE (lhs);
3221 tree rhs1 = gimple_assign_rhs1 (stmt);
3222 tree rhs1_type = TREE_TYPE (rhs1);
3223
3224 if (!is_gimple_reg (lhs))
3225 {
3226 error ("non-register as LHS of unary operation");
3227 return true;
3228 }
3229
3230 if (!is_gimple_val (rhs1))
3231 {
3232 error ("invalid operand in unary operation");
3233 return true;
3234 }
3235
3236 /* First handle conversions. */
3237 switch (rhs_code)
3238 {
3239 CASE_CONVERT:
3240 {
3241 /* Allow conversions from pointer type to integral type only if
3242 there is no sign or zero extension involved.
3243 For targets were the precision of ptrofftype doesn't match that
3244 of pointers we need to allow arbitrary conversions to ptrofftype. */
3245 if ((POINTER_TYPE_P (lhs_type)
3246 && INTEGRAL_TYPE_P (rhs1_type))
3247 || (POINTER_TYPE_P (rhs1_type)
3248 && INTEGRAL_TYPE_P (lhs_type)
3249 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3250 || ptrofftype_p (sizetype))))
3251 return false;
3252
3253 /* Allow conversion from integral to offset type and vice versa. */
3254 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3255 && INTEGRAL_TYPE_P (rhs1_type))
3256 || (INTEGRAL_TYPE_P (lhs_type)
3257 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3258 return false;
3259
3260 /* Otherwise assert we are converting between types of the
3261 same kind. */
3262 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3263 {
3264 error ("invalid types in nop conversion");
3265 debug_generic_expr (lhs_type);
3266 debug_generic_expr (rhs1_type);
3267 return true;
3268 }
3269
3270 return false;
3271 }
3272
3273 case ADDR_SPACE_CONVERT_EXPR:
3274 {
3275 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3276 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3277 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3278 {
3279 error ("invalid types in address space conversion");
3280 debug_generic_expr (lhs_type);
3281 debug_generic_expr (rhs1_type);
3282 return true;
3283 }
3284
3285 return false;
3286 }
3287
3288 case FIXED_CONVERT_EXPR:
3289 {
3290 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3291 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3292 {
3293 error ("invalid types in fixed-point conversion");
3294 debug_generic_expr (lhs_type);
3295 debug_generic_expr (rhs1_type);
3296 return true;
3297 }
3298
3299 return false;
3300 }
3301
3302 case FLOAT_EXPR:
3303 {
3304 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3305 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3306 || !VECTOR_FLOAT_TYPE_P(lhs_type)))
3307 {
3308 error ("invalid types in conversion to floating point");
3309 debug_generic_expr (lhs_type);
3310 debug_generic_expr (rhs1_type);
3311 return true;
3312 }
3313
3314 return false;
3315 }
3316
3317 case FIX_TRUNC_EXPR:
3318 {
3319 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3320 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3321 || !VECTOR_FLOAT_TYPE_P(rhs1_type)))
3322 {
3323 error ("invalid types in conversion to integer");
3324 debug_generic_expr (lhs_type);
3325 debug_generic_expr (rhs1_type);
3326 return true;
3327 }
3328
3329 return false;
3330 }
3331
3332 case VEC_UNPACK_HI_EXPR:
3333 case VEC_UNPACK_LO_EXPR:
3334 case REDUC_MAX_EXPR:
3335 case REDUC_MIN_EXPR:
3336 case REDUC_PLUS_EXPR:
3337 case VEC_UNPACK_FLOAT_HI_EXPR:
3338 case VEC_UNPACK_FLOAT_LO_EXPR:
3339 /* FIXME. */
3340 return false;
3341
3342 case NEGATE_EXPR:
3343 case ABS_EXPR:
3344 case BIT_NOT_EXPR:
3345 case PAREN_EXPR:
3346 case NON_LVALUE_EXPR:
3347 case CONJ_EXPR:
3348 break;
3349
3350 default:
3351 gcc_unreachable ();
3352 }
3353
3354 /* For the remaining codes assert there is no conversion involved. */
3355 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3356 {
3357 error ("non-trivial conversion in unary operation");
3358 debug_generic_expr (lhs_type);
3359 debug_generic_expr (rhs1_type);
3360 return true;
3361 }
3362
3363 return false;
3364 }
3365
3366 /* Verify a gimple assignment statement STMT with a binary rhs.
3367 Returns true if anything is wrong. */
3368
3369 static bool
3370 verify_gimple_assign_binary (gimple stmt)
3371 {
3372 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3373 tree lhs = gimple_assign_lhs (stmt);
3374 tree lhs_type = TREE_TYPE (lhs);
3375 tree rhs1 = gimple_assign_rhs1 (stmt);
3376 tree rhs1_type = TREE_TYPE (rhs1);
3377 tree rhs2 = gimple_assign_rhs2 (stmt);
3378 tree rhs2_type = TREE_TYPE (rhs2);
3379
3380 if (!is_gimple_reg (lhs))
3381 {
3382 error ("non-register as LHS of binary operation");
3383 return true;
3384 }
3385
3386 if (!is_gimple_val (rhs1)
3387 || !is_gimple_val (rhs2))
3388 {
3389 error ("invalid operands in binary operation");
3390 return true;
3391 }
3392
3393 /* First handle operations that involve different types. */
3394 switch (rhs_code)
3395 {
3396 case COMPLEX_EXPR:
3397 {
3398 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3399 || !(INTEGRAL_TYPE_P (rhs1_type)
3400 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3401 || !(INTEGRAL_TYPE_P (rhs2_type)
3402 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3403 {
3404 error ("type mismatch in complex expression");
3405 debug_generic_expr (lhs_type);
3406 debug_generic_expr (rhs1_type);
3407 debug_generic_expr (rhs2_type);
3408 return true;
3409 }
3410
3411 return false;
3412 }
3413
3414 case LSHIFT_EXPR:
3415 case RSHIFT_EXPR:
3416 case LROTATE_EXPR:
3417 case RROTATE_EXPR:
3418 {
3419 /* Shifts and rotates are ok on integral types, fixed point
3420 types and integer vector types. */
3421 if ((!INTEGRAL_TYPE_P (rhs1_type)
3422 && !FIXED_POINT_TYPE_P (rhs1_type)
3423 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3424 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3425 || (!INTEGRAL_TYPE_P (rhs2_type)
3426 /* Vector shifts of vectors are also ok. */
3427 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3428 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3429 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3430 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3431 || !useless_type_conversion_p (lhs_type, rhs1_type))
3432 {
3433 error ("type mismatch in shift expression");
3434 debug_generic_expr (lhs_type);
3435 debug_generic_expr (rhs1_type);
3436 debug_generic_expr (rhs2_type);
3437 return true;
3438 }
3439
3440 return false;
3441 }
3442
3443 case VEC_LSHIFT_EXPR:
3444 case VEC_RSHIFT_EXPR:
3445 {
3446 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3447 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3448 || POINTER_TYPE_P (TREE_TYPE (rhs1_type))
3449 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type))
3450 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3451 || (!INTEGRAL_TYPE_P (rhs2_type)
3452 && (TREE_CODE (rhs2_type) != VECTOR_TYPE
3453 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3454 || !useless_type_conversion_p (lhs_type, rhs1_type))
3455 {
3456 error ("type mismatch in vector shift expression");
3457 debug_generic_expr (lhs_type);
3458 debug_generic_expr (rhs1_type);
3459 debug_generic_expr (rhs2_type);
3460 return true;
3461 }
3462 /* For shifting a vector of non-integral components we
3463 only allow shifting by a constant multiple of the element size. */
3464 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3465 && (TREE_CODE (rhs2) != INTEGER_CST
3466 || !div_if_zero_remainder (EXACT_DIV_EXPR, rhs2,
3467 TYPE_SIZE (TREE_TYPE (rhs1_type)))))
3468 {
3469 error ("non-element sized vector shift of floating point vector");
3470 return true;
3471 }
3472
3473 return false;
3474 }
3475
3476 case WIDEN_LSHIFT_EXPR:
3477 {
3478 if (!INTEGRAL_TYPE_P (lhs_type)
3479 || !INTEGRAL_TYPE_P (rhs1_type)
3480 || TREE_CODE (rhs2) != INTEGER_CST
3481 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3482 {
3483 error ("type mismatch in widening vector shift expression");
3484 debug_generic_expr (lhs_type);
3485 debug_generic_expr (rhs1_type);
3486 debug_generic_expr (rhs2_type);
3487 return true;
3488 }
3489
3490 return false;
3491 }
3492
3493 case VEC_WIDEN_LSHIFT_HI_EXPR:
3494 case VEC_WIDEN_LSHIFT_LO_EXPR:
3495 {
3496 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3497 || TREE_CODE (lhs_type) != VECTOR_TYPE
3498 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3499 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3500 || TREE_CODE (rhs2) != INTEGER_CST
3501 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3502 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3503 {
3504 error ("type mismatch in widening vector shift expression");
3505 debug_generic_expr (lhs_type);
3506 debug_generic_expr (rhs1_type);
3507 debug_generic_expr (rhs2_type);
3508 return true;
3509 }
3510
3511 return false;
3512 }
3513
3514 case PLUS_EXPR:
3515 case MINUS_EXPR:
3516 {
3517 /* We use regular PLUS_EXPR and MINUS_EXPR for vectors.
3518 ??? This just makes the checker happy and may not be what is
3519 intended. */
3520 if (TREE_CODE (lhs_type) == VECTOR_TYPE
3521 && POINTER_TYPE_P (TREE_TYPE (lhs_type)))
3522 {
3523 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3524 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3525 {
3526 error ("invalid non-vector operands to vector valued plus");
3527 return true;
3528 }
3529 lhs_type = TREE_TYPE (lhs_type);
3530 rhs1_type = TREE_TYPE (rhs1_type);
3531 rhs2_type = TREE_TYPE (rhs2_type);
3532 /* PLUS_EXPR is commutative, so we might end up canonicalizing
3533 the pointer to 2nd place. */
3534 if (POINTER_TYPE_P (rhs2_type))
3535 {
3536 tree tem = rhs1_type;
3537 rhs1_type = rhs2_type;
3538 rhs2_type = tem;
3539 }
3540 goto do_pointer_plus_expr_check;
3541 }
3542 if (POINTER_TYPE_P (lhs_type)
3543 || POINTER_TYPE_P (rhs1_type)
3544 || POINTER_TYPE_P (rhs2_type))
3545 {
3546 error ("invalid (pointer) operands to plus/minus");
3547 return true;
3548 }
3549
3550 /* Continue with generic binary expression handling. */
3551 break;
3552 }
3553
3554 case POINTER_PLUS_EXPR:
3555 {
3556 do_pointer_plus_expr_check:
3557 if (!POINTER_TYPE_P (rhs1_type)
3558 || !useless_type_conversion_p (lhs_type, rhs1_type)
3559 || !ptrofftype_p (rhs2_type))
3560 {
3561 error ("type mismatch in pointer plus expression");
3562 debug_generic_stmt (lhs_type);
3563 debug_generic_stmt (rhs1_type);
3564 debug_generic_stmt (rhs2_type);
3565 return true;
3566 }
3567
3568 return false;
3569 }
3570
3571 case TRUTH_ANDIF_EXPR:
3572 case TRUTH_ORIF_EXPR:
3573 case TRUTH_AND_EXPR:
3574 case TRUTH_OR_EXPR:
3575 case TRUTH_XOR_EXPR:
3576
3577 gcc_unreachable ();
3578
3579 case LT_EXPR:
3580 case LE_EXPR:
3581 case GT_EXPR:
3582 case GE_EXPR:
3583 case EQ_EXPR:
3584 case NE_EXPR:
3585 case UNORDERED_EXPR:
3586 case ORDERED_EXPR:
3587 case UNLT_EXPR:
3588 case UNLE_EXPR:
3589 case UNGT_EXPR:
3590 case UNGE_EXPR:
3591 case UNEQ_EXPR:
3592 case LTGT_EXPR:
3593 /* Comparisons are also binary, but the result type is not
3594 connected to the operand types. */
3595 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3596
3597 case WIDEN_MULT_EXPR:
3598 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3599 return true;
3600 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3601 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3602
3603 case WIDEN_SUM_EXPR:
3604 case VEC_WIDEN_MULT_HI_EXPR:
3605 case VEC_WIDEN_MULT_LO_EXPR:
3606 case VEC_WIDEN_MULT_EVEN_EXPR:
3607 case VEC_WIDEN_MULT_ODD_EXPR:
3608 case VEC_PACK_TRUNC_EXPR:
3609 case VEC_PACK_SAT_EXPR:
3610 case VEC_PACK_FIX_TRUNC_EXPR:
3611 /* FIXME. */
3612 return false;
3613
3614 case MULT_EXPR:
3615 case MULT_HIGHPART_EXPR:
3616 case TRUNC_DIV_EXPR:
3617 case CEIL_DIV_EXPR:
3618 case FLOOR_DIV_EXPR:
3619 case ROUND_DIV_EXPR:
3620 case TRUNC_MOD_EXPR:
3621 case CEIL_MOD_EXPR:
3622 case FLOOR_MOD_EXPR:
3623 case ROUND_MOD_EXPR:
3624 case RDIV_EXPR:
3625 case EXACT_DIV_EXPR:
3626 case MIN_EXPR:
3627 case MAX_EXPR:
3628 case BIT_IOR_EXPR:
3629 case BIT_XOR_EXPR:
3630 case BIT_AND_EXPR:
3631 /* Continue with generic binary expression handling. */
3632 break;
3633
3634 default:
3635 gcc_unreachable ();
3636 }
3637
3638 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3639 || !useless_type_conversion_p (lhs_type, rhs2_type))
3640 {
3641 error ("type mismatch in binary expression");
3642 debug_generic_stmt (lhs_type);
3643 debug_generic_stmt (rhs1_type);
3644 debug_generic_stmt (rhs2_type);
3645 return true;
3646 }
3647
3648 return false;
3649 }
3650
3651 /* Verify a gimple assignment statement STMT with a ternary rhs.
3652 Returns true if anything is wrong. */
3653
3654 static bool
3655 verify_gimple_assign_ternary (gimple stmt)
3656 {
3657 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3658 tree lhs = gimple_assign_lhs (stmt);
3659 tree lhs_type = TREE_TYPE (lhs);
3660 tree rhs1 = gimple_assign_rhs1 (stmt);
3661 tree rhs1_type = TREE_TYPE (rhs1);
3662 tree rhs2 = gimple_assign_rhs2 (stmt);
3663 tree rhs2_type = TREE_TYPE (rhs2);
3664 tree rhs3 = gimple_assign_rhs3 (stmt);
3665 tree rhs3_type = TREE_TYPE (rhs3);
3666
3667 if (!is_gimple_reg (lhs))
3668 {
3669 error ("non-register as LHS of ternary operation");
3670 return true;
3671 }
3672
3673 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3674 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3675 || !is_gimple_val (rhs2)
3676 || !is_gimple_val (rhs3))
3677 {
3678 error ("invalid operands in ternary operation");
3679 return true;
3680 }
3681
3682 /* First handle operations that involve different types. */
3683 switch (rhs_code)
3684 {
3685 case WIDEN_MULT_PLUS_EXPR:
3686 case WIDEN_MULT_MINUS_EXPR:
3687 if ((!INTEGRAL_TYPE_P (rhs1_type)
3688 && !FIXED_POINT_TYPE_P (rhs1_type))
3689 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3690 || !useless_type_conversion_p (lhs_type, rhs3_type)
3691 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3692 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3693 {
3694 error ("type mismatch in widening multiply-accumulate expression");
3695 debug_generic_expr (lhs_type);
3696 debug_generic_expr (rhs1_type);
3697 debug_generic_expr (rhs2_type);
3698 debug_generic_expr (rhs3_type);
3699 return true;
3700 }
3701 break;
3702
3703 case FMA_EXPR:
3704 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3705 || !useless_type_conversion_p (lhs_type, rhs2_type)
3706 || !useless_type_conversion_p (lhs_type, rhs3_type))
3707 {
3708 error ("type mismatch in fused multiply-add expression");
3709 debug_generic_expr (lhs_type);
3710 debug_generic_expr (rhs1_type);
3711 debug_generic_expr (rhs2_type);
3712 debug_generic_expr (rhs3_type);
3713 return true;
3714 }
3715 break;
3716
3717 case COND_EXPR:
3718 case VEC_COND_EXPR:
3719 if (!useless_type_conversion_p (lhs_type, rhs2_type)
3720 || !useless_type_conversion_p (lhs_type, rhs3_type))
3721 {
3722 error ("type mismatch in conditional expression");
3723 debug_generic_expr (lhs_type);
3724 debug_generic_expr (rhs2_type);
3725 debug_generic_expr (rhs3_type);
3726 return true;
3727 }
3728 break;
3729
3730 case VEC_PERM_EXPR:
3731 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3732 || !useless_type_conversion_p (lhs_type, rhs2_type))
3733 {
3734 error ("type mismatch in vector permute expression");
3735 debug_generic_expr (lhs_type);
3736 debug_generic_expr (rhs1_type);
3737 debug_generic_expr (rhs2_type);
3738 debug_generic_expr (rhs3_type);
3739 return true;
3740 }
3741
3742 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3743 || TREE_CODE (rhs2_type) != VECTOR_TYPE
3744 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
3745 {
3746 error ("vector types expected in vector permute expression");
3747 debug_generic_expr (lhs_type);
3748 debug_generic_expr (rhs1_type);
3749 debug_generic_expr (rhs2_type);
3750 debug_generic_expr (rhs3_type);
3751 return true;
3752 }
3753
3754 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
3755 || TYPE_VECTOR_SUBPARTS (rhs2_type)
3756 != TYPE_VECTOR_SUBPARTS (rhs3_type)
3757 || TYPE_VECTOR_SUBPARTS (rhs3_type)
3758 != TYPE_VECTOR_SUBPARTS (lhs_type))
3759 {
3760 error ("vectors with different element number found "
3761 "in vector permute expression");
3762 debug_generic_expr (lhs_type);
3763 debug_generic_expr (rhs1_type);
3764 debug_generic_expr (rhs2_type);
3765 debug_generic_expr (rhs3_type);
3766 return true;
3767 }
3768
3769 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
3770 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
3771 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
3772 {
3773 error ("invalid mask type in vector permute expression");
3774 debug_generic_expr (lhs_type);
3775 debug_generic_expr (rhs1_type);
3776 debug_generic_expr (rhs2_type);
3777 debug_generic_expr (rhs3_type);
3778 return true;
3779 }
3780
3781 return false;
3782
3783 case DOT_PROD_EXPR:
3784 case REALIGN_LOAD_EXPR:
3785 /* FIXME. */
3786 return false;
3787
3788 default:
3789 gcc_unreachable ();
3790 }
3791 return false;
3792 }
3793
3794 /* Verify a gimple assignment statement STMT with a single rhs.
3795 Returns true if anything is wrong. */
3796
3797 static bool
3798 verify_gimple_assign_single (gimple stmt)
3799 {
3800 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3801 tree lhs = gimple_assign_lhs (stmt);
3802 tree lhs_type = TREE_TYPE (lhs);
3803 tree rhs1 = gimple_assign_rhs1 (stmt);
3804 tree rhs1_type = TREE_TYPE (rhs1);
3805 bool res = false;
3806
3807 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3808 {
3809 error ("non-trivial conversion at assignment");
3810 debug_generic_expr (lhs_type);
3811 debug_generic_expr (rhs1_type);
3812 return true;
3813 }
3814
3815 if (gimple_clobber_p (stmt)
3816 && !DECL_P (lhs))
3817 {
3818 error ("non-decl LHS in clobber statement");
3819 debug_generic_expr (lhs);
3820 return true;
3821 }
3822
3823 if (handled_component_p (lhs))
3824 res |= verify_types_in_gimple_reference (lhs, true);
3825
3826 /* Special codes we cannot handle via their class. */
3827 switch (rhs_code)
3828 {
3829 case ADDR_EXPR:
3830 {
3831 tree op = TREE_OPERAND (rhs1, 0);
3832 if (!is_gimple_addressable (op))
3833 {
3834 error ("invalid operand in unary expression");
3835 return true;
3836 }
3837
3838 /* Technically there is no longer a need for matching types, but
3839 gimple hygiene asks for this check. In LTO we can end up
3840 combining incompatible units and thus end up with addresses
3841 of globals that change their type to a common one. */
3842 if (!in_lto_p
3843 && !types_compatible_p (TREE_TYPE (op),
3844 TREE_TYPE (TREE_TYPE (rhs1)))
3845 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
3846 TREE_TYPE (op)))
3847 {
3848 error ("type mismatch in address expression");
3849 debug_generic_stmt (TREE_TYPE (rhs1));
3850 debug_generic_stmt (TREE_TYPE (op));
3851 return true;
3852 }
3853
3854 return verify_types_in_gimple_reference (op, true);
3855 }
3856
3857 /* tcc_reference */
3858 case INDIRECT_REF:
3859 error ("INDIRECT_REF in gimple IL");
3860 return true;
3861
3862 case COMPONENT_REF:
3863 case BIT_FIELD_REF:
3864 case ARRAY_REF:
3865 case ARRAY_RANGE_REF:
3866 case VIEW_CONVERT_EXPR:
3867 case REALPART_EXPR:
3868 case IMAGPART_EXPR:
3869 case TARGET_MEM_REF:
3870 case MEM_REF:
3871 if (!is_gimple_reg (lhs)
3872 && is_gimple_reg_type (TREE_TYPE (lhs)))
3873 {
3874 error ("invalid rhs for gimple memory store");
3875 debug_generic_stmt (lhs);
3876 debug_generic_stmt (rhs1);
3877 return true;
3878 }
3879 return res || verify_types_in_gimple_reference (rhs1, false);
3880
3881 /* tcc_constant */
3882 case SSA_NAME:
3883 case INTEGER_CST:
3884 case REAL_CST:
3885 case FIXED_CST:
3886 case COMPLEX_CST:
3887 case VECTOR_CST:
3888 case STRING_CST:
3889 return res;
3890
3891 /* tcc_declaration */
3892 case CONST_DECL:
3893 return res;
3894 case VAR_DECL:
3895 case PARM_DECL:
3896 if (!is_gimple_reg (lhs)
3897 && !is_gimple_reg (rhs1)
3898 && is_gimple_reg_type (TREE_TYPE (lhs)))
3899 {
3900 error ("invalid rhs for gimple memory store");
3901 debug_generic_stmt (lhs);
3902 debug_generic_stmt (rhs1);
3903 return true;
3904 }
3905 return res;
3906
3907 case CONSTRUCTOR:
3908 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
3909 {
3910 unsigned int i;
3911 tree elt_i, elt_v, elt_t = NULL_TREE;
3912
3913 if (CONSTRUCTOR_NELTS (rhs1) == 0)
3914 return res;
3915 /* For vector CONSTRUCTORs we require that either it is empty
3916 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
3917 (then the element count must be correct to cover the whole
3918 outer vector and index must be NULL on all elements, or it is
3919 a CONSTRUCTOR of scalar elements, where we as an exception allow
3920 smaller number of elements (assuming zero filling) and
3921 consecutive indexes as compared to NULL indexes (such
3922 CONSTRUCTORs can appear in the IL from FEs). */
3923 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
3924 {
3925 if (elt_t == NULL_TREE)
3926 {
3927 elt_t = TREE_TYPE (elt_v);
3928 if (TREE_CODE (elt_t) == VECTOR_TYPE)
3929 {
3930 tree elt_t = TREE_TYPE (elt_v);
3931 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
3932 TREE_TYPE (elt_t)))
3933 {
3934 error ("incorrect type of vector CONSTRUCTOR"
3935 " elements");
3936 debug_generic_stmt (rhs1);
3937 return true;
3938 }
3939 else if (CONSTRUCTOR_NELTS (rhs1)
3940 * TYPE_VECTOR_SUBPARTS (elt_t)
3941 != TYPE_VECTOR_SUBPARTS (rhs1_type))
3942 {
3943 error ("incorrect number of vector CONSTRUCTOR"
3944 " elements");
3945 debug_generic_stmt (rhs1);
3946 return true;
3947 }
3948 }
3949 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
3950 elt_t))
3951 {
3952 error ("incorrect type of vector CONSTRUCTOR elements");
3953 debug_generic_stmt (rhs1);
3954 return true;
3955 }
3956 else if (CONSTRUCTOR_NELTS (rhs1)
3957 > TYPE_VECTOR_SUBPARTS (rhs1_type))
3958 {
3959 error ("incorrect number of vector CONSTRUCTOR elements");
3960 debug_generic_stmt (rhs1);
3961 return true;
3962 }
3963 }
3964 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
3965 {
3966 error ("incorrect type of vector CONSTRUCTOR elements");
3967 debug_generic_stmt (rhs1);
3968 return true;
3969 }
3970 if (elt_i != NULL_TREE
3971 && (TREE_CODE (elt_t) == VECTOR_TYPE
3972 || TREE_CODE (elt_i) != INTEGER_CST
3973 || compare_tree_int (elt_i, i) != 0))
3974 {
3975 error ("vector CONSTRUCTOR with non-NULL element index");
3976 debug_generic_stmt (rhs1);
3977 return true;
3978 }
3979 }
3980 }
3981 return res;
3982 case OBJ_TYPE_REF:
3983 case ASSERT_EXPR:
3984 case WITH_SIZE_EXPR:
3985 /* FIXME. */
3986 return res;
3987
3988 default:;
3989 }
3990
3991 return res;
3992 }
3993
3994 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
3995 is a problem, otherwise false. */
3996
3997 static bool
3998 verify_gimple_assign (gimple stmt)
3999 {
4000 switch (gimple_assign_rhs_class (stmt))
4001 {
4002 case GIMPLE_SINGLE_RHS:
4003 return verify_gimple_assign_single (stmt);
4004
4005 case GIMPLE_UNARY_RHS:
4006 return verify_gimple_assign_unary (stmt);
4007
4008 case GIMPLE_BINARY_RHS:
4009 return verify_gimple_assign_binary (stmt);
4010
4011 case GIMPLE_TERNARY_RHS:
4012 return verify_gimple_assign_ternary (stmt);
4013
4014 default:
4015 gcc_unreachable ();
4016 }
4017 }
4018
4019 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4020 is a problem, otherwise false. */
4021
4022 static bool
4023 verify_gimple_return (gimple stmt)
4024 {
4025 tree op = gimple_return_retval (stmt);
4026 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4027
4028 /* We cannot test for present return values as we do not fix up missing
4029 return values from the original source. */
4030 if (op == NULL)
4031 return false;
4032
4033 if (!is_gimple_val (op)
4034 && TREE_CODE (op) != RESULT_DECL)
4035 {
4036 error ("invalid operand in return statement");
4037 debug_generic_stmt (op);
4038 return true;
4039 }
4040
4041 if ((TREE_CODE (op) == RESULT_DECL
4042 && DECL_BY_REFERENCE (op))
4043 || (TREE_CODE (op) == SSA_NAME
4044 && SSA_NAME_VAR (op)
4045 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4046 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4047 op = TREE_TYPE (op);
4048
4049 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4050 {
4051 error ("invalid conversion in return statement");
4052 debug_generic_stmt (restype);
4053 debug_generic_stmt (TREE_TYPE (op));
4054 return true;
4055 }
4056
4057 return false;
4058 }
4059
4060
4061 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4062 is a problem, otherwise false. */
4063
4064 static bool
4065 verify_gimple_goto (gimple stmt)
4066 {
4067 tree dest = gimple_goto_dest (stmt);
4068
4069 /* ??? We have two canonical forms of direct goto destinations, a
4070 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4071 if (TREE_CODE (dest) != LABEL_DECL
4072 && (!is_gimple_val (dest)
4073 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4074 {
4075 error ("goto destination is neither a label nor a pointer");
4076 return true;
4077 }
4078
4079 return false;
4080 }
4081
4082 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4083 is a problem, otherwise false. */
4084
4085 static bool
4086 verify_gimple_switch (gimple stmt)
4087 {
4088 unsigned int i, n;
4089 tree elt, prev_upper_bound = NULL_TREE;
4090 tree index_type, elt_type = NULL_TREE;
4091
4092 if (!is_gimple_val (gimple_switch_index (stmt)))
4093 {
4094 error ("invalid operand to switch statement");
4095 debug_generic_stmt (gimple_switch_index (stmt));
4096 return true;
4097 }
4098
4099 index_type = TREE_TYPE (gimple_switch_index (stmt));
4100 if (! INTEGRAL_TYPE_P (index_type))
4101 {
4102 error ("non-integral type switch statement");
4103 debug_generic_expr (index_type);
4104 return true;
4105 }
4106
4107 elt = gimple_switch_label (stmt, 0);
4108 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4109 {
4110 error ("invalid default case label in switch statement");
4111 debug_generic_expr (elt);
4112 return true;
4113 }
4114
4115 n = gimple_switch_num_labels (stmt);
4116 for (i = 1; i < n; i++)
4117 {
4118 elt = gimple_switch_label (stmt, i);
4119
4120 if (! CASE_LOW (elt))
4121 {
4122 error ("invalid case label in switch statement");
4123 debug_generic_expr (elt);
4124 return true;
4125 }
4126 if (CASE_HIGH (elt)
4127 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4128 {
4129 error ("invalid case range in switch statement");
4130 debug_generic_expr (elt);
4131 return true;
4132 }
4133
4134 if (elt_type)
4135 {
4136 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4137 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4138 {
4139 error ("type mismatch for case label in switch statement");
4140 debug_generic_expr (elt);
4141 return true;
4142 }
4143 }
4144 else
4145 {
4146 elt_type = TREE_TYPE (CASE_LOW (elt));
4147 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4148 {
4149 error ("type precision mismatch in switch statement");
4150 return true;
4151 }
4152 }
4153
4154 if (prev_upper_bound)
4155 {
4156 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4157 {
4158 error ("case labels not sorted in switch statement");
4159 return true;
4160 }
4161 }
4162
4163 prev_upper_bound = CASE_HIGH (elt);
4164 if (! prev_upper_bound)
4165 prev_upper_bound = CASE_LOW (elt);
4166 }
4167
4168 return false;
4169 }
4170
4171 /* Verify a gimple debug statement STMT.
4172 Returns true if anything is wrong. */
4173
4174 static bool
4175 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4176 {
4177 /* There isn't much that could be wrong in a gimple debug stmt. A
4178 gimple debug bind stmt, for example, maps a tree, that's usually
4179 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4180 component or member of an aggregate type, to another tree, that
4181 can be an arbitrary expression. These stmts expand into debug
4182 insns, and are converted to debug notes by var-tracking.c. */
4183 return false;
4184 }
4185
4186 /* Verify a gimple label statement STMT.
4187 Returns true if anything is wrong. */
4188
4189 static bool
4190 verify_gimple_label (gimple stmt)
4191 {
4192 tree decl = gimple_label_label (stmt);
4193 int uid;
4194 bool err = false;
4195
4196 if (TREE_CODE (decl) != LABEL_DECL)
4197 return true;
4198
4199 uid = LABEL_DECL_UID (decl);
4200 if (cfun->cfg
4201 && (uid == -1 || (*label_to_block_map)[uid] != gimple_bb (stmt)))
4202 {
4203 error ("incorrect entry in label_to_block_map");
4204 err |= true;
4205 }
4206
4207 uid = EH_LANDING_PAD_NR (decl);
4208 if (uid)
4209 {
4210 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4211 if (decl != lp->post_landing_pad)
4212 {
4213 error ("incorrect setting of landing pad number");
4214 err |= true;
4215 }
4216 }
4217
4218 return err;
4219 }
4220
4221 /* Verify the GIMPLE statement STMT. Returns true if there is an
4222 error, otherwise false. */
4223
4224 static bool
4225 verify_gimple_stmt (gimple stmt)
4226 {
4227 switch (gimple_code (stmt))
4228 {
4229 case GIMPLE_ASSIGN:
4230 return verify_gimple_assign (stmt);
4231
4232 case GIMPLE_LABEL:
4233 return verify_gimple_label (stmt);
4234
4235 case GIMPLE_CALL:
4236 return verify_gimple_call (stmt);
4237
4238 case GIMPLE_COND:
4239 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4240 {
4241 error ("invalid comparison code in gimple cond");
4242 return true;
4243 }
4244 if (!(!gimple_cond_true_label (stmt)
4245 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4246 || !(!gimple_cond_false_label (stmt)
4247 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4248 {
4249 error ("invalid labels in gimple cond");
4250 return true;
4251 }
4252
4253 return verify_gimple_comparison (boolean_type_node,
4254 gimple_cond_lhs (stmt),
4255 gimple_cond_rhs (stmt));
4256
4257 case GIMPLE_GOTO:
4258 return verify_gimple_goto (stmt);
4259
4260 case GIMPLE_SWITCH:
4261 return verify_gimple_switch (stmt);
4262
4263 case GIMPLE_RETURN:
4264 return verify_gimple_return (stmt);
4265
4266 case GIMPLE_ASM:
4267 return false;
4268
4269 case GIMPLE_TRANSACTION:
4270 return verify_gimple_transaction (stmt);
4271
4272 /* Tuples that do not have tree operands. */
4273 case GIMPLE_NOP:
4274 case GIMPLE_PREDICT:
4275 case GIMPLE_RESX:
4276 case GIMPLE_EH_DISPATCH:
4277 case GIMPLE_EH_MUST_NOT_THROW:
4278 return false;
4279
4280 CASE_GIMPLE_OMP:
4281 /* OpenMP directives are validated by the FE and never operated
4282 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4283 non-gimple expressions when the main index variable has had
4284 its address taken. This does not affect the loop itself
4285 because the header of an GIMPLE_OMP_FOR is merely used to determine
4286 how to setup the parallel iteration. */
4287 return false;
4288
4289 case GIMPLE_DEBUG:
4290 return verify_gimple_debug (stmt);
4291
4292 default:
4293 gcc_unreachable ();
4294 }
4295 }
4296
4297 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4298 and false otherwise. */
4299
4300 static bool
4301 verify_gimple_phi (gimple phi)
4302 {
4303 bool err = false;
4304 unsigned i;
4305 tree phi_result = gimple_phi_result (phi);
4306 bool virtual_p;
4307
4308 if (!phi_result)
4309 {
4310 error ("invalid PHI result");
4311 return true;
4312 }
4313
4314 virtual_p = virtual_operand_p (phi_result);
4315 if (TREE_CODE (phi_result) != SSA_NAME
4316 || (virtual_p
4317 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4318 {
4319 error ("invalid PHI result");
4320 err = true;
4321 }
4322
4323 for (i = 0; i < gimple_phi_num_args (phi); i++)
4324 {
4325 tree t = gimple_phi_arg_def (phi, i);
4326
4327 if (!t)
4328 {
4329 error ("missing PHI def");
4330 err |= true;
4331 continue;
4332 }
4333 /* Addressable variables do have SSA_NAMEs but they
4334 are not considered gimple values. */
4335 else if ((TREE_CODE (t) == SSA_NAME
4336 && virtual_p != virtual_operand_p (t))
4337 || (virtual_p
4338 && (TREE_CODE (t) != SSA_NAME
4339 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4340 || (!virtual_p
4341 && !is_gimple_val (t)))
4342 {
4343 error ("invalid PHI argument");
4344 debug_generic_expr (t);
4345 err |= true;
4346 }
4347 #ifdef ENABLE_TYPES_CHECKING
4348 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4349 {
4350 error ("incompatible types in PHI argument %u", i);
4351 debug_generic_stmt (TREE_TYPE (phi_result));
4352 debug_generic_stmt (TREE_TYPE (t));
4353 err |= true;
4354 }
4355 #endif
4356 }
4357
4358 return err;
4359 }
4360
4361 /* Verify the GIMPLE statements inside the sequence STMTS. */
4362
4363 static bool
4364 verify_gimple_in_seq_2 (gimple_seq stmts)
4365 {
4366 gimple_stmt_iterator ittr;
4367 bool err = false;
4368
4369 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4370 {
4371 gimple stmt = gsi_stmt (ittr);
4372
4373 switch (gimple_code (stmt))
4374 {
4375 case GIMPLE_BIND:
4376 err |= verify_gimple_in_seq_2 (gimple_bind_body (stmt));
4377 break;
4378
4379 case GIMPLE_TRY:
4380 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4381 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4382 break;
4383
4384 case GIMPLE_EH_FILTER:
4385 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4386 break;
4387
4388 case GIMPLE_EH_ELSE:
4389 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (stmt));
4390 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (stmt));
4391 break;
4392
4393 case GIMPLE_CATCH:
4394 err |= verify_gimple_in_seq_2 (gimple_catch_handler (stmt));
4395 break;
4396
4397 case GIMPLE_TRANSACTION:
4398 err |= verify_gimple_transaction (stmt);
4399 break;
4400
4401 default:
4402 {
4403 bool err2 = verify_gimple_stmt (stmt);
4404 if (err2)
4405 debug_gimple_stmt (stmt);
4406 err |= err2;
4407 }
4408 }
4409 }
4410
4411 return err;
4412 }
4413
4414 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4415 is a problem, otherwise false. */
4416
4417 static bool
4418 verify_gimple_transaction (gimple stmt)
4419 {
4420 tree lab = gimple_transaction_label (stmt);
4421 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4422 return true;
4423 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4424 }
4425
4426
4427 /* Verify the GIMPLE statements inside the statement list STMTS. */
4428
4429 DEBUG_FUNCTION void
4430 verify_gimple_in_seq (gimple_seq stmts)
4431 {
4432 timevar_push (TV_TREE_STMT_VERIFY);
4433 if (verify_gimple_in_seq_2 (stmts))
4434 internal_error ("verify_gimple failed");
4435 timevar_pop (TV_TREE_STMT_VERIFY);
4436 }
4437
4438 /* Return true when the T can be shared. */
4439
4440 bool
4441 tree_node_can_be_shared (tree t)
4442 {
4443 if (IS_TYPE_OR_DECL_P (t)
4444 || is_gimple_min_invariant (t)
4445 || TREE_CODE (t) == SSA_NAME
4446 || t == error_mark_node
4447 || TREE_CODE (t) == IDENTIFIER_NODE)
4448 return true;
4449
4450 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4451 return true;
4452
4453 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4454 && is_gimple_min_invariant (TREE_OPERAND (t, 1)))
4455 || TREE_CODE (t) == COMPONENT_REF
4456 || TREE_CODE (t) == REALPART_EXPR
4457 || TREE_CODE (t) == IMAGPART_EXPR)
4458 t = TREE_OPERAND (t, 0);
4459
4460 if (DECL_P (t))
4461 return true;
4462
4463 return false;
4464 }
4465
4466 /* Called via walk_gimple_stmt. Verify tree sharing. */
4467
4468 static tree
4469 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4470 {
4471 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4472 struct pointer_set_t *visited = (struct pointer_set_t *) wi->info;
4473
4474 if (tree_node_can_be_shared (*tp))
4475 {
4476 *walk_subtrees = false;
4477 return NULL;
4478 }
4479
4480 if (pointer_set_insert (visited, *tp))
4481 return *tp;
4482
4483 return NULL;
4484 }
4485
4486 static bool eh_error_found;
4487 static int
4488 verify_eh_throw_stmt_node (void **slot, void *data)
4489 {
4490 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4491 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4492
4493 if (!pointer_set_contains (visited, node->stmt))
4494 {
4495 error ("dead STMT in EH table");
4496 debug_gimple_stmt (node->stmt);
4497 eh_error_found = true;
4498 }
4499 return 1;
4500 }
4501
4502 /* Verify the GIMPLE statements in the CFG of FN. */
4503
4504 DEBUG_FUNCTION void
4505 verify_gimple_in_cfg (struct function *fn)
4506 {
4507 basic_block bb;
4508 bool err = false;
4509 struct pointer_set_t *visited, *visited_stmts;
4510
4511 timevar_push (TV_TREE_STMT_VERIFY);
4512 visited = pointer_set_create ();
4513 visited_stmts = pointer_set_create ();
4514
4515 FOR_EACH_BB_FN (bb, fn)
4516 {
4517 gimple_stmt_iterator gsi;
4518
4519 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4520 {
4521 gimple phi = gsi_stmt (gsi);
4522 bool err2 = false;
4523 unsigned i;
4524
4525 pointer_set_insert (visited_stmts, phi);
4526
4527 if (gimple_bb (phi) != bb)
4528 {
4529 error ("gimple_bb (phi) is set to a wrong basic block");
4530 err2 = true;
4531 }
4532
4533 err2 |= verify_gimple_phi (phi);
4534
4535 for (i = 0; i < gimple_phi_num_args (phi); i++)
4536 {
4537 tree arg = gimple_phi_arg_def (phi, i);
4538 tree addr = walk_tree (&arg, verify_node_sharing, visited, NULL);
4539 if (addr)
4540 {
4541 error ("incorrect sharing of tree nodes");
4542 debug_generic_expr (addr);
4543 err2 |= true;
4544 }
4545 }
4546
4547 if (err2)
4548 debug_gimple_stmt (phi);
4549 err |= err2;
4550 }
4551
4552 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4553 {
4554 gimple stmt = gsi_stmt (gsi);
4555 bool err2 = false;
4556 struct walk_stmt_info wi;
4557 tree addr;
4558 int lp_nr;
4559
4560 pointer_set_insert (visited_stmts, stmt);
4561
4562 if (gimple_bb (stmt) != bb)
4563 {
4564 error ("gimple_bb (stmt) is set to a wrong basic block");
4565 err2 = true;
4566 }
4567
4568 err2 |= verify_gimple_stmt (stmt);
4569
4570 memset (&wi, 0, sizeof (wi));
4571 wi.info = (void *) visited;
4572 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
4573 if (addr)
4574 {
4575 error ("incorrect sharing of tree nodes");
4576 debug_generic_expr (addr);
4577 err2 |= true;
4578 }
4579
4580 /* ??? Instead of not checking these stmts at all the walker
4581 should know its context via wi. */
4582 if (!is_gimple_debug (stmt)
4583 && !is_gimple_omp (stmt))
4584 {
4585 memset (&wi, 0, sizeof (wi));
4586 addr = walk_gimple_op (stmt, verify_expr, &wi);
4587 if (addr)
4588 {
4589 debug_generic_expr (addr);
4590 inform (gimple_location (stmt), "in statement");
4591 err2 |= true;
4592 }
4593 }
4594
4595 /* If the statement is marked as part of an EH region, then it is
4596 expected that the statement could throw. Verify that when we
4597 have optimizations that simplify statements such that we prove
4598 that they cannot throw, that we update other data structures
4599 to match. */
4600 lp_nr = lookup_stmt_eh_lp (stmt);
4601 if (lp_nr != 0)
4602 {
4603 if (!stmt_could_throw_p (stmt))
4604 {
4605 error ("statement marked for throw, but doesn%'t");
4606 err2 |= true;
4607 }
4608 else if (lp_nr > 0
4609 && !gsi_one_before_end_p (gsi)
4610 && stmt_can_throw_internal (stmt))
4611 {
4612 error ("statement marked for throw in middle of block");
4613 err2 |= true;
4614 }
4615 }
4616
4617 if (err2)
4618 debug_gimple_stmt (stmt);
4619 err |= err2;
4620 }
4621 }
4622
4623 eh_error_found = false;
4624 if (get_eh_throw_stmt_table (cfun))
4625 htab_traverse (get_eh_throw_stmt_table (cfun),
4626 verify_eh_throw_stmt_node,
4627 visited_stmts);
4628
4629 if (err || eh_error_found)
4630 internal_error ("verify_gimple failed");
4631
4632 pointer_set_destroy (visited);
4633 pointer_set_destroy (visited_stmts);
4634 verify_histograms ();
4635 timevar_pop (TV_TREE_STMT_VERIFY);
4636 }
4637
4638
4639 /* Verifies that the flow information is OK. */
4640
4641 static int
4642 gimple_verify_flow_info (void)
4643 {
4644 int err = 0;
4645 basic_block bb;
4646 gimple_stmt_iterator gsi;
4647 gimple stmt;
4648 edge e;
4649 edge_iterator ei;
4650
4651 if (ENTRY_BLOCK_PTR->il.gimple.seq || ENTRY_BLOCK_PTR->il.gimple.phi_nodes)
4652 {
4653 error ("ENTRY_BLOCK has IL associated with it");
4654 err = 1;
4655 }
4656
4657 if (EXIT_BLOCK_PTR->il.gimple.seq || EXIT_BLOCK_PTR->il.gimple.phi_nodes)
4658 {
4659 error ("EXIT_BLOCK has IL associated with it");
4660 err = 1;
4661 }
4662
4663 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4664 if (e->flags & EDGE_FALLTHRU)
4665 {
4666 error ("fallthru to exit from bb %d", e->src->index);
4667 err = 1;
4668 }
4669
4670 FOR_EACH_BB (bb)
4671 {
4672 bool found_ctrl_stmt = false;
4673
4674 stmt = NULL;
4675
4676 /* Skip labels on the start of basic block. */
4677 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4678 {
4679 tree label;
4680 gimple prev_stmt = stmt;
4681
4682 stmt = gsi_stmt (gsi);
4683
4684 if (gimple_code (stmt) != GIMPLE_LABEL)
4685 break;
4686
4687 label = gimple_label_label (stmt);
4688 if (prev_stmt && DECL_NONLOCAL (label))
4689 {
4690 error ("nonlocal label ");
4691 print_generic_expr (stderr, label, 0);
4692 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4693 bb->index);
4694 err = 1;
4695 }
4696
4697 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
4698 {
4699 error ("EH landing pad label ");
4700 print_generic_expr (stderr, label, 0);
4701 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4702 bb->index);
4703 err = 1;
4704 }
4705
4706 if (label_to_block (label) != bb)
4707 {
4708 error ("label ");
4709 print_generic_expr (stderr, label, 0);
4710 fprintf (stderr, " to block does not match in bb %d",
4711 bb->index);
4712 err = 1;
4713 }
4714
4715 if (decl_function_context (label) != current_function_decl)
4716 {
4717 error ("label ");
4718 print_generic_expr (stderr, label, 0);
4719 fprintf (stderr, " has incorrect context in bb %d",
4720 bb->index);
4721 err = 1;
4722 }
4723 }
4724
4725 /* Verify that body of basic block BB is free of control flow. */
4726 for (; !gsi_end_p (gsi); gsi_next (&gsi))
4727 {
4728 gimple stmt = gsi_stmt (gsi);
4729
4730 if (found_ctrl_stmt)
4731 {
4732 error ("control flow in the middle of basic block %d",
4733 bb->index);
4734 err = 1;
4735 }
4736
4737 if (stmt_ends_bb_p (stmt))
4738 found_ctrl_stmt = true;
4739
4740 if (gimple_code (stmt) == GIMPLE_LABEL)
4741 {
4742 error ("label ");
4743 print_generic_expr (stderr, gimple_label_label (stmt), 0);
4744 fprintf (stderr, " in the middle of basic block %d", bb->index);
4745 err = 1;
4746 }
4747 }
4748
4749 gsi = gsi_last_bb (bb);
4750 if (gsi_end_p (gsi))
4751 continue;
4752
4753 stmt = gsi_stmt (gsi);
4754
4755 if (gimple_code (stmt) == GIMPLE_LABEL)
4756 continue;
4757
4758 err |= verify_eh_edges (stmt);
4759
4760 if (is_ctrl_stmt (stmt))
4761 {
4762 FOR_EACH_EDGE (e, ei, bb->succs)
4763 if (e->flags & EDGE_FALLTHRU)
4764 {
4765 error ("fallthru edge after a control statement in bb %d",
4766 bb->index);
4767 err = 1;
4768 }
4769 }
4770
4771 if (gimple_code (stmt) != GIMPLE_COND)
4772 {
4773 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4774 after anything else but if statement. */
4775 FOR_EACH_EDGE (e, ei, bb->succs)
4776 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
4777 {
4778 error ("true/false edge after a non-GIMPLE_COND in bb %d",
4779 bb->index);
4780 err = 1;
4781 }
4782 }
4783
4784 switch (gimple_code (stmt))
4785 {
4786 case GIMPLE_COND:
4787 {
4788 edge true_edge;
4789 edge false_edge;
4790
4791 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
4792
4793 if (!true_edge
4794 || !false_edge
4795 || !(true_edge->flags & EDGE_TRUE_VALUE)
4796 || !(false_edge->flags & EDGE_FALSE_VALUE)
4797 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4798 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4799 || EDGE_COUNT (bb->succs) >= 3)
4800 {
4801 error ("wrong outgoing edge flags at end of bb %d",
4802 bb->index);
4803 err = 1;
4804 }
4805 }
4806 break;
4807
4808 case GIMPLE_GOTO:
4809 if (simple_goto_p (stmt))
4810 {
4811 error ("explicit goto at end of bb %d", bb->index);
4812 err = 1;
4813 }
4814 else
4815 {
4816 /* FIXME. We should double check that the labels in the
4817 destination blocks have their address taken. */
4818 FOR_EACH_EDGE (e, ei, bb->succs)
4819 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
4820 | EDGE_FALSE_VALUE))
4821 || !(e->flags & EDGE_ABNORMAL))
4822 {
4823 error ("wrong outgoing edge flags at end of bb %d",
4824 bb->index);
4825 err = 1;
4826 }
4827 }
4828 break;
4829
4830 case GIMPLE_CALL:
4831 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
4832 break;
4833 /* ... fallthru ... */
4834 case GIMPLE_RETURN:
4835 if (!single_succ_p (bb)
4836 || (single_succ_edge (bb)->flags
4837 & (EDGE_FALLTHRU | EDGE_ABNORMAL
4838 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4839 {
4840 error ("wrong outgoing edge flags at end of bb %d", bb->index);
4841 err = 1;
4842 }
4843 if (single_succ (bb) != EXIT_BLOCK_PTR)
4844 {
4845 error ("return edge does not point to exit in bb %d",
4846 bb->index);
4847 err = 1;
4848 }
4849 break;
4850
4851 case GIMPLE_SWITCH:
4852 {
4853 tree prev;
4854 edge e;
4855 size_t i, n;
4856
4857 n = gimple_switch_num_labels (stmt);
4858
4859 /* Mark all the destination basic blocks. */
4860 for (i = 0; i < n; ++i)
4861 {
4862 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
4863 basic_block label_bb = label_to_block (lab);
4864 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
4865 label_bb->aux = (void *)1;
4866 }
4867
4868 /* Verify that the case labels are sorted. */
4869 prev = gimple_switch_label (stmt, 0);
4870 for (i = 1; i < n; ++i)
4871 {
4872 tree c = gimple_switch_label (stmt, i);
4873 if (!CASE_LOW (c))
4874 {
4875 error ("found default case not at the start of "
4876 "case vector");
4877 err = 1;
4878 continue;
4879 }
4880 if (CASE_LOW (prev)
4881 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
4882 {
4883 error ("case labels not sorted: ");
4884 print_generic_expr (stderr, prev, 0);
4885 fprintf (stderr," is greater than ");
4886 print_generic_expr (stderr, c, 0);
4887 fprintf (stderr," but comes before it.\n");
4888 err = 1;
4889 }
4890 prev = c;
4891 }
4892 /* VRP will remove the default case if it can prove it will
4893 never be executed. So do not verify there always exists
4894 a default case here. */
4895
4896 FOR_EACH_EDGE (e, ei, bb->succs)
4897 {
4898 if (!e->dest->aux)
4899 {
4900 error ("extra outgoing edge %d->%d",
4901 bb->index, e->dest->index);
4902 err = 1;
4903 }
4904
4905 e->dest->aux = (void *)2;
4906 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
4907 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4908 {
4909 error ("wrong outgoing edge flags at end of bb %d",
4910 bb->index);
4911 err = 1;
4912 }
4913 }
4914
4915 /* Check that we have all of them. */
4916 for (i = 0; i < n; ++i)
4917 {
4918 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
4919 basic_block label_bb = label_to_block (lab);
4920
4921 if (label_bb->aux != (void *)2)
4922 {
4923 error ("missing edge %i->%i", bb->index, label_bb->index);
4924 err = 1;
4925 }
4926 }
4927
4928 FOR_EACH_EDGE (e, ei, bb->succs)
4929 e->dest->aux = (void *)0;
4930 }
4931 break;
4932
4933 case GIMPLE_EH_DISPATCH:
4934 err |= verify_eh_dispatch_edge (stmt);
4935 break;
4936
4937 default:
4938 break;
4939 }
4940 }
4941
4942 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
4943 verify_dominators (CDI_DOMINATORS);
4944
4945 return err;
4946 }
4947
4948
4949 /* Updates phi nodes after creating a forwarder block joined
4950 by edge FALLTHRU. */
4951
4952 static void
4953 gimple_make_forwarder_block (edge fallthru)
4954 {
4955 edge e;
4956 edge_iterator ei;
4957 basic_block dummy, bb;
4958 tree var;
4959 gimple_stmt_iterator gsi;
4960
4961 dummy = fallthru->src;
4962 bb = fallthru->dest;
4963
4964 if (single_pred_p (bb))
4965 return;
4966
4967 /* If we redirected a branch we must create new PHI nodes at the
4968 start of BB. */
4969 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
4970 {
4971 gimple phi, new_phi;
4972
4973 phi = gsi_stmt (gsi);
4974 var = gimple_phi_result (phi);
4975 new_phi = create_phi_node (var, bb);
4976 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
4977 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
4978 UNKNOWN_LOCATION);
4979 }
4980
4981 /* Add the arguments we have stored on edges. */
4982 FOR_EACH_EDGE (e, ei, bb->preds)
4983 {
4984 if (e == fallthru)
4985 continue;
4986
4987 flush_pending_stmts (e);
4988 }
4989 }
4990
4991
4992 /* Return a non-special label in the head of basic block BLOCK.
4993 Create one if it doesn't exist. */
4994
4995 tree
4996 gimple_block_label (basic_block bb)
4997 {
4998 gimple_stmt_iterator i, s = gsi_start_bb (bb);
4999 bool first = true;
5000 tree label;
5001 gimple stmt;
5002
5003 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5004 {
5005 stmt = gsi_stmt (i);
5006 if (gimple_code (stmt) != GIMPLE_LABEL)
5007 break;
5008 label = gimple_label_label (stmt);
5009 if (!DECL_NONLOCAL (label))
5010 {
5011 if (!first)
5012 gsi_move_before (&i, &s);
5013 return label;
5014 }
5015 }
5016
5017 label = create_artificial_label (UNKNOWN_LOCATION);
5018 stmt = gimple_build_label (label);
5019 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5020 return label;
5021 }
5022
5023
5024 /* Attempt to perform edge redirection by replacing a possibly complex
5025 jump instruction by a goto or by removing the jump completely.
5026 This can apply only if all edges now point to the same block. The
5027 parameters and return values are equivalent to
5028 redirect_edge_and_branch. */
5029
5030 static edge
5031 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5032 {
5033 basic_block src = e->src;
5034 gimple_stmt_iterator i;
5035 gimple stmt;
5036
5037 /* We can replace or remove a complex jump only when we have exactly
5038 two edges. */
5039 if (EDGE_COUNT (src->succs) != 2
5040 /* Verify that all targets will be TARGET. Specifically, the
5041 edge that is not E must also go to TARGET. */
5042 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5043 return NULL;
5044
5045 i = gsi_last_bb (src);
5046 if (gsi_end_p (i))
5047 return NULL;
5048
5049 stmt = gsi_stmt (i);
5050
5051 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5052 {
5053 gsi_remove (&i, true);
5054 e = ssa_redirect_edge (e, target);
5055 e->flags = EDGE_FALLTHRU;
5056 return e;
5057 }
5058
5059 return NULL;
5060 }
5061
5062
5063 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5064 edge representing the redirected branch. */
5065
5066 static edge
5067 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5068 {
5069 basic_block bb = e->src;
5070 gimple_stmt_iterator gsi;
5071 edge ret;
5072 gimple stmt;
5073
5074 if (e->flags & EDGE_ABNORMAL)
5075 return NULL;
5076
5077 if (e->dest == dest)
5078 return NULL;
5079
5080 if (e->flags & EDGE_EH)
5081 return redirect_eh_edge (e, dest);
5082
5083 if (e->src != ENTRY_BLOCK_PTR)
5084 {
5085 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5086 if (ret)
5087 return ret;
5088 }
5089
5090 gsi = gsi_last_bb (bb);
5091 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5092
5093 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5094 {
5095 case GIMPLE_COND:
5096 /* For COND_EXPR, we only need to redirect the edge. */
5097 break;
5098
5099 case GIMPLE_GOTO:
5100 /* No non-abnormal edges should lead from a non-simple goto, and
5101 simple ones should be represented implicitly. */
5102 gcc_unreachable ();
5103
5104 case GIMPLE_SWITCH:
5105 {
5106 tree label = gimple_block_label (dest);
5107 tree cases = get_cases_for_edge (e, stmt);
5108
5109 /* If we have a list of cases associated with E, then use it
5110 as it's a lot faster than walking the entire case vector. */
5111 if (cases)
5112 {
5113 edge e2 = find_edge (e->src, dest);
5114 tree last, first;
5115
5116 first = cases;
5117 while (cases)
5118 {
5119 last = cases;
5120 CASE_LABEL (cases) = label;
5121 cases = CASE_CHAIN (cases);
5122 }
5123
5124 /* If there was already an edge in the CFG, then we need
5125 to move all the cases associated with E to E2. */
5126 if (e2)
5127 {
5128 tree cases2 = get_cases_for_edge (e2, stmt);
5129
5130 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5131 CASE_CHAIN (cases2) = first;
5132 }
5133 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5134 }
5135 else
5136 {
5137 size_t i, n = gimple_switch_num_labels (stmt);
5138
5139 for (i = 0; i < n; i++)
5140 {
5141 tree elt = gimple_switch_label (stmt, i);
5142 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5143 CASE_LABEL (elt) = label;
5144 }
5145 }
5146 }
5147 break;
5148
5149 case GIMPLE_ASM:
5150 {
5151 int i, n = gimple_asm_nlabels (stmt);
5152 tree label = NULL;
5153
5154 for (i = 0; i < n; ++i)
5155 {
5156 tree cons = gimple_asm_label_op (stmt, i);
5157 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5158 {
5159 if (!label)
5160 label = gimple_block_label (dest);
5161 TREE_VALUE (cons) = label;
5162 }
5163 }
5164
5165 /* If we didn't find any label matching the former edge in the
5166 asm labels, we must be redirecting the fallthrough
5167 edge. */
5168 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5169 }
5170 break;
5171
5172 case GIMPLE_RETURN:
5173 gsi_remove (&gsi, true);
5174 e->flags |= EDGE_FALLTHRU;
5175 break;
5176
5177 case GIMPLE_OMP_RETURN:
5178 case GIMPLE_OMP_CONTINUE:
5179 case GIMPLE_OMP_SECTIONS_SWITCH:
5180 case GIMPLE_OMP_FOR:
5181 /* The edges from OMP constructs can be simply redirected. */
5182 break;
5183
5184 case GIMPLE_EH_DISPATCH:
5185 if (!(e->flags & EDGE_FALLTHRU))
5186 redirect_eh_dispatch_edge (stmt, e, dest);
5187 break;
5188
5189 case GIMPLE_TRANSACTION:
5190 /* The ABORT edge has a stored label associated with it, otherwise
5191 the edges are simply redirectable. */
5192 if (e->flags == 0)
5193 gimple_transaction_set_label (stmt, gimple_block_label (dest));
5194 break;
5195
5196 default:
5197 /* Otherwise it must be a fallthru edge, and we don't need to
5198 do anything besides redirecting it. */
5199 gcc_assert (e->flags & EDGE_FALLTHRU);
5200 break;
5201 }
5202
5203 /* Update/insert PHI nodes as necessary. */
5204
5205 /* Now update the edges in the CFG. */
5206 e = ssa_redirect_edge (e, dest);
5207
5208 return e;
5209 }
5210
5211 /* Returns true if it is possible to remove edge E by redirecting
5212 it to the destination of the other edge from E->src. */
5213
5214 static bool
5215 gimple_can_remove_branch_p (const_edge e)
5216 {
5217 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5218 return false;
5219
5220 return true;
5221 }
5222
5223 /* Simple wrapper, as we can always redirect fallthru edges. */
5224
5225 static basic_block
5226 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5227 {
5228 e = gimple_redirect_edge_and_branch (e, dest);
5229 gcc_assert (e);
5230
5231 return NULL;
5232 }
5233
5234
5235 /* Splits basic block BB after statement STMT (but at least after the
5236 labels). If STMT is NULL, BB is split just after the labels. */
5237
5238 static basic_block
5239 gimple_split_block (basic_block bb, void *stmt)
5240 {
5241 gimple_stmt_iterator gsi;
5242 gimple_stmt_iterator gsi_tgt;
5243 gimple act;
5244 gimple_seq list;
5245 basic_block new_bb;
5246 edge e;
5247 edge_iterator ei;
5248
5249 new_bb = create_empty_bb (bb);
5250
5251 /* Redirect the outgoing edges. */
5252 new_bb->succs = bb->succs;
5253 bb->succs = NULL;
5254 FOR_EACH_EDGE (e, ei, new_bb->succs)
5255 e->src = new_bb;
5256
5257 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5258 stmt = NULL;
5259
5260 /* Move everything from GSI to the new basic block. */
5261 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5262 {
5263 act = gsi_stmt (gsi);
5264 if (gimple_code (act) == GIMPLE_LABEL)
5265 continue;
5266
5267 if (!stmt)
5268 break;
5269
5270 if (stmt == act)
5271 {
5272 gsi_next (&gsi);
5273 break;
5274 }
5275 }
5276
5277 if (gsi_end_p (gsi))
5278 return new_bb;
5279
5280 /* Split the statement list - avoid re-creating new containers as this
5281 brings ugly quadratic memory consumption in the inliner.
5282 (We are still quadratic since we need to update stmt BB pointers,
5283 sadly.) */
5284 gsi_split_seq_before (&gsi, &list);
5285 set_bb_seq (new_bb, list);
5286 for (gsi_tgt = gsi_start (list);
5287 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5288 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5289
5290 return new_bb;
5291 }
5292
5293
5294 /* Moves basic block BB after block AFTER. */
5295
5296 static bool
5297 gimple_move_block_after (basic_block bb, basic_block after)
5298 {
5299 if (bb->prev_bb == after)
5300 return true;
5301
5302 unlink_block (bb);
5303 link_block (bb, after);
5304
5305 return true;
5306 }
5307
5308
5309 /* Return TRUE if block BB has no executable statements, otherwise return
5310 FALSE. */
5311
5312 bool
5313 gimple_empty_block_p (basic_block bb)
5314 {
5315 /* BB must have no executable statements. */
5316 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5317 if (phi_nodes (bb))
5318 return false;
5319 if (gsi_end_p (gsi))
5320 return true;
5321 if (is_gimple_debug (gsi_stmt (gsi)))
5322 gsi_next_nondebug (&gsi);
5323 return gsi_end_p (gsi);
5324 }
5325
5326
5327 /* Split a basic block if it ends with a conditional branch and if the
5328 other part of the block is not empty. */
5329
5330 static basic_block
5331 gimple_split_block_before_cond_jump (basic_block bb)
5332 {
5333 gimple last, split_point;
5334 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5335 if (gsi_end_p (gsi))
5336 return NULL;
5337 last = gsi_stmt (gsi);
5338 if (gimple_code (last) != GIMPLE_COND
5339 && gimple_code (last) != GIMPLE_SWITCH)
5340 return NULL;
5341 gsi_prev_nondebug (&gsi);
5342 split_point = gsi_stmt (gsi);
5343 return split_block (bb, split_point)->dest;
5344 }
5345
5346
5347 /* Return true if basic_block can be duplicated. */
5348
5349 static bool
5350 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5351 {
5352 return true;
5353 }
5354
5355 /* Create a duplicate of the basic block BB. NOTE: This does not
5356 preserve SSA form. */
5357
5358 static basic_block
5359 gimple_duplicate_bb (basic_block bb)
5360 {
5361 basic_block new_bb;
5362 gimple_stmt_iterator gsi, gsi_tgt;
5363 gimple_seq phis = phi_nodes (bb);
5364 gimple phi, stmt, copy;
5365
5366 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
5367
5368 /* Copy the PHI nodes. We ignore PHI node arguments here because
5369 the incoming edges have not been setup yet. */
5370 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
5371 {
5372 phi = gsi_stmt (gsi);
5373 copy = create_phi_node (NULL_TREE, new_bb);
5374 create_new_def_for (gimple_phi_result (phi), copy,
5375 gimple_phi_result_ptr (copy));
5376 }
5377
5378 gsi_tgt = gsi_start_bb (new_bb);
5379 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5380 {
5381 def_operand_p def_p;
5382 ssa_op_iter op_iter;
5383 tree lhs;
5384
5385 stmt = gsi_stmt (gsi);
5386 if (gimple_code (stmt) == GIMPLE_LABEL)
5387 continue;
5388
5389 /* Don't duplicate label debug stmts. */
5390 if (gimple_debug_bind_p (stmt)
5391 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5392 == LABEL_DECL)
5393 continue;
5394
5395 /* Create a new copy of STMT and duplicate STMT's virtual
5396 operands. */
5397 copy = gimple_copy (stmt);
5398 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5399
5400 maybe_duplicate_eh_stmt (copy, stmt);
5401 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5402
5403 /* When copying around a stmt writing into a local non-user
5404 aggregate, make sure it won't share stack slot with other
5405 vars. */
5406 lhs = gimple_get_lhs (stmt);
5407 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5408 {
5409 tree base = get_base_address (lhs);
5410 if (base
5411 && (TREE_CODE (base) == VAR_DECL
5412 || TREE_CODE (base) == RESULT_DECL)
5413 && DECL_IGNORED_P (base)
5414 && !TREE_STATIC (base)
5415 && !DECL_EXTERNAL (base)
5416 && (TREE_CODE (base) != VAR_DECL
5417 || !DECL_HAS_VALUE_EXPR_P (base)))
5418 DECL_NONSHAREABLE (base) = 1;
5419 }
5420
5421 /* Create new names for all the definitions created by COPY and
5422 add replacement mappings for each new name. */
5423 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5424 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5425 }
5426
5427 return new_bb;
5428 }
5429
5430 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5431
5432 static void
5433 add_phi_args_after_copy_edge (edge e_copy)
5434 {
5435 basic_block bb, bb_copy = e_copy->src, dest;
5436 edge e;
5437 edge_iterator ei;
5438 gimple phi, phi_copy;
5439 tree def;
5440 gimple_stmt_iterator psi, psi_copy;
5441
5442 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5443 return;
5444
5445 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5446
5447 if (e_copy->dest->flags & BB_DUPLICATED)
5448 dest = get_bb_original (e_copy->dest);
5449 else
5450 dest = e_copy->dest;
5451
5452 e = find_edge (bb, dest);
5453 if (!e)
5454 {
5455 /* During loop unrolling the target of the latch edge is copied.
5456 In this case we are not looking for edge to dest, but to
5457 duplicated block whose original was dest. */
5458 FOR_EACH_EDGE (e, ei, bb->succs)
5459 {
5460 if ((e->dest->flags & BB_DUPLICATED)
5461 && get_bb_original (e->dest) == dest)
5462 break;
5463 }
5464
5465 gcc_assert (e != NULL);
5466 }
5467
5468 for (psi = gsi_start_phis (e->dest),
5469 psi_copy = gsi_start_phis (e_copy->dest);
5470 !gsi_end_p (psi);
5471 gsi_next (&psi), gsi_next (&psi_copy))
5472 {
5473 phi = gsi_stmt (psi);
5474 phi_copy = gsi_stmt (psi_copy);
5475 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5476 add_phi_arg (phi_copy, def, e_copy,
5477 gimple_phi_arg_location_from_edge (phi, e));
5478 }
5479 }
5480
5481
5482 /* Basic block BB_COPY was created by code duplication. Add phi node
5483 arguments for edges going out of BB_COPY. The blocks that were
5484 duplicated have BB_DUPLICATED set. */
5485
5486 void
5487 add_phi_args_after_copy_bb (basic_block bb_copy)
5488 {
5489 edge e_copy;
5490 edge_iterator ei;
5491
5492 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5493 {
5494 add_phi_args_after_copy_edge (e_copy);
5495 }
5496 }
5497
5498 /* Blocks in REGION_COPY array of length N_REGION were created by
5499 duplication of basic blocks. Add phi node arguments for edges
5500 going from these blocks. If E_COPY is not NULL, also add
5501 phi node arguments for its destination.*/
5502
5503 void
5504 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5505 edge e_copy)
5506 {
5507 unsigned i;
5508
5509 for (i = 0; i < n_region; i++)
5510 region_copy[i]->flags |= BB_DUPLICATED;
5511
5512 for (i = 0; i < n_region; i++)
5513 add_phi_args_after_copy_bb (region_copy[i]);
5514 if (e_copy)
5515 add_phi_args_after_copy_edge (e_copy);
5516
5517 for (i = 0; i < n_region; i++)
5518 region_copy[i]->flags &= ~BB_DUPLICATED;
5519 }
5520
5521 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5522 important exit edge EXIT. By important we mean that no SSA name defined
5523 inside region is live over the other exit edges of the region. All entry
5524 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5525 to the duplicate of the region. Dominance and loop information is
5526 updated, but not the SSA web. The new basic blocks are stored to
5527 REGION_COPY in the same order as they had in REGION, provided that
5528 REGION_COPY is not NULL.
5529 The function returns false if it is unable to copy the region,
5530 true otherwise. */
5531
5532 bool
5533 gimple_duplicate_sese_region (edge entry, edge exit,
5534 basic_block *region, unsigned n_region,
5535 basic_block *region_copy)
5536 {
5537 unsigned i;
5538 bool free_region_copy = false, copying_header = false;
5539 struct loop *loop = entry->dest->loop_father;
5540 edge exit_copy;
5541 vec<basic_block> doms;
5542 edge redirected;
5543 int total_freq = 0, entry_freq = 0;
5544 gcov_type total_count = 0, entry_count = 0;
5545
5546 if (!can_copy_bbs_p (region, n_region))
5547 return false;
5548
5549 /* Some sanity checking. Note that we do not check for all possible
5550 missuses of the functions. I.e. if you ask to copy something weird,
5551 it will work, but the state of structures probably will not be
5552 correct. */
5553 for (i = 0; i < n_region; i++)
5554 {
5555 /* We do not handle subloops, i.e. all the blocks must belong to the
5556 same loop. */
5557 if (region[i]->loop_father != loop)
5558 return false;
5559
5560 if (region[i] != entry->dest
5561 && region[i] == loop->header)
5562 return false;
5563 }
5564
5565 set_loop_copy (loop, loop);
5566
5567 /* In case the function is used for loop header copying (which is the primary
5568 use), ensure that EXIT and its copy will be new latch and entry edges. */
5569 if (loop->header == entry->dest)
5570 {
5571 copying_header = true;
5572 set_loop_copy (loop, loop_outer (loop));
5573
5574 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5575 return false;
5576
5577 for (i = 0; i < n_region; i++)
5578 if (region[i] != exit->src
5579 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5580 return false;
5581 }
5582
5583 if (!region_copy)
5584 {
5585 region_copy = XNEWVEC (basic_block, n_region);
5586 free_region_copy = true;
5587 }
5588
5589 /* Record blocks outside the region that are dominated by something
5590 inside. */
5591 doms.create (0);
5592 initialize_original_copy_tables ();
5593
5594 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5595
5596 if (entry->dest->count)
5597 {
5598 total_count = entry->dest->count;
5599 entry_count = entry->count;
5600 /* Fix up corner cases, to avoid division by zero or creation of negative
5601 frequencies. */
5602 if (entry_count > total_count)
5603 entry_count = total_count;
5604 }
5605 else
5606 {
5607 total_freq = entry->dest->frequency;
5608 entry_freq = EDGE_FREQUENCY (entry);
5609 /* Fix up corner cases, to avoid division by zero or creation of negative
5610 frequencies. */
5611 if (total_freq == 0)
5612 total_freq = 1;
5613 else if (entry_freq > total_freq)
5614 entry_freq = total_freq;
5615 }
5616
5617 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5618 split_edge_bb_loc (entry));
5619 if (total_count)
5620 {
5621 scale_bbs_frequencies_gcov_type (region, n_region,
5622 total_count - entry_count,
5623 total_count);
5624 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
5625 total_count);
5626 }
5627 else
5628 {
5629 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5630 total_freq);
5631 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5632 }
5633
5634 if (copying_header)
5635 {
5636 loop->header = exit->dest;
5637 loop->latch = exit->src;
5638 }
5639
5640 /* Redirect the entry and add the phi node arguments. */
5641 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
5642 gcc_assert (redirected != NULL);
5643 flush_pending_stmts (entry);
5644
5645 /* Concerning updating of dominators: We must recount dominators
5646 for entry block and its copy. Anything that is outside of the
5647 region, but was dominated by something inside needs recounting as
5648 well. */
5649 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
5650 doms.safe_push (get_bb_original (entry->dest));
5651 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5652 doms.release ();
5653
5654 /* Add the other PHI node arguments. */
5655 add_phi_args_after_copy (region_copy, n_region, NULL);
5656
5657 if (free_region_copy)
5658 free (region_copy);
5659
5660 free_original_copy_tables ();
5661 return true;
5662 }
5663
5664 /* Checks if BB is part of the region defined by N_REGION BBS. */
5665 static bool
5666 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
5667 {
5668 unsigned int n;
5669
5670 for (n = 0; n < n_region; n++)
5671 {
5672 if (bb == bbs[n])
5673 return true;
5674 }
5675 return false;
5676 }
5677
5678 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
5679 are stored to REGION_COPY in the same order in that they appear
5680 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5681 the region, EXIT an exit from it. The condition guarding EXIT
5682 is moved to ENTRY. Returns true if duplication succeeds, false
5683 otherwise.
5684
5685 For example,
5686
5687 some_code;
5688 if (cond)
5689 A;
5690 else
5691 B;
5692
5693 is transformed to
5694
5695 if (cond)
5696 {
5697 some_code;
5698 A;
5699 }
5700 else
5701 {
5702 some_code;
5703 B;
5704 }
5705 */
5706
5707 bool
5708 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
5709 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
5710 basic_block *region_copy ATTRIBUTE_UNUSED)
5711 {
5712 unsigned i;
5713 bool free_region_copy = false;
5714 struct loop *loop = exit->dest->loop_father;
5715 struct loop *orig_loop = entry->dest->loop_father;
5716 basic_block switch_bb, entry_bb, nentry_bb;
5717 vec<basic_block> doms;
5718 int total_freq = 0, exit_freq = 0;
5719 gcov_type total_count = 0, exit_count = 0;
5720 edge exits[2], nexits[2], e;
5721 gimple_stmt_iterator gsi;
5722 gimple cond_stmt;
5723 edge sorig, snew;
5724 basic_block exit_bb;
5725 gimple_stmt_iterator psi;
5726 gimple phi;
5727 tree def;
5728 struct loop *target, *aloop, *cloop;
5729
5730 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
5731 exits[0] = exit;
5732 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
5733
5734 if (!can_copy_bbs_p (region, n_region))
5735 return false;
5736
5737 initialize_original_copy_tables ();
5738 set_loop_copy (orig_loop, loop);
5739
5740 target= loop;
5741 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
5742 {
5743 if (bb_part_of_region_p (aloop->header, region, n_region))
5744 {
5745 cloop = duplicate_loop (aloop, target);
5746 duplicate_subloops (aloop, cloop);
5747 }
5748 }
5749
5750 if (!region_copy)
5751 {
5752 region_copy = XNEWVEC (basic_block, n_region);
5753 free_region_copy = true;
5754 }
5755
5756 gcc_assert (!need_ssa_update_p (cfun));
5757
5758 /* Record blocks outside the region that are dominated by something
5759 inside. */
5760 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5761
5762 if (exit->src->count)
5763 {
5764 total_count = exit->src->count;
5765 exit_count = exit->count;
5766 /* Fix up corner cases, to avoid division by zero or creation of negative
5767 frequencies. */
5768 if (exit_count > total_count)
5769 exit_count = total_count;
5770 }
5771 else
5772 {
5773 total_freq = exit->src->frequency;
5774 exit_freq = EDGE_FREQUENCY (exit);
5775 /* Fix up corner cases, to avoid division by zero or creation of negative
5776 frequencies. */
5777 if (total_freq == 0)
5778 total_freq = 1;
5779 if (exit_freq > total_freq)
5780 exit_freq = total_freq;
5781 }
5782
5783 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
5784 split_edge_bb_loc (exit));
5785 if (total_count)
5786 {
5787 scale_bbs_frequencies_gcov_type (region, n_region,
5788 total_count - exit_count,
5789 total_count);
5790 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
5791 total_count);
5792 }
5793 else
5794 {
5795 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
5796 total_freq);
5797 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
5798 }
5799
5800 /* Create the switch block, and put the exit condition to it. */
5801 entry_bb = entry->dest;
5802 nentry_bb = get_bb_copy (entry_bb);
5803 if (!last_stmt (entry->src)
5804 || !stmt_ends_bb_p (last_stmt (entry->src)))
5805 switch_bb = entry->src;
5806 else
5807 switch_bb = split_edge (entry);
5808 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
5809
5810 gsi = gsi_last_bb (switch_bb);
5811 cond_stmt = last_stmt (exit->src);
5812 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
5813 cond_stmt = gimple_copy (cond_stmt);
5814
5815 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
5816
5817 sorig = single_succ_edge (switch_bb);
5818 sorig->flags = exits[1]->flags;
5819 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
5820
5821 /* Register the new edge from SWITCH_BB in loop exit lists. */
5822 rescan_loop_exit (snew, true, false);
5823
5824 /* Add the PHI node arguments. */
5825 add_phi_args_after_copy (region_copy, n_region, snew);
5826
5827 /* Get rid of now superfluous conditions and associated edges (and phi node
5828 arguments). */
5829 exit_bb = exit->dest;
5830
5831 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
5832 PENDING_STMT (e) = NULL;
5833
5834 /* The latch of ORIG_LOOP was copied, and so was the backedge
5835 to the original header. We redirect this backedge to EXIT_BB. */
5836 for (i = 0; i < n_region; i++)
5837 if (get_bb_original (region_copy[i]) == orig_loop->latch)
5838 {
5839 gcc_assert (single_succ_edge (region_copy[i]));
5840 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
5841 PENDING_STMT (e) = NULL;
5842 for (psi = gsi_start_phis (exit_bb);
5843 !gsi_end_p (psi);
5844 gsi_next (&psi))
5845 {
5846 phi = gsi_stmt (psi);
5847 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
5848 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
5849 }
5850 }
5851 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
5852 PENDING_STMT (e) = NULL;
5853
5854 /* Anything that is outside of the region, but was dominated by something
5855 inside needs to update dominance info. */
5856 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5857 doms.release ();
5858 /* Update the SSA web. */
5859 update_ssa (TODO_update_ssa);
5860
5861 if (free_region_copy)
5862 free (region_copy);
5863
5864 free_original_copy_tables ();
5865 return true;
5866 }
5867
5868 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
5869 adding blocks when the dominator traversal reaches EXIT. This
5870 function silently assumes that ENTRY strictly dominates EXIT. */
5871
5872 void
5873 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
5874 vec<basic_block> *bbs_p)
5875 {
5876 basic_block son;
5877
5878 for (son = first_dom_son (CDI_DOMINATORS, entry);
5879 son;
5880 son = next_dom_son (CDI_DOMINATORS, son))
5881 {
5882 bbs_p->safe_push (son);
5883 if (son != exit)
5884 gather_blocks_in_sese_region (son, exit, bbs_p);
5885 }
5886 }
5887
5888 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
5889 The duplicates are recorded in VARS_MAP. */
5890
5891 static void
5892 replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
5893 tree to_context)
5894 {
5895 tree t = *tp, new_t;
5896 struct function *f = DECL_STRUCT_FUNCTION (to_context);
5897 void **loc;
5898
5899 if (DECL_CONTEXT (t) == to_context)
5900 return;
5901
5902 loc = pointer_map_contains (vars_map, t);
5903
5904 if (!loc)
5905 {
5906 loc = pointer_map_insert (vars_map, t);
5907
5908 if (SSA_VAR_P (t))
5909 {
5910 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
5911 add_local_decl (f, new_t);
5912 }
5913 else
5914 {
5915 gcc_assert (TREE_CODE (t) == CONST_DECL);
5916 new_t = copy_node (t);
5917 }
5918 DECL_CONTEXT (new_t) = to_context;
5919
5920 *loc = new_t;
5921 }
5922 else
5923 new_t = (tree) *loc;
5924
5925 *tp = new_t;
5926 }
5927
5928
5929 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
5930 VARS_MAP maps old ssa names and var_decls to the new ones. */
5931
5932 static tree
5933 replace_ssa_name (tree name, struct pointer_map_t *vars_map,
5934 tree to_context)
5935 {
5936 void **loc;
5937 tree new_name;
5938
5939 gcc_assert (!virtual_operand_p (name));
5940
5941 loc = pointer_map_contains (vars_map, name);
5942
5943 if (!loc)
5944 {
5945 tree decl = SSA_NAME_VAR (name);
5946 if (decl)
5947 {
5948 replace_by_duplicate_decl (&decl, vars_map, to_context);
5949 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
5950 decl, SSA_NAME_DEF_STMT (name));
5951 if (SSA_NAME_IS_DEFAULT_DEF (name))
5952 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context),
5953 decl, new_name);
5954 }
5955 else
5956 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
5957 name, SSA_NAME_DEF_STMT (name));
5958
5959 loc = pointer_map_insert (vars_map, name);
5960 *loc = new_name;
5961 }
5962 else
5963 new_name = (tree) *loc;
5964
5965 return new_name;
5966 }
5967
5968 struct move_stmt_d
5969 {
5970 tree orig_block;
5971 tree new_block;
5972 tree from_context;
5973 tree to_context;
5974 struct pointer_map_t *vars_map;
5975 htab_t new_label_map;
5976 struct pointer_map_t *eh_map;
5977 bool remap_decls_p;
5978 };
5979
5980 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
5981 contained in *TP if it has been ORIG_BLOCK previously and change the
5982 DECL_CONTEXT of every local variable referenced in *TP. */
5983
5984 static tree
5985 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
5986 {
5987 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5988 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
5989 tree t = *tp;
5990
5991 if (EXPR_P (t))
5992 {
5993 if (TREE_BLOCK (t) == p->orig_block
5994 || (p->orig_block == NULL_TREE
5995 && TREE_BLOCK (t) == NULL_TREE))
5996 TREE_SET_BLOCK (t, p->new_block);
5997 }
5998 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
5999 {
6000 if (TREE_CODE (t) == SSA_NAME)
6001 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6002 else if (TREE_CODE (t) == LABEL_DECL)
6003 {
6004 if (p->new_label_map)
6005 {
6006 struct tree_map in, *out;
6007 in.base.from = t;
6008 out = (struct tree_map *)
6009 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6010 if (out)
6011 *tp = t = out->to;
6012 }
6013
6014 DECL_CONTEXT (t) = p->to_context;
6015 }
6016 else if (p->remap_decls_p)
6017 {
6018 /* Replace T with its duplicate. T should no longer appear in the
6019 parent function, so this looks wasteful; however, it may appear
6020 in referenced_vars, and more importantly, as virtual operands of
6021 statements, and in alias lists of other variables. It would be
6022 quite difficult to expunge it from all those places. ??? It might
6023 suffice to do this for addressable variables. */
6024 if ((TREE_CODE (t) == VAR_DECL
6025 && !is_global_var (t))
6026 || TREE_CODE (t) == CONST_DECL)
6027 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6028 }
6029 *walk_subtrees = 0;
6030 }
6031 else if (TYPE_P (t))
6032 *walk_subtrees = 0;
6033
6034 return NULL_TREE;
6035 }
6036
6037 /* Helper for move_stmt_r. Given an EH region number for the source
6038 function, map that to the duplicate EH regio number in the dest. */
6039
6040 static int
6041 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6042 {
6043 eh_region old_r, new_r;
6044 void **slot;
6045
6046 old_r = get_eh_region_from_number (old_nr);
6047 slot = pointer_map_contains (p->eh_map, old_r);
6048 new_r = (eh_region) *slot;
6049
6050 return new_r->index;
6051 }
6052
6053 /* Similar, but operate on INTEGER_CSTs. */
6054
6055 static tree
6056 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6057 {
6058 int old_nr, new_nr;
6059
6060 old_nr = tree_low_cst (old_t_nr, 0);
6061 new_nr = move_stmt_eh_region_nr (old_nr, p);
6062
6063 return build_int_cst (integer_type_node, new_nr);
6064 }
6065
6066 /* Like move_stmt_op, but for gimple statements.
6067
6068 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6069 contained in the current statement in *GSI_P and change the
6070 DECL_CONTEXT of every local variable referenced in the current
6071 statement. */
6072
6073 static tree
6074 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6075 struct walk_stmt_info *wi)
6076 {
6077 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6078 gimple stmt = gsi_stmt (*gsi_p);
6079 tree block = gimple_block (stmt);
6080
6081 if (p->orig_block == NULL_TREE
6082 || block == p->orig_block
6083 || block == NULL_TREE)
6084 gimple_set_block (stmt, p->new_block);
6085 #ifdef ENABLE_CHECKING
6086 else if (block != p->new_block)
6087 {
6088 while (block && block != p->orig_block)
6089 block = BLOCK_SUPERCONTEXT (block);
6090 gcc_assert (block);
6091 }
6092 #endif
6093
6094 switch (gimple_code (stmt))
6095 {
6096 case GIMPLE_CALL:
6097 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6098 {
6099 tree r, fndecl = gimple_call_fndecl (stmt);
6100 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6101 switch (DECL_FUNCTION_CODE (fndecl))
6102 {
6103 case BUILT_IN_EH_COPY_VALUES:
6104 r = gimple_call_arg (stmt, 1);
6105 r = move_stmt_eh_region_tree_nr (r, p);
6106 gimple_call_set_arg (stmt, 1, r);
6107 /* FALLTHRU */
6108
6109 case BUILT_IN_EH_POINTER:
6110 case BUILT_IN_EH_FILTER:
6111 r = gimple_call_arg (stmt, 0);
6112 r = move_stmt_eh_region_tree_nr (r, p);
6113 gimple_call_set_arg (stmt, 0, r);
6114 break;
6115
6116 default:
6117 break;
6118 }
6119 }
6120 break;
6121
6122 case GIMPLE_RESX:
6123 {
6124 int r = gimple_resx_region (stmt);
6125 r = move_stmt_eh_region_nr (r, p);
6126 gimple_resx_set_region (stmt, r);
6127 }
6128 break;
6129
6130 case GIMPLE_EH_DISPATCH:
6131 {
6132 int r = gimple_eh_dispatch_region (stmt);
6133 r = move_stmt_eh_region_nr (r, p);
6134 gimple_eh_dispatch_set_region (stmt, r);
6135 }
6136 break;
6137
6138 case GIMPLE_OMP_RETURN:
6139 case GIMPLE_OMP_CONTINUE:
6140 break;
6141 default:
6142 if (is_gimple_omp (stmt))
6143 {
6144 /* Do not remap variables inside OMP directives. Variables
6145 referenced in clauses and directive header belong to the
6146 parent function and should not be moved into the child
6147 function. */
6148 bool save_remap_decls_p = p->remap_decls_p;
6149 p->remap_decls_p = false;
6150 *handled_ops_p = true;
6151
6152 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6153 move_stmt_op, wi);
6154
6155 p->remap_decls_p = save_remap_decls_p;
6156 }
6157 break;
6158 }
6159
6160 return NULL_TREE;
6161 }
6162
6163 /* Move basic block BB from function CFUN to function DEST_FN. The
6164 block is moved out of the original linked list and placed after
6165 block AFTER in the new list. Also, the block is removed from the
6166 original array of blocks and placed in DEST_FN's array of blocks.
6167 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6168 updated to reflect the moved edges.
6169
6170 The local variables are remapped to new instances, VARS_MAP is used
6171 to record the mapping. */
6172
6173 static void
6174 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6175 basic_block after, bool update_edge_count_p,
6176 struct move_stmt_d *d)
6177 {
6178 struct control_flow_graph *cfg;
6179 edge_iterator ei;
6180 edge e;
6181 gimple_stmt_iterator si;
6182 unsigned old_len, new_len;
6183
6184 /* Remove BB from dominance structures. */
6185 delete_from_dominance_info (CDI_DOMINATORS, bb);
6186 if (current_loops)
6187 remove_bb_from_loops (bb);
6188
6189 /* Link BB to the new linked list. */
6190 move_block_after (bb, after);
6191
6192 /* Update the edge count in the corresponding flowgraphs. */
6193 if (update_edge_count_p)
6194 FOR_EACH_EDGE (e, ei, bb->succs)
6195 {
6196 cfun->cfg->x_n_edges--;
6197 dest_cfun->cfg->x_n_edges++;
6198 }
6199
6200 /* Remove BB from the original basic block array. */
6201 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6202 cfun->cfg->x_n_basic_blocks--;
6203
6204 /* Grow DEST_CFUN's basic block array if needed. */
6205 cfg = dest_cfun->cfg;
6206 cfg->x_n_basic_blocks++;
6207 if (bb->index >= cfg->x_last_basic_block)
6208 cfg->x_last_basic_block = bb->index + 1;
6209
6210 old_len = vec_safe_length (cfg->x_basic_block_info);
6211 if ((unsigned) cfg->x_last_basic_block >= old_len)
6212 {
6213 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6214 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6215 }
6216
6217 (*cfg->x_basic_block_info)[bb->index] = bb;
6218
6219 /* Remap the variables in phi nodes. */
6220 for (si = gsi_start_phis (bb); !gsi_end_p (si); )
6221 {
6222 gimple phi = gsi_stmt (si);
6223 use_operand_p use;
6224 tree op = PHI_RESULT (phi);
6225 ssa_op_iter oi;
6226 unsigned i;
6227
6228 if (virtual_operand_p (op))
6229 {
6230 /* Remove the phi nodes for virtual operands (alias analysis will be
6231 run for the new function, anyway). */
6232 remove_phi_node (&si, true);
6233 continue;
6234 }
6235
6236 SET_PHI_RESULT (phi,
6237 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6238 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6239 {
6240 op = USE_FROM_PTR (use);
6241 if (TREE_CODE (op) == SSA_NAME)
6242 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6243 }
6244
6245 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6246 {
6247 location_t locus = gimple_phi_arg_location (phi, i);
6248 tree block = LOCATION_BLOCK (locus);
6249
6250 if (locus == UNKNOWN_LOCATION)
6251 continue;
6252 if (d->orig_block == NULL_TREE || block == d->orig_block)
6253 {
6254 if (d->new_block == NULL_TREE)
6255 locus = LOCATION_LOCUS (locus);
6256 else
6257 locus = COMBINE_LOCATION_DATA (line_table, locus, d->new_block);
6258 gimple_phi_arg_set_location (phi, i, locus);
6259 }
6260 }
6261
6262 gsi_next (&si);
6263 }
6264
6265 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6266 {
6267 gimple stmt = gsi_stmt (si);
6268 struct walk_stmt_info wi;
6269
6270 memset (&wi, 0, sizeof (wi));
6271 wi.info = d;
6272 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6273
6274 if (gimple_code (stmt) == GIMPLE_LABEL)
6275 {
6276 tree label = gimple_label_label (stmt);
6277 int uid = LABEL_DECL_UID (label);
6278
6279 gcc_assert (uid > -1);
6280
6281 old_len = vec_safe_length (cfg->x_label_to_block_map);
6282 if (old_len <= (unsigned) uid)
6283 {
6284 new_len = 3 * uid / 2 + 1;
6285 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6286 }
6287
6288 (*cfg->x_label_to_block_map)[uid] = bb;
6289 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6290
6291 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6292
6293 if (uid >= dest_cfun->cfg->last_label_uid)
6294 dest_cfun->cfg->last_label_uid = uid + 1;
6295 }
6296
6297 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6298 remove_stmt_from_eh_lp_fn (cfun, stmt);
6299
6300 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6301 gimple_remove_stmt_histograms (cfun, stmt);
6302
6303 /* We cannot leave any operands allocated from the operand caches of
6304 the current function. */
6305 free_stmt_operands (stmt);
6306 push_cfun (dest_cfun);
6307 update_stmt (stmt);
6308 pop_cfun ();
6309 }
6310
6311 FOR_EACH_EDGE (e, ei, bb->succs)
6312 if (e->goto_locus != UNKNOWN_LOCATION)
6313 {
6314 tree block = LOCATION_BLOCK (e->goto_locus);
6315 if (d->orig_block == NULL_TREE
6316 || block == d->orig_block)
6317 e->goto_locus = d->new_block ?
6318 COMBINE_LOCATION_DATA (line_table, e->goto_locus, d->new_block) :
6319 LOCATION_LOCUS (e->goto_locus);
6320 #ifdef ENABLE_CHECKING
6321 else if (block != d->new_block)
6322 {
6323 while (block && block != d->orig_block)
6324 block = BLOCK_SUPERCONTEXT (block);
6325 gcc_assert (block);
6326 }
6327 #endif
6328 }
6329 }
6330
6331 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6332 the outermost EH region. Use REGION as the incoming base EH region. */
6333
6334 static eh_region
6335 find_outermost_region_in_block (struct function *src_cfun,
6336 basic_block bb, eh_region region)
6337 {
6338 gimple_stmt_iterator si;
6339
6340 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6341 {
6342 gimple stmt = gsi_stmt (si);
6343 eh_region stmt_region;
6344 int lp_nr;
6345
6346 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6347 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6348 if (stmt_region)
6349 {
6350 if (region == NULL)
6351 region = stmt_region;
6352 else if (stmt_region != region)
6353 {
6354 region = eh_region_outermost (src_cfun, stmt_region, region);
6355 gcc_assert (region != NULL);
6356 }
6357 }
6358 }
6359
6360 return region;
6361 }
6362
6363 static tree
6364 new_label_mapper (tree decl, void *data)
6365 {
6366 htab_t hash = (htab_t) data;
6367 struct tree_map *m;
6368 void **slot;
6369
6370 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6371
6372 m = XNEW (struct tree_map);
6373 m->hash = DECL_UID (decl);
6374 m->base.from = decl;
6375 m->to = create_artificial_label (UNKNOWN_LOCATION);
6376 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6377 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6378 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6379
6380 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6381 gcc_assert (*slot == NULL);
6382
6383 *slot = m;
6384
6385 return m->to;
6386 }
6387
6388 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6389 subblocks. */
6390
6391 static void
6392 replace_block_vars_by_duplicates (tree block, struct pointer_map_t *vars_map,
6393 tree to_context)
6394 {
6395 tree *tp, t;
6396
6397 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6398 {
6399 t = *tp;
6400 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6401 continue;
6402 replace_by_duplicate_decl (&t, vars_map, to_context);
6403 if (t != *tp)
6404 {
6405 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6406 {
6407 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6408 DECL_HAS_VALUE_EXPR_P (t) = 1;
6409 }
6410 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6411 *tp = t;
6412 }
6413 }
6414
6415 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6416 replace_block_vars_by_duplicates (block, vars_map, to_context);
6417 }
6418
6419 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6420 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6421 single basic block in the original CFG and the new basic block is
6422 returned. DEST_CFUN must not have a CFG yet.
6423
6424 Note that the region need not be a pure SESE region. Blocks inside
6425 the region may contain calls to abort/exit. The only restriction
6426 is that ENTRY_BB should be the only entry point and it must
6427 dominate EXIT_BB.
6428
6429 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6430 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6431 to the new function.
6432
6433 All local variables referenced in the region are assumed to be in
6434 the corresponding BLOCK_VARS and unexpanded variable lists
6435 associated with DEST_CFUN. */
6436
6437 basic_block
6438 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
6439 basic_block exit_bb, tree orig_block)
6440 {
6441 vec<basic_block> bbs, dom_bbs;
6442 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6443 basic_block after, bb, *entry_pred, *exit_succ, abb;
6444 struct function *saved_cfun = cfun;
6445 int *entry_flag, *exit_flag;
6446 unsigned *entry_prob, *exit_prob;
6447 unsigned i, num_entry_edges, num_exit_edges;
6448 edge e;
6449 edge_iterator ei;
6450 htab_t new_label_map;
6451 struct pointer_map_t *vars_map, *eh_map;
6452 struct loop *loop = entry_bb->loop_father;
6453 struct move_stmt_d d;
6454
6455 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6456 region. */
6457 gcc_assert (entry_bb != exit_bb
6458 && (!exit_bb
6459 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
6460
6461 /* Collect all the blocks in the region. Manually add ENTRY_BB
6462 because it won't be added by dfs_enumerate_from. */
6463 bbs.create (0);
6464 bbs.safe_push (entry_bb);
6465 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6466
6467 /* The blocks that used to be dominated by something in BBS will now be
6468 dominated by the new block. */
6469 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6470 bbs.address (),
6471 bbs.length ());
6472
6473 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6474 the predecessor edges to ENTRY_BB and the successor edges to
6475 EXIT_BB so that we can re-attach them to the new basic block that
6476 will replace the region. */
6477 num_entry_edges = EDGE_COUNT (entry_bb->preds);
6478 entry_pred = XNEWVEC (basic_block, num_entry_edges);
6479 entry_flag = XNEWVEC (int, num_entry_edges);
6480 entry_prob = XNEWVEC (unsigned, num_entry_edges);
6481 i = 0;
6482 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
6483 {
6484 entry_prob[i] = e->probability;
6485 entry_flag[i] = e->flags;
6486 entry_pred[i++] = e->src;
6487 remove_edge (e);
6488 }
6489
6490 if (exit_bb)
6491 {
6492 num_exit_edges = EDGE_COUNT (exit_bb->succs);
6493 exit_succ = XNEWVEC (basic_block, num_exit_edges);
6494 exit_flag = XNEWVEC (int, num_exit_edges);
6495 exit_prob = XNEWVEC (unsigned, num_exit_edges);
6496 i = 0;
6497 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
6498 {
6499 exit_prob[i] = e->probability;
6500 exit_flag[i] = e->flags;
6501 exit_succ[i++] = e->dest;
6502 remove_edge (e);
6503 }
6504 }
6505 else
6506 {
6507 num_exit_edges = 0;
6508 exit_succ = NULL;
6509 exit_flag = NULL;
6510 exit_prob = NULL;
6511 }
6512
6513 /* Switch context to the child function to initialize DEST_FN's CFG. */
6514 gcc_assert (dest_cfun->cfg == NULL);
6515 push_cfun (dest_cfun);
6516
6517 init_empty_tree_cfg ();
6518
6519 /* Initialize EH information for the new function. */
6520 eh_map = NULL;
6521 new_label_map = NULL;
6522 if (saved_cfun->eh)
6523 {
6524 eh_region region = NULL;
6525
6526 FOR_EACH_VEC_ELT (bbs, i, bb)
6527 region = find_outermost_region_in_block (saved_cfun, bb, region);
6528
6529 init_eh_for_function ();
6530 if (region != NULL)
6531 {
6532 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6533 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
6534 new_label_mapper, new_label_map);
6535 }
6536 }
6537
6538 pop_cfun ();
6539
6540 /* Move blocks from BBS into DEST_CFUN. */
6541 gcc_assert (bbs.length () >= 2);
6542 after = dest_cfun->cfg->x_entry_block_ptr;
6543 vars_map = pointer_map_create ();
6544
6545 memset (&d, 0, sizeof (d));
6546 d.orig_block = orig_block;
6547 d.new_block = DECL_INITIAL (dest_cfun->decl);
6548 d.from_context = cfun->decl;
6549 d.to_context = dest_cfun->decl;
6550 d.vars_map = vars_map;
6551 d.new_label_map = new_label_map;
6552 d.eh_map = eh_map;
6553 d.remap_decls_p = true;
6554
6555 FOR_EACH_VEC_ELT (bbs, i, bb)
6556 {
6557 /* No need to update edge counts on the last block. It has
6558 already been updated earlier when we detached the region from
6559 the original CFG. */
6560 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
6561 after = bb;
6562 }
6563
6564 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
6565 if (orig_block)
6566 {
6567 tree block;
6568 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6569 == NULL_TREE);
6570 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6571 = BLOCK_SUBBLOCKS (orig_block);
6572 for (block = BLOCK_SUBBLOCKS (orig_block);
6573 block; block = BLOCK_CHAIN (block))
6574 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
6575 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
6576 }
6577
6578 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
6579 vars_map, dest_cfun->decl);
6580
6581 if (new_label_map)
6582 htab_delete (new_label_map);
6583 if (eh_map)
6584 pointer_map_destroy (eh_map);
6585 pointer_map_destroy (vars_map);
6586
6587 /* Rewire the entry and exit blocks. The successor to the entry
6588 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6589 the child function. Similarly, the predecessor of DEST_FN's
6590 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
6591 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6592 various CFG manipulation function get to the right CFG.
6593
6594 FIXME, this is silly. The CFG ought to become a parameter to
6595 these helpers. */
6596 push_cfun (dest_cfun);
6597 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
6598 if (exit_bb)
6599 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
6600 pop_cfun ();
6601
6602 /* Back in the original function, the SESE region has disappeared,
6603 create a new basic block in its place. */
6604 bb = create_empty_bb (entry_pred[0]);
6605 if (current_loops)
6606 add_bb_to_loop (bb, loop);
6607 for (i = 0; i < num_entry_edges; i++)
6608 {
6609 e = make_edge (entry_pred[i], bb, entry_flag[i]);
6610 e->probability = entry_prob[i];
6611 }
6612
6613 for (i = 0; i < num_exit_edges; i++)
6614 {
6615 e = make_edge (bb, exit_succ[i], exit_flag[i]);
6616 e->probability = exit_prob[i];
6617 }
6618
6619 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
6620 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
6621 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
6622 dom_bbs.release ();
6623
6624 if (exit_bb)
6625 {
6626 free (exit_prob);
6627 free (exit_flag);
6628 free (exit_succ);
6629 }
6630 free (entry_prob);
6631 free (entry_flag);
6632 free (entry_pred);
6633 bbs.release ();
6634
6635 return bb;
6636 }
6637
6638
6639 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
6640 */
6641
6642 void
6643 dump_function_to_file (tree fndecl, FILE *file, int flags)
6644 {
6645 tree arg, var, old_current_fndecl = current_function_decl;
6646 struct function *dsf;
6647 bool ignore_topmost_bind = false, any_var = false;
6648 basic_block bb;
6649 tree chain;
6650 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
6651 && decl_is_tm_clone (fndecl));
6652 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
6653
6654 current_function_decl = fndecl;
6655 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
6656
6657 arg = DECL_ARGUMENTS (fndecl);
6658 while (arg)
6659 {
6660 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
6661 fprintf (file, " ");
6662 print_generic_expr (file, arg, dump_flags);
6663 if (flags & TDF_VERBOSE)
6664 print_node (file, "", arg, 4);
6665 if (DECL_CHAIN (arg))
6666 fprintf (file, ", ");
6667 arg = DECL_CHAIN (arg);
6668 }
6669 fprintf (file, ")\n");
6670
6671 if (flags & TDF_VERBOSE)
6672 print_node (file, "", fndecl, 2);
6673
6674 dsf = DECL_STRUCT_FUNCTION (fndecl);
6675 if (dsf && (flags & TDF_EH))
6676 dump_eh_tree (file, dsf);
6677
6678 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
6679 {
6680 dump_node (fndecl, TDF_SLIM | flags, file);
6681 current_function_decl = old_current_fndecl;
6682 return;
6683 }
6684
6685 /* When GIMPLE is lowered, the variables are no longer available in
6686 BIND_EXPRs, so display them separately. */
6687 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
6688 {
6689 unsigned ix;
6690 ignore_topmost_bind = true;
6691
6692 fprintf (file, "{\n");
6693 if (!vec_safe_is_empty (fun->local_decls))
6694 FOR_EACH_LOCAL_DECL (fun, ix, var)
6695 {
6696 print_generic_decl (file, var, flags);
6697 if (flags & TDF_VERBOSE)
6698 print_node (file, "", var, 4);
6699 fprintf (file, "\n");
6700
6701 any_var = true;
6702 }
6703 if (gimple_in_ssa_p (cfun))
6704 for (ix = 1; ix < num_ssa_names; ++ix)
6705 {
6706 tree name = ssa_name (ix);
6707 if (name && !SSA_NAME_VAR (name))
6708 {
6709 fprintf (file, " ");
6710 print_generic_expr (file, TREE_TYPE (name), flags);
6711 fprintf (file, " ");
6712 print_generic_expr (file, name, flags);
6713 fprintf (file, ";\n");
6714
6715 any_var = true;
6716 }
6717 }
6718 }
6719
6720 if (fun && fun->decl == fndecl
6721 && fun->cfg
6722 && basic_block_info_for_function (fun))
6723 {
6724 /* If the CFG has been built, emit a CFG-based dump. */
6725 if (!ignore_topmost_bind)
6726 fprintf (file, "{\n");
6727
6728 if (any_var && n_basic_blocks_for_function (fun))
6729 fprintf (file, "\n");
6730
6731 FOR_EACH_BB_FN (bb, fun)
6732 dump_bb (file, bb, 2, flags | TDF_COMMENT);
6733
6734 fprintf (file, "}\n");
6735 }
6736 else if (DECL_SAVED_TREE (fndecl) == NULL)
6737 {
6738 /* The function is now in GIMPLE form but the CFG has not been
6739 built yet. Emit the single sequence of GIMPLE statements
6740 that make up its body. */
6741 gimple_seq body = gimple_body (fndecl);
6742
6743 if (gimple_seq_first_stmt (body)
6744 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
6745 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
6746 print_gimple_seq (file, body, 0, flags);
6747 else
6748 {
6749 if (!ignore_topmost_bind)
6750 fprintf (file, "{\n");
6751
6752 if (any_var)
6753 fprintf (file, "\n");
6754
6755 print_gimple_seq (file, body, 2, flags);
6756 fprintf (file, "}\n");
6757 }
6758 }
6759 else
6760 {
6761 int indent;
6762
6763 /* Make a tree based dump. */
6764 chain = DECL_SAVED_TREE (fndecl);
6765 if (chain && TREE_CODE (chain) == BIND_EXPR)
6766 {
6767 if (ignore_topmost_bind)
6768 {
6769 chain = BIND_EXPR_BODY (chain);
6770 indent = 2;
6771 }
6772 else
6773 indent = 0;
6774 }
6775 else
6776 {
6777 if (!ignore_topmost_bind)
6778 fprintf (file, "{\n");
6779 indent = 2;
6780 }
6781
6782 if (any_var)
6783 fprintf (file, "\n");
6784
6785 print_generic_stmt_indented (file, chain, flags, indent);
6786 if (ignore_topmost_bind)
6787 fprintf (file, "}\n");
6788 }
6789
6790 if (flags & TDF_ENUMERATE_LOCALS)
6791 dump_enumerated_decls (file, flags);
6792 fprintf (file, "\n\n");
6793
6794 current_function_decl = old_current_fndecl;
6795 }
6796
6797 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
6798
6799 DEBUG_FUNCTION void
6800 debug_function (tree fn, int flags)
6801 {
6802 dump_function_to_file (fn, stderr, flags);
6803 }
6804
6805
6806 /* Print on FILE the indexes for the predecessors of basic_block BB. */
6807
6808 static void
6809 print_pred_bbs (FILE *file, basic_block bb)
6810 {
6811 edge e;
6812 edge_iterator ei;
6813
6814 FOR_EACH_EDGE (e, ei, bb->preds)
6815 fprintf (file, "bb_%d ", e->src->index);
6816 }
6817
6818
6819 /* Print on FILE the indexes for the successors of basic_block BB. */
6820
6821 static void
6822 print_succ_bbs (FILE *file, basic_block bb)
6823 {
6824 edge e;
6825 edge_iterator ei;
6826
6827 FOR_EACH_EDGE (e, ei, bb->succs)
6828 fprintf (file, "bb_%d ", e->dest->index);
6829 }
6830
6831 /* Print to FILE the basic block BB following the VERBOSITY level. */
6832
6833 void
6834 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
6835 {
6836 char *s_indent = (char *) alloca ((size_t) indent + 1);
6837 memset ((void *) s_indent, ' ', (size_t) indent);
6838 s_indent[indent] = '\0';
6839
6840 /* Print basic_block's header. */
6841 if (verbosity >= 2)
6842 {
6843 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
6844 print_pred_bbs (file, bb);
6845 fprintf (file, "}, succs = {");
6846 print_succ_bbs (file, bb);
6847 fprintf (file, "})\n");
6848 }
6849
6850 /* Print basic_block's body. */
6851 if (verbosity >= 3)
6852 {
6853 fprintf (file, "%s {\n", s_indent);
6854 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
6855 fprintf (file, "%s }\n", s_indent);
6856 }
6857 }
6858
6859 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
6860
6861 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
6862 VERBOSITY level this outputs the contents of the loop, or just its
6863 structure. */
6864
6865 static void
6866 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
6867 {
6868 char *s_indent;
6869 basic_block bb;
6870
6871 if (loop == NULL)
6872 return;
6873
6874 s_indent = (char *) alloca ((size_t) indent + 1);
6875 memset ((void *) s_indent, ' ', (size_t) indent);
6876 s_indent[indent] = '\0';
6877
6878 /* Print loop's header. */
6879 fprintf (file, "%sloop_%d (", s_indent, loop->num);
6880 if (loop->header)
6881 fprintf (file, "header = %d", loop->header->index);
6882 else
6883 {
6884 fprintf (file, "deleted)\n");
6885 return;
6886 }
6887 if (loop->latch)
6888 fprintf (file, ", latch = %d", loop->latch->index);
6889 else
6890 fprintf (file, ", multiple latches");
6891 fprintf (file, ", niter = ");
6892 print_generic_expr (file, loop->nb_iterations, 0);
6893
6894 if (loop->any_upper_bound)
6895 {
6896 fprintf (file, ", upper_bound = ");
6897 dump_double_int (file, loop->nb_iterations_upper_bound, true);
6898 }
6899
6900 if (loop->any_estimate)
6901 {
6902 fprintf (file, ", estimate = ");
6903 dump_double_int (file, loop->nb_iterations_estimate, true);
6904 }
6905 fprintf (file, ")\n");
6906
6907 /* Print loop's body. */
6908 if (verbosity >= 1)
6909 {
6910 fprintf (file, "%s{\n", s_indent);
6911 FOR_EACH_BB (bb)
6912 if (bb->loop_father == loop)
6913 print_loops_bb (file, bb, indent, verbosity);
6914
6915 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
6916 fprintf (file, "%s}\n", s_indent);
6917 }
6918 }
6919
6920 /* Print the LOOP and its sibling loops on FILE, indented INDENT
6921 spaces. Following VERBOSITY level this outputs the contents of the
6922 loop, or just its structure. */
6923
6924 static void
6925 print_loop_and_siblings (FILE *file, struct loop *loop, int indent, int verbosity)
6926 {
6927 if (loop == NULL)
6928 return;
6929
6930 print_loop (file, loop, indent, verbosity);
6931 print_loop_and_siblings (file, loop->next, indent, verbosity);
6932 }
6933
6934 /* Follow a CFG edge from the entry point of the program, and on entry
6935 of a loop, pretty print the loop structure on FILE. */
6936
6937 void
6938 print_loops (FILE *file, int verbosity)
6939 {
6940 basic_block bb;
6941
6942 bb = ENTRY_BLOCK_PTR;
6943 if (bb && bb->loop_father)
6944 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
6945 }
6946
6947
6948 /* Debugging loops structure at tree level, at some VERBOSITY level. */
6949
6950 DEBUG_FUNCTION void
6951 debug_loops (int verbosity)
6952 {
6953 print_loops (stderr, verbosity);
6954 }
6955
6956 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
6957
6958 DEBUG_FUNCTION void
6959 debug_loop (struct loop *loop, int verbosity)
6960 {
6961 print_loop (stderr, loop, 0, verbosity);
6962 }
6963
6964 /* Print on stderr the code of loop number NUM, at some VERBOSITY
6965 level. */
6966
6967 DEBUG_FUNCTION void
6968 debug_loop_num (unsigned num, int verbosity)
6969 {
6970 debug_loop (get_loop (num), verbosity);
6971 }
6972
6973 /* Return true if BB ends with a call, possibly followed by some
6974 instructions that must stay with the call. Return false,
6975 otherwise. */
6976
6977 static bool
6978 gimple_block_ends_with_call_p (basic_block bb)
6979 {
6980 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6981 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
6982 }
6983
6984
6985 /* Return true if BB ends with a conditional branch. Return false,
6986 otherwise. */
6987
6988 static bool
6989 gimple_block_ends_with_condjump_p (const_basic_block bb)
6990 {
6991 gimple stmt = last_stmt (CONST_CAST_BB (bb));
6992 return (stmt && gimple_code (stmt) == GIMPLE_COND);
6993 }
6994
6995
6996 /* Return true if we need to add fake edge to exit at statement T.
6997 Helper function for gimple_flow_call_edges_add. */
6998
6999 static bool
7000 need_fake_edge_p (gimple t)
7001 {
7002 tree fndecl = NULL_TREE;
7003 int call_flags = 0;
7004
7005 /* NORETURN and LONGJMP calls already have an edge to exit.
7006 CONST and PURE calls do not need one.
7007 We don't currently check for CONST and PURE here, although
7008 it would be a good idea, because those attributes are
7009 figured out from the RTL in mark_constant_function, and
7010 the counter incrementation code from -fprofile-arcs
7011 leads to different results from -fbranch-probabilities. */
7012 if (is_gimple_call (t))
7013 {
7014 fndecl = gimple_call_fndecl (t);
7015 call_flags = gimple_call_flags (t);
7016 }
7017
7018 if (is_gimple_call (t)
7019 && fndecl
7020 && DECL_BUILT_IN (fndecl)
7021 && (call_flags & ECF_NOTHROW)
7022 && !(call_flags & ECF_RETURNS_TWICE)
7023 /* fork() doesn't really return twice, but the effect of
7024 wrapping it in __gcov_fork() which calls __gcov_flush()
7025 and clears the counters before forking has the same
7026 effect as returning twice. Force a fake edge. */
7027 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7028 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7029 return false;
7030
7031 if (is_gimple_call (t))
7032 {
7033 edge_iterator ei;
7034 edge e;
7035 basic_block bb;
7036
7037 if (!(call_flags & ECF_NORETURN))
7038 return true;
7039
7040 bb = gimple_bb (t);
7041 FOR_EACH_EDGE (e, ei, bb->succs)
7042 if ((e->flags & EDGE_FAKE) == 0)
7043 return true;
7044 }
7045
7046 if (gimple_code (t) == GIMPLE_ASM
7047 && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
7048 return true;
7049
7050 return false;
7051 }
7052
7053
7054 /* Add fake edges to the function exit for any non constant and non
7055 noreturn calls (or noreturn calls with EH/abnormal edges),
7056 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7057 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7058 that were split.
7059
7060 The goal is to expose cases in which entering a basic block does
7061 not imply that all subsequent instructions must be executed. */
7062
7063 static int
7064 gimple_flow_call_edges_add (sbitmap blocks)
7065 {
7066 int i;
7067 int blocks_split = 0;
7068 int last_bb = last_basic_block;
7069 bool check_last_block = false;
7070
7071 if (n_basic_blocks == NUM_FIXED_BLOCKS)
7072 return 0;
7073
7074 if (! blocks)
7075 check_last_block = true;
7076 else
7077 check_last_block = bitmap_bit_p (blocks, EXIT_BLOCK_PTR->prev_bb->index);
7078
7079 /* In the last basic block, before epilogue generation, there will be
7080 a fallthru edge to EXIT. Special care is required if the last insn
7081 of the last basic block is a call because make_edge folds duplicate
7082 edges, which would result in the fallthru edge also being marked
7083 fake, which would result in the fallthru edge being removed by
7084 remove_fake_edges, which would result in an invalid CFG.
7085
7086 Moreover, we can't elide the outgoing fake edge, since the block
7087 profiler needs to take this into account in order to solve the minimal
7088 spanning tree in the case that the call doesn't return.
7089
7090 Handle this by adding a dummy instruction in a new last basic block. */
7091 if (check_last_block)
7092 {
7093 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
7094 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7095 gimple t = NULL;
7096
7097 if (!gsi_end_p (gsi))
7098 t = gsi_stmt (gsi);
7099
7100 if (t && need_fake_edge_p (t))
7101 {
7102 edge e;
7103
7104 e = find_edge (bb, EXIT_BLOCK_PTR);
7105 if (e)
7106 {
7107 gsi_insert_on_edge (e, gimple_build_nop ());
7108 gsi_commit_edge_inserts ();
7109 }
7110 }
7111 }
7112
7113 /* Now add fake edges to the function exit for any non constant
7114 calls since there is no way that we can determine if they will
7115 return or not... */
7116 for (i = 0; i < last_bb; i++)
7117 {
7118 basic_block bb = BASIC_BLOCK (i);
7119 gimple_stmt_iterator gsi;
7120 gimple stmt, last_stmt;
7121
7122 if (!bb)
7123 continue;
7124
7125 if (blocks && !bitmap_bit_p (blocks, i))
7126 continue;
7127
7128 gsi = gsi_last_nondebug_bb (bb);
7129 if (!gsi_end_p (gsi))
7130 {
7131 last_stmt = gsi_stmt (gsi);
7132 do
7133 {
7134 stmt = gsi_stmt (gsi);
7135 if (need_fake_edge_p (stmt))
7136 {
7137 edge e;
7138
7139 /* The handling above of the final block before the
7140 epilogue should be enough to verify that there is
7141 no edge to the exit block in CFG already.
7142 Calling make_edge in such case would cause us to
7143 mark that edge as fake and remove it later. */
7144 #ifdef ENABLE_CHECKING
7145 if (stmt == last_stmt)
7146 {
7147 e = find_edge (bb, EXIT_BLOCK_PTR);
7148 gcc_assert (e == NULL);
7149 }
7150 #endif
7151
7152 /* Note that the following may create a new basic block
7153 and renumber the existing basic blocks. */
7154 if (stmt != last_stmt)
7155 {
7156 e = split_block (bb, stmt);
7157 if (e)
7158 blocks_split++;
7159 }
7160 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
7161 }
7162 gsi_prev (&gsi);
7163 }
7164 while (!gsi_end_p (gsi));
7165 }
7166 }
7167
7168 if (blocks_split)
7169 verify_flow_info ();
7170
7171 return blocks_split;
7172 }
7173
7174 /* Removes edge E and all the blocks dominated by it, and updates dominance
7175 information. The IL in E->src needs to be updated separately.
7176 If dominance info is not available, only the edge E is removed.*/
7177
7178 void
7179 remove_edge_and_dominated_blocks (edge e)
7180 {
7181 vec<basic_block> bbs_to_remove = vNULL;
7182 vec<basic_block> bbs_to_fix_dom = vNULL;
7183 bitmap df, df_idom;
7184 edge f;
7185 edge_iterator ei;
7186 bool none_removed = false;
7187 unsigned i;
7188 basic_block bb, dbb;
7189 bitmap_iterator bi;
7190
7191 if (!dom_info_available_p (CDI_DOMINATORS))
7192 {
7193 remove_edge (e);
7194 return;
7195 }
7196
7197 /* No updating is needed for edges to exit. */
7198 if (e->dest == EXIT_BLOCK_PTR)
7199 {
7200 if (cfgcleanup_altered_bbs)
7201 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7202 remove_edge (e);
7203 return;
7204 }
7205
7206 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7207 that is not dominated by E->dest, then this set is empty. Otherwise,
7208 all the basic blocks dominated by E->dest are removed.
7209
7210 Also, to DF_IDOM we store the immediate dominators of the blocks in
7211 the dominance frontier of E (i.e., of the successors of the
7212 removed blocks, if there are any, and of E->dest otherwise). */
7213 FOR_EACH_EDGE (f, ei, e->dest->preds)
7214 {
7215 if (f == e)
7216 continue;
7217
7218 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7219 {
7220 none_removed = true;
7221 break;
7222 }
7223 }
7224
7225 df = BITMAP_ALLOC (NULL);
7226 df_idom = BITMAP_ALLOC (NULL);
7227
7228 if (none_removed)
7229 bitmap_set_bit (df_idom,
7230 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7231 else
7232 {
7233 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7234 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7235 {
7236 FOR_EACH_EDGE (f, ei, bb->succs)
7237 {
7238 if (f->dest != EXIT_BLOCK_PTR)
7239 bitmap_set_bit (df, f->dest->index);
7240 }
7241 }
7242 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7243 bitmap_clear_bit (df, bb->index);
7244
7245 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7246 {
7247 bb = BASIC_BLOCK (i);
7248 bitmap_set_bit (df_idom,
7249 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7250 }
7251 }
7252
7253 if (cfgcleanup_altered_bbs)
7254 {
7255 /* Record the set of the altered basic blocks. */
7256 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7257 bitmap_ior_into (cfgcleanup_altered_bbs, df);
7258 }
7259
7260 /* Remove E and the cancelled blocks. */
7261 if (none_removed)
7262 remove_edge (e);
7263 else
7264 {
7265 /* Walk backwards so as to get a chance to substitute all
7266 released DEFs into debug stmts. See
7267 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7268 details. */
7269 for (i = bbs_to_remove.length (); i-- > 0; )
7270 delete_basic_block (bbs_to_remove[i]);
7271 }
7272
7273 /* Update the dominance information. The immediate dominator may change only
7274 for blocks whose immediate dominator belongs to DF_IDOM:
7275
7276 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7277 removal. Let Z the arbitrary block such that idom(Z) = Y and
7278 Z dominates X after the removal. Before removal, there exists a path P
7279 from Y to X that avoids Z. Let F be the last edge on P that is
7280 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7281 dominates W, and because of P, Z does not dominate W), and W belongs to
7282 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7283 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
7284 {
7285 bb = BASIC_BLOCK (i);
7286 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
7287 dbb;
7288 dbb = next_dom_son (CDI_DOMINATORS, dbb))
7289 bbs_to_fix_dom.safe_push (dbb);
7290 }
7291
7292 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
7293
7294 BITMAP_FREE (df);
7295 BITMAP_FREE (df_idom);
7296 bbs_to_remove.release ();
7297 bbs_to_fix_dom.release ();
7298 }
7299
7300 /* Purge dead EH edges from basic block BB. */
7301
7302 bool
7303 gimple_purge_dead_eh_edges (basic_block bb)
7304 {
7305 bool changed = false;
7306 edge e;
7307 edge_iterator ei;
7308 gimple stmt = last_stmt (bb);
7309
7310 if (stmt && stmt_can_throw_internal (stmt))
7311 return false;
7312
7313 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7314 {
7315 if (e->flags & EDGE_EH)
7316 {
7317 remove_edge_and_dominated_blocks (e);
7318 changed = true;
7319 }
7320 else
7321 ei_next (&ei);
7322 }
7323
7324 return changed;
7325 }
7326
7327 /* Purge dead EH edges from basic block listed in BLOCKS. */
7328
7329 bool
7330 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
7331 {
7332 bool changed = false;
7333 unsigned i;
7334 bitmap_iterator bi;
7335
7336 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7337 {
7338 basic_block bb = BASIC_BLOCK (i);
7339
7340 /* Earlier gimple_purge_dead_eh_edges could have removed
7341 this basic block already. */
7342 gcc_assert (bb || changed);
7343 if (bb != NULL)
7344 changed |= gimple_purge_dead_eh_edges (bb);
7345 }
7346
7347 return changed;
7348 }
7349
7350 /* Purge dead abnormal call edges from basic block BB. */
7351
7352 bool
7353 gimple_purge_dead_abnormal_call_edges (basic_block bb)
7354 {
7355 bool changed = false;
7356 edge e;
7357 edge_iterator ei;
7358 gimple stmt = last_stmt (bb);
7359
7360 if (!cfun->has_nonlocal_label)
7361 return false;
7362
7363 if (stmt && stmt_can_make_abnormal_goto (stmt))
7364 return false;
7365
7366 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7367 {
7368 if (e->flags & EDGE_ABNORMAL)
7369 {
7370 remove_edge_and_dominated_blocks (e);
7371 changed = true;
7372 }
7373 else
7374 ei_next (&ei);
7375 }
7376
7377 return changed;
7378 }
7379
7380 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
7381
7382 bool
7383 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
7384 {
7385 bool changed = false;
7386 unsigned i;
7387 bitmap_iterator bi;
7388
7389 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7390 {
7391 basic_block bb = BASIC_BLOCK (i);
7392
7393 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
7394 this basic block already. */
7395 gcc_assert (bb || changed);
7396 if (bb != NULL)
7397 changed |= gimple_purge_dead_abnormal_call_edges (bb);
7398 }
7399
7400 return changed;
7401 }
7402
7403 /* This function is called whenever a new edge is created or
7404 redirected. */
7405
7406 static void
7407 gimple_execute_on_growing_pred (edge e)
7408 {
7409 basic_block bb = e->dest;
7410
7411 if (!gimple_seq_empty_p (phi_nodes (bb)))
7412 reserve_phi_args_for_new_edge (bb);
7413 }
7414
7415 /* This function is called immediately before edge E is removed from
7416 the edge vector E->dest->preds. */
7417
7418 static void
7419 gimple_execute_on_shrinking_pred (edge e)
7420 {
7421 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
7422 remove_phi_args (e);
7423 }
7424
7425 /*---------------------------------------------------------------------------
7426 Helper functions for Loop versioning
7427 ---------------------------------------------------------------------------*/
7428
7429 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
7430 of 'first'. Both of them are dominated by 'new_head' basic block. When
7431 'new_head' was created by 'second's incoming edge it received phi arguments
7432 on the edge by split_edge(). Later, additional edge 'e' was created to
7433 connect 'new_head' and 'first'. Now this routine adds phi args on this
7434 additional edge 'e' that new_head to second edge received as part of edge
7435 splitting. */
7436
7437 static void
7438 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
7439 basic_block new_head, edge e)
7440 {
7441 gimple phi1, phi2;
7442 gimple_stmt_iterator psi1, psi2;
7443 tree def;
7444 edge e2 = find_edge (new_head, second);
7445
7446 /* Because NEW_HEAD has been created by splitting SECOND's incoming
7447 edge, we should always have an edge from NEW_HEAD to SECOND. */
7448 gcc_assert (e2 != NULL);
7449
7450 /* Browse all 'second' basic block phi nodes and add phi args to
7451 edge 'e' for 'first' head. PHI args are always in correct order. */
7452
7453 for (psi2 = gsi_start_phis (second),
7454 psi1 = gsi_start_phis (first);
7455 !gsi_end_p (psi2) && !gsi_end_p (psi1);
7456 gsi_next (&psi2), gsi_next (&psi1))
7457 {
7458 phi1 = gsi_stmt (psi1);
7459 phi2 = gsi_stmt (psi2);
7460 def = PHI_ARG_DEF (phi2, e2->dest_idx);
7461 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
7462 }
7463 }
7464
7465
7466 /* Adds a if else statement to COND_BB with condition COND_EXPR.
7467 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
7468 the destination of the ELSE part. */
7469
7470 static void
7471 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
7472 basic_block second_head ATTRIBUTE_UNUSED,
7473 basic_block cond_bb, void *cond_e)
7474 {
7475 gimple_stmt_iterator gsi;
7476 gimple new_cond_expr;
7477 tree cond_expr = (tree) cond_e;
7478 edge e0;
7479
7480 /* Build new conditional expr */
7481 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
7482 NULL_TREE, NULL_TREE);
7483
7484 /* Add new cond in cond_bb. */
7485 gsi = gsi_last_bb (cond_bb);
7486 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
7487
7488 /* Adjust edges appropriately to connect new head with first head
7489 as well as second head. */
7490 e0 = single_succ_edge (cond_bb);
7491 e0->flags &= ~EDGE_FALLTHRU;
7492 e0->flags |= EDGE_FALSE_VALUE;
7493 }
7494
7495
7496 /* Do book-keeping of basic block BB for the profile consistency checker.
7497 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
7498 then do post-pass accounting. Store the counting in RECORD. */
7499 static void
7500 gimple_account_profile_record (basic_block bb, int after_pass,
7501 struct profile_record *record)
7502 {
7503 gimple_stmt_iterator i;
7504 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
7505 {
7506 record->size[after_pass]
7507 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
7508 if (profile_status == PROFILE_READ)
7509 record->time[after_pass]
7510 += estimate_num_insns (gsi_stmt (i),
7511 &eni_time_weights) * bb->count;
7512 else if (profile_status == PROFILE_GUESSED)
7513 record->time[after_pass]
7514 += estimate_num_insns (gsi_stmt (i),
7515 &eni_time_weights) * bb->frequency;
7516 }
7517 }
7518
7519 struct cfg_hooks gimple_cfg_hooks = {
7520 "gimple",
7521 gimple_verify_flow_info,
7522 gimple_dump_bb, /* dump_bb */
7523 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
7524 create_bb, /* create_basic_block */
7525 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
7526 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
7527 gimple_can_remove_branch_p, /* can_remove_branch_p */
7528 remove_bb, /* delete_basic_block */
7529 gimple_split_block, /* split_block */
7530 gimple_move_block_after, /* move_block_after */
7531 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
7532 gimple_merge_blocks, /* merge_blocks */
7533 gimple_predict_edge, /* predict_edge */
7534 gimple_predicted_by_p, /* predicted_by_p */
7535 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
7536 gimple_duplicate_bb, /* duplicate_block */
7537 gimple_split_edge, /* split_edge */
7538 gimple_make_forwarder_block, /* make_forward_block */
7539 NULL, /* tidy_fallthru_edge */
7540 NULL, /* force_nonfallthru */
7541 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
7542 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
7543 gimple_flow_call_edges_add, /* flow_call_edges_add */
7544 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
7545 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
7546 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
7547 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
7548 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
7549 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
7550 flush_pending_stmts, /* flush_pending_stmts */
7551 gimple_empty_block_p, /* block_empty_p */
7552 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
7553 gimple_account_profile_record,
7554 };
7555
7556
7557 /* Split all critical edges. */
7558
7559 static unsigned int
7560 split_critical_edges (void)
7561 {
7562 basic_block bb;
7563 edge e;
7564 edge_iterator ei;
7565
7566 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
7567 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
7568 mappings around the calls to split_edge. */
7569 start_recording_case_labels ();
7570 FOR_ALL_BB (bb)
7571 {
7572 FOR_EACH_EDGE (e, ei, bb->succs)
7573 {
7574 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
7575 split_edge (e);
7576 /* PRE inserts statements to edges and expects that
7577 since split_critical_edges was done beforehand, committing edge
7578 insertions will not split more edges. In addition to critical
7579 edges we must split edges that have multiple successors and
7580 end by control flow statements, such as RESX.
7581 Go ahead and split them too. This matches the logic in
7582 gimple_find_edge_insert_loc. */
7583 else if ((!single_pred_p (e->dest)
7584 || !gimple_seq_empty_p (phi_nodes (e->dest))
7585 || e->dest == EXIT_BLOCK_PTR)
7586 && e->src != ENTRY_BLOCK_PTR
7587 && !(e->flags & EDGE_ABNORMAL))
7588 {
7589 gimple_stmt_iterator gsi;
7590
7591 gsi = gsi_last_bb (e->src);
7592 if (!gsi_end_p (gsi)
7593 && stmt_ends_bb_p (gsi_stmt (gsi))
7594 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
7595 && !gimple_call_builtin_p (gsi_stmt (gsi),
7596 BUILT_IN_RETURN)))
7597 split_edge (e);
7598 }
7599 }
7600 }
7601 end_recording_case_labels ();
7602 return 0;
7603 }
7604
7605 struct gimple_opt_pass pass_split_crit_edges =
7606 {
7607 {
7608 GIMPLE_PASS,
7609 "crited", /* name */
7610 OPTGROUP_NONE, /* optinfo_flags */
7611 NULL, /* gate */
7612 split_critical_edges, /* execute */
7613 NULL, /* sub */
7614 NULL, /* next */
7615 0, /* static_pass_number */
7616 TV_TREE_SPLIT_EDGES, /* tv_id */
7617 PROP_cfg, /* properties required */
7618 PROP_no_crit_edges, /* properties_provided */
7619 0, /* properties_destroyed */
7620 0, /* todo_flags_start */
7621 TODO_verify_flow /* todo_flags_finish */
7622 }
7623 };
7624
7625
7626 /* Build a ternary operation and gimplify it. Emit code before GSI.
7627 Return the gimple_val holding the result. */
7628
7629 tree
7630 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
7631 tree type, tree a, tree b, tree c)
7632 {
7633 tree ret;
7634 location_t loc = gimple_location (gsi_stmt (*gsi));
7635
7636 ret = fold_build3_loc (loc, code, type, a, b, c);
7637 STRIP_NOPS (ret);
7638
7639 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7640 GSI_SAME_STMT);
7641 }
7642
7643 /* Build a binary operation and gimplify it. Emit code before GSI.
7644 Return the gimple_val holding the result. */
7645
7646 tree
7647 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
7648 tree type, tree a, tree b)
7649 {
7650 tree ret;
7651
7652 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
7653 STRIP_NOPS (ret);
7654
7655 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7656 GSI_SAME_STMT);
7657 }
7658
7659 /* Build a unary operation and gimplify it. Emit code before GSI.
7660 Return the gimple_val holding the result. */
7661
7662 tree
7663 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
7664 tree a)
7665 {
7666 tree ret;
7667
7668 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
7669 STRIP_NOPS (ret);
7670
7671 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7672 GSI_SAME_STMT);
7673 }
7674
7675
7676 \f
7677 /* Emit return warnings. */
7678
7679 static unsigned int
7680 execute_warn_function_return (void)
7681 {
7682 source_location location;
7683 gimple last;
7684 edge e;
7685 edge_iterator ei;
7686
7687 if (!targetm.warn_func_return (cfun->decl))
7688 return 0;
7689
7690 /* If we have a path to EXIT, then we do return. */
7691 if (TREE_THIS_VOLATILE (cfun->decl)
7692 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
7693 {
7694 location = UNKNOWN_LOCATION;
7695 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7696 {
7697 last = last_stmt (e->src);
7698 if ((gimple_code (last) == GIMPLE_RETURN
7699 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
7700 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
7701 break;
7702 }
7703 if (location == UNKNOWN_LOCATION)
7704 location = cfun->function_end_locus;
7705 warning_at (location, 0, "%<noreturn%> function does return");
7706 }
7707
7708 /* If we see "return;" in some basic block, then we do reach the end
7709 without returning a value. */
7710 else if (warn_return_type
7711 && !TREE_NO_WARNING (cfun->decl)
7712 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
7713 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
7714 {
7715 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7716 {
7717 gimple last = last_stmt (e->src);
7718 if (gimple_code (last) == GIMPLE_RETURN
7719 && gimple_return_retval (last) == NULL
7720 && !gimple_no_warning_p (last))
7721 {
7722 location = gimple_location (last);
7723 if (location == UNKNOWN_LOCATION)
7724 location = cfun->function_end_locus;
7725 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
7726 TREE_NO_WARNING (cfun->decl) = 1;
7727 break;
7728 }
7729 }
7730 }
7731 return 0;
7732 }
7733
7734
7735 /* Given a basic block B which ends with a conditional and has
7736 precisely two successors, determine which of the edges is taken if
7737 the conditional is true and which is taken if the conditional is
7738 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
7739
7740 void
7741 extract_true_false_edges_from_block (basic_block b,
7742 edge *true_edge,
7743 edge *false_edge)
7744 {
7745 edge e = EDGE_SUCC (b, 0);
7746
7747 if (e->flags & EDGE_TRUE_VALUE)
7748 {
7749 *true_edge = e;
7750 *false_edge = EDGE_SUCC (b, 1);
7751 }
7752 else
7753 {
7754 *false_edge = e;
7755 *true_edge = EDGE_SUCC (b, 1);
7756 }
7757 }
7758
7759 struct gimple_opt_pass pass_warn_function_return =
7760 {
7761 {
7762 GIMPLE_PASS,
7763 "*warn_function_return", /* name */
7764 OPTGROUP_NONE, /* optinfo_flags */
7765 NULL, /* gate */
7766 execute_warn_function_return, /* execute */
7767 NULL, /* sub */
7768 NULL, /* next */
7769 0, /* static_pass_number */
7770 TV_NONE, /* tv_id */
7771 PROP_cfg, /* properties_required */
7772 0, /* properties_provided */
7773 0, /* properties_destroyed */
7774 0, /* todo_flags_start */
7775 0 /* todo_flags_finish */
7776 }
7777 };
7778
7779 /* Emit noreturn warnings. */
7780
7781 static unsigned int
7782 execute_warn_function_noreturn (void)
7783 {
7784 if (!TREE_THIS_VOLATILE (current_function_decl)
7785 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0)
7786 warn_function_noreturn (current_function_decl);
7787 return 0;
7788 }
7789
7790 static bool
7791 gate_warn_function_noreturn (void)
7792 {
7793 return warn_suggest_attribute_noreturn;
7794 }
7795
7796 struct gimple_opt_pass pass_warn_function_noreturn =
7797 {
7798 {
7799 GIMPLE_PASS,
7800 "*warn_function_noreturn", /* name */
7801 OPTGROUP_NONE, /* optinfo_flags */
7802 gate_warn_function_noreturn, /* gate */
7803 execute_warn_function_noreturn, /* execute */
7804 NULL, /* sub */
7805 NULL, /* next */
7806 0, /* static_pass_number */
7807 TV_NONE, /* tv_id */
7808 PROP_cfg, /* properties_required */
7809 0, /* properties_provided */
7810 0, /* properties_destroyed */
7811 0, /* todo_flags_start */
7812 0 /* todo_flags_finish */
7813 }
7814 };
7815
7816
7817 /* Walk a gimplified function and warn for functions whose return value is
7818 ignored and attribute((warn_unused_result)) is set. This is done before
7819 inlining, so we don't have to worry about that. */
7820
7821 static void
7822 do_warn_unused_result (gimple_seq seq)
7823 {
7824 tree fdecl, ftype;
7825 gimple_stmt_iterator i;
7826
7827 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
7828 {
7829 gimple g = gsi_stmt (i);
7830
7831 switch (gimple_code (g))
7832 {
7833 case GIMPLE_BIND:
7834 do_warn_unused_result (gimple_bind_body (g));
7835 break;
7836 case GIMPLE_TRY:
7837 do_warn_unused_result (gimple_try_eval (g));
7838 do_warn_unused_result (gimple_try_cleanup (g));
7839 break;
7840 case GIMPLE_CATCH:
7841 do_warn_unused_result (gimple_catch_handler (g));
7842 break;
7843 case GIMPLE_EH_FILTER:
7844 do_warn_unused_result (gimple_eh_filter_failure (g));
7845 break;
7846
7847 case GIMPLE_CALL:
7848 if (gimple_call_lhs (g))
7849 break;
7850 if (gimple_call_internal_p (g))
7851 break;
7852
7853 /* This is a naked call, as opposed to a GIMPLE_CALL with an
7854 LHS. All calls whose value is ignored should be
7855 represented like this. Look for the attribute. */
7856 fdecl = gimple_call_fndecl (g);
7857 ftype = gimple_call_fntype (g);
7858
7859 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
7860 {
7861 location_t loc = gimple_location (g);
7862
7863 if (fdecl)
7864 warning_at (loc, OPT_Wunused_result,
7865 "ignoring return value of %qD, "
7866 "declared with attribute warn_unused_result",
7867 fdecl);
7868 else
7869 warning_at (loc, OPT_Wunused_result,
7870 "ignoring return value of function "
7871 "declared with attribute warn_unused_result");
7872 }
7873 break;
7874
7875 default:
7876 /* Not a container, not a call, or a call whose value is used. */
7877 break;
7878 }
7879 }
7880 }
7881
7882 static unsigned int
7883 run_warn_unused_result (void)
7884 {
7885 do_warn_unused_result (gimple_body (current_function_decl));
7886 return 0;
7887 }
7888
7889 static bool
7890 gate_warn_unused_result (void)
7891 {
7892 return flag_warn_unused_result;
7893 }
7894
7895 struct gimple_opt_pass pass_warn_unused_result =
7896 {
7897 {
7898 GIMPLE_PASS,
7899 "*warn_unused_result", /* name */
7900 OPTGROUP_NONE, /* optinfo_flags */
7901 gate_warn_unused_result, /* gate */
7902 run_warn_unused_result, /* execute */
7903 NULL, /* sub */
7904 NULL, /* next */
7905 0, /* static_pass_number */
7906 TV_NONE, /* tv_id */
7907 PROP_gimple_any, /* properties_required */
7908 0, /* properties_provided */
7909 0, /* properties_destroyed */
7910 0, /* todo_flags_start */
7911 0, /* todo_flags_finish */
7912 }
7913 };
7914
7915
7916 /* Garbage collection support for edge_def. */
7917
7918 extern void gt_ggc_mx (tree&);
7919 extern void gt_ggc_mx (gimple&);
7920 extern void gt_ggc_mx (rtx&);
7921 extern void gt_ggc_mx (basic_block&);
7922
7923 void
7924 gt_ggc_mx (edge_def *e)
7925 {
7926 tree block = LOCATION_BLOCK (e->goto_locus);
7927 gt_ggc_mx (e->src);
7928 gt_ggc_mx (e->dest);
7929 if (current_ir_type () == IR_GIMPLE)
7930 gt_ggc_mx (e->insns.g);
7931 else
7932 gt_ggc_mx (e->insns.r);
7933 gt_ggc_mx (block);
7934 }
7935
7936 /* PCH support for edge_def. */
7937
7938 extern void gt_pch_nx (tree&);
7939 extern void gt_pch_nx (gimple&);
7940 extern void gt_pch_nx (rtx&);
7941 extern void gt_pch_nx (basic_block&);
7942
7943 void
7944 gt_pch_nx (edge_def *e)
7945 {
7946 tree block = LOCATION_BLOCK (e->goto_locus);
7947 gt_pch_nx (e->src);
7948 gt_pch_nx (e->dest);
7949 if (current_ir_type () == IR_GIMPLE)
7950 gt_pch_nx (e->insns.g);
7951 else
7952 gt_pch_nx (e->insns.r);
7953 gt_pch_nx (block);
7954 }
7955
7956 void
7957 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
7958 {
7959 tree block = LOCATION_BLOCK (e->goto_locus);
7960 op (&(e->src), cookie);
7961 op (&(e->dest), cookie);
7962 if (current_ir_type () == IR_GIMPLE)
7963 op (&(e->insns.g), cookie);
7964 else
7965 op (&(e->insns.r), cookie);
7966 op (&(block), cookie);
7967 }