Correcting transform_to_exit_first_loop + fix to PR tree-optimization/46886
[gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
3 2010, 2011, 2012 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "output.h"
30 #include "flags.h"
31 #include "function.h"
32 #include "ggc.h"
33 #include "langhooks.h"
34 #include "tree-pretty-print.h"
35 #include "gimple-pretty-print.h"
36 #include "tree-flow.h"
37 #include "timevar.h"
38 #include "tree-dump.h"
39 #include "tree-pass.h"
40 #include "diagnostic-core.h"
41 #include "except.h"
42 #include "cfgloop.h"
43 #include "cfglayout.h"
44 #include "tree-ssa-propagate.h"
45 #include "value-prof.h"
46 #include "pointer-set.h"
47 #include "tree-inline.h"
48
49 /* This file contains functions for building the Control Flow Graph (CFG)
50 for a function tree. */
51
52 /* Local declarations. */
53
54 /* Initial capacity for the basic block array. */
55 static const int initial_cfg_capacity = 20;
56
57 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
58 which use a particular edge. The CASE_LABEL_EXPRs are chained together
59 via their CASE_CHAIN field, which we clear after we're done with the
60 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
61
62 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
63 update the case vector in response to edge redirections.
64
65 Right now this table is set up and torn down at key points in the
66 compilation process. It would be nice if we could make the table
67 more persistent. The key is getting notification of changes to
68 the CFG (particularly edge removal, creation and redirection). */
69
70 static struct pointer_map_t *edge_to_cases;
71
72 /* If we record edge_to_cases, this bitmap will hold indexes
73 of basic blocks that end in a GIMPLE_SWITCH which we touched
74 due to edge manipulations. */
75
76 static bitmap touched_switch_bbs;
77
78 /* CFG statistics. */
79 struct cfg_stats_d
80 {
81 long num_merged_labels;
82 };
83
84 static struct cfg_stats_d cfg_stats;
85
86 /* Nonzero if we found a computed goto while building basic blocks. */
87 static bool found_computed_goto;
88
89 /* Hash table to store last discriminator assigned for each locus. */
90 struct locus_discrim_map
91 {
92 location_t locus;
93 int discriminator;
94 };
95 static htab_t discriminator_per_locus;
96
97 /* Basic blocks and flowgraphs. */
98 static void make_blocks (gimple_seq);
99 static void factor_computed_gotos (void);
100
101 /* Edges. */
102 static void make_edges (void);
103 static void make_cond_expr_edges (basic_block);
104 static void make_gimple_switch_edges (basic_block);
105 static void make_goto_expr_edges (basic_block);
106 static void make_gimple_asm_edges (basic_block);
107 static unsigned int locus_map_hash (const void *);
108 static int locus_map_eq (const void *, const void *);
109 static void assign_discriminator (location_t, basic_block);
110 static edge gimple_redirect_edge_and_branch (edge, basic_block);
111 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
112 static unsigned int split_critical_edges (void);
113
114 /* Various helpers. */
115 static inline bool stmt_starts_bb_p (gimple, gimple);
116 static int gimple_verify_flow_info (void);
117 static void gimple_make_forwarder_block (edge);
118 static void gimple_cfg2vcg (FILE *);
119 static gimple first_non_label_stmt (basic_block);
120 static bool verify_gimple_transaction (gimple);
121
122 /* Flowgraph optimization and cleanup. */
123 static void gimple_merge_blocks (basic_block, basic_block);
124 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
125 static void remove_bb (basic_block);
126 static edge find_taken_edge_computed_goto (basic_block, tree);
127 static edge find_taken_edge_cond_expr (basic_block, tree);
128 static edge find_taken_edge_switch_expr (basic_block, tree);
129 static tree find_case_label_for_value (gimple, tree);
130 static void group_case_labels_stmt (gimple);
131
132 void
133 init_empty_tree_cfg_for_function (struct function *fn)
134 {
135 /* Initialize the basic block array. */
136 init_flow (fn);
137 profile_status_for_function (fn) = PROFILE_ABSENT;
138 n_basic_blocks_for_function (fn) = NUM_FIXED_BLOCKS;
139 last_basic_block_for_function (fn) = NUM_FIXED_BLOCKS;
140 basic_block_info_for_function (fn)
141 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
142 VEC_safe_grow_cleared (basic_block, gc,
143 basic_block_info_for_function (fn),
144 initial_cfg_capacity);
145
146 /* Build a mapping of labels to their associated blocks. */
147 label_to_block_map_for_function (fn)
148 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
149 VEC_safe_grow_cleared (basic_block, gc,
150 label_to_block_map_for_function (fn),
151 initial_cfg_capacity);
152
153 SET_BASIC_BLOCK_FOR_FUNCTION (fn, ENTRY_BLOCK,
154 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn));
155 SET_BASIC_BLOCK_FOR_FUNCTION (fn, EXIT_BLOCK,
156 EXIT_BLOCK_PTR_FOR_FUNCTION (fn));
157
158 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)->next_bb
159 = EXIT_BLOCK_PTR_FOR_FUNCTION (fn);
160 EXIT_BLOCK_PTR_FOR_FUNCTION (fn)->prev_bb
161 = ENTRY_BLOCK_PTR_FOR_FUNCTION (fn);
162 }
163
164 void
165 init_empty_tree_cfg (void)
166 {
167 init_empty_tree_cfg_for_function (cfun);
168 }
169
170 /*---------------------------------------------------------------------------
171 Create basic blocks
172 ---------------------------------------------------------------------------*/
173
174 /* Entry point to the CFG builder for trees. SEQ is the sequence of
175 statements to be added to the flowgraph. */
176
177 static void
178 build_gimple_cfg (gimple_seq seq)
179 {
180 /* Register specific gimple functions. */
181 gimple_register_cfg_hooks ();
182
183 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
184
185 init_empty_tree_cfg ();
186
187 found_computed_goto = 0;
188 make_blocks (seq);
189
190 /* Computed gotos are hell to deal with, especially if there are
191 lots of them with a large number of destinations. So we factor
192 them to a common computed goto location before we build the
193 edge list. After we convert back to normal form, we will un-factor
194 the computed gotos since factoring introduces an unwanted jump. */
195 if (found_computed_goto)
196 factor_computed_gotos ();
197
198 /* Make sure there is always at least one block, even if it's empty. */
199 if (n_basic_blocks == NUM_FIXED_BLOCKS)
200 create_empty_bb (ENTRY_BLOCK_PTR);
201
202 /* Adjust the size of the array. */
203 if (VEC_length (basic_block, basic_block_info) < (size_t) n_basic_blocks)
204 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, n_basic_blocks);
205
206 /* To speed up statement iterator walks, we first purge dead labels. */
207 cleanup_dead_labels ();
208
209 /* Group case nodes to reduce the number of edges.
210 We do this after cleaning up dead labels because otherwise we miss
211 a lot of obvious case merging opportunities. */
212 group_case_labels ();
213
214 /* Create the edges of the flowgraph. */
215 discriminator_per_locus = htab_create (13, locus_map_hash, locus_map_eq,
216 free);
217 make_edges ();
218 cleanup_dead_labels ();
219 htab_delete (discriminator_per_locus);
220
221 /* Debugging dumps. */
222
223 /* Write the flowgraph to a VCG file. */
224 {
225 int local_dump_flags;
226 FILE *vcg_file = dump_begin (TDI_vcg, &local_dump_flags);
227 if (vcg_file)
228 {
229 gimple_cfg2vcg (vcg_file);
230 dump_end (TDI_vcg, vcg_file);
231 }
232 }
233 }
234
235 static unsigned int
236 execute_build_cfg (void)
237 {
238 gimple_seq body = gimple_body (current_function_decl);
239
240 build_gimple_cfg (body);
241 gimple_set_body (current_function_decl, NULL);
242 if (dump_file && (dump_flags & TDF_DETAILS))
243 {
244 fprintf (dump_file, "Scope blocks:\n");
245 dump_scope_blocks (dump_file, dump_flags);
246 }
247 return 0;
248 }
249
250 struct gimple_opt_pass pass_build_cfg =
251 {
252 {
253 GIMPLE_PASS,
254 "cfg", /* name */
255 NULL, /* gate */
256 execute_build_cfg, /* execute */
257 NULL, /* sub */
258 NULL, /* next */
259 0, /* static_pass_number */
260 TV_TREE_CFG, /* tv_id */
261 PROP_gimple_leh, /* properties_required */
262 PROP_cfg, /* properties_provided */
263 0, /* properties_destroyed */
264 0, /* todo_flags_start */
265 TODO_verify_stmts | TODO_cleanup_cfg /* todo_flags_finish */
266 }
267 };
268
269
270 /* Return true if T is a computed goto. */
271
272 static bool
273 computed_goto_p (gimple t)
274 {
275 return (gimple_code (t) == GIMPLE_GOTO
276 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
277 }
278
279
280 /* Search the CFG for any computed gotos. If found, factor them to a
281 common computed goto site. Also record the location of that site so
282 that we can un-factor the gotos after we have converted back to
283 normal form. */
284
285 static void
286 factor_computed_gotos (void)
287 {
288 basic_block bb;
289 tree factored_label_decl = NULL;
290 tree var = NULL;
291 gimple factored_computed_goto_label = NULL;
292 gimple factored_computed_goto = NULL;
293
294 /* We know there are one or more computed gotos in this function.
295 Examine the last statement in each basic block to see if the block
296 ends with a computed goto. */
297
298 FOR_EACH_BB (bb)
299 {
300 gimple_stmt_iterator gsi = gsi_last_bb (bb);
301 gimple last;
302
303 if (gsi_end_p (gsi))
304 continue;
305
306 last = gsi_stmt (gsi);
307
308 /* Ignore the computed goto we create when we factor the original
309 computed gotos. */
310 if (last == factored_computed_goto)
311 continue;
312
313 /* If the last statement is a computed goto, factor it. */
314 if (computed_goto_p (last))
315 {
316 gimple assignment;
317
318 /* The first time we find a computed goto we need to create
319 the factored goto block and the variable each original
320 computed goto will use for their goto destination. */
321 if (!factored_computed_goto)
322 {
323 basic_block new_bb = create_empty_bb (bb);
324 gimple_stmt_iterator new_gsi = gsi_start_bb (new_bb);
325
326 /* Create the destination of the factored goto. Each original
327 computed goto will put its desired destination into this
328 variable and jump to the label we create immediately
329 below. */
330 var = create_tmp_var (ptr_type_node, "gotovar");
331
332 /* Build a label for the new block which will contain the
333 factored computed goto. */
334 factored_label_decl = create_artificial_label (UNKNOWN_LOCATION);
335 factored_computed_goto_label
336 = gimple_build_label (factored_label_decl);
337 gsi_insert_after (&new_gsi, factored_computed_goto_label,
338 GSI_NEW_STMT);
339
340 /* Build our new computed goto. */
341 factored_computed_goto = gimple_build_goto (var);
342 gsi_insert_after (&new_gsi, factored_computed_goto, GSI_NEW_STMT);
343 }
344
345 /* Copy the original computed goto's destination into VAR. */
346 assignment = gimple_build_assign (var, gimple_goto_dest (last));
347 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
348
349 /* And re-vector the computed goto to the new destination. */
350 gimple_goto_set_dest (last, factored_label_decl);
351 }
352 }
353 }
354
355
356 /* Build a flowgraph for the sequence of stmts SEQ. */
357
358 static void
359 make_blocks (gimple_seq seq)
360 {
361 gimple_stmt_iterator i = gsi_start (seq);
362 gimple stmt = NULL;
363 bool start_new_block = true;
364 bool first_stmt_of_seq = true;
365 basic_block bb = ENTRY_BLOCK_PTR;
366
367 while (!gsi_end_p (i))
368 {
369 gimple prev_stmt;
370
371 prev_stmt = stmt;
372 stmt = gsi_stmt (i);
373
374 /* If the statement starts a new basic block or if we have determined
375 in a previous pass that we need to create a new block for STMT, do
376 so now. */
377 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
378 {
379 if (!first_stmt_of_seq)
380 seq = gsi_split_seq_before (&i);
381 bb = create_basic_block (seq, NULL, bb);
382 start_new_block = false;
383 }
384
385 /* Now add STMT to BB and create the subgraphs for special statement
386 codes. */
387 gimple_set_bb (stmt, bb);
388
389 if (computed_goto_p (stmt))
390 found_computed_goto = true;
391
392 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
393 next iteration. */
394 if (stmt_ends_bb_p (stmt))
395 {
396 /* If the stmt can make abnormal goto use a new temporary
397 for the assignment to the LHS. This makes sure the old value
398 of the LHS is available on the abnormal edge. Otherwise
399 we will end up with overlapping life-ranges for abnormal
400 SSA names. */
401 if (gimple_has_lhs (stmt)
402 && stmt_can_make_abnormal_goto (stmt)
403 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
404 {
405 tree lhs = gimple_get_lhs (stmt);
406 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
407 gimple s = gimple_build_assign (lhs, tmp);
408 gimple_set_location (s, gimple_location (stmt));
409 gimple_set_block (s, gimple_block (stmt));
410 gimple_set_lhs (stmt, tmp);
411 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
412 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
413 DECL_GIMPLE_REG_P (tmp) = 1;
414 gsi_insert_after (&i, s, GSI_SAME_STMT);
415 }
416 start_new_block = true;
417 }
418
419 gsi_next (&i);
420 first_stmt_of_seq = false;
421 }
422 }
423
424
425 /* Create and return a new empty basic block after bb AFTER. */
426
427 static basic_block
428 create_bb (void *h, void *e, basic_block after)
429 {
430 basic_block bb;
431
432 gcc_assert (!e);
433
434 /* Create and initialize a new basic block. Since alloc_block uses
435 GC allocation that clears memory to allocate a basic block, we do
436 not have to clear the newly allocated basic block here. */
437 bb = alloc_block ();
438
439 bb->index = last_basic_block;
440 bb->flags = BB_NEW;
441 bb->il.gimple = ggc_alloc_cleared_gimple_bb_info ();
442 set_bb_seq (bb, h ? (gimple_seq) h : gimple_seq_alloc ());
443
444 /* Add the new block to the linked list of blocks. */
445 link_block (bb, after);
446
447 /* Grow the basic block array if needed. */
448 if ((size_t) last_basic_block == VEC_length (basic_block, basic_block_info))
449 {
450 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
451 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
452 }
453
454 /* Add the newly created block to the array. */
455 SET_BASIC_BLOCK (last_basic_block, bb);
456
457 n_basic_blocks++;
458 last_basic_block++;
459
460 return bb;
461 }
462
463
464 /*---------------------------------------------------------------------------
465 Edge creation
466 ---------------------------------------------------------------------------*/
467
468 /* Fold COND_EXPR_COND of each COND_EXPR. */
469
470 void
471 fold_cond_expr_cond (void)
472 {
473 basic_block bb;
474
475 FOR_EACH_BB (bb)
476 {
477 gimple stmt = last_stmt (bb);
478
479 if (stmt && gimple_code (stmt) == GIMPLE_COND)
480 {
481 location_t loc = gimple_location (stmt);
482 tree cond;
483 bool zerop, onep;
484
485 fold_defer_overflow_warnings ();
486 cond = fold_binary_loc (loc, gimple_cond_code (stmt), boolean_type_node,
487 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
488 if (cond)
489 {
490 zerop = integer_zerop (cond);
491 onep = integer_onep (cond);
492 }
493 else
494 zerop = onep = false;
495
496 fold_undefer_overflow_warnings (zerop || onep,
497 stmt,
498 WARN_STRICT_OVERFLOW_CONDITIONAL);
499 if (zerop)
500 gimple_cond_make_false (stmt);
501 else if (onep)
502 gimple_cond_make_true (stmt);
503 }
504 }
505 }
506
507 /* Join all the blocks in the flowgraph. */
508
509 static void
510 make_edges (void)
511 {
512 basic_block bb;
513 struct omp_region *cur_region = NULL;
514
515 /* Create an edge from entry to the first block with executable
516 statements in it. */
517 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
518
519 /* Traverse the basic block array placing edges. */
520 FOR_EACH_BB (bb)
521 {
522 gimple last = last_stmt (bb);
523 bool fallthru;
524
525 if (last)
526 {
527 enum gimple_code code = gimple_code (last);
528 switch (code)
529 {
530 case GIMPLE_GOTO:
531 make_goto_expr_edges (bb);
532 fallthru = false;
533 break;
534 case GIMPLE_RETURN:
535 make_edge (bb, EXIT_BLOCK_PTR, 0);
536 fallthru = false;
537 break;
538 case GIMPLE_COND:
539 make_cond_expr_edges (bb);
540 fallthru = false;
541 break;
542 case GIMPLE_SWITCH:
543 make_gimple_switch_edges (bb);
544 fallthru = false;
545 break;
546 case GIMPLE_RESX:
547 make_eh_edges (last);
548 fallthru = false;
549 break;
550 case GIMPLE_EH_DISPATCH:
551 fallthru = make_eh_dispatch_edges (last);
552 break;
553
554 case GIMPLE_CALL:
555 /* If this function receives a nonlocal goto, then we need to
556 make edges from this call site to all the nonlocal goto
557 handlers. */
558 if (stmt_can_make_abnormal_goto (last))
559 make_abnormal_goto_edges (bb, true);
560
561 /* If this statement has reachable exception handlers, then
562 create abnormal edges to them. */
563 make_eh_edges (last);
564
565 /* BUILTIN_RETURN is really a return statement. */
566 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
567 make_edge (bb, EXIT_BLOCK_PTR, 0), fallthru = false;
568 /* Some calls are known not to return. */
569 else
570 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
571 break;
572
573 case GIMPLE_ASSIGN:
574 /* A GIMPLE_ASSIGN may throw internally and thus be considered
575 control-altering. */
576 if (is_ctrl_altering_stmt (last))
577 make_eh_edges (last);
578 fallthru = true;
579 break;
580
581 case GIMPLE_ASM:
582 make_gimple_asm_edges (bb);
583 fallthru = true;
584 break;
585
586 case GIMPLE_OMP_PARALLEL:
587 case GIMPLE_OMP_TASK:
588 case GIMPLE_OMP_FOR:
589 case GIMPLE_OMP_SINGLE:
590 case GIMPLE_OMP_MASTER:
591 case GIMPLE_OMP_ORDERED:
592 case GIMPLE_OMP_CRITICAL:
593 case GIMPLE_OMP_SECTION:
594 cur_region = new_omp_region (bb, code, cur_region);
595 fallthru = true;
596 break;
597
598 case GIMPLE_OMP_SECTIONS:
599 cur_region = new_omp_region (bb, code, cur_region);
600 fallthru = true;
601 break;
602
603 case GIMPLE_OMP_SECTIONS_SWITCH:
604 fallthru = false;
605 break;
606
607 case GIMPLE_OMP_ATOMIC_LOAD:
608 case GIMPLE_OMP_ATOMIC_STORE:
609 fallthru = true;
610 break;
611
612 case GIMPLE_OMP_RETURN:
613 /* In the case of a GIMPLE_OMP_SECTION, the edge will go
614 somewhere other than the next block. This will be
615 created later. */
616 cur_region->exit = bb;
617 fallthru = cur_region->type != GIMPLE_OMP_SECTION;
618 cur_region = cur_region->outer;
619 break;
620
621 case GIMPLE_OMP_CONTINUE:
622 cur_region->cont = bb;
623 switch (cur_region->type)
624 {
625 case GIMPLE_OMP_FOR:
626 /* Mark all GIMPLE_OMP_FOR and GIMPLE_OMP_CONTINUE
627 succs edges as abnormal to prevent splitting
628 them. */
629 single_succ_edge (cur_region->entry)->flags |= EDGE_ABNORMAL;
630 /* Make the loopback edge. */
631 make_edge (bb, single_succ (cur_region->entry),
632 EDGE_ABNORMAL);
633
634 /* Create an edge from GIMPLE_OMP_FOR to exit, which
635 corresponds to the case that the body of the loop
636 is not executed at all. */
637 make_edge (cur_region->entry, bb->next_bb, EDGE_ABNORMAL);
638 make_edge (bb, bb->next_bb, EDGE_FALLTHRU | EDGE_ABNORMAL);
639 fallthru = false;
640 break;
641
642 case GIMPLE_OMP_SECTIONS:
643 /* Wire up the edges into and out of the nested sections. */
644 {
645 basic_block switch_bb = single_succ (cur_region->entry);
646
647 struct omp_region *i;
648 for (i = cur_region->inner; i ; i = i->next)
649 {
650 gcc_assert (i->type == GIMPLE_OMP_SECTION);
651 make_edge (switch_bb, i->entry, 0);
652 make_edge (i->exit, bb, EDGE_FALLTHRU);
653 }
654
655 /* Make the loopback edge to the block with
656 GIMPLE_OMP_SECTIONS_SWITCH. */
657 make_edge (bb, switch_bb, 0);
658
659 /* Make the edge from the switch to exit. */
660 make_edge (switch_bb, bb->next_bb, 0);
661 fallthru = false;
662 }
663 break;
664
665 default:
666 gcc_unreachable ();
667 }
668 break;
669
670 case GIMPLE_TRANSACTION:
671 {
672 tree abort_label = gimple_transaction_label (last);
673 if (abort_label)
674 make_edge (bb, label_to_block (abort_label), 0);
675 fallthru = true;
676 }
677 break;
678
679 default:
680 gcc_assert (!stmt_ends_bb_p (last));
681 fallthru = true;
682 }
683 }
684 else
685 fallthru = true;
686
687 if (fallthru)
688 {
689 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
690 if (last)
691 assign_discriminator (gimple_location (last), bb->next_bb);
692 }
693 }
694
695 if (root_omp_region)
696 free_omp_regions ();
697
698 /* Fold COND_EXPR_COND of each COND_EXPR. */
699 fold_cond_expr_cond ();
700 }
701
702 /* Trivial hash function for a location_t. ITEM is a pointer to
703 a hash table entry that maps a location_t to a discriminator. */
704
705 static unsigned int
706 locus_map_hash (const void *item)
707 {
708 return ((const struct locus_discrim_map *) item)->locus;
709 }
710
711 /* Equality function for the locus-to-discriminator map. VA and VB
712 point to the two hash table entries to compare. */
713
714 static int
715 locus_map_eq (const void *va, const void *vb)
716 {
717 const struct locus_discrim_map *a = (const struct locus_discrim_map *) va;
718 const struct locus_discrim_map *b = (const struct locus_discrim_map *) vb;
719 return a->locus == b->locus;
720 }
721
722 /* Find the next available discriminator value for LOCUS. The
723 discriminator distinguishes among several basic blocks that
724 share a common locus, allowing for more accurate sample-based
725 profiling. */
726
727 static int
728 next_discriminator_for_locus (location_t locus)
729 {
730 struct locus_discrim_map item;
731 struct locus_discrim_map **slot;
732
733 item.locus = locus;
734 item.discriminator = 0;
735 slot = (struct locus_discrim_map **)
736 htab_find_slot_with_hash (discriminator_per_locus, (void *) &item,
737 (hashval_t) locus, INSERT);
738 gcc_assert (slot);
739 if (*slot == HTAB_EMPTY_ENTRY)
740 {
741 *slot = XNEW (struct locus_discrim_map);
742 gcc_assert (*slot);
743 (*slot)->locus = locus;
744 (*slot)->discriminator = 0;
745 }
746 (*slot)->discriminator++;
747 return (*slot)->discriminator;
748 }
749
750 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
751
752 static bool
753 same_line_p (location_t locus1, location_t locus2)
754 {
755 expanded_location from, to;
756
757 if (locus1 == locus2)
758 return true;
759
760 from = expand_location (locus1);
761 to = expand_location (locus2);
762
763 if (from.line != to.line)
764 return false;
765 if (from.file == to.file)
766 return true;
767 return (from.file != NULL
768 && to.file != NULL
769 && filename_cmp (from.file, to.file) == 0);
770 }
771
772 /* Assign a unique discriminator value to block BB if it begins at the same
773 LOCUS as its predecessor block. */
774
775 static void
776 assign_discriminator (location_t locus, basic_block bb)
777 {
778 gimple first_in_to_bb, last_in_to_bb;
779
780 if (locus == 0 || bb->discriminator != 0)
781 return;
782
783 first_in_to_bb = first_non_label_stmt (bb);
784 last_in_to_bb = last_stmt (bb);
785 if ((first_in_to_bb && same_line_p (locus, gimple_location (first_in_to_bb)))
786 || (last_in_to_bb && same_line_p (locus, gimple_location (last_in_to_bb))))
787 bb->discriminator = next_discriminator_for_locus (locus);
788 }
789
790 /* Create the edges for a GIMPLE_COND starting at block BB. */
791
792 static void
793 make_cond_expr_edges (basic_block bb)
794 {
795 gimple entry = last_stmt (bb);
796 gimple then_stmt, else_stmt;
797 basic_block then_bb, else_bb;
798 tree then_label, else_label;
799 edge e;
800 location_t entry_locus;
801
802 gcc_assert (entry);
803 gcc_assert (gimple_code (entry) == GIMPLE_COND);
804
805 entry_locus = gimple_location (entry);
806
807 /* Entry basic blocks for each component. */
808 then_label = gimple_cond_true_label (entry);
809 else_label = gimple_cond_false_label (entry);
810 then_bb = label_to_block (then_label);
811 else_bb = label_to_block (else_label);
812 then_stmt = first_stmt (then_bb);
813 else_stmt = first_stmt (else_bb);
814
815 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
816 assign_discriminator (entry_locus, then_bb);
817 e->goto_locus = gimple_location (then_stmt);
818 if (e->goto_locus)
819 e->goto_block = gimple_block (then_stmt);
820 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
821 if (e)
822 {
823 assign_discriminator (entry_locus, else_bb);
824 e->goto_locus = gimple_location (else_stmt);
825 if (e->goto_locus)
826 e->goto_block = gimple_block (else_stmt);
827 }
828
829 /* We do not need the labels anymore. */
830 gimple_cond_set_true_label (entry, NULL_TREE);
831 gimple_cond_set_false_label (entry, NULL_TREE);
832 }
833
834
835 /* Called for each element in the hash table (P) as we delete the
836 edge to cases hash table.
837
838 Clear all the TREE_CHAINs to prevent problems with copying of
839 SWITCH_EXPRs and structure sharing rules, then free the hash table
840 element. */
841
842 static bool
843 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
844 void *data ATTRIBUTE_UNUSED)
845 {
846 tree t, next;
847
848 for (t = (tree) *value; t; t = next)
849 {
850 next = CASE_CHAIN (t);
851 CASE_CHAIN (t) = NULL;
852 }
853
854 *value = NULL;
855 return true;
856 }
857
858 /* Start recording information mapping edges to case labels. */
859
860 void
861 start_recording_case_labels (void)
862 {
863 gcc_assert (edge_to_cases == NULL);
864 edge_to_cases = pointer_map_create ();
865 touched_switch_bbs = BITMAP_ALLOC (NULL);
866 }
867
868 /* Return nonzero if we are recording information for case labels. */
869
870 static bool
871 recording_case_labels_p (void)
872 {
873 return (edge_to_cases != NULL);
874 }
875
876 /* Stop recording information mapping edges to case labels and
877 remove any information we have recorded. */
878 void
879 end_recording_case_labels (void)
880 {
881 bitmap_iterator bi;
882 unsigned i;
883 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
884 pointer_map_destroy (edge_to_cases);
885 edge_to_cases = NULL;
886 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
887 {
888 basic_block bb = BASIC_BLOCK (i);
889 if (bb)
890 {
891 gimple stmt = last_stmt (bb);
892 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
893 group_case_labels_stmt (stmt);
894 }
895 }
896 BITMAP_FREE (touched_switch_bbs);
897 }
898
899 /* If we are inside a {start,end}_recording_cases block, then return
900 a chain of CASE_LABEL_EXPRs from T which reference E.
901
902 Otherwise return NULL. */
903
904 static tree
905 get_cases_for_edge (edge e, gimple t)
906 {
907 void **slot;
908 size_t i, n;
909
910 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
911 chains available. Return NULL so the caller can detect this case. */
912 if (!recording_case_labels_p ())
913 return NULL;
914
915 slot = pointer_map_contains (edge_to_cases, e);
916 if (slot)
917 return (tree) *slot;
918
919 /* If we did not find E in the hash table, then this must be the first
920 time we have been queried for information about E & T. Add all the
921 elements from T to the hash table then perform the query again. */
922
923 n = gimple_switch_num_labels (t);
924 for (i = 0; i < n; i++)
925 {
926 tree elt = gimple_switch_label (t, i);
927 tree lab = CASE_LABEL (elt);
928 basic_block label_bb = label_to_block (lab);
929 edge this_edge = find_edge (e->src, label_bb);
930
931 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
932 a new chain. */
933 slot = pointer_map_insert (edge_to_cases, this_edge);
934 CASE_CHAIN (elt) = (tree) *slot;
935 *slot = elt;
936 }
937
938 return (tree) *pointer_map_contains (edge_to_cases, e);
939 }
940
941 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
942
943 static void
944 make_gimple_switch_edges (basic_block bb)
945 {
946 gimple entry = last_stmt (bb);
947 location_t entry_locus;
948 size_t i, n;
949
950 entry_locus = gimple_location (entry);
951
952 n = gimple_switch_num_labels (entry);
953
954 for (i = 0; i < n; ++i)
955 {
956 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
957 basic_block label_bb = label_to_block (lab);
958 make_edge (bb, label_bb, 0);
959 assign_discriminator (entry_locus, label_bb);
960 }
961 }
962
963
964 /* Return the basic block holding label DEST. */
965
966 basic_block
967 label_to_block_fn (struct function *ifun, tree dest)
968 {
969 int uid = LABEL_DECL_UID (dest);
970
971 /* We would die hard when faced by an undefined label. Emit a label to
972 the very first basic block. This will hopefully make even the dataflow
973 and undefined variable warnings quite right. */
974 if (seen_error () && uid < 0)
975 {
976 gimple_stmt_iterator gsi = gsi_start_bb (BASIC_BLOCK (NUM_FIXED_BLOCKS));
977 gimple stmt;
978
979 stmt = gimple_build_label (dest);
980 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
981 uid = LABEL_DECL_UID (dest);
982 }
983 if (VEC_length (basic_block, ifun->cfg->x_label_to_block_map)
984 <= (unsigned int) uid)
985 return NULL;
986 return VEC_index (basic_block, ifun->cfg->x_label_to_block_map, uid);
987 }
988
989 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
990 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
991
992 void
993 make_abnormal_goto_edges (basic_block bb, bool for_call)
994 {
995 basic_block target_bb;
996 gimple_stmt_iterator gsi;
997
998 FOR_EACH_BB (target_bb)
999 for (gsi = gsi_start_bb (target_bb); !gsi_end_p (gsi); gsi_next (&gsi))
1000 {
1001 gimple label_stmt = gsi_stmt (gsi);
1002 tree target;
1003
1004 if (gimple_code (label_stmt) != GIMPLE_LABEL)
1005 break;
1006
1007 target = gimple_label_label (label_stmt);
1008
1009 /* Make an edge to every label block that has been marked as a
1010 potential target for a computed goto or a non-local goto. */
1011 if ((FORCED_LABEL (target) && !for_call)
1012 || (DECL_NONLOCAL (target) && for_call))
1013 {
1014 make_edge (bb, target_bb, EDGE_ABNORMAL);
1015 break;
1016 }
1017 }
1018 }
1019
1020 /* Create edges for a goto statement at block BB. */
1021
1022 static void
1023 make_goto_expr_edges (basic_block bb)
1024 {
1025 gimple_stmt_iterator last = gsi_last_bb (bb);
1026 gimple goto_t = gsi_stmt (last);
1027
1028 /* A simple GOTO creates normal edges. */
1029 if (simple_goto_p (goto_t))
1030 {
1031 tree dest = gimple_goto_dest (goto_t);
1032 basic_block label_bb = label_to_block (dest);
1033 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1034 e->goto_locus = gimple_location (goto_t);
1035 assign_discriminator (e->goto_locus, label_bb);
1036 if (e->goto_locus)
1037 e->goto_block = gimple_block (goto_t);
1038 gsi_remove (&last, true);
1039 return;
1040 }
1041
1042 /* A computed GOTO creates abnormal edges. */
1043 make_abnormal_goto_edges (bb, false);
1044 }
1045
1046 /* Create edges for an asm statement with labels at block BB. */
1047
1048 static void
1049 make_gimple_asm_edges (basic_block bb)
1050 {
1051 gimple stmt = last_stmt (bb);
1052 location_t stmt_loc = gimple_location (stmt);
1053 int i, n = gimple_asm_nlabels (stmt);
1054
1055 for (i = 0; i < n; ++i)
1056 {
1057 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1058 basic_block label_bb = label_to_block (label);
1059 make_edge (bb, label_bb, 0);
1060 assign_discriminator (stmt_loc, label_bb);
1061 }
1062 }
1063
1064 /*---------------------------------------------------------------------------
1065 Flowgraph analysis
1066 ---------------------------------------------------------------------------*/
1067
1068 /* Cleanup useless labels in basic blocks. This is something we wish
1069 to do early because it allows us to group case labels before creating
1070 the edges for the CFG, and it speeds up block statement iterators in
1071 all passes later on.
1072 We rerun this pass after CFG is created, to get rid of the labels that
1073 are no longer referenced. After then we do not run it any more, since
1074 (almost) no new labels should be created. */
1075
1076 /* A map from basic block index to the leading label of that block. */
1077 static struct label_record
1078 {
1079 /* The label. */
1080 tree label;
1081
1082 /* True if the label is referenced from somewhere. */
1083 bool used;
1084 } *label_for_bb;
1085
1086 /* Given LABEL return the first label in the same basic block. */
1087
1088 static tree
1089 main_block_label (tree label)
1090 {
1091 basic_block bb = label_to_block (label);
1092 tree main_label = label_for_bb[bb->index].label;
1093
1094 /* label_to_block possibly inserted undefined label into the chain. */
1095 if (!main_label)
1096 {
1097 label_for_bb[bb->index].label = label;
1098 main_label = label;
1099 }
1100
1101 label_for_bb[bb->index].used = true;
1102 return main_label;
1103 }
1104
1105 /* Clean up redundant labels within the exception tree. */
1106
1107 static void
1108 cleanup_dead_labels_eh (void)
1109 {
1110 eh_landing_pad lp;
1111 eh_region r;
1112 tree lab;
1113 int i;
1114
1115 if (cfun->eh == NULL)
1116 return;
1117
1118 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
1119 if (lp && lp->post_landing_pad)
1120 {
1121 lab = main_block_label (lp->post_landing_pad);
1122 if (lab != lp->post_landing_pad)
1123 {
1124 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1125 EH_LANDING_PAD_NR (lab) = lp->index;
1126 }
1127 }
1128
1129 FOR_ALL_EH_REGION (r)
1130 switch (r->type)
1131 {
1132 case ERT_CLEANUP:
1133 case ERT_MUST_NOT_THROW:
1134 break;
1135
1136 case ERT_TRY:
1137 {
1138 eh_catch c;
1139 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1140 {
1141 lab = c->label;
1142 if (lab)
1143 c->label = main_block_label (lab);
1144 }
1145 }
1146 break;
1147
1148 case ERT_ALLOWED_EXCEPTIONS:
1149 lab = r->u.allowed.label;
1150 if (lab)
1151 r->u.allowed.label = main_block_label (lab);
1152 break;
1153 }
1154 }
1155
1156
1157 /* Cleanup redundant labels. This is a three-step process:
1158 1) Find the leading label for each block.
1159 2) Redirect all references to labels to the leading labels.
1160 3) Cleanup all useless labels. */
1161
1162 void
1163 cleanup_dead_labels (void)
1164 {
1165 basic_block bb;
1166 label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
1167
1168 /* Find a suitable label for each block. We use the first user-defined
1169 label if there is one, or otherwise just the first label we see. */
1170 FOR_EACH_BB (bb)
1171 {
1172 gimple_stmt_iterator i;
1173
1174 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1175 {
1176 tree label;
1177 gimple stmt = gsi_stmt (i);
1178
1179 if (gimple_code (stmt) != GIMPLE_LABEL)
1180 break;
1181
1182 label = gimple_label_label (stmt);
1183
1184 /* If we have not yet seen a label for the current block,
1185 remember this one and see if there are more labels. */
1186 if (!label_for_bb[bb->index].label)
1187 {
1188 label_for_bb[bb->index].label = label;
1189 continue;
1190 }
1191
1192 /* If we did see a label for the current block already, but it
1193 is an artificially created label, replace it if the current
1194 label is a user defined label. */
1195 if (!DECL_ARTIFICIAL (label)
1196 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1197 {
1198 label_for_bb[bb->index].label = label;
1199 break;
1200 }
1201 }
1202 }
1203
1204 /* Now redirect all jumps/branches to the selected label.
1205 First do so for each block ending in a control statement. */
1206 FOR_EACH_BB (bb)
1207 {
1208 gimple stmt = last_stmt (bb);
1209 tree label, new_label;
1210
1211 if (!stmt)
1212 continue;
1213
1214 switch (gimple_code (stmt))
1215 {
1216 case GIMPLE_COND:
1217 label = gimple_cond_true_label (stmt);
1218 if (label)
1219 {
1220 new_label = main_block_label (label);
1221 if (new_label != label)
1222 gimple_cond_set_true_label (stmt, new_label);
1223 }
1224
1225 label = gimple_cond_false_label (stmt);
1226 if (label)
1227 {
1228 new_label = main_block_label (label);
1229 if (new_label != label)
1230 gimple_cond_set_false_label (stmt, new_label);
1231 }
1232 break;
1233
1234 case GIMPLE_SWITCH:
1235 {
1236 size_t i, n = gimple_switch_num_labels (stmt);
1237
1238 /* Replace all destination labels. */
1239 for (i = 0; i < n; ++i)
1240 {
1241 tree case_label = gimple_switch_label (stmt, i);
1242 label = CASE_LABEL (case_label);
1243 new_label = main_block_label (label);
1244 if (new_label != label)
1245 CASE_LABEL (case_label) = new_label;
1246 }
1247 break;
1248 }
1249
1250 case GIMPLE_ASM:
1251 {
1252 int i, n = gimple_asm_nlabels (stmt);
1253
1254 for (i = 0; i < n; ++i)
1255 {
1256 tree cons = gimple_asm_label_op (stmt, i);
1257 tree label = main_block_label (TREE_VALUE (cons));
1258 TREE_VALUE (cons) = label;
1259 }
1260 break;
1261 }
1262
1263 /* We have to handle gotos until they're removed, and we don't
1264 remove them until after we've created the CFG edges. */
1265 case GIMPLE_GOTO:
1266 if (!computed_goto_p (stmt))
1267 {
1268 label = gimple_goto_dest (stmt);
1269 new_label = main_block_label (label);
1270 if (new_label != label)
1271 gimple_goto_set_dest (stmt, new_label);
1272 }
1273 break;
1274
1275 case GIMPLE_TRANSACTION:
1276 {
1277 tree label = gimple_transaction_label (stmt);
1278 if (label)
1279 {
1280 tree new_label = main_block_label (label);
1281 if (new_label != label)
1282 gimple_transaction_set_label (stmt, new_label);
1283 }
1284 }
1285 break;
1286
1287 default:
1288 break;
1289 }
1290 }
1291
1292 /* Do the same for the exception region tree labels. */
1293 cleanup_dead_labels_eh ();
1294
1295 /* Finally, purge dead labels. All user-defined labels and labels that
1296 can be the target of non-local gotos and labels which have their
1297 address taken are preserved. */
1298 FOR_EACH_BB (bb)
1299 {
1300 gimple_stmt_iterator i;
1301 tree label_for_this_bb = label_for_bb[bb->index].label;
1302
1303 if (!label_for_this_bb)
1304 continue;
1305
1306 /* If the main label of the block is unused, we may still remove it. */
1307 if (!label_for_bb[bb->index].used)
1308 label_for_this_bb = NULL;
1309
1310 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1311 {
1312 tree label;
1313 gimple stmt = gsi_stmt (i);
1314
1315 if (gimple_code (stmt) != GIMPLE_LABEL)
1316 break;
1317
1318 label = gimple_label_label (stmt);
1319
1320 if (label == label_for_this_bb
1321 || !DECL_ARTIFICIAL (label)
1322 || DECL_NONLOCAL (label)
1323 || FORCED_LABEL (label))
1324 gsi_next (&i);
1325 else
1326 gsi_remove (&i, true);
1327 }
1328 }
1329
1330 free (label_for_bb);
1331 }
1332
1333 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1334 the ones jumping to the same label.
1335 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1336
1337 static void
1338 group_case_labels_stmt (gimple stmt)
1339 {
1340 int old_size = gimple_switch_num_labels (stmt);
1341 int i, j, new_size = old_size;
1342 tree default_case = NULL_TREE;
1343 tree default_label = NULL_TREE;
1344 bool has_default;
1345
1346 /* The default label is always the first case in a switch
1347 statement after gimplification if it was not optimized
1348 away */
1349 if (!CASE_LOW (gimple_switch_default_label (stmt))
1350 && !CASE_HIGH (gimple_switch_default_label (stmt)))
1351 {
1352 default_case = gimple_switch_default_label (stmt);
1353 default_label = CASE_LABEL (default_case);
1354 has_default = true;
1355 }
1356 else
1357 has_default = false;
1358
1359 /* Look for possible opportunities to merge cases. */
1360 if (has_default)
1361 i = 1;
1362 else
1363 i = 0;
1364 while (i < old_size)
1365 {
1366 tree base_case, base_label, base_high;
1367 base_case = gimple_switch_label (stmt, i);
1368
1369 gcc_assert (base_case);
1370 base_label = CASE_LABEL (base_case);
1371
1372 /* Discard cases that have the same destination as the
1373 default case. */
1374 if (base_label == default_label)
1375 {
1376 gimple_switch_set_label (stmt, i, NULL_TREE);
1377 i++;
1378 new_size--;
1379 continue;
1380 }
1381
1382 base_high = CASE_HIGH (base_case)
1383 ? CASE_HIGH (base_case)
1384 : CASE_LOW (base_case);
1385 i++;
1386
1387 /* Try to merge case labels. Break out when we reach the end
1388 of the label vector or when we cannot merge the next case
1389 label with the current one. */
1390 while (i < old_size)
1391 {
1392 tree merge_case = gimple_switch_label (stmt, i);
1393 tree merge_label = CASE_LABEL (merge_case);
1394 double_int bhp1 = double_int_add (tree_to_double_int (base_high),
1395 double_int_one);
1396
1397 /* Merge the cases if they jump to the same place,
1398 and their ranges are consecutive. */
1399 if (merge_label == base_label
1400 && double_int_equal_p (tree_to_double_int (CASE_LOW (merge_case)),
1401 bhp1))
1402 {
1403 base_high = CASE_HIGH (merge_case) ?
1404 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1405 CASE_HIGH (base_case) = base_high;
1406 gimple_switch_set_label (stmt, i, NULL_TREE);
1407 new_size--;
1408 i++;
1409 }
1410 else
1411 break;
1412 }
1413 }
1414
1415 /* Compress the case labels in the label vector, and adjust the
1416 length of the vector. */
1417 for (i = 0, j = 0; i < new_size; i++)
1418 {
1419 while (! gimple_switch_label (stmt, j))
1420 j++;
1421 gimple_switch_set_label (stmt, i,
1422 gimple_switch_label (stmt, j++));
1423 }
1424
1425 gcc_assert (new_size <= old_size);
1426 gimple_switch_set_num_labels (stmt, new_size);
1427 }
1428
1429 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1430 and scan the sorted vector of cases. Combine the ones jumping to the
1431 same label. */
1432
1433 void
1434 group_case_labels (void)
1435 {
1436 basic_block bb;
1437
1438 FOR_EACH_BB (bb)
1439 {
1440 gimple stmt = last_stmt (bb);
1441 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1442 group_case_labels_stmt (stmt);
1443 }
1444 }
1445
1446 /* Checks whether we can merge block B into block A. */
1447
1448 static bool
1449 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1450 {
1451 gimple stmt;
1452 gimple_stmt_iterator gsi;
1453 gimple_seq phis;
1454
1455 if (!single_succ_p (a))
1456 return false;
1457
1458 if (single_succ_edge (a)->flags & (EDGE_ABNORMAL | EDGE_EH | EDGE_PRESERVE))
1459 return false;
1460
1461 if (single_succ (a) != b)
1462 return false;
1463
1464 if (!single_pred_p (b))
1465 return false;
1466
1467 if (b == EXIT_BLOCK_PTR)
1468 return false;
1469
1470 /* If A ends by a statement causing exceptions or something similar, we
1471 cannot merge the blocks. */
1472 stmt = last_stmt (a);
1473 if (stmt && stmt_ends_bb_p (stmt))
1474 return false;
1475
1476 /* Do not allow a block with only a non-local label to be merged. */
1477 if (stmt
1478 && gimple_code (stmt) == GIMPLE_LABEL
1479 && DECL_NONLOCAL (gimple_label_label (stmt)))
1480 return false;
1481
1482 /* Examine the labels at the beginning of B. */
1483 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
1484 {
1485 tree lab;
1486 stmt = gsi_stmt (gsi);
1487 if (gimple_code (stmt) != GIMPLE_LABEL)
1488 break;
1489 lab = gimple_label_label (stmt);
1490
1491 /* Do not remove user forced labels or for -O0 any user labels. */
1492 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1493 return false;
1494 }
1495
1496 /* Protect the loop latches. */
1497 if (current_loops && b->loop_father->latch == b)
1498 return false;
1499
1500 /* It must be possible to eliminate all phi nodes in B. If ssa form
1501 is not up-to-date and a name-mapping is registered, we cannot eliminate
1502 any phis. Symbols marked for renaming are never a problem though. */
1503 phis = phi_nodes (b);
1504 if (!gimple_seq_empty_p (phis)
1505 && name_mappings_registered_p ())
1506 return false;
1507
1508 /* When not optimizing, don't merge if we'd lose goto_locus. */
1509 if (!optimize
1510 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1511 {
1512 location_t goto_locus = single_succ_edge (a)->goto_locus;
1513 gimple_stmt_iterator prev, next;
1514 prev = gsi_last_nondebug_bb (a);
1515 next = gsi_after_labels (b);
1516 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1517 gsi_next_nondebug (&next);
1518 if ((gsi_end_p (prev)
1519 || gimple_location (gsi_stmt (prev)) != goto_locus)
1520 && (gsi_end_p (next)
1521 || gimple_location (gsi_stmt (next)) != goto_locus))
1522 return false;
1523 }
1524
1525 return true;
1526 }
1527
1528 /* Return true if the var whose chain of uses starts at PTR has no
1529 nondebug uses. */
1530 bool
1531 has_zero_uses_1 (const ssa_use_operand_t *head)
1532 {
1533 const ssa_use_operand_t *ptr;
1534
1535 for (ptr = head->next; ptr != head; ptr = ptr->next)
1536 if (!is_gimple_debug (USE_STMT (ptr)))
1537 return false;
1538
1539 return true;
1540 }
1541
1542 /* Return true if the var whose chain of uses starts at PTR has a
1543 single nondebug use. Set USE_P and STMT to that single nondebug
1544 use, if so, or to NULL otherwise. */
1545 bool
1546 single_imm_use_1 (const ssa_use_operand_t *head,
1547 use_operand_p *use_p, gimple *stmt)
1548 {
1549 ssa_use_operand_t *ptr, *single_use = 0;
1550
1551 for (ptr = head->next; ptr != head; ptr = ptr->next)
1552 if (!is_gimple_debug (USE_STMT (ptr)))
1553 {
1554 if (single_use)
1555 {
1556 single_use = NULL;
1557 break;
1558 }
1559 single_use = ptr;
1560 }
1561
1562 if (use_p)
1563 *use_p = single_use;
1564
1565 if (stmt)
1566 *stmt = single_use ? single_use->loc.stmt : NULL;
1567
1568 return !!single_use;
1569 }
1570
1571 /* Replaces all uses of NAME by VAL. */
1572
1573 void
1574 replace_uses_by (tree name, tree val)
1575 {
1576 imm_use_iterator imm_iter;
1577 use_operand_p use;
1578 gimple stmt;
1579 edge e;
1580
1581 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1582 {
1583 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1584 {
1585 replace_exp (use, val);
1586
1587 if (gimple_code (stmt) == GIMPLE_PHI)
1588 {
1589 e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
1590 if (e->flags & EDGE_ABNORMAL)
1591 {
1592 /* This can only occur for virtual operands, since
1593 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1594 would prevent replacement. */
1595 gcc_checking_assert (!is_gimple_reg (name));
1596 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1597 }
1598 }
1599 }
1600
1601 if (gimple_code (stmt) != GIMPLE_PHI)
1602 {
1603 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1604 gimple orig_stmt = stmt;
1605 size_t i;
1606
1607 /* Mark the block if we changed the last stmt in it. */
1608 if (cfgcleanup_altered_bbs
1609 && stmt_ends_bb_p (stmt))
1610 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1611
1612 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1613 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1614 only change sth from non-invariant to invariant, and only
1615 when propagating constants. */
1616 if (is_gimple_min_invariant (val))
1617 for (i = 0; i < gimple_num_ops (stmt); i++)
1618 {
1619 tree op = gimple_op (stmt, i);
1620 /* Operands may be empty here. For example, the labels
1621 of a GIMPLE_COND are nulled out following the creation
1622 of the corresponding CFG edges. */
1623 if (op && TREE_CODE (op) == ADDR_EXPR)
1624 recompute_tree_invariant_for_addr_expr (op);
1625 }
1626
1627 if (fold_stmt (&gsi))
1628 stmt = gsi_stmt (gsi);
1629
1630 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1631 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1632
1633 update_stmt (stmt);
1634 }
1635 }
1636
1637 gcc_checking_assert (has_zero_uses (name));
1638
1639 /* Also update the trees stored in loop structures. */
1640 if (current_loops)
1641 {
1642 struct loop *loop;
1643 loop_iterator li;
1644
1645 FOR_EACH_LOOP (li, loop, 0)
1646 {
1647 substitute_in_loop_info (loop, name, val);
1648 }
1649 }
1650 }
1651
1652 /* Merge block B into block A. */
1653
1654 static void
1655 gimple_merge_blocks (basic_block a, basic_block b)
1656 {
1657 gimple_stmt_iterator last, gsi, psi;
1658 gimple_seq phis = phi_nodes (b);
1659
1660 if (dump_file)
1661 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1662
1663 /* Remove all single-valued PHI nodes from block B of the form
1664 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1665 gsi = gsi_last_bb (a);
1666 for (psi = gsi_start (phis); !gsi_end_p (psi); )
1667 {
1668 gimple phi = gsi_stmt (psi);
1669 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1670 gimple copy;
1671 bool may_replace_uses = !is_gimple_reg (def)
1672 || may_propagate_copy (def, use);
1673
1674 /* In case we maintain loop closed ssa form, do not propagate arguments
1675 of loop exit phi nodes. */
1676 if (current_loops
1677 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1678 && is_gimple_reg (def)
1679 && TREE_CODE (use) == SSA_NAME
1680 && a->loop_father != b->loop_father)
1681 may_replace_uses = false;
1682
1683 if (!may_replace_uses)
1684 {
1685 gcc_assert (is_gimple_reg (def));
1686
1687 /* Note that just emitting the copies is fine -- there is no problem
1688 with ordering of phi nodes. This is because A is the single
1689 predecessor of B, therefore results of the phi nodes cannot
1690 appear as arguments of the phi nodes. */
1691 copy = gimple_build_assign (def, use);
1692 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1693 remove_phi_node (&psi, false);
1694 }
1695 else
1696 {
1697 /* If we deal with a PHI for virtual operands, we can simply
1698 propagate these without fussing with folding or updating
1699 the stmt. */
1700 if (!is_gimple_reg (def))
1701 {
1702 imm_use_iterator iter;
1703 use_operand_p use_p;
1704 gimple stmt;
1705
1706 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1707 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1708 SET_USE (use_p, use);
1709
1710 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1711 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1712 }
1713 else
1714 replace_uses_by (def, use);
1715
1716 remove_phi_node (&psi, true);
1717 }
1718 }
1719
1720 /* Ensure that B follows A. */
1721 move_block_after (b, a);
1722
1723 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1724 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1725
1726 /* Remove labels from B and set gimple_bb to A for other statements. */
1727 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1728 {
1729 gimple stmt = gsi_stmt (gsi);
1730 if (gimple_code (stmt) == GIMPLE_LABEL)
1731 {
1732 tree label = gimple_label_label (stmt);
1733 int lp_nr;
1734
1735 gsi_remove (&gsi, false);
1736
1737 /* Now that we can thread computed gotos, we might have
1738 a situation where we have a forced label in block B
1739 However, the label at the start of block B might still be
1740 used in other ways (think about the runtime checking for
1741 Fortran assigned gotos). So we can not just delete the
1742 label. Instead we move the label to the start of block A. */
1743 if (FORCED_LABEL (label))
1744 {
1745 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1746 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1747 }
1748 /* Other user labels keep around in a form of a debug stmt. */
1749 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1750 {
1751 gimple dbg = gimple_build_debug_bind (label,
1752 integer_zero_node,
1753 stmt);
1754 gimple_debug_bind_reset_value (dbg);
1755 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1756 }
1757
1758 lp_nr = EH_LANDING_PAD_NR (label);
1759 if (lp_nr)
1760 {
1761 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1762 lp->post_landing_pad = NULL;
1763 }
1764 }
1765 else
1766 {
1767 gimple_set_bb (stmt, a);
1768 gsi_next (&gsi);
1769 }
1770 }
1771
1772 /* Merge the sequences. */
1773 last = gsi_last_bb (a);
1774 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1775 set_bb_seq (b, NULL);
1776
1777 if (cfgcleanup_altered_bbs)
1778 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1779 }
1780
1781
1782 /* Return the one of two successors of BB that is not reachable by a
1783 complex edge, if there is one. Else, return BB. We use
1784 this in optimizations that use post-dominators for their heuristics,
1785 to catch the cases in C++ where function calls are involved. */
1786
1787 basic_block
1788 single_noncomplex_succ (basic_block bb)
1789 {
1790 edge e0, e1;
1791 if (EDGE_COUNT (bb->succs) != 2)
1792 return bb;
1793
1794 e0 = EDGE_SUCC (bb, 0);
1795 e1 = EDGE_SUCC (bb, 1);
1796 if (e0->flags & EDGE_COMPLEX)
1797 return e1->dest;
1798 if (e1->flags & EDGE_COMPLEX)
1799 return e0->dest;
1800
1801 return bb;
1802 }
1803
1804 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1805
1806 void
1807 notice_special_calls (gimple call)
1808 {
1809 int flags = gimple_call_flags (call);
1810
1811 if (flags & ECF_MAY_BE_ALLOCA)
1812 cfun->calls_alloca = true;
1813 if (flags & ECF_RETURNS_TWICE)
1814 cfun->calls_setjmp = true;
1815 }
1816
1817
1818 /* Clear flags set by notice_special_calls. Used by dead code removal
1819 to update the flags. */
1820
1821 void
1822 clear_special_calls (void)
1823 {
1824 cfun->calls_alloca = false;
1825 cfun->calls_setjmp = false;
1826 }
1827
1828 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1829
1830 static void
1831 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1832 {
1833 /* Since this block is no longer reachable, we can just delete all
1834 of its PHI nodes. */
1835 remove_phi_nodes (bb);
1836
1837 /* Remove edges to BB's successors. */
1838 while (EDGE_COUNT (bb->succs) > 0)
1839 remove_edge (EDGE_SUCC (bb, 0));
1840 }
1841
1842
1843 /* Remove statements of basic block BB. */
1844
1845 static void
1846 remove_bb (basic_block bb)
1847 {
1848 gimple_stmt_iterator i;
1849
1850 if (dump_file)
1851 {
1852 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1853 if (dump_flags & TDF_DETAILS)
1854 {
1855 dump_bb (bb, dump_file, 0);
1856 fprintf (dump_file, "\n");
1857 }
1858 }
1859
1860 if (current_loops)
1861 {
1862 struct loop *loop = bb->loop_father;
1863
1864 /* If a loop gets removed, clean up the information associated
1865 with it. */
1866 if (loop->latch == bb
1867 || loop->header == bb)
1868 free_numbers_of_iterations_estimates_loop (loop);
1869 }
1870
1871 /* Remove all the instructions in the block. */
1872 if (bb_seq (bb) != NULL)
1873 {
1874 /* Walk backwards so as to get a chance to substitute all
1875 released DEFs into debug stmts. See
1876 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
1877 details. */
1878 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
1879 {
1880 gimple stmt = gsi_stmt (i);
1881 if (gimple_code (stmt) == GIMPLE_LABEL
1882 && (FORCED_LABEL (gimple_label_label (stmt))
1883 || DECL_NONLOCAL (gimple_label_label (stmt))))
1884 {
1885 basic_block new_bb;
1886 gimple_stmt_iterator new_gsi;
1887
1888 /* A non-reachable non-local label may still be referenced.
1889 But it no longer needs to carry the extra semantics of
1890 non-locality. */
1891 if (DECL_NONLOCAL (gimple_label_label (stmt)))
1892 {
1893 DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
1894 FORCED_LABEL (gimple_label_label (stmt)) = 1;
1895 }
1896
1897 new_bb = bb->prev_bb;
1898 new_gsi = gsi_start_bb (new_bb);
1899 gsi_remove (&i, false);
1900 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
1901 }
1902 else
1903 {
1904 /* Release SSA definitions if we are in SSA. Note that we
1905 may be called when not in SSA. For example,
1906 final_cleanup calls this function via
1907 cleanup_tree_cfg. */
1908 if (gimple_in_ssa_p (cfun))
1909 release_defs (stmt);
1910
1911 gsi_remove (&i, true);
1912 }
1913
1914 if (gsi_end_p (i))
1915 i = gsi_last_bb (bb);
1916 else
1917 gsi_prev (&i);
1918 }
1919 }
1920
1921 remove_phi_nodes_and_edges_for_unreachable_block (bb);
1922 bb->il.gimple = NULL;
1923 }
1924
1925
1926 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
1927 predicate VAL, return the edge that will be taken out of the block.
1928 If VAL does not match a unique edge, NULL is returned. */
1929
1930 edge
1931 find_taken_edge (basic_block bb, tree val)
1932 {
1933 gimple stmt;
1934
1935 stmt = last_stmt (bb);
1936
1937 gcc_assert (stmt);
1938 gcc_assert (is_ctrl_stmt (stmt));
1939
1940 if (val == NULL)
1941 return NULL;
1942
1943 if (!is_gimple_min_invariant (val))
1944 return NULL;
1945
1946 if (gimple_code (stmt) == GIMPLE_COND)
1947 return find_taken_edge_cond_expr (bb, val);
1948
1949 if (gimple_code (stmt) == GIMPLE_SWITCH)
1950 return find_taken_edge_switch_expr (bb, val);
1951
1952 if (computed_goto_p (stmt))
1953 {
1954 /* Only optimize if the argument is a label, if the argument is
1955 not a label then we can not construct a proper CFG.
1956
1957 It may be the case that we only need to allow the LABEL_REF to
1958 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
1959 appear inside a LABEL_EXPR just to be safe. */
1960 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
1961 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
1962 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
1963 return NULL;
1964 }
1965
1966 gcc_unreachable ();
1967 }
1968
1969 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
1970 statement, determine which of the outgoing edges will be taken out of the
1971 block. Return NULL if either edge may be taken. */
1972
1973 static edge
1974 find_taken_edge_computed_goto (basic_block bb, tree val)
1975 {
1976 basic_block dest;
1977 edge e = NULL;
1978
1979 dest = label_to_block (val);
1980 if (dest)
1981 {
1982 e = find_edge (bb, dest);
1983 gcc_assert (e != NULL);
1984 }
1985
1986 return e;
1987 }
1988
1989 /* Given a constant value VAL and the entry block BB to a COND_EXPR
1990 statement, determine which of the two edges will be taken out of the
1991 block. Return NULL if either edge may be taken. */
1992
1993 static edge
1994 find_taken_edge_cond_expr (basic_block bb, tree val)
1995 {
1996 edge true_edge, false_edge;
1997
1998 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1999
2000 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2001 return (integer_zerop (val) ? false_edge : true_edge);
2002 }
2003
2004 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2005 statement, determine which edge will be taken out of the block. Return
2006 NULL if any edge may be taken. */
2007
2008 static edge
2009 find_taken_edge_switch_expr (basic_block bb, tree val)
2010 {
2011 basic_block dest_bb;
2012 edge e;
2013 gimple switch_stmt;
2014 tree taken_case;
2015
2016 switch_stmt = last_stmt (bb);
2017 taken_case = find_case_label_for_value (switch_stmt, val);
2018 dest_bb = label_to_block (CASE_LABEL (taken_case));
2019
2020 e = find_edge (bb, dest_bb);
2021 gcc_assert (e);
2022 return e;
2023 }
2024
2025
2026 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2027 We can make optimal use here of the fact that the case labels are
2028 sorted: We can do a binary search for a case matching VAL. */
2029
2030 static tree
2031 find_case_label_for_value (gimple switch_stmt, tree val)
2032 {
2033 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2034 tree default_case = gimple_switch_default_label (switch_stmt);
2035
2036 for (low = 0, high = n; high - low > 1; )
2037 {
2038 size_t i = (high + low) / 2;
2039 tree t = gimple_switch_label (switch_stmt, i);
2040 int cmp;
2041
2042 /* Cache the result of comparing CASE_LOW and val. */
2043 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2044
2045 if (cmp > 0)
2046 high = i;
2047 else
2048 low = i;
2049
2050 if (CASE_HIGH (t) == NULL)
2051 {
2052 /* A singe-valued case label. */
2053 if (cmp == 0)
2054 return t;
2055 }
2056 else
2057 {
2058 /* A case range. We can only handle integer ranges. */
2059 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2060 return t;
2061 }
2062 }
2063
2064 return default_case;
2065 }
2066
2067
2068 /* Dump a basic block on stderr. */
2069
2070 void
2071 gimple_debug_bb (basic_block bb)
2072 {
2073 gimple_dump_bb (bb, stderr, 0, TDF_VOPS|TDF_MEMSYMS);
2074 }
2075
2076
2077 /* Dump basic block with index N on stderr. */
2078
2079 basic_block
2080 gimple_debug_bb_n (int n)
2081 {
2082 gimple_debug_bb (BASIC_BLOCK (n));
2083 return BASIC_BLOCK (n);
2084 }
2085
2086
2087 /* Dump the CFG on stderr.
2088
2089 FLAGS are the same used by the tree dumping functions
2090 (see TDF_* in tree-pass.h). */
2091
2092 void
2093 gimple_debug_cfg (int flags)
2094 {
2095 gimple_dump_cfg (stderr, flags);
2096 }
2097
2098
2099 /* Dump the program showing basic block boundaries on the given FILE.
2100
2101 FLAGS are the same used by the tree dumping functions (see TDF_* in
2102 tree.h). */
2103
2104 void
2105 gimple_dump_cfg (FILE *file, int flags)
2106 {
2107 if (flags & TDF_DETAILS)
2108 {
2109 dump_function_header (file, current_function_decl, flags);
2110 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2111 n_basic_blocks, n_edges, last_basic_block);
2112
2113 brief_dump_cfg (file);
2114 fprintf (file, "\n");
2115 }
2116
2117 if (flags & TDF_STATS)
2118 dump_cfg_stats (file);
2119
2120 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2121 }
2122
2123
2124 /* Dump CFG statistics on FILE. */
2125
2126 void
2127 dump_cfg_stats (FILE *file)
2128 {
2129 static long max_num_merged_labels = 0;
2130 unsigned long size, total = 0;
2131 long num_edges;
2132 basic_block bb;
2133 const char * const fmt_str = "%-30s%-13s%12s\n";
2134 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2135 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2136 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2137 const char *funcname
2138 = lang_hooks.decl_printable_name (current_function_decl, 2);
2139
2140
2141 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2142
2143 fprintf (file, "---------------------------------------------------------\n");
2144 fprintf (file, fmt_str, "", " Number of ", "Memory");
2145 fprintf (file, fmt_str, "", " instances ", "used ");
2146 fprintf (file, "---------------------------------------------------------\n");
2147
2148 size = n_basic_blocks * sizeof (struct basic_block_def);
2149 total += size;
2150 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2151 SCALE (size), LABEL (size));
2152
2153 num_edges = 0;
2154 FOR_EACH_BB (bb)
2155 num_edges += EDGE_COUNT (bb->succs);
2156 size = num_edges * sizeof (struct edge_def);
2157 total += size;
2158 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2159
2160 fprintf (file, "---------------------------------------------------------\n");
2161 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2162 LABEL (total));
2163 fprintf (file, "---------------------------------------------------------\n");
2164 fprintf (file, "\n");
2165
2166 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2167 max_num_merged_labels = cfg_stats.num_merged_labels;
2168
2169 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2170 cfg_stats.num_merged_labels, max_num_merged_labels);
2171
2172 fprintf (file, "\n");
2173 }
2174
2175
2176 /* Dump CFG statistics on stderr. Keep extern so that it's always
2177 linked in the final executable. */
2178
2179 DEBUG_FUNCTION void
2180 debug_cfg_stats (void)
2181 {
2182 dump_cfg_stats (stderr);
2183 }
2184
2185
2186 /* Dump the flowgraph to a .vcg FILE. */
2187
2188 static void
2189 gimple_cfg2vcg (FILE *file)
2190 {
2191 edge e;
2192 edge_iterator ei;
2193 basic_block bb;
2194 const char *funcname
2195 = lang_hooks.decl_printable_name (current_function_decl, 2);
2196
2197 /* Write the file header. */
2198 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2199 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2200 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2201
2202 /* Write blocks and edges. */
2203 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2204 {
2205 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2206 e->dest->index);
2207
2208 if (e->flags & EDGE_FAKE)
2209 fprintf (file, " linestyle: dotted priority: 10");
2210 else
2211 fprintf (file, " linestyle: solid priority: 100");
2212
2213 fprintf (file, " }\n");
2214 }
2215 fputc ('\n', file);
2216
2217 FOR_EACH_BB (bb)
2218 {
2219 enum gimple_code head_code, end_code;
2220 const char *head_name, *end_name;
2221 int head_line = 0;
2222 int end_line = 0;
2223 gimple first = first_stmt (bb);
2224 gimple last = last_stmt (bb);
2225
2226 if (first)
2227 {
2228 head_code = gimple_code (first);
2229 head_name = gimple_code_name[head_code];
2230 head_line = get_lineno (first);
2231 }
2232 else
2233 head_name = "no-statement";
2234
2235 if (last)
2236 {
2237 end_code = gimple_code (last);
2238 end_name = gimple_code_name[end_code];
2239 end_line = get_lineno (last);
2240 }
2241 else
2242 end_name = "no-statement";
2243
2244 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2245 bb->index, bb->index, head_name, head_line, end_name,
2246 end_line);
2247
2248 FOR_EACH_EDGE (e, ei, bb->succs)
2249 {
2250 if (e->dest == EXIT_BLOCK_PTR)
2251 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2252 else
2253 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2254
2255 if (e->flags & EDGE_FAKE)
2256 fprintf (file, " priority: 10 linestyle: dotted");
2257 else
2258 fprintf (file, " priority: 100 linestyle: solid");
2259
2260 fprintf (file, " }\n");
2261 }
2262
2263 if (bb->next_bb != EXIT_BLOCK_PTR)
2264 fputc ('\n', file);
2265 }
2266
2267 fputs ("}\n\n", file);
2268 }
2269
2270
2271
2272 /*---------------------------------------------------------------------------
2273 Miscellaneous helpers
2274 ---------------------------------------------------------------------------*/
2275
2276 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2277 flow. Transfers of control flow associated with EH are excluded. */
2278
2279 static bool
2280 call_can_make_abnormal_goto (gimple t)
2281 {
2282 /* If the function has no non-local labels, then a call cannot make an
2283 abnormal transfer of control. */
2284 if (!cfun->has_nonlocal_label)
2285 return false;
2286
2287 /* Likewise if the call has no side effects. */
2288 if (!gimple_has_side_effects (t))
2289 return false;
2290
2291 /* Likewise if the called function is leaf. */
2292 if (gimple_call_flags (t) & ECF_LEAF)
2293 return false;
2294
2295 return true;
2296 }
2297
2298
2299 /* Return true if T can make an abnormal transfer of control flow.
2300 Transfers of control flow associated with EH are excluded. */
2301
2302 bool
2303 stmt_can_make_abnormal_goto (gimple t)
2304 {
2305 if (computed_goto_p (t))
2306 return true;
2307 if (is_gimple_call (t))
2308 return call_can_make_abnormal_goto (t);
2309 return false;
2310 }
2311
2312
2313 /* Return true if T represents a stmt that always transfers control. */
2314
2315 bool
2316 is_ctrl_stmt (gimple t)
2317 {
2318 switch (gimple_code (t))
2319 {
2320 case GIMPLE_COND:
2321 case GIMPLE_SWITCH:
2322 case GIMPLE_GOTO:
2323 case GIMPLE_RETURN:
2324 case GIMPLE_RESX:
2325 return true;
2326 default:
2327 return false;
2328 }
2329 }
2330
2331
2332 /* Return true if T is a statement that may alter the flow of control
2333 (e.g., a call to a non-returning function). */
2334
2335 bool
2336 is_ctrl_altering_stmt (gimple t)
2337 {
2338 gcc_assert (t);
2339
2340 switch (gimple_code (t))
2341 {
2342 case GIMPLE_CALL:
2343 {
2344 int flags = gimple_call_flags (t);
2345
2346 /* A call alters control flow if it can make an abnormal goto. */
2347 if (call_can_make_abnormal_goto (t))
2348 return true;
2349
2350 /* A call also alters control flow if it does not return. */
2351 if (flags & ECF_NORETURN)
2352 return true;
2353
2354 /* TM ending statements have backedges out of the transaction.
2355 Return true so we split the basic block containing them.
2356 Note that the TM_BUILTIN test is merely an optimization. */
2357 if ((flags & ECF_TM_BUILTIN)
2358 && is_tm_ending_fndecl (gimple_call_fndecl (t)))
2359 return true;
2360
2361 /* BUILT_IN_RETURN call is same as return statement. */
2362 if (gimple_call_builtin_p (t, BUILT_IN_RETURN))
2363 return true;
2364 }
2365 break;
2366
2367 case GIMPLE_EH_DISPATCH:
2368 /* EH_DISPATCH branches to the individual catch handlers at
2369 this level of a try or allowed-exceptions region. It can
2370 fallthru to the next statement as well. */
2371 return true;
2372
2373 case GIMPLE_ASM:
2374 if (gimple_asm_nlabels (t) > 0)
2375 return true;
2376 break;
2377
2378 CASE_GIMPLE_OMP:
2379 /* OpenMP directives alter control flow. */
2380 return true;
2381
2382 case GIMPLE_TRANSACTION:
2383 /* A transaction start alters control flow. */
2384 return true;
2385
2386 default:
2387 break;
2388 }
2389
2390 /* If a statement can throw, it alters control flow. */
2391 return stmt_can_throw_internal (t);
2392 }
2393
2394
2395 /* Return true if T is a simple local goto. */
2396
2397 bool
2398 simple_goto_p (gimple t)
2399 {
2400 return (gimple_code (t) == GIMPLE_GOTO
2401 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2402 }
2403
2404
2405 /* Return true if STMT should start a new basic block. PREV_STMT is
2406 the statement preceding STMT. It is used when STMT is a label or a
2407 case label. Labels should only start a new basic block if their
2408 previous statement wasn't a label. Otherwise, sequence of labels
2409 would generate unnecessary basic blocks that only contain a single
2410 label. */
2411
2412 static inline bool
2413 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2414 {
2415 if (stmt == NULL)
2416 return false;
2417
2418 /* Labels start a new basic block only if the preceding statement
2419 wasn't a label of the same type. This prevents the creation of
2420 consecutive blocks that have nothing but a single label. */
2421 if (gimple_code (stmt) == GIMPLE_LABEL)
2422 {
2423 /* Nonlocal and computed GOTO targets always start a new block. */
2424 if (DECL_NONLOCAL (gimple_label_label (stmt))
2425 || FORCED_LABEL (gimple_label_label (stmt)))
2426 return true;
2427
2428 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2429 {
2430 if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
2431 return true;
2432
2433 cfg_stats.num_merged_labels++;
2434 return false;
2435 }
2436 else
2437 return true;
2438 }
2439
2440 return false;
2441 }
2442
2443
2444 /* Return true if T should end a basic block. */
2445
2446 bool
2447 stmt_ends_bb_p (gimple t)
2448 {
2449 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2450 }
2451
2452 /* Remove block annotations and other data structures. */
2453
2454 void
2455 delete_tree_cfg_annotations (void)
2456 {
2457 label_to_block_map = NULL;
2458 }
2459
2460
2461 /* Return the first statement in basic block BB. */
2462
2463 gimple
2464 first_stmt (basic_block bb)
2465 {
2466 gimple_stmt_iterator i = gsi_start_bb (bb);
2467 gimple stmt = NULL;
2468
2469 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2470 {
2471 gsi_next (&i);
2472 stmt = NULL;
2473 }
2474 return stmt;
2475 }
2476
2477 /* Return the first non-label statement in basic block BB. */
2478
2479 static gimple
2480 first_non_label_stmt (basic_block bb)
2481 {
2482 gimple_stmt_iterator i = gsi_start_bb (bb);
2483 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2484 gsi_next (&i);
2485 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2486 }
2487
2488 /* Return the last statement in basic block BB. */
2489
2490 gimple
2491 last_stmt (basic_block bb)
2492 {
2493 gimple_stmt_iterator i = gsi_last_bb (bb);
2494 gimple stmt = NULL;
2495
2496 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2497 {
2498 gsi_prev (&i);
2499 stmt = NULL;
2500 }
2501 return stmt;
2502 }
2503
2504 /* Return the last statement of an otherwise empty block. Return NULL
2505 if the block is totally empty, or if it contains more than one
2506 statement. */
2507
2508 gimple
2509 last_and_only_stmt (basic_block bb)
2510 {
2511 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2512 gimple last, prev;
2513
2514 if (gsi_end_p (i))
2515 return NULL;
2516
2517 last = gsi_stmt (i);
2518 gsi_prev_nondebug (&i);
2519 if (gsi_end_p (i))
2520 return last;
2521
2522 /* Empty statements should no longer appear in the instruction stream.
2523 Everything that might have appeared before should be deleted by
2524 remove_useless_stmts, and the optimizers should just gsi_remove
2525 instead of smashing with build_empty_stmt.
2526
2527 Thus the only thing that should appear here in a block containing
2528 one executable statement is a label. */
2529 prev = gsi_stmt (i);
2530 if (gimple_code (prev) == GIMPLE_LABEL)
2531 return last;
2532 else
2533 return NULL;
2534 }
2535
2536 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2537
2538 static void
2539 reinstall_phi_args (edge new_edge, edge old_edge)
2540 {
2541 edge_var_map_vector v;
2542 edge_var_map *vm;
2543 int i;
2544 gimple_stmt_iterator phis;
2545
2546 v = redirect_edge_var_map_vector (old_edge);
2547 if (!v)
2548 return;
2549
2550 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2551 VEC_iterate (edge_var_map, v, i, vm) && !gsi_end_p (phis);
2552 i++, gsi_next (&phis))
2553 {
2554 gimple phi = gsi_stmt (phis);
2555 tree result = redirect_edge_var_map_result (vm);
2556 tree arg = redirect_edge_var_map_def (vm);
2557
2558 gcc_assert (result == gimple_phi_result (phi));
2559
2560 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2561 }
2562
2563 redirect_edge_var_map_clear (old_edge);
2564 }
2565
2566 /* Returns the basic block after which the new basic block created
2567 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2568 near its "logical" location. This is of most help to humans looking
2569 at debugging dumps. */
2570
2571 static basic_block
2572 split_edge_bb_loc (edge edge_in)
2573 {
2574 basic_block dest = edge_in->dest;
2575 basic_block dest_prev = dest->prev_bb;
2576
2577 if (dest_prev)
2578 {
2579 edge e = find_edge (dest_prev, dest);
2580 if (e && !(e->flags & EDGE_COMPLEX))
2581 return edge_in->src;
2582 }
2583 return dest_prev;
2584 }
2585
2586 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2587 Abort on abnormal edges. */
2588
2589 static basic_block
2590 gimple_split_edge (edge edge_in)
2591 {
2592 basic_block new_bb, after_bb, dest;
2593 edge new_edge, e;
2594
2595 /* Abnormal edges cannot be split. */
2596 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2597
2598 dest = edge_in->dest;
2599
2600 after_bb = split_edge_bb_loc (edge_in);
2601
2602 new_bb = create_empty_bb (after_bb);
2603 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2604 new_bb->count = edge_in->count;
2605 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2606 new_edge->probability = REG_BR_PROB_BASE;
2607 new_edge->count = edge_in->count;
2608
2609 e = redirect_edge_and_branch (edge_in, new_bb);
2610 gcc_assert (e == edge_in);
2611 reinstall_phi_args (new_edge, e);
2612
2613 return new_bb;
2614 }
2615
2616
2617 /* Verify properties of the address expression T with base object BASE. */
2618
2619 static tree
2620 verify_address (tree t, tree base)
2621 {
2622 bool old_constant;
2623 bool old_side_effects;
2624 bool new_constant;
2625 bool new_side_effects;
2626
2627 old_constant = TREE_CONSTANT (t);
2628 old_side_effects = TREE_SIDE_EFFECTS (t);
2629
2630 recompute_tree_invariant_for_addr_expr (t);
2631 new_side_effects = TREE_SIDE_EFFECTS (t);
2632 new_constant = TREE_CONSTANT (t);
2633
2634 if (old_constant != new_constant)
2635 {
2636 error ("constant not recomputed when ADDR_EXPR changed");
2637 return t;
2638 }
2639 if (old_side_effects != new_side_effects)
2640 {
2641 error ("side effects not recomputed when ADDR_EXPR changed");
2642 return t;
2643 }
2644
2645 if (!(TREE_CODE (base) == VAR_DECL
2646 || TREE_CODE (base) == PARM_DECL
2647 || TREE_CODE (base) == RESULT_DECL))
2648 return NULL_TREE;
2649
2650 if (DECL_GIMPLE_REG_P (base))
2651 {
2652 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2653 return base;
2654 }
2655
2656 return NULL_TREE;
2657 }
2658
2659 /* Callback for walk_tree, check that all elements with address taken are
2660 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2661 inside a PHI node. */
2662
2663 static tree
2664 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2665 {
2666 tree t = *tp, x;
2667
2668 if (TYPE_P (t))
2669 *walk_subtrees = 0;
2670
2671 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2672 #define CHECK_OP(N, MSG) \
2673 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2674 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2675
2676 switch (TREE_CODE (t))
2677 {
2678 case SSA_NAME:
2679 if (SSA_NAME_IN_FREE_LIST (t))
2680 {
2681 error ("SSA name in freelist but still referenced");
2682 return *tp;
2683 }
2684 break;
2685
2686 case INDIRECT_REF:
2687 error ("INDIRECT_REF in gimple IL");
2688 return t;
2689
2690 case MEM_REF:
2691 x = TREE_OPERAND (t, 0);
2692 if (!POINTER_TYPE_P (TREE_TYPE (x))
2693 || !is_gimple_mem_ref_addr (x))
2694 {
2695 error ("invalid first operand of MEM_REF");
2696 return x;
2697 }
2698 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2699 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2700 {
2701 error ("invalid offset operand of MEM_REF");
2702 return TREE_OPERAND (t, 1);
2703 }
2704 if (TREE_CODE (x) == ADDR_EXPR
2705 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2706 return x;
2707 *walk_subtrees = 0;
2708 break;
2709
2710 case ASSERT_EXPR:
2711 x = fold (ASSERT_EXPR_COND (t));
2712 if (x == boolean_false_node)
2713 {
2714 error ("ASSERT_EXPR with an always-false condition");
2715 return *tp;
2716 }
2717 break;
2718
2719 case MODIFY_EXPR:
2720 error ("MODIFY_EXPR not expected while having tuples");
2721 return *tp;
2722
2723 case ADDR_EXPR:
2724 {
2725 tree tem;
2726
2727 gcc_assert (is_gimple_address (t));
2728
2729 /* Skip any references (they will be checked when we recurse down the
2730 tree) and ensure that any variable used as a prefix is marked
2731 addressable. */
2732 for (x = TREE_OPERAND (t, 0);
2733 handled_component_p (x);
2734 x = TREE_OPERAND (x, 0))
2735 ;
2736
2737 if ((tem = verify_address (t, x)))
2738 return tem;
2739
2740 if (!(TREE_CODE (x) == VAR_DECL
2741 || TREE_CODE (x) == PARM_DECL
2742 || TREE_CODE (x) == RESULT_DECL))
2743 return NULL;
2744
2745 if (!TREE_ADDRESSABLE (x))
2746 {
2747 error ("address taken, but ADDRESSABLE bit not set");
2748 return x;
2749 }
2750
2751 break;
2752 }
2753
2754 case COND_EXPR:
2755 x = COND_EXPR_COND (t);
2756 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2757 {
2758 error ("non-integral used in condition");
2759 return x;
2760 }
2761 if (!is_gimple_condexpr (x))
2762 {
2763 error ("invalid conditional operand");
2764 return x;
2765 }
2766 break;
2767
2768 case NON_LVALUE_EXPR:
2769 case TRUTH_NOT_EXPR:
2770 gcc_unreachable ();
2771
2772 CASE_CONVERT:
2773 case FIX_TRUNC_EXPR:
2774 case FLOAT_EXPR:
2775 case NEGATE_EXPR:
2776 case ABS_EXPR:
2777 case BIT_NOT_EXPR:
2778 CHECK_OP (0, "invalid operand to unary operator");
2779 break;
2780
2781 case REALPART_EXPR:
2782 case IMAGPART_EXPR:
2783 case COMPONENT_REF:
2784 case ARRAY_REF:
2785 case ARRAY_RANGE_REF:
2786 case BIT_FIELD_REF:
2787 case VIEW_CONVERT_EXPR:
2788 /* We have a nest of references. Verify that each of the operands
2789 that determine where to reference is either a constant or a variable,
2790 verify that the base is valid, and then show we've already checked
2791 the subtrees. */
2792 while (handled_component_p (t))
2793 {
2794 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2795 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2796 else if (TREE_CODE (t) == ARRAY_REF
2797 || TREE_CODE (t) == ARRAY_RANGE_REF)
2798 {
2799 CHECK_OP (1, "invalid array index");
2800 if (TREE_OPERAND (t, 2))
2801 CHECK_OP (2, "invalid array lower bound");
2802 if (TREE_OPERAND (t, 3))
2803 CHECK_OP (3, "invalid array stride");
2804 }
2805 else if (TREE_CODE (t) == BIT_FIELD_REF)
2806 {
2807 if (!host_integerp (TREE_OPERAND (t, 1), 1)
2808 || !host_integerp (TREE_OPERAND (t, 2), 1))
2809 {
2810 error ("invalid position or size operand to BIT_FIELD_REF");
2811 return t;
2812 }
2813 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2814 && (TYPE_PRECISION (TREE_TYPE (t))
2815 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2816 {
2817 error ("integral result type precision does not match "
2818 "field size of BIT_FIELD_REF");
2819 return t;
2820 }
2821 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2822 && !AGGREGATE_TYPE_P (TREE_TYPE (t))
2823 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2824 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2825 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2826 {
2827 error ("mode precision of non-integral result does not "
2828 "match field size of BIT_FIELD_REF");
2829 return t;
2830 }
2831 }
2832
2833 t = TREE_OPERAND (t, 0);
2834 }
2835
2836 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2837 {
2838 error ("invalid reference prefix");
2839 return t;
2840 }
2841 *walk_subtrees = 0;
2842 break;
2843 case PLUS_EXPR:
2844 case MINUS_EXPR:
2845 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2846 POINTER_PLUS_EXPR. */
2847 if (POINTER_TYPE_P (TREE_TYPE (t)))
2848 {
2849 error ("invalid operand to plus/minus, type is a pointer");
2850 return t;
2851 }
2852 CHECK_OP (0, "invalid operand to binary operator");
2853 CHECK_OP (1, "invalid operand to binary operator");
2854 break;
2855
2856 case POINTER_PLUS_EXPR:
2857 /* Check to make sure the first operand is a pointer or reference type. */
2858 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
2859 {
2860 error ("invalid operand to pointer plus, first operand is not a pointer");
2861 return t;
2862 }
2863 /* Check to make sure the second operand is a ptrofftype. */
2864 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
2865 {
2866 error ("invalid operand to pointer plus, second operand is not an "
2867 "integer type of appropriate width");
2868 return t;
2869 }
2870 /* FALLTHROUGH */
2871 case LT_EXPR:
2872 case LE_EXPR:
2873 case GT_EXPR:
2874 case GE_EXPR:
2875 case EQ_EXPR:
2876 case NE_EXPR:
2877 case UNORDERED_EXPR:
2878 case ORDERED_EXPR:
2879 case UNLT_EXPR:
2880 case UNLE_EXPR:
2881 case UNGT_EXPR:
2882 case UNGE_EXPR:
2883 case UNEQ_EXPR:
2884 case LTGT_EXPR:
2885 case MULT_EXPR:
2886 case TRUNC_DIV_EXPR:
2887 case CEIL_DIV_EXPR:
2888 case FLOOR_DIV_EXPR:
2889 case ROUND_DIV_EXPR:
2890 case TRUNC_MOD_EXPR:
2891 case CEIL_MOD_EXPR:
2892 case FLOOR_MOD_EXPR:
2893 case ROUND_MOD_EXPR:
2894 case RDIV_EXPR:
2895 case EXACT_DIV_EXPR:
2896 case MIN_EXPR:
2897 case MAX_EXPR:
2898 case LSHIFT_EXPR:
2899 case RSHIFT_EXPR:
2900 case LROTATE_EXPR:
2901 case RROTATE_EXPR:
2902 case BIT_IOR_EXPR:
2903 case BIT_XOR_EXPR:
2904 case BIT_AND_EXPR:
2905 CHECK_OP (0, "invalid operand to binary operator");
2906 CHECK_OP (1, "invalid operand to binary operator");
2907 break;
2908
2909 case CONSTRUCTOR:
2910 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2911 *walk_subtrees = 0;
2912 break;
2913
2914 case CASE_LABEL_EXPR:
2915 if (CASE_CHAIN (t))
2916 {
2917 error ("invalid CASE_CHAIN");
2918 return t;
2919 }
2920 break;
2921
2922 default:
2923 break;
2924 }
2925 return NULL;
2926
2927 #undef CHECK_OP
2928 }
2929
2930
2931 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
2932 Returns true if there is an error, otherwise false. */
2933
2934 static bool
2935 verify_types_in_gimple_min_lval (tree expr)
2936 {
2937 tree op;
2938
2939 if (is_gimple_id (expr))
2940 return false;
2941
2942 if (TREE_CODE (expr) != TARGET_MEM_REF
2943 && TREE_CODE (expr) != MEM_REF)
2944 {
2945 error ("invalid expression for min lvalue");
2946 return true;
2947 }
2948
2949 /* TARGET_MEM_REFs are strange beasts. */
2950 if (TREE_CODE (expr) == TARGET_MEM_REF)
2951 return false;
2952
2953 op = TREE_OPERAND (expr, 0);
2954 if (!is_gimple_val (op))
2955 {
2956 error ("invalid operand in indirect reference");
2957 debug_generic_stmt (op);
2958 return true;
2959 }
2960 /* Memory references now generally can involve a value conversion. */
2961
2962 return false;
2963 }
2964
2965 /* Verify if EXPR is a valid GIMPLE reference expression. If
2966 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
2967 if there is an error, otherwise false. */
2968
2969 static bool
2970 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
2971 {
2972 while (handled_component_p (expr))
2973 {
2974 tree op = TREE_OPERAND (expr, 0);
2975
2976 if (TREE_CODE (expr) == ARRAY_REF
2977 || TREE_CODE (expr) == ARRAY_RANGE_REF)
2978 {
2979 if (!is_gimple_val (TREE_OPERAND (expr, 1))
2980 || (TREE_OPERAND (expr, 2)
2981 && !is_gimple_val (TREE_OPERAND (expr, 2)))
2982 || (TREE_OPERAND (expr, 3)
2983 && !is_gimple_val (TREE_OPERAND (expr, 3))))
2984 {
2985 error ("invalid operands to array reference");
2986 debug_generic_stmt (expr);
2987 return true;
2988 }
2989 }
2990
2991 /* Verify if the reference array element types are compatible. */
2992 if (TREE_CODE (expr) == ARRAY_REF
2993 && !useless_type_conversion_p (TREE_TYPE (expr),
2994 TREE_TYPE (TREE_TYPE (op))))
2995 {
2996 error ("type mismatch in array reference");
2997 debug_generic_stmt (TREE_TYPE (expr));
2998 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2999 return true;
3000 }
3001 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3002 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3003 TREE_TYPE (TREE_TYPE (op))))
3004 {
3005 error ("type mismatch in array range reference");
3006 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3007 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3008 return true;
3009 }
3010
3011 if ((TREE_CODE (expr) == REALPART_EXPR
3012 || TREE_CODE (expr) == IMAGPART_EXPR)
3013 && !useless_type_conversion_p (TREE_TYPE (expr),
3014 TREE_TYPE (TREE_TYPE (op))))
3015 {
3016 error ("type mismatch in real/imagpart reference");
3017 debug_generic_stmt (TREE_TYPE (expr));
3018 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3019 return true;
3020 }
3021
3022 if (TREE_CODE (expr) == COMPONENT_REF
3023 && !useless_type_conversion_p (TREE_TYPE (expr),
3024 TREE_TYPE (TREE_OPERAND (expr, 1))))
3025 {
3026 error ("type mismatch in component reference");
3027 debug_generic_stmt (TREE_TYPE (expr));
3028 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3029 return true;
3030 }
3031
3032 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3033 {
3034 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3035 that their operand is not an SSA name or an invariant when
3036 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3037 bug). Otherwise there is nothing to verify, gross mismatches at
3038 most invoke undefined behavior. */
3039 if (require_lvalue
3040 && (TREE_CODE (op) == SSA_NAME
3041 || is_gimple_min_invariant (op)))
3042 {
3043 error ("conversion of an SSA_NAME on the left hand side");
3044 debug_generic_stmt (expr);
3045 return true;
3046 }
3047 else if (TREE_CODE (op) == SSA_NAME
3048 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3049 {
3050 error ("conversion of register to a different size");
3051 debug_generic_stmt (expr);
3052 return true;
3053 }
3054 else if (!handled_component_p (op))
3055 return false;
3056 }
3057
3058 expr = op;
3059 }
3060
3061 if (TREE_CODE (expr) == MEM_REF)
3062 {
3063 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3064 {
3065 error ("invalid address operand in MEM_REF");
3066 debug_generic_stmt (expr);
3067 return true;
3068 }
3069 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3070 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3071 {
3072 error ("invalid offset operand in MEM_REF");
3073 debug_generic_stmt (expr);
3074 return true;
3075 }
3076 }
3077 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3078 {
3079 if (!TMR_BASE (expr)
3080 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3081 {
3082 error ("invalid address operand in TARGET_MEM_REF");
3083 return true;
3084 }
3085 if (!TMR_OFFSET (expr)
3086 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3087 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3088 {
3089 error ("invalid offset operand in TARGET_MEM_REF");
3090 debug_generic_stmt (expr);
3091 return true;
3092 }
3093 }
3094
3095 return ((require_lvalue || !is_gimple_min_invariant (expr))
3096 && verify_types_in_gimple_min_lval (expr));
3097 }
3098
3099 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3100 list of pointer-to types that is trivially convertible to DEST. */
3101
3102 static bool
3103 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3104 {
3105 tree src;
3106
3107 if (!TYPE_POINTER_TO (src_obj))
3108 return true;
3109
3110 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3111 if (useless_type_conversion_p (dest, src))
3112 return true;
3113
3114 return false;
3115 }
3116
3117 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3118 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3119
3120 static bool
3121 valid_fixed_convert_types_p (tree type1, tree type2)
3122 {
3123 return (FIXED_POINT_TYPE_P (type1)
3124 && (INTEGRAL_TYPE_P (type2)
3125 || SCALAR_FLOAT_TYPE_P (type2)
3126 || FIXED_POINT_TYPE_P (type2)));
3127 }
3128
3129 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3130 is a problem, otherwise false. */
3131
3132 static bool
3133 verify_gimple_call (gimple stmt)
3134 {
3135 tree fn = gimple_call_fn (stmt);
3136 tree fntype, fndecl;
3137 unsigned i;
3138
3139 if (gimple_call_internal_p (stmt))
3140 {
3141 if (fn)
3142 {
3143 error ("gimple call has two targets");
3144 debug_generic_stmt (fn);
3145 return true;
3146 }
3147 }
3148 else
3149 {
3150 if (!fn)
3151 {
3152 error ("gimple call has no target");
3153 return true;
3154 }
3155 }
3156
3157 if (fn && !is_gimple_call_addr (fn))
3158 {
3159 error ("invalid function in gimple call");
3160 debug_generic_stmt (fn);
3161 return true;
3162 }
3163
3164 if (fn
3165 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3166 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3167 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3168 {
3169 error ("non-function in gimple call");
3170 return true;
3171 }
3172
3173 fndecl = gimple_call_fndecl (stmt);
3174 if (fndecl
3175 && TREE_CODE (fndecl) == FUNCTION_DECL
3176 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3177 && !DECL_PURE_P (fndecl)
3178 && !TREE_READONLY (fndecl))
3179 {
3180 error ("invalid pure const state for function");
3181 return true;
3182 }
3183
3184 if (gimple_call_lhs (stmt)
3185 && (!is_gimple_lvalue (gimple_call_lhs (stmt))
3186 || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
3187 {
3188 error ("invalid LHS in gimple call");
3189 return true;
3190 }
3191
3192 if (gimple_call_lhs (stmt) && gimple_call_noreturn_p (stmt))
3193 {
3194 error ("LHS in noreturn call");
3195 return true;
3196 }
3197
3198 fntype = gimple_call_fntype (stmt);
3199 if (fntype
3200 && gimple_call_lhs (stmt)
3201 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3202 TREE_TYPE (fntype))
3203 /* ??? At least C++ misses conversions at assignments from
3204 void * call results.
3205 ??? Java is completely off. Especially with functions
3206 returning java.lang.Object.
3207 For now simply allow arbitrary pointer type conversions. */
3208 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3209 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3210 {
3211 error ("invalid conversion in gimple call");
3212 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3213 debug_generic_stmt (TREE_TYPE (fntype));
3214 return true;
3215 }
3216
3217 if (gimple_call_chain (stmt)
3218 && !is_gimple_val (gimple_call_chain (stmt)))
3219 {
3220 error ("invalid static chain in gimple call");
3221 debug_generic_stmt (gimple_call_chain (stmt));
3222 return true;
3223 }
3224
3225 /* If there is a static chain argument, this should not be an indirect
3226 call, and the decl should have DECL_STATIC_CHAIN set. */
3227 if (gimple_call_chain (stmt))
3228 {
3229 if (!gimple_call_fndecl (stmt))
3230 {
3231 error ("static chain in indirect gimple call");
3232 return true;
3233 }
3234 fn = TREE_OPERAND (fn, 0);
3235
3236 if (!DECL_STATIC_CHAIN (fn))
3237 {
3238 error ("static chain with function that doesn%'t use one");
3239 return true;
3240 }
3241 }
3242
3243 /* ??? The C frontend passes unpromoted arguments in case it
3244 didn't see a function declaration before the call. So for now
3245 leave the call arguments mostly unverified. Once we gimplify
3246 unit-at-a-time we have a chance to fix this. */
3247
3248 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3249 {
3250 tree arg = gimple_call_arg (stmt, i);
3251 if ((is_gimple_reg_type (TREE_TYPE (arg))
3252 && !is_gimple_val (arg))
3253 || (!is_gimple_reg_type (TREE_TYPE (arg))
3254 && !is_gimple_lvalue (arg)))
3255 {
3256 error ("invalid argument to gimple call");
3257 debug_generic_expr (arg);
3258 return true;
3259 }
3260 }
3261
3262 return false;
3263 }
3264
3265 /* Verifies the gimple comparison with the result type TYPE and
3266 the operands OP0 and OP1. */
3267
3268 static bool
3269 verify_gimple_comparison (tree type, tree op0, tree op1)
3270 {
3271 tree op0_type = TREE_TYPE (op0);
3272 tree op1_type = TREE_TYPE (op1);
3273
3274 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3275 {
3276 error ("invalid operands in gimple comparison");
3277 return true;
3278 }
3279
3280 /* For comparisons we do not have the operations type as the
3281 effective type the comparison is carried out in. Instead
3282 we require that either the first operand is trivially
3283 convertible into the second, or the other way around.
3284 Because we special-case pointers to void we allow
3285 comparisons of pointers with the same mode as well. */
3286 if (!useless_type_conversion_p (op0_type, op1_type)
3287 && !useless_type_conversion_p (op1_type, op0_type)
3288 && (!POINTER_TYPE_P (op0_type)
3289 || !POINTER_TYPE_P (op1_type)
3290 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3291 {
3292 error ("mismatching comparison operand types");
3293 debug_generic_expr (op0_type);
3294 debug_generic_expr (op1_type);
3295 return true;
3296 }
3297
3298 /* The resulting type of a comparison may be an effective boolean type. */
3299 if (INTEGRAL_TYPE_P (type)
3300 && (TREE_CODE (type) == BOOLEAN_TYPE
3301 || TYPE_PRECISION (type) == 1))
3302 ;
3303 /* Or an integer vector type with the same size and element count
3304 as the comparison operand types. */
3305 else if (TREE_CODE (type) == VECTOR_TYPE
3306 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3307 {
3308 if (TREE_CODE (op0_type) != VECTOR_TYPE
3309 || TREE_CODE (op1_type) != VECTOR_TYPE)
3310 {
3311 error ("non-vector operands in vector comparison");
3312 debug_generic_expr (op0_type);
3313 debug_generic_expr (op1_type);
3314 return true;
3315 }
3316
3317 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3318 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3319 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type)))))
3320 {
3321 error ("invalid vector comparison resulting type");
3322 debug_generic_expr (type);
3323 return true;
3324 }
3325 }
3326 else
3327 {
3328 error ("bogus comparison result type");
3329 debug_generic_expr (type);
3330 return true;
3331 }
3332
3333 return false;
3334 }
3335
3336 /* Verify a gimple assignment statement STMT with an unary rhs.
3337 Returns true if anything is wrong. */
3338
3339 static bool
3340 verify_gimple_assign_unary (gimple stmt)
3341 {
3342 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3343 tree lhs = gimple_assign_lhs (stmt);
3344 tree lhs_type = TREE_TYPE (lhs);
3345 tree rhs1 = gimple_assign_rhs1 (stmt);
3346 tree rhs1_type = TREE_TYPE (rhs1);
3347
3348 if (!is_gimple_reg (lhs))
3349 {
3350 error ("non-register as LHS of unary operation");
3351 return true;
3352 }
3353
3354 if (!is_gimple_val (rhs1))
3355 {
3356 error ("invalid operand in unary operation");
3357 return true;
3358 }
3359
3360 /* First handle conversions. */
3361 switch (rhs_code)
3362 {
3363 CASE_CONVERT:
3364 {
3365 /* Allow conversions from pointer type to integral type only if
3366 there is no sign or zero extension involved.
3367 For targets were the precision of ptrofftype doesn't match that
3368 of pointers we need to allow arbitrary conversions to ptrofftype. */
3369 if ((POINTER_TYPE_P (lhs_type)
3370 && INTEGRAL_TYPE_P (rhs1_type))
3371 || (POINTER_TYPE_P (rhs1_type)
3372 && INTEGRAL_TYPE_P (lhs_type)
3373 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3374 || ptrofftype_p (sizetype))))
3375 return false;
3376
3377 /* Allow conversion from integer to offset type and vice versa. */
3378 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3379 && TREE_CODE (rhs1_type) == INTEGER_TYPE)
3380 || (TREE_CODE (lhs_type) == INTEGER_TYPE
3381 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3382 return false;
3383
3384 /* Otherwise assert we are converting between types of the
3385 same kind. */
3386 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3387 {
3388 error ("invalid types in nop conversion");
3389 debug_generic_expr (lhs_type);
3390 debug_generic_expr (rhs1_type);
3391 return true;
3392 }
3393
3394 return false;
3395 }
3396
3397 case ADDR_SPACE_CONVERT_EXPR:
3398 {
3399 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3400 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3401 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3402 {
3403 error ("invalid types in address space conversion");
3404 debug_generic_expr (lhs_type);
3405 debug_generic_expr (rhs1_type);
3406 return true;
3407 }
3408
3409 return false;
3410 }
3411
3412 case FIXED_CONVERT_EXPR:
3413 {
3414 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3415 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3416 {
3417 error ("invalid types in fixed-point conversion");
3418 debug_generic_expr (lhs_type);
3419 debug_generic_expr (rhs1_type);
3420 return true;
3421 }
3422
3423 return false;
3424 }
3425
3426 case FLOAT_EXPR:
3427 {
3428 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3429 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3430 || !VECTOR_FLOAT_TYPE_P(lhs_type)))
3431 {
3432 error ("invalid types in conversion to floating point");
3433 debug_generic_expr (lhs_type);
3434 debug_generic_expr (rhs1_type);
3435 return true;
3436 }
3437
3438 return false;
3439 }
3440
3441 case FIX_TRUNC_EXPR:
3442 {
3443 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3444 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3445 || !VECTOR_FLOAT_TYPE_P(rhs1_type)))
3446 {
3447 error ("invalid types in conversion to integer");
3448 debug_generic_expr (lhs_type);
3449 debug_generic_expr (rhs1_type);
3450 return true;
3451 }
3452
3453 return false;
3454 }
3455
3456 case VEC_UNPACK_HI_EXPR:
3457 case VEC_UNPACK_LO_EXPR:
3458 case REDUC_MAX_EXPR:
3459 case REDUC_MIN_EXPR:
3460 case REDUC_PLUS_EXPR:
3461 case VEC_UNPACK_FLOAT_HI_EXPR:
3462 case VEC_UNPACK_FLOAT_LO_EXPR:
3463 /* FIXME. */
3464 return false;
3465
3466 case NEGATE_EXPR:
3467 case ABS_EXPR:
3468 case BIT_NOT_EXPR:
3469 case PAREN_EXPR:
3470 case NON_LVALUE_EXPR:
3471 case CONJ_EXPR:
3472 break;
3473
3474 default:
3475 gcc_unreachable ();
3476 }
3477
3478 /* For the remaining codes assert there is no conversion involved. */
3479 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3480 {
3481 error ("non-trivial conversion in unary operation");
3482 debug_generic_expr (lhs_type);
3483 debug_generic_expr (rhs1_type);
3484 return true;
3485 }
3486
3487 return false;
3488 }
3489
3490 /* Verify a gimple assignment statement STMT with a binary rhs.
3491 Returns true if anything is wrong. */
3492
3493 static bool
3494 verify_gimple_assign_binary (gimple stmt)
3495 {
3496 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3497 tree lhs = gimple_assign_lhs (stmt);
3498 tree lhs_type = TREE_TYPE (lhs);
3499 tree rhs1 = gimple_assign_rhs1 (stmt);
3500 tree rhs1_type = TREE_TYPE (rhs1);
3501 tree rhs2 = gimple_assign_rhs2 (stmt);
3502 tree rhs2_type = TREE_TYPE (rhs2);
3503
3504 if (!is_gimple_reg (lhs))
3505 {
3506 error ("non-register as LHS of binary operation");
3507 return true;
3508 }
3509
3510 if (!is_gimple_val (rhs1)
3511 || !is_gimple_val (rhs2))
3512 {
3513 error ("invalid operands in binary operation");
3514 return true;
3515 }
3516
3517 /* First handle operations that involve different types. */
3518 switch (rhs_code)
3519 {
3520 case COMPLEX_EXPR:
3521 {
3522 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3523 || !(INTEGRAL_TYPE_P (rhs1_type)
3524 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3525 || !(INTEGRAL_TYPE_P (rhs2_type)
3526 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3527 {
3528 error ("type mismatch in complex expression");
3529 debug_generic_expr (lhs_type);
3530 debug_generic_expr (rhs1_type);
3531 debug_generic_expr (rhs2_type);
3532 return true;
3533 }
3534
3535 return false;
3536 }
3537
3538 case LSHIFT_EXPR:
3539 case RSHIFT_EXPR:
3540 case LROTATE_EXPR:
3541 case RROTATE_EXPR:
3542 {
3543 /* Shifts and rotates are ok on integral types, fixed point
3544 types and integer vector types. */
3545 if ((!INTEGRAL_TYPE_P (rhs1_type)
3546 && !FIXED_POINT_TYPE_P (rhs1_type)
3547 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3548 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3549 || (!INTEGRAL_TYPE_P (rhs2_type)
3550 /* Vector shifts of vectors are also ok. */
3551 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3552 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3553 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3554 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3555 || !useless_type_conversion_p (lhs_type, rhs1_type))
3556 {
3557 error ("type mismatch in shift expression");
3558 debug_generic_expr (lhs_type);
3559 debug_generic_expr (rhs1_type);
3560 debug_generic_expr (rhs2_type);
3561 return true;
3562 }
3563
3564 return false;
3565 }
3566
3567 case VEC_LSHIFT_EXPR:
3568 case VEC_RSHIFT_EXPR:
3569 {
3570 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3571 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3572 || POINTER_TYPE_P (TREE_TYPE (rhs1_type))
3573 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type))
3574 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3575 || (!INTEGRAL_TYPE_P (rhs2_type)
3576 && (TREE_CODE (rhs2_type) != VECTOR_TYPE
3577 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3578 || !useless_type_conversion_p (lhs_type, rhs1_type))
3579 {
3580 error ("type mismatch in vector shift expression");
3581 debug_generic_expr (lhs_type);
3582 debug_generic_expr (rhs1_type);
3583 debug_generic_expr (rhs2_type);
3584 return true;
3585 }
3586 /* For shifting a vector of non-integral components we
3587 only allow shifting by a constant multiple of the element size. */
3588 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3589 && (TREE_CODE (rhs2) != INTEGER_CST
3590 || !div_if_zero_remainder (EXACT_DIV_EXPR, rhs2,
3591 TYPE_SIZE (TREE_TYPE (rhs1_type)))))
3592 {
3593 error ("non-element sized vector shift of floating point vector");
3594 return true;
3595 }
3596
3597 return false;
3598 }
3599
3600 case WIDEN_LSHIFT_EXPR:
3601 {
3602 if (!INTEGRAL_TYPE_P (lhs_type)
3603 || !INTEGRAL_TYPE_P (rhs1_type)
3604 || TREE_CODE (rhs2) != INTEGER_CST
3605 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3606 {
3607 error ("type mismatch in widening vector shift expression");
3608 debug_generic_expr (lhs_type);
3609 debug_generic_expr (rhs1_type);
3610 debug_generic_expr (rhs2_type);
3611 return true;
3612 }
3613
3614 return false;
3615 }
3616
3617 case VEC_WIDEN_LSHIFT_HI_EXPR:
3618 case VEC_WIDEN_LSHIFT_LO_EXPR:
3619 {
3620 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3621 || TREE_CODE (lhs_type) != VECTOR_TYPE
3622 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3623 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3624 || TREE_CODE (rhs2) != INTEGER_CST
3625 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3626 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3627 {
3628 error ("type mismatch in widening vector shift expression");
3629 debug_generic_expr (lhs_type);
3630 debug_generic_expr (rhs1_type);
3631 debug_generic_expr (rhs2_type);
3632 return true;
3633 }
3634
3635 return false;
3636 }
3637
3638 case PLUS_EXPR:
3639 case MINUS_EXPR:
3640 {
3641 /* We use regular PLUS_EXPR and MINUS_EXPR for vectors.
3642 ??? This just makes the checker happy and may not be what is
3643 intended. */
3644 if (TREE_CODE (lhs_type) == VECTOR_TYPE
3645 && POINTER_TYPE_P (TREE_TYPE (lhs_type)))
3646 {
3647 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3648 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3649 {
3650 error ("invalid non-vector operands to vector valued plus");
3651 return true;
3652 }
3653 lhs_type = TREE_TYPE (lhs_type);
3654 rhs1_type = TREE_TYPE (rhs1_type);
3655 rhs2_type = TREE_TYPE (rhs2_type);
3656 /* PLUS_EXPR is commutative, so we might end up canonicalizing
3657 the pointer to 2nd place. */
3658 if (POINTER_TYPE_P (rhs2_type))
3659 {
3660 tree tem = rhs1_type;
3661 rhs1_type = rhs2_type;
3662 rhs2_type = tem;
3663 }
3664 goto do_pointer_plus_expr_check;
3665 }
3666 if (POINTER_TYPE_P (lhs_type)
3667 || POINTER_TYPE_P (rhs1_type)
3668 || POINTER_TYPE_P (rhs2_type))
3669 {
3670 error ("invalid (pointer) operands to plus/minus");
3671 return true;
3672 }
3673
3674 /* Continue with generic binary expression handling. */
3675 break;
3676 }
3677
3678 case POINTER_PLUS_EXPR:
3679 {
3680 do_pointer_plus_expr_check:
3681 if (!POINTER_TYPE_P (rhs1_type)
3682 || !useless_type_conversion_p (lhs_type, rhs1_type)
3683 || !ptrofftype_p (rhs2_type))
3684 {
3685 error ("type mismatch in pointer plus expression");
3686 debug_generic_stmt (lhs_type);
3687 debug_generic_stmt (rhs1_type);
3688 debug_generic_stmt (rhs2_type);
3689 return true;
3690 }
3691
3692 return false;
3693 }
3694
3695 case TRUTH_ANDIF_EXPR:
3696 case TRUTH_ORIF_EXPR:
3697 case TRUTH_AND_EXPR:
3698 case TRUTH_OR_EXPR:
3699 case TRUTH_XOR_EXPR:
3700
3701 gcc_unreachable ();
3702
3703 case LT_EXPR:
3704 case LE_EXPR:
3705 case GT_EXPR:
3706 case GE_EXPR:
3707 case EQ_EXPR:
3708 case NE_EXPR:
3709 case UNORDERED_EXPR:
3710 case ORDERED_EXPR:
3711 case UNLT_EXPR:
3712 case UNLE_EXPR:
3713 case UNGT_EXPR:
3714 case UNGE_EXPR:
3715 case UNEQ_EXPR:
3716 case LTGT_EXPR:
3717 /* Comparisons are also binary, but the result type is not
3718 connected to the operand types. */
3719 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3720
3721 case WIDEN_MULT_EXPR:
3722 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3723 return true;
3724 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3725 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3726
3727 case WIDEN_SUM_EXPR:
3728 case VEC_WIDEN_MULT_HI_EXPR:
3729 case VEC_WIDEN_MULT_LO_EXPR:
3730 case VEC_PACK_TRUNC_EXPR:
3731 case VEC_PACK_SAT_EXPR:
3732 case VEC_PACK_FIX_TRUNC_EXPR:
3733 /* FIXME. */
3734 return false;
3735
3736 case MULT_EXPR:
3737 case TRUNC_DIV_EXPR:
3738 case CEIL_DIV_EXPR:
3739 case FLOOR_DIV_EXPR:
3740 case ROUND_DIV_EXPR:
3741 case TRUNC_MOD_EXPR:
3742 case CEIL_MOD_EXPR:
3743 case FLOOR_MOD_EXPR:
3744 case ROUND_MOD_EXPR:
3745 case RDIV_EXPR:
3746 case EXACT_DIV_EXPR:
3747 case MIN_EXPR:
3748 case MAX_EXPR:
3749 case BIT_IOR_EXPR:
3750 case BIT_XOR_EXPR:
3751 case BIT_AND_EXPR:
3752 /* Continue with generic binary expression handling. */
3753 break;
3754
3755 default:
3756 gcc_unreachable ();
3757 }
3758
3759 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3760 || !useless_type_conversion_p (lhs_type, rhs2_type))
3761 {
3762 error ("type mismatch in binary expression");
3763 debug_generic_stmt (lhs_type);
3764 debug_generic_stmt (rhs1_type);
3765 debug_generic_stmt (rhs2_type);
3766 return true;
3767 }
3768
3769 return false;
3770 }
3771
3772 /* Verify a gimple assignment statement STMT with a ternary rhs.
3773 Returns true if anything is wrong. */
3774
3775 static bool
3776 verify_gimple_assign_ternary (gimple stmt)
3777 {
3778 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3779 tree lhs = gimple_assign_lhs (stmt);
3780 tree lhs_type = TREE_TYPE (lhs);
3781 tree rhs1 = gimple_assign_rhs1 (stmt);
3782 tree rhs1_type = TREE_TYPE (rhs1);
3783 tree rhs2 = gimple_assign_rhs2 (stmt);
3784 tree rhs2_type = TREE_TYPE (rhs2);
3785 tree rhs3 = gimple_assign_rhs3 (stmt);
3786 tree rhs3_type = TREE_TYPE (rhs3);
3787
3788 if (!is_gimple_reg (lhs))
3789 {
3790 error ("non-register as LHS of ternary operation");
3791 return true;
3792 }
3793
3794 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3795 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3796 || !is_gimple_val (rhs2)
3797 || !is_gimple_val (rhs3))
3798 {
3799 error ("invalid operands in ternary operation");
3800 return true;
3801 }
3802
3803 /* First handle operations that involve different types. */
3804 switch (rhs_code)
3805 {
3806 case WIDEN_MULT_PLUS_EXPR:
3807 case WIDEN_MULT_MINUS_EXPR:
3808 if ((!INTEGRAL_TYPE_P (rhs1_type)
3809 && !FIXED_POINT_TYPE_P (rhs1_type))
3810 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3811 || !useless_type_conversion_p (lhs_type, rhs3_type)
3812 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3813 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3814 {
3815 error ("type mismatch in widening multiply-accumulate expression");
3816 debug_generic_expr (lhs_type);
3817 debug_generic_expr (rhs1_type);
3818 debug_generic_expr (rhs2_type);
3819 debug_generic_expr (rhs3_type);
3820 return true;
3821 }
3822 break;
3823
3824 case FMA_EXPR:
3825 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3826 || !useless_type_conversion_p (lhs_type, rhs2_type)
3827 || !useless_type_conversion_p (lhs_type, rhs3_type))
3828 {
3829 error ("type mismatch in fused multiply-add expression");
3830 debug_generic_expr (lhs_type);
3831 debug_generic_expr (rhs1_type);
3832 debug_generic_expr (rhs2_type);
3833 debug_generic_expr (rhs3_type);
3834 return true;
3835 }
3836 break;
3837
3838 case COND_EXPR:
3839 case VEC_COND_EXPR:
3840 if (!useless_type_conversion_p (lhs_type, rhs2_type)
3841 || !useless_type_conversion_p (lhs_type, rhs3_type))
3842 {
3843 error ("type mismatch in conditional expression");
3844 debug_generic_expr (lhs_type);
3845 debug_generic_expr (rhs2_type);
3846 debug_generic_expr (rhs3_type);
3847 return true;
3848 }
3849 break;
3850
3851 case VEC_PERM_EXPR:
3852 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3853 || !useless_type_conversion_p (lhs_type, rhs2_type))
3854 {
3855 error ("type mismatch in vector permute expression");
3856 debug_generic_expr (lhs_type);
3857 debug_generic_expr (rhs1_type);
3858 debug_generic_expr (rhs2_type);
3859 debug_generic_expr (rhs3_type);
3860 return true;
3861 }
3862
3863 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3864 || TREE_CODE (rhs2_type) != VECTOR_TYPE
3865 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
3866 {
3867 error ("vector types expected in vector permute expression");
3868 debug_generic_expr (lhs_type);
3869 debug_generic_expr (rhs1_type);
3870 debug_generic_expr (rhs2_type);
3871 debug_generic_expr (rhs3_type);
3872 return true;
3873 }
3874
3875 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
3876 || TYPE_VECTOR_SUBPARTS (rhs2_type)
3877 != TYPE_VECTOR_SUBPARTS (rhs3_type)
3878 || TYPE_VECTOR_SUBPARTS (rhs3_type)
3879 != TYPE_VECTOR_SUBPARTS (lhs_type))
3880 {
3881 error ("vectors with different element number found "
3882 "in vector permute expression");
3883 debug_generic_expr (lhs_type);
3884 debug_generic_expr (rhs1_type);
3885 debug_generic_expr (rhs2_type);
3886 debug_generic_expr (rhs3_type);
3887 return true;
3888 }
3889
3890 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
3891 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
3892 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
3893 {
3894 error ("invalid mask type in vector permute expression");
3895 debug_generic_expr (lhs_type);
3896 debug_generic_expr (rhs1_type);
3897 debug_generic_expr (rhs2_type);
3898 debug_generic_expr (rhs3_type);
3899 return true;
3900 }
3901
3902 return false;
3903
3904 case DOT_PROD_EXPR:
3905 case REALIGN_LOAD_EXPR:
3906 /* FIXME. */
3907 return false;
3908
3909 default:
3910 gcc_unreachable ();
3911 }
3912 return false;
3913 }
3914
3915 /* Verify a gimple assignment statement STMT with a single rhs.
3916 Returns true if anything is wrong. */
3917
3918 static bool
3919 verify_gimple_assign_single (gimple stmt)
3920 {
3921 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3922 tree lhs = gimple_assign_lhs (stmt);
3923 tree lhs_type = TREE_TYPE (lhs);
3924 tree rhs1 = gimple_assign_rhs1 (stmt);
3925 tree rhs1_type = TREE_TYPE (rhs1);
3926 bool res = false;
3927
3928 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3929 {
3930 error ("non-trivial conversion at assignment");
3931 debug_generic_expr (lhs_type);
3932 debug_generic_expr (rhs1_type);
3933 return true;
3934 }
3935
3936 if (handled_component_p (lhs))
3937 res |= verify_types_in_gimple_reference (lhs, true);
3938
3939 /* Special codes we cannot handle via their class. */
3940 switch (rhs_code)
3941 {
3942 case ADDR_EXPR:
3943 {
3944 tree op = TREE_OPERAND (rhs1, 0);
3945 if (!is_gimple_addressable (op))
3946 {
3947 error ("invalid operand in unary expression");
3948 return true;
3949 }
3950
3951 /* Technically there is no longer a need for matching types, but
3952 gimple hygiene asks for this check. In LTO we can end up
3953 combining incompatible units and thus end up with addresses
3954 of globals that change their type to a common one. */
3955 if (!in_lto_p
3956 && !types_compatible_p (TREE_TYPE (op),
3957 TREE_TYPE (TREE_TYPE (rhs1)))
3958 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
3959 TREE_TYPE (op)))
3960 {
3961 error ("type mismatch in address expression");
3962 debug_generic_stmt (TREE_TYPE (rhs1));
3963 debug_generic_stmt (TREE_TYPE (op));
3964 return true;
3965 }
3966
3967 return verify_types_in_gimple_reference (op, true);
3968 }
3969
3970 /* tcc_reference */
3971 case INDIRECT_REF:
3972 error ("INDIRECT_REF in gimple IL");
3973 return true;
3974
3975 case COMPONENT_REF:
3976 case BIT_FIELD_REF:
3977 case ARRAY_REF:
3978 case ARRAY_RANGE_REF:
3979 case VIEW_CONVERT_EXPR:
3980 case REALPART_EXPR:
3981 case IMAGPART_EXPR:
3982 case TARGET_MEM_REF:
3983 case MEM_REF:
3984 if (!is_gimple_reg (lhs)
3985 && is_gimple_reg_type (TREE_TYPE (lhs)))
3986 {
3987 error ("invalid rhs for gimple memory store");
3988 debug_generic_stmt (lhs);
3989 debug_generic_stmt (rhs1);
3990 return true;
3991 }
3992 return res || verify_types_in_gimple_reference (rhs1, false);
3993
3994 /* tcc_constant */
3995 case SSA_NAME:
3996 case INTEGER_CST:
3997 case REAL_CST:
3998 case FIXED_CST:
3999 case COMPLEX_CST:
4000 case VECTOR_CST:
4001 case STRING_CST:
4002 return res;
4003
4004 /* tcc_declaration */
4005 case CONST_DECL:
4006 return res;
4007 case VAR_DECL:
4008 case PARM_DECL:
4009 if (!is_gimple_reg (lhs)
4010 && !is_gimple_reg (rhs1)
4011 && is_gimple_reg_type (TREE_TYPE (lhs)))
4012 {
4013 error ("invalid rhs for gimple memory store");
4014 debug_generic_stmt (lhs);
4015 debug_generic_stmt (rhs1);
4016 return true;
4017 }
4018 return res;
4019
4020 case CONSTRUCTOR:
4021 case OBJ_TYPE_REF:
4022 case ASSERT_EXPR:
4023 case WITH_SIZE_EXPR:
4024 /* FIXME. */
4025 return res;
4026
4027 default:;
4028 }
4029
4030 return res;
4031 }
4032
4033 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4034 is a problem, otherwise false. */
4035
4036 static bool
4037 verify_gimple_assign (gimple stmt)
4038 {
4039 switch (gimple_assign_rhs_class (stmt))
4040 {
4041 case GIMPLE_SINGLE_RHS:
4042 return verify_gimple_assign_single (stmt);
4043
4044 case GIMPLE_UNARY_RHS:
4045 return verify_gimple_assign_unary (stmt);
4046
4047 case GIMPLE_BINARY_RHS:
4048 return verify_gimple_assign_binary (stmt);
4049
4050 case GIMPLE_TERNARY_RHS:
4051 return verify_gimple_assign_ternary (stmt);
4052
4053 default:
4054 gcc_unreachable ();
4055 }
4056 }
4057
4058 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4059 is a problem, otherwise false. */
4060
4061 static bool
4062 verify_gimple_return (gimple stmt)
4063 {
4064 tree op = gimple_return_retval (stmt);
4065 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4066
4067 /* We cannot test for present return values as we do not fix up missing
4068 return values from the original source. */
4069 if (op == NULL)
4070 return false;
4071
4072 if (!is_gimple_val (op)
4073 && TREE_CODE (op) != RESULT_DECL)
4074 {
4075 error ("invalid operand in return statement");
4076 debug_generic_stmt (op);
4077 return true;
4078 }
4079
4080 if ((TREE_CODE (op) == RESULT_DECL
4081 && DECL_BY_REFERENCE (op))
4082 || (TREE_CODE (op) == SSA_NAME
4083 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4084 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4085 op = TREE_TYPE (op);
4086
4087 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4088 {
4089 error ("invalid conversion in return statement");
4090 debug_generic_stmt (restype);
4091 debug_generic_stmt (TREE_TYPE (op));
4092 return true;
4093 }
4094
4095 return false;
4096 }
4097
4098
4099 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4100 is a problem, otherwise false. */
4101
4102 static bool
4103 verify_gimple_goto (gimple stmt)
4104 {
4105 tree dest = gimple_goto_dest (stmt);
4106
4107 /* ??? We have two canonical forms of direct goto destinations, a
4108 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4109 if (TREE_CODE (dest) != LABEL_DECL
4110 && (!is_gimple_val (dest)
4111 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4112 {
4113 error ("goto destination is neither a label nor a pointer");
4114 return true;
4115 }
4116
4117 return false;
4118 }
4119
4120 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4121 is a problem, otherwise false. */
4122
4123 static bool
4124 verify_gimple_switch (gimple stmt)
4125 {
4126 if (!is_gimple_val (gimple_switch_index (stmt)))
4127 {
4128 error ("invalid operand to switch statement");
4129 debug_generic_stmt (gimple_switch_index (stmt));
4130 return true;
4131 }
4132
4133 return false;
4134 }
4135
4136 /* Verify a gimple debug statement STMT.
4137 Returns true if anything is wrong. */
4138
4139 static bool
4140 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4141 {
4142 /* There isn't much that could be wrong in a gimple debug stmt. A
4143 gimple debug bind stmt, for example, maps a tree, that's usually
4144 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4145 component or member of an aggregate type, to another tree, that
4146 can be an arbitrary expression. These stmts expand into debug
4147 insns, and are converted to debug notes by var-tracking.c. */
4148 return false;
4149 }
4150
4151 /* Verify a gimple label statement STMT.
4152 Returns true if anything is wrong. */
4153
4154 static bool
4155 verify_gimple_label (gimple stmt)
4156 {
4157 tree decl = gimple_label_label (stmt);
4158 int uid;
4159 bool err = false;
4160
4161 if (TREE_CODE (decl) != LABEL_DECL)
4162 return true;
4163
4164 uid = LABEL_DECL_UID (decl);
4165 if (cfun->cfg
4166 && (uid == -1
4167 || VEC_index (basic_block,
4168 label_to_block_map, uid) != gimple_bb (stmt)))
4169 {
4170 error ("incorrect entry in label_to_block_map");
4171 err |= true;
4172 }
4173
4174 uid = EH_LANDING_PAD_NR (decl);
4175 if (uid)
4176 {
4177 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4178 if (decl != lp->post_landing_pad)
4179 {
4180 error ("incorrect setting of landing pad number");
4181 err |= true;
4182 }
4183 }
4184
4185 return err;
4186 }
4187
4188 /* Verify the GIMPLE statement STMT. Returns true if there is an
4189 error, otherwise false. */
4190
4191 static bool
4192 verify_gimple_stmt (gimple stmt)
4193 {
4194 switch (gimple_code (stmt))
4195 {
4196 case GIMPLE_ASSIGN:
4197 return verify_gimple_assign (stmt);
4198
4199 case GIMPLE_LABEL:
4200 return verify_gimple_label (stmt);
4201
4202 case GIMPLE_CALL:
4203 return verify_gimple_call (stmt);
4204
4205 case GIMPLE_COND:
4206 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4207 {
4208 error ("invalid comparison code in gimple cond");
4209 return true;
4210 }
4211 if (!(!gimple_cond_true_label (stmt)
4212 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4213 || !(!gimple_cond_false_label (stmt)
4214 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4215 {
4216 error ("invalid labels in gimple cond");
4217 return true;
4218 }
4219
4220 return verify_gimple_comparison (boolean_type_node,
4221 gimple_cond_lhs (stmt),
4222 gimple_cond_rhs (stmt));
4223
4224 case GIMPLE_GOTO:
4225 return verify_gimple_goto (stmt);
4226
4227 case GIMPLE_SWITCH:
4228 return verify_gimple_switch (stmt);
4229
4230 case GIMPLE_RETURN:
4231 return verify_gimple_return (stmt);
4232
4233 case GIMPLE_ASM:
4234 return false;
4235
4236 case GIMPLE_TRANSACTION:
4237 return verify_gimple_transaction (stmt);
4238
4239 /* Tuples that do not have tree operands. */
4240 case GIMPLE_NOP:
4241 case GIMPLE_PREDICT:
4242 case GIMPLE_RESX:
4243 case GIMPLE_EH_DISPATCH:
4244 case GIMPLE_EH_MUST_NOT_THROW:
4245 return false;
4246
4247 CASE_GIMPLE_OMP:
4248 /* OpenMP directives are validated by the FE and never operated
4249 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4250 non-gimple expressions when the main index variable has had
4251 its address taken. This does not affect the loop itself
4252 because the header of an GIMPLE_OMP_FOR is merely used to determine
4253 how to setup the parallel iteration. */
4254 return false;
4255
4256 case GIMPLE_DEBUG:
4257 return verify_gimple_debug (stmt);
4258
4259 default:
4260 gcc_unreachable ();
4261 }
4262 }
4263
4264 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4265 and false otherwise. */
4266
4267 static bool
4268 verify_gimple_phi (gimple phi)
4269 {
4270 bool err = false;
4271 unsigned i;
4272 tree phi_result = gimple_phi_result (phi);
4273 bool virtual_p;
4274
4275 if (!phi_result)
4276 {
4277 error ("invalid PHI result");
4278 return true;
4279 }
4280
4281 virtual_p = !is_gimple_reg (phi_result);
4282 if (TREE_CODE (phi_result) != SSA_NAME
4283 || (virtual_p
4284 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4285 {
4286 error ("invalid PHI result");
4287 err = true;
4288 }
4289
4290 for (i = 0; i < gimple_phi_num_args (phi); i++)
4291 {
4292 tree t = gimple_phi_arg_def (phi, i);
4293
4294 if (!t)
4295 {
4296 error ("missing PHI def");
4297 err |= true;
4298 continue;
4299 }
4300 /* Addressable variables do have SSA_NAMEs but they
4301 are not considered gimple values. */
4302 else if ((TREE_CODE (t) == SSA_NAME
4303 && virtual_p != !is_gimple_reg (t))
4304 || (virtual_p
4305 && (TREE_CODE (t) != SSA_NAME
4306 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4307 || (!virtual_p
4308 && !is_gimple_val (t)))
4309 {
4310 error ("invalid PHI argument");
4311 debug_generic_expr (t);
4312 err |= true;
4313 }
4314 #ifdef ENABLE_TYPES_CHECKING
4315 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4316 {
4317 error ("incompatible types in PHI argument %u", i);
4318 debug_generic_stmt (TREE_TYPE (phi_result));
4319 debug_generic_stmt (TREE_TYPE (t));
4320 err |= true;
4321 }
4322 #endif
4323 }
4324
4325 return err;
4326 }
4327
4328 /* Verify the GIMPLE statements inside the sequence STMTS. */
4329
4330 static bool
4331 verify_gimple_in_seq_2 (gimple_seq stmts)
4332 {
4333 gimple_stmt_iterator ittr;
4334 bool err = false;
4335
4336 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4337 {
4338 gimple stmt = gsi_stmt (ittr);
4339
4340 switch (gimple_code (stmt))
4341 {
4342 case GIMPLE_BIND:
4343 err |= verify_gimple_in_seq_2 (gimple_bind_body (stmt));
4344 break;
4345
4346 case GIMPLE_TRY:
4347 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4348 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4349 break;
4350
4351 case GIMPLE_EH_FILTER:
4352 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4353 break;
4354
4355 case GIMPLE_EH_ELSE:
4356 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (stmt));
4357 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (stmt));
4358 break;
4359
4360 case GIMPLE_CATCH:
4361 err |= verify_gimple_in_seq_2 (gimple_catch_handler (stmt));
4362 break;
4363
4364 case GIMPLE_TRANSACTION:
4365 err |= verify_gimple_transaction (stmt);
4366 break;
4367
4368 default:
4369 {
4370 bool err2 = verify_gimple_stmt (stmt);
4371 if (err2)
4372 debug_gimple_stmt (stmt);
4373 err |= err2;
4374 }
4375 }
4376 }
4377
4378 return err;
4379 }
4380
4381 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4382 is a problem, otherwise false. */
4383
4384 static bool
4385 verify_gimple_transaction (gimple stmt)
4386 {
4387 tree lab = gimple_transaction_label (stmt);
4388 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4389 return true;
4390 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4391 }
4392
4393
4394 /* Verify the GIMPLE statements inside the statement list STMTS. */
4395
4396 DEBUG_FUNCTION void
4397 verify_gimple_in_seq (gimple_seq stmts)
4398 {
4399 timevar_push (TV_TREE_STMT_VERIFY);
4400 if (verify_gimple_in_seq_2 (stmts))
4401 internal_error ("verify_gimple failed");
4402 timevar_pop (TV_TREE_STMT_VERIFY);
4403 }
4404
4405 /* Return true when the T can be shared. */
4406
4407 bool
4408 tree_node_can_be_shared (tree t)
4409 {
4410 if (IS_TYPE_OR_DECL_P (t)
4411 || is_gimple_min_invariant (t)
4412 || TREE_CODE (t) == SSA_NAME
4413 || t == error_mark_node
4414 || TREE_CODE (t) == IDENTIFIER_NODE)
4415 return true;
4416
4417 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4418 return true;
4419
4420 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4421 && is_gimple_min_invariant (TREE_OPERAND (t, 1)))
4422 || TREE_CODE (t) == COMPONENT_REF
4423 || TREE_CODE (t) == REALPART_EXPR
4424 || TREE_CODE (t) == IMAGPART_EXPR)
4425 t = TREE_OPERAND (t, 0);
4426
4427 if (DECL_P (t))
4428 return true;
4429
4430 return false;
4431 }
4432
4433 /* Called via walk_gimple_stmt. Verify tree sharing. */
4434
4435 static tree
4436 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4437 {
4438 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4439 struct pointer_set_t *visited = (struct pointer_set_t *) wi->info;
4440
4441 if (tree_node_can_be_shared (*tp))
4442 {
4443 *walk_subtrees = false;
4444 return NULL;
4445 }
4446
4447 if (pointer_set_insert (visited, *tp))
4448 return *tp;
4449
4450 return NULL;
4451 }
4452
4453 static bool eh_error_found;
4454 static int
4455 verify_eh_throw_stmt_node (void **slot, void *data)
4456 {
4457 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4458 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4459
4460 if (!pointer_set_contains (visited, node->stmt))
4461 {
4462 error ("dead STMT in EH table");
4463 debug_gimple_stmt (node->stmt);
4464 eh_error_found = true;
4465 }
4466 return 1;
4467 }
4468
4469 /* Verify the GIMPLE statements in the CFG of FN. */
4470
4471 DEBUG_FUNCTION void
4472 verify_gimple_in_cfg (struct function *fn)
4473 {
4474 basic_block bb;
4475 bool err = false;
4476 struct pointer_set_t *visited, *visited_stmts;
4477
4478 timevar_push (TV_TREE_STMT_VERIFY);
4479 visited = pointer_set_create ();
4480 visited_stmts = pointer_set_create ();
4481
4482 FOR_EACH_BB_FN (bb, fn)
4483 {
4484 gimple_stmt_iterator gsi;
4485
4486 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4487 {
4488 gimple phi = gsi_stmt (gsi);
4489 bool err2 = false;
4490 unsigned i;
4491
4492 pointer_set_insert (visited_stmts, phi);
4493
4494 if (gimple_bb (phi) != bb)
4495 {
4496 error ("gimple_bb (phi) is set to a wrong basic block");
4497 err2 = true;
4498 }
4499
4500 err2 |= verify_gimple_phi (phi);
4501
4502 for (i = 0; i < gimple_phi_num_args (phi); i++)
4503 {
4504 tree arg = gimple_phi_arg_def (phi, i);
4505 tree addr = walk_tree (&arg, verify_node_sharing, visited, NULL);
4506 if (addr)
4507 {
4508 error ("incorrect sharing of tree nodes");
4509 debug_generic_expr (addr);
4510 err2 |= true;
4511 }
4512 }
4513
4514 if (err2)
4515 debug_gimple_stmt (phi);
4516 err |= err2;
4517 }
4518
4519 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4520 {
4521 gimple stmt = gsi_stmt (gsi);
4522 bool err2 = false;
4523 struct walk_stmt_info wi;
4524 tree addr;
4525 int lp_nr;
4526
4527 pointer_set_insert (visited_stmts, stmt);
4528
4529 if (gimple_bb (stmt) != bb)
4530 {
4531 error ("gimple_bb (stmt) is set to a wrong basic block");
4532 err2 = true;
4533 }
4534
4535 err2 |= verify_gimple_stmt (stmt);
4536
4537 memset (&wi, 0, sizeof (wi));
4538 wi.info = (void *) visited;
4539 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
4540 if (addr)
4541 {
4542 error ("incorrect sharing of tree nodes");
4543 debug_generic_expr (addr);
4544 err2 |= true;
4545 }
4546
4547 /* ??? Instead of not checking these stmts at all the walker
4548 should know its context via wi. */
4549 if (!is_gimple_debug (stmt)
4550 && !is_gimple_omp (stmt))
4551 {
4552 memset (&wi, 0, sizeof (wi));
4553 addr = walk_gimple_op (stmt, verify_expr, &wi);
4554 if (addr)
4555 {
4556 debug_generic_expr (addr);
4557 inform (gimple_location (stmt), "in statement");
4558 err2 |= true;
4559 }
4560 }
4561
4562 /* If the statement is marked as part of an EH region, then it is
4563 expected that the statement could throw. Verify that when we
4564 have optimizations that simplify statements such that we prove
4565 that they cannot throw, that we update other data structures
4566 to match. */
4567 lp_nr = lookup_stmt_eh_lp (stmt);
4568 if (lp_nr != 0)
4569 {
4570 if (!stmt_could_throw_p (stmt))
4571 {
4572 error ("statement marked for throw, but doesn%'t");
4573 err2 |= true;
4574 }
4575 else if (lp_nr > 0
4576 && !gsi_one_before_end_p (gsi)
4577 && stmt_can_throw_internal (stmt))
4578 {
4579 error ("statement marked for throw in middle of block");
4580 err2 |= true;
4581 }
4582 }
4583
4584 if (err2)
4585 debug_gimple_stmt (stmt);
4586 err |= err2;
4587 }
4588 }
4589
4590 eh_error_found = false;
4591 if (get_eh_throw_stmt_table (cfun))
4592 htab_traverse (get_eh_throw_stmt_table (cfun),
4593 verify_eh_throw_stmt_node,
4594 visited_stmts);
4595
4596 if (err || eh_error_found)
4597 internal_error ("verify_gimple failed");
4598
4599 pointer_set_destroy (visited);
4600 pointer_set_destroy (visited_stmts);
4601 verify_histograms ();
4602 timevar_pop (TV_TREE_STMT_VERIFY);
4603 }
4604
4605
4606 /* Verifies that the flow information is OK. */
4607
4608 static int
4609 gimple_verify_flow_info (void)
4610 {
4611 int err = 0;
4612 basic_block bb;
4613 gimple_stmt_iterator gsi;
4614 gimple stmt;
4615 edge e;
4616 edge_iterator ei;
4617
4618 if (ENTRY_BLOCK_PTR->il.gimple)
4619 {
4620 error ("ENTRY_BLOCK has IL associated with it");
4621 err = 1;
4622 }
4623
4624 if (EXIT_BLOCK_PTR->il.gimple)
4625 {
4626 error ("EXIT_BLOCK has IL associated with it");
4627 err = 1;
4628 }
4629
4630 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4631 if (e->flags & EDGE_FALLTHRU)
4632 {
4633 error ("fallthru to exit from bb %d", e->src->index);
4634 err = 1;
4635 }
4636
4637 FOR_EACH_BB (bb)
4638 {
4639 bool found_ctrl_stmt = false;
4640
4641 stmt = NULL;
4642
4643 /* Skip labels on the start of basic block. */
4644 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4645 {
4646 tree label;
4647 gimple prev_stmt = stmt;
4648
4649 stmt = gsi_stmt (gsi);
4650
4651 if (gimple_code (stmt) != GIMPLE_LABEL)
4652 break;
4653
4654 label = gimple_label_label (stmt);
4655 if (prev_stmt && DECL_NONLOCAL (label))
4656 {
4657 error ("nonlocal label ");
4658 print_generic_expr (stderr, label, 0);
4659 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4660 bb->index);
4661 err = 1;
4662 }
4663
4664 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
4665 {
4666 error ("EH landing pad label ");
4667 print_generic_expr (stderr, label, 0);
4668 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4669 bb->index);
4670 err = 1;
4671 }
4672
4673 if (label_to_block (label) != bb)
4674 {
4675 error ("label ");
4676 print_generic_expr (stderr, label, 0);
4677 fprintf (stderr, " to block does not match in bb %d",
4678 bb->index);
4679 err = 1;
4680 }
4681
4682 if (decl_function_context (label) != current_function_decl)
4683 {
4684 error ("label ");
4685 print_generic_expr (stderr, label, 0);
4686 fprintf (stderr, " has incorrect context in bb %d",
4687 bb->index);
4688 err = 1;
4689 }
4690 }
4691
4692 /* Verify that body of basic block BB is free of control flow. */
4693 for (; !gsi_end_p (gsi); gsi_next (&gsi))
4694 {
4695 gimple stmt = gsi_stmt (gsi);
4696
4697 if (found_ctrl_stmt)
4698 {
4699 error ("control flow in the middle of basic block %d",
4700 bb->index);
4701 err = 1;
4702 }
4703
4704 if (stmt_ends_bb_p (stmt))
4705 found_ctrl_stmt = true;
4706
4707 if (gimple_code (stmt) == GIMPLE_LABEL)
4708 {
4709 error ("label ");
4710 print_generic_expr (stderr, gimple_label_label (stmt), 0);
4711 fprintf (stderr, " in the middle of basic block %d", bb->index);
4712 err = 1;
4713 }
4714 }
4715
4716 gsi = gsi_last_bb (bb);
4717 if (gsi_end_p (gsi))
4718 continue;
4719
4720 stmt = gsi_stmt (gsi);
4721
4722 if (gimple_code (stmt) == GIMPLE_LABEL)
4723 continue;
4724
4725 err |= verify_eh_edges (stmt);
4726
4727 if (is_ctrl_stmt (stmt))
4728 {
4729 FOR_EACH_EDGE (e, ei, bb->succs)
4730 if (e->flags & EDGE_FALLTHRU)
4731 {
4732 error ("fallthru edge after a control statement in bb %d",
4733 bb->index);
4734 err = 1;
4735 }
4736 }
4737
4738 if (gimple_code (stmt) != GIMPLE_COND)
4739 {
4740 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4741 after anything else but if statement. */
4742 FOR_EACH_EDGE (e, ei, bb->succs)
4743 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
4744 {
4745 error ("true/false edge after a non-GIMPLE_COND in bb %d",
4746 bb->index);
4747 err = 1;
4748 }
4749 }
4750
4751 switch (gimple_code (stmt))
4752 {
4753 case GIMPLE_COND:
4754 {
4755 edge true_edge;
4756 edge false_edge;
4757
4758 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
4759
4760 if (!true_edge
4761 || !false_edge
4762 || !(true_edge->flags & EDGE_TRUE_VALUE)
4763 || !(false_edge->flags & EDGE_FALSE_VALUE)
4764 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4765 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4766 || EDGE_COUNT (bb->succs) >= 3)
4767 {
4768 error ("wrong outgoing edge flags at end of bb %d",
4769 bb->index);
4770 err = 1;
4771 }
4772 }
4773 break;
4774
4775 case GIMPLE_GOTO:
4776 if (simple_goto_p (stmt))
4777 {
4778 error ("explicit goto at end of bb %d", bb->index);
4779 err = 1;
4780 }
4781 else
4782 {
4783 /* FIXME. We should double check that the labels in the
4784 destination blocks have their address taken. */
4785 FOR_EACH_EDGE (e, ei, bb->succs)
4786 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
4787 | EDGE_FALSE_VALUE))
4788 || !(e->flags & EDGE_ABNORMAL))
4789 {
4790 error ("wrong outgoing edge flags at end of bb %d",
4791 bb->index);
4792 err = 1;
4793 }
4794 }
4795 break;
4796
4797 case GIMPLE_CALL:
4798 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
4799 break;
4800 /* ... fallthru ... */
4801 case GIMPLE_RETURN:
4802 if (!single_succ_p (bb)
4803 || (single_succ_edge (bb)->flags
4804 & (EDGE_FALLTHRU | EDGE_ABNORMAL
4805 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4806 {
4807 error ("wrong outgoing edge flags at end of bb %d", bb->index);
4808 err = 1;
4809 }
4810 if (single_succ (bb) != EXIT_BLOCK_PTR)
4811 {
4812 error ("return edge does not point to exit in bb %d",
4813 bb->index);
4814 err = 1;
4815 }
4816 break;
4817
4818 case GIMPLE_SWITCH:
4819 {
4820 tree prev;
4821 edge e;
4822 size_t i, n;
4823
4824 n = gimple_switch_num_labels (stmt);
4825
4826 /* Mark all the destination basic blocks. */
4827 for (i = 0; i < n; ++i)
4828 {
4829 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
4830 basic_block label_bb = label_to_block (lab);
4831 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
4832 label_bb->aux = (void *)1;
4833 }
4834
4835 /* Verify that the case labels are sorted. */
4836 prev = gimple_switch_label (stmt, 0);
4837 for (i = 1; i < n; ++i)
4838 {
4839 tree c = gimple_switch_label (stmt, i);
4840 if (!CASE_LOW (c))
4841 {
4842 error ("found default case not at the start of "
4843 "case vector");
4844 err = 1;
4845 continue;
4846 }
4847 if (CASE_LOW (prev)
4848 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
4849 {
4850 error ("case labels not sorted: ");
4851 print_generic_expr (stderr, prev, 0);
4852 fprintf (stderr," is greater than ");
4853 print_generic_expr (stderr, c, 0);
4854 fprintf (stderr," but comes before it.\n");
4855 err = 1;
4856 }
4857 prev = c;
4858 }
4859 /* VRP will remove the default case if it can prove it will
4860 never be executed. So do not verify there always exists
4861 a default case here. */
4862
4863 FOR_EACH_EDGE (e, ei, bb->succs)
4864 {
4865 if (!e->dest->aux)
4866 {
4867 error ("extra outgoing edge %d->%d",
4868 bb->index, e->dest->index);
4869 err = 1;
4870 }
4871
4872 e->dest->aux = (void *)2;
4873 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
4874 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4875 {
4876 error ("wrong outgoing edge flags at end of bb %d",
4877 bb->index);
4878 err = 1;
4879 }
4880 }
4881
4882 /* Check that we have all of them. */
4883 for (i = 0; i < n; ++i)
4884 {
4885 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
4886 basic_block label_bb = label_to_block (lab);
4887
4888 if (label_bb->aux != (void *)2)
4889 {
4890 error ("missing edge %i->%i", bb->index, label_bb->index);
4891 err = 1;
4892 }
4893 }
4894
4895 FOR_EACH_EDGE (e, ei, bb->succs)
4896 e->dest->aux = (void *)0;
4897 }
4898 break;
4899
4900 case GIMPLE_EH_DISPATCH:
4901 err |= verify_eh_dispatch_edge (stmt);
4902 break;
4903
4904 default:
4905 break;
4906 }
4907 }
4908
4909 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
4910 verify_dominators (CDI_DOMINATORS);
4911
4912 return err;
4913 }
4914
4915
4916 /* Updates phi nodes after creating a forwarder block joined
4917 by edge FALLTHRU. */
4918
4919 static void
4920 gimple_make_forwarder_block (edge fallthru)
4921 {
4922 edge e;
4923 edge_iterator ei;
4924 basic_block dummy, bb;
4925 tree var;
4926 gimple_stmt_iterator gsi;
4927
4928 dummy = fallthru->src;
4929 bb = fallthru->dest;
4930
4931 if (single_pred_p (bb))
4932 return;
4933
4934 /* If we redirected a branch we must create new PHI nodes at the
4935 start of BB. */
4936 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
4937 {
4938 gimple phi, new_phi;
4939
4940 phi = gsi_stmt (gsi);
4941 var = gimple_phi_result (phi);
4942 new_phi = create_phi_node (var, bb);
4943 SSA_NAME_DEF_STMT (var) = new_phi;
4944 gimple_phi_set_result (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
4945 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
4946 UNKNOWN_LOCATION);
4947 }
4948
4949 /* Add the arguments we have stored on edges. */
4950 FOR_EACH_EDGE (e, ei, bb->preds)
4951 {
4952 if (e == fallthru)
4953 continue;
4954
4955 flush_pending_stmts (e);
4956 }
4957 }
4958
4959
4960 /* Return a non-special label in the head of basic block BLOCK.
4961 Create one if it doesn't exist. */
4962
4963 tree
4964 gimple_block_label (basic_block bb)
4965 {
4966 gimple_stmt_iterator i, s = gsi_start_bb (bb);
4967 bool first = true;
4968 tree label;
4969 gimple stmt;
4970
4971 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
4972 {
4973 stmt = gsi_stmt (i);
4974 if (gimple_code (stmt) != GIMPLE_LABEL)
4975 break;
4976 label = gimple_label_label (stmt);
4977 if (!DECL_NONLOCAL (label))
4978 {
4979 if (!first)
4980 gsi_move_before (&i, &s);
4981 return label;
4982 }
4983 }
4984
4985 label = create_artificial_label (UNKNOWN_LOCATION);
4986 stmt = gimple_build_label (label);
4987 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
4988 return label;
4989 }
4990
4991
4992 /* Attempt to perform edge redirection by replacing a possibly complex
4993 jump instruction by a goto or by removing the jump completely.
4994 This can apply only if all edges now point to the same block. The
4995 parameters and return values are equivalent to
4996 redirect_edge_and_branch. */
4997
4998 static edge
4999 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5000 {
5001 basic_block src = e->src;
5002 gimple_stmt_iterator i;
5003 gimple stmt;
5004
5005 /* We can replace or remove a complex jump only when we have exactly
5006 two edges. */
5007 if (EDGE_COUNT (src->succs) != 2
5008 /* Verify that all targets will be TARGET. Specifically, the
5009 edge that is not E must also go to TARGET. */
5010 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5011 return NULL;
5012
5013 i = gsi_last_bb (src);
5014 if (gsi_end_p (i))
5015 return NULL;
5016
5017 stmt = gsi_stmt (i);
5018
5019 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5020 {
5021 gsi_remove (&i, true);
5022 e = ssa_redirect_edge (e, target);
5023 e->flags = EDGE_FALLTHRU;
5024 return e;
5025 }
5026
5027 return NULL;
5028 }
5029
5030
5031 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5032 edge representing the redirected branch. */
5033
5034 static edge
5035 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5036 {
5037 basic_block bb = e->src;
5038 gimple_stmt_iterator gsi;
5039 edge ret;
5040 gimple stmt;
5041
5042 if (e->flags & EDGE_ABNORMAL)
5043 return NULL;
5044
5045 if (e->dest == dest)
5046 return NULL;
5047
5048 if (e->flags & EDGE_EH)
5049 return redirect_eh_edge (e, dest);
5050
5051 if (e->src != ENTRY_BLOCK_PTR)
5052 {
5053 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5054 if (ret)
5055 return ret;
5056 }
5057
5058 gsi = gsi_last_bb (bb);
5059 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5060
5061 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5062 {
5063 case GIMPLE_COND:
5064 /* For COND_EXPR, we only need to redirect the edge. */
5065 break;
5066
5067 case GIMPLE_GOTO:
5068 /* No non-abnormal edges should lead from a non-simple goto, and
5069 simple ones should be represented implicitly. */
5070 gcc_unreachable ();
5071
5072 case GIMPLE_SWITCH:
5073 {
5074 tree label = gimple_block_label (dest);
5075 tree cases = get_cases_for_edge (e, stmt);
5076
5077 /* If we have a list of cases associated with E, then use it
5078 as it's a lot faster than walking the entire case vector. */
5079 if (cases)
5080 {
5081 edge e2 = find_edge (e->src, dest);
5082 tree last, first;
5083
5084 first = cases;
5085 while (cases)
5086 {
5087 last = cases;
5088 CASE_LABEL (cases) = label;
5089 cases = CASE_CHAIN (cases);
5090 }
5091
5092 /* If there was already an edge in the CFG, then we need
5093 to move all the cases associated with E to E2. */
5094 if (e2)
5095 {
5096 tree cases2 = get_cases_for_edge (e2, stmt);
5097
5098 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5099 CASE_CHAIN (cases2) = first;
5100 }
5101 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5102 }
5103 else
5104 {
5105 size_t i, n = gimple_switch_num_labels (stmt);
5106
5107 for (i = 0; i < n; i++)
5108 {
5109 tree elt = gimple_switch_label (stmt, i);
5110 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5111 CASE_LABEL (elt) = label;
5112 }
5113 }
5114 }
5115 break;
5116
5117 case GIMPLE_ASM:
5118 {
5119 int i, n = gimple_asm_nlabels (stmt);
5120 tree label = NULL;
5121
5122 for (i = 0; i < n; ++i)
5123 {
5124 tree cons = gimple_asm_label_op (stmt, i);
5125 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5126 {
5127 if (!label)
5128 label = gimple_block_label (dest);
5129 TREE_VALUE (cons) = label;
5130 }
5131 }
5132
5133 /* If we didn't find any label matching the former edge in the
5134 asm labels, we must be redirecting the fallthrough
5135 edge. */
5136 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5137 }
5138 break;
5139
5140 case GIMPLE_RETURN:
5141 gsi_remove (&gsi, true);
5142 e->flags |= EDGE_FALLTHRU;
5143 break;
5144
5145 case GIMPLE_OMP_RETURN:
5146 case GIMPLE_OMP_CONTINUE:
5147 case GIMPLE_OMP_SECTIONS_SWITCH:
5148 case GIMPLE_OMP_FOR:
5149 /* The edges from OMP constructs can be simply redirected. */
5150 break;
5151
5152 case GIMPLE_EH_DISPATCH:
5153 if (!(e->flags & EDGE_FALLTHRU))
5154 redirect_eh_dispatch_edge (stmt, e, dest);
5155 break;
5156
5157 case GIMPLE_TRANSACTION:
5158 /* The ABORT edge has a stored label associated with it, otherwise
5159 the edges are simply redirectable. */
5160 if (e->flags == 0)
5161 gimple_transaction_set_label (stmt, gimple_block_label (dest));
5162 break;
5163
5164 default:
5165 /* Otherwise it must be a fallthru edge, and we don't need to
5166 do anything besides redirecting it. */
5167 gcc_assert (e->flags & EDGE_FALLTHRU);
5168 break;
5169 }
5170
5171 /* Update/insert PHI nodes as necessary. */
5172
5173 /* Now update the edges in the CFG. */
5174 e = ssa_redirect_edge (e, dest);
5175
5176 return e;
5177 }
5178
5179 /* Returns true if it is possible to remove edge E by redirecting
5180 it to the destination of the other edge from E->src. */
5181
5182 static bool
5183 gimple_can_remove_branch_p (const_edge e)
5184 {
5185 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5186 return false;
5187
5188 return true;
5189 }
5190
5191 /* Simple wrapper, as we can always redirect fallthru edges. */
5192
5193 static basic_block
5194 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5195 {
5196 e = gimple_redirect_edge_and_branch (e, dest);
5197 gcc_assert (e);
5198
5199 return NULL;
5200 }
5201
5202
5203 /* Splits basic block BB after statement STMT (but at least after the
5204 labels). If STMT is NULL, BB is split just after the labels. */
5205
5206 static basic_block
5207 gimple_split_block (basic_block bb, void *stmt)
5208 {
5209 gimple_stmt_iterator gsi;
5210 gimple_stmt_iterator gsi_tgt;
5211 gimple act;
5212 gimple_seq list;
5213 basic_block new_bb;
5214 edge e;
5215 edge_iterator ei;
5216
5217 new_bb = create_empty_bb (bb);
5218
5219 /* Redirect the outgoing edges. */
5220 new_bb->succs = bb->succs;
5221 bb->succs = NULL;
5222 FOR_EACH_EDGE (e, ei, new_bb->succs)
5223 e->src = new_bb;
5224
5225 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5226 stmt = NULL;
5227
5228 /* Move everything from GSI to the new basic block. */
5229 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5230 {
5231 act = gsi_stmt (gsi);
5232 if (gimple_code (act) == GIMPLE_LABEL)
5233 continue;
5234
5235 if (!stmt)
5236 break;
5237
5238 if (stmt == act)
5239 {
5240 gsi_next (&gsi);
5241 break;
5242 }
5243 }
5244
5245 if (gsi_end_p (gsi))
5246 return new_bb;
5247
5248 /* Split the statement list - avoid re-creating new containers as this
5249 brings ugly quadratic memory consumption in the inliner.
5250 (We are still quadratic since we need to update stmt BB pointers,
5251 sadly.) */
5252 list = gsi_split_seq_before (&gsi);
5253 set_bb_seq (new_bb, list);
5254 for (gsi_tgt = gsi_start (list);
5255 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5256 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5257
5258 return new_bb;
5259 }
5260
5261
5262 /* Moves basic block BB after block AFTER. */
5263
5264 static bool
5265 gimple_move_block_after (basic_block bb, basic_block after)
5266 {
5267 if (bb->prev_bb == after)
5268 return true;
5269
5270 unlink_block (bb);
5271 link_block (bb, after);
5272
5273 return true;
5274 }
5275
5276
5277 /* Return true if basic_block can be duplicated. */
5278
5279 static bool
5280 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5281 {
5282 return true;
5283 }
5284
5285 /* Create a duplicate of the basic block BB. NOTE: This does not
5286 preserve SSA form. */
5287
5288 static basic_block
5289 gimple_duplicate_bb (basic_block bb)
5290 {
5291 basic_block new_bb;
5292 gimple_stmt_iterator gsi, gsi_tgt;
5293 gimple_seq phis = phi_nodes (bb);
5294 gimple phi, stmt, copy;
5295
5296 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
5297
5298 /* Copy the PHI nodes. We ignore PHI node arguments here because
5299 the incoming edges have not been setup yet. */
5300 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
5301 {
5302 phi = gsi_stmt (gsi);
5303 copy = create_phi_node (gimple_phi_result (phi), new_bb);
5304 create_new_def_for (gimple_phi_result (copy), copy,
5305 gimple_phi_result_ptr (copy));
5306 }
5307
5308 gsi_tgt = gsi_start_bb (new_bb);
5309 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5310 {
5311 def_operand_p def_p;
5312 ssa_op_iter op_iter;
5313 tree lhs;
5314
5315 stmt = gsi_stmt (gsi);
5316 if (gimple_code (stmt) == GIMPLE_LABEL)
5317 continue;
5318
5319 /* Don't duplicate label debug stmts. */
5320 if (gimple_debug_bind_p (stmt)
5321 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5322 == LABEL_DECL)
5323 continue;
5324
5325 /* Create a new copy of STMT and duplicate STMT's virtual
5326 operands. */
5327 copy = gimple_copy (stmt);
5328 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5329
5330 maybe_duplicate_eh_stmt (copy, stmt);
5331 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5332
5333 /* When copying around a stmt writing into a local non-user
5334 aggregate, make sure it won't share stack slot with other
5335 vars. */
5336 lhs = gimple_get_lhs (stmt);
5337 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5338 {
5339 tree base = get_base_address (lhs);
5340 if (base
5341 && (TREE_CODE (base) == VAR_DECL
5342 || TREE_CODE (base) == RESULT_DECL)
5343 && DECL_IGNORED_P (base)
5344 && !TREE_STATIC (base)
5345 && !DECL_EXTERNAL (base)
5346 && (TREE_CODE (base) != VAR_DECL
5347 || !DECL_HAS_VALUE_EXPR_P (base)))
5348 DECL_NONSHAREABLE (base) = 1;
5349 }
5350
5351 /* Create new names for all the definitions created by COPY and
5352 add replacement mappings for each new name. */
5353 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5354 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5355 }
5356
5357 return new_bb;
5358 }
5359
5360 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5361
5362 static void
5363 add_phi_args_after_copy_edge (edge e_copy)
5364 {
5365 basic_block bb, bb_copy = e_copy->src, dest;
5366 edge e;
5367 edge_iterator ei;
5368 gimple phi, phi_copy;
5369 tree def;
5370 gimple_stmt_iterator psi, psi_copy;
5371
5372 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5373 return;
5374
5375 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5376
5377 if (e_copy->dest->flags & BB_DUPLICATED)
5378 dest = get_bb_original (e_copy->dest);
5379 else
5380 dest = e_copy->dest;
5381
5382 e = find_edge (bb, dest);
5383 if (!e)
5384 {
5385 /* During loop unrolling the target of the latch edge is copied.
5386 In this case we are not looking for edge to dest, but to
5387 duplicated block whose original was dest. */
5388 FOR_EACH_EDGE (e, ei, bb->succs)
5389 {
5390 if ((e->dest->flags & BB_DUPLICATED)
5391 && get_bb_original (e->dest) == dest)
5392 break;
5393 }
5394
5395 gcc_assert (e != NULL);
5396 }
5397
5398 for (psi = gsi_start_phis (e->dest),
5399 psi_copy = gsi_start_phis (e_copy->dest);
5400 !gsi_end_p (psi);
5401 gsi_next (&psi), gsi_next (&psi_copy))
5402 {
5403 phi = gsi_stmt (psi);
5404 phi_copy = gsi_stmt (psi_copy);
5405 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5406 add_phi_arg (phi_copy, def, e_copy,
5407 gimple_phi_arg_location_from_edge (phi, e));
5408 }
5409 }
5410
5411
5412 /* Basic block BB_COPY was created by code duplication. Add phi node
5413 arguments for edges going out of BB_COPY. The blocks that were
5414 duplicated have BB_DUPLICATED set. */
5415
5416 void
5417 add_phi_args_after_copy_bb (basic_block bb_copy)
5418 {
5419 edge e_copy;
5420 edge_iterator ei;
5421
5422 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5423 {
5424 add_phi_args_after_copy_edge (e_copy);
5425 }
5426 }
5427
5428 /* Blocks in REGION_COPY array of length N_REGION were created by
5429 duplication of basic blocks. Add phi node arguments for edges
5430 going from these blocks. If E_COPY is not NULL, also add
5431 phi node arguments for its destination.*/
5432
5433 void
5434 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5435 edge e_copy)
5436 {
5437 unsigned i;
5438
5439 for (i = 0; i < n_region; i++)
5440 region_copy[i]->flags |= BB_DUPLICATED;
5441
5442 for (i = 0; i < n_region; i++)
5443 add_phi_args_after_copy_bb (region_copy[i]);
5444 if (e_copy)
5445 add_phi_args_after_copy_edge (e_copy);
5446
5447 for (i = 0; i < n_region; i++)
5448 region_copy[i]->flags &= ~BB_DUPLICATED;
5449 }
5450
5451 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5452 important exit edge EXIT. By important we mean that no SSA name defined
5453 inside region is live over the other exit edges of the region. All entry
5454 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5455 to the duplicate of the region. SSA form, dominance and loop information
5456 is updated. The new basic blocks are stored to REGION_COPY in the same
5457 order as they had in REGION, provided that REGION_COPY is not NULL.
5458 The function returns false if it is unable to copy the region,
5459 true otherwise. */
5460
5461 bool
5462 gimple_duplicate_sese_region (edge entry, edge exit,
5463 basic_block *region, unsigned n_region,
5464 basic_block *region_copy)
5465 {
5466 unsigned i;
5467 bool free_region_copy = false, copying_header = false;
5468 struct loop *loop = entry->dest->loop_father;
5469 edge exit_copy;
5470 VEC (basic_block, heap) *doms;
5471 edge redirected;
5472 int total_freq = 0, entry_freq = 0;
5473 gcov_type total_count = 0, entry_count = 0;
5474
5475 if (!can_copy_bbs_p (region, n_region))
5476 return false;
5477
5478 /* Some sanity checking. Note that we do not check for all possible
5479 missuses of the functions. I.e. if you ask to copy something weird,
5480 it will work, but the state of structures probably will not be
5481 correct. */
5482 for (i = 0; i < n_region; i++)
5483 {
5484 /* We do not handle subloops, i.e. all the blocks must belong to the
5485 same loop. */
5486 if (region[i]->loop_father != loop)
5487 return false;
5488
5489 if (region[i] != entry->dest
5490 && region[i] == loop->header)
5491 return false;
5492 }
5493
5494 set_loop_copy (loop, loop);
5495
5496 /* In case the function is used for loop header copying (which is the primary
5497 use), ensure that EXIT and its copy will be new latch and entry edges. */
5498 if (loop->header == entry->dest)
5499 {
5500 copying_header = true;
5501 set_loop_copy (loop, loop_outer (loop));
5502
5503 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5504 return false;
5505
5506 for (i = 0; i < n_region; i++)
5507 if (region[i] != exit->src
5508 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5509 return false;
5510 }
5511
5512 if (!region_copy)
5513 {
5514 region_copy = XNEWVEC (basic_block, n_region);
5515 free_region_copy = true;
5516 }
5517
5518 gcc_assert (!need_ssa_update_p (cfun));
5519
5520 /* Record blocks outside the region that are dominated by something
5521 inside. */
5522 doms = NULL;
5523 initialize_original_copy_tables ();
5524
5525 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5526
5527 if (entry->dest->count)
5528 {
5529 total_count = entry->dest->count;
5530 entry_count = entry->count;
5531 /* Fix up corner cases, to avoid division by zero or creation of negative
5532 frequencies. */
5533 if (entry_count > total_count)
5534 entry_count = total_count;
5535 }
5536 else
5537 {
5538 total_freq = entry->dest->frequency;
5539 entry_freq = EDGE_FREQUENCY (entry);
5540 /* Fix up corner cases, to avoid division by zero or creation of negative
5541 frequencies. */
5542 if (total_freq == 0)
5543 total_freq = 1;
5544 else if (entry_freq > total_freq)
5545 entry_freq = total_freq;
5546 }
5547
5548 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5549 split_edge_bb_loc (entry));
5550 if (total_count)
5551 {
5552 scale_bbs_frequencies_gcov_type (region, n_region,
5553 total_count - entry_count,
5554 total_count);
5555 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
5556 total_count);
5557 }
5558 else
5559 {
5560 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5561 total_freq);
5562 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5563 }
5564
5565 if (copying_header)
5566 {
5567 loop->header = exit->dest;
5568 loop->latch = exit->src;
5569 }
5570
5571 /* Redirect the entry and add the phi node arguments. */
5572 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
5573 gcc_assert (redirected != NULL);
5574 flush_pending_stmts (entry);
5575
5576 /* Concerning updating of dominators: We must recount dominators
5577 for entry block and its copy. Anything that is outside of the
5578 region, but was dominated by something inside needs recounting as
5579 well. */
5580 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
5581 VEC_safe_push (basic_block, heap, doms, get_bb_original (entry->dest));
5582 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5583 VEC_free (basic_block, heap, doms);
5584
5585 /* Add the other PHI node arguments. */
5586 add_phi_args_after_copy (region_copy, n_region, NULL);
5587
5588 /* Update the SSA web. */
5589 update_ssa (TODO_update_ssa);
5590
5591 if (free_region_copy)
5592 free (region_copy);
5593
5594 free_original_copy_tables ();
5595 return true;
5596 }
5597
5598 /* Checks if BB is part of the region defined by N_REGION BBS. */
5599 static bool
5600 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
5601 {
5602 unsigned int n;
5603
5604 for (n = 0; n < n_region; n++)
5605 {
5606 if (bb == bbs[n])
5607 return true;
5608 }
5609 return false;
5610 }
5611
5612 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
5613 are stored to REGION_COPY in the same order in that they appear
5614 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5615 the region, EXIT an exit from it. The condition guarding EXIT
5616 is moved to ENTRY. Returns true if duplication succeeds, false
5617 otherwise.
5618
5619 For example,
5620
5621 some_code;
5622 if (cond)
5623 A;
5624 else
5625 B;
5626
5627 is transformed to
5628
5629 if (cond)
5630 {
5631 some_code;
5632 A;
5633 }
5634 else
5635 {
5636 some_code;
5637 B;
5638 }
5639 */
5640
5641 bool
5642 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
5643 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
5644 basic_block *region_copy ATTRIBUTE_UNUSED)
5645 {
5646 unsigned i;
5647 bool free_region_copy = false;
5648 struct loop *loop = exit->dest->loop_father;
5649 struct loop *orig_loop = entry->dest->loop_father;
5650 basic_block switch_bb, entry_bb, nentry_bb;
5651 VEC (basic_block, heap) *doms;
5652 int total_freq = 0, exit_freq = 0;
5653 gcov_type total_count = 0, exit_count = 0;
5654 edge exits[2], nexits[2], e;
5655 gimple_stmt_iterator gsi;
5656 gimple cond_stmt;
5657 edge sorig, snew;
5658 basic_block exit_bb;
5659 gimple_stmt_iterator psi;
5660 gimple phi;
5661 tree def;
5662 struct loop *target, *aloop, *cloop;
5663
5664 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
5665 exits[0] = exit;
5666 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
5667
5668 if (!can_copy_bbs_p (region, n_region))
5669 return false;
5670
5671 initialize_original_copy_tables ();
5672 set_loop_copy (orig_loop, loop);
5673
5674 target= loop;
5675 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
5676 {
5677 if (bb_part_of_region_p (aloop->header, region, n_region))
5678 {
5679 cloop = duplicate_loop (aloop, target);
5680 duplicate_subloops (aloop, cloop);
5681 }
5682 }
5683
5684 if (!region_copy)
5685 {
5686 region_copy = XNEWVEC (basic_block, n_region);
5687 free_region_copy = true;
5688 }
5689
5690 gcc_assert (!need_ssa_update_p (cfun));
5691
5692 /* Record blocks outside the region that are dominated by something
5693 inside. */
5694 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5695
5696 if (exit->src->count)
5697 {
5698 total_count = exit->src->count;
5699 exit_count = exit->count;
5700 /* Fix up corner cases, to avoid division by zero or creation of negative
5701 frequencies. */
5702 if (exit_count > total_count)
5703 exit_count = total_count;
5704 }
5705 else
5706 {
5707 total_freq = exit->src->frequency;
5708 exit_freq = EDGE_FREQUENCY (exit);
5709 /* Fix up corner cases, to avoid division by zero or creation of negative
5710 frequencies. */
5711 if (total_freq == 0)
5712 total_freq = 1;
5713 if (exit_freq > total_freq)
5714 exit_freq = total_freq;
5715 }
5716
5717 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
5718 split_edge_bb_loc (exit));
5719 if (total_count)
5720 {
5721 scale_bbs_frequencies_gcov_type (region, n_region,
5722 total_count - exit_count,
5723 total_count);
5724 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
5725 total_count);
5726 }
5727 else
5728 {
5729 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
5730 total_freq);
5731 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
5732 }
5733
5734 /* Create the switch block, and put the exit condition to it. */
5735 entry_bb = entry->dest;
5736 nentry_bb = get_bb_copy (entry_bb);
5737 if (!last_stmt (entry->src)
5738 || !stmt_ends_bb_p (last_stmt (entry->src)))
5739 switch_bb = entry->src;
5740 else
5741 switch_bb = split_edge (entry);
5742 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
5743
5744 gsi = gsi_last_bb (switch_bb);
5745 cond_stmt = last_stmt (exit->src);
5746 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
5747 cond_stmt = gimple_copy (cond_stmt);
5748
5749 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
5750
5751 sorig = single_succ_edge (switch_bb);
5752 sorig->flags = exits[1]->flags;
5753 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
5754
5755 /* Register the new edge from SWITCH_BB in loop exit lists. */
5756 rescan_loop_exit (snew, true, false);
5757
5758 /* Add the PHI node arguments. */
5759 add_phi_args_after_copy (region_copy, n_region, snew);
5760
5761 /* Get rid of now superfluous conditions and associated edges (and phi node
5762 arguments). */
5763 exit_bb = exit->dest;
5764
5765 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
5766 PENDING_STMT (e) = NULL;
5767
5768 /* The latch of ORIG_LOOP was copied, and so was the backedge
5769 to the original header. We redirect this backedge to EXIT_BB. */
5770 for (i = 0; i < n_region; i++)
5771 if (get_bb_original (region_copy[i]) == orig_loop->latch)
5772 {
5773 gcc_assert (single_succ_edge (region_copy[i]));
5774 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
5775 PENDING_STMT (e) = NULL;
5776 for (psi = gsi_start_phis (exit_bb);
5777 !gsi_end_p (psi);
5778 gsi_next (&psi))
5779 {
5780 phi = gsi_stmt (psi);
5781 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
5782 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
5783 }
5784 }
5785 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
5786 PENDING_STMT (e) = NULL;
5787
5788 /* Anything that is outside of the region, but was dominated by something
5789 inside needs to update dominance info. */
5790 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5791 VEC_free (basic_block, heap, doms);
5792 /* Update the SSA web. */
5793 update_ssa (TODO_update_ssa);
5794
5795 if (free_region_copy)
5796 free (region_copy);
5797
5798 free_original_copy_tables ();
5799 return true;
5800 }
5801
5802 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
5803 adding blocks when the dominator traversal reaches EXIT. This
5804 function silently assumes that ENTRY strictly dominates EXIT. */
5805
5806 void
5807 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
5808 VEC(basic_block,heap) **bbs_p)
5809 {
5810 basic_block son;
5811
5812 for (son = first_dom_son (CDI_DOMINATORS, entry);
5813 son;
5814 son = next_dom_son (CDI_DOMINATORS, son))
5815 {
5816 VEC_safe_push (basic_block, heap, *bbs_p, son);
5817 if (son != exit)
5818 gather_blocks_in_sese_region (son, exit, bbs_p);
5819 }
5820 }
5821
5822 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
5823 The duplicates are recorded in VARS_MAP. */
5824
5825 static void
5826 replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
5827 tree to_context)
5828 {
5829 tree t = *tp, new_t;
5830 struct function *f = DECL_STRUCT_FUNCTION (to_context);
5831 void **loc;
5832
5833 if (DECL_CONTEXT (t) == to_context)
5834 return;
5835
5836 loc = pointer_map_contains (vars_map, t);
5837
5838 if (!loc)
5839 {
5840 loc = pointer_map_insert (vars_map, t);
5841
5842 if (SSA_VAR_P (t))
5843 {
5844 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
5845 add_local_decl (f, new_t);
5846 }
5847 else
5848 {
5849 gcc_assert (TREE_CODE (t) == CONST_DECL);
5850 new_t = copy_node (t);
5851 }
5852 DECL_CONTEXT (new_t) = to_context;
5853
5854 *loc = new_t;
5855 }
5856 else
5857 new_t = (tree) *loc;
5858
5859 *tp = new_t;
5860 }
5861
5862
5863 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
5864 VARS_MAP maps old ssa names and var_decls to the new ones. */
5865
5866 static tree
5867 replace_ssa_name (tree name, struct pointer_map_t *vars_map,
5868 tree to_context)
5869 {
5870 void **loc;
5871 tree new_name, decl = SSA_NAME_VAR (name);
5872
5873 gcc_assert (is_gimple_reg (name));
5874
5875 loc = pointer_map_contains (vars_map, name);
5876
5877 if (!loc)
5878 {
5879 replace_by_duplicate_decl (&decl, vars_map, to_context);
5880
5881 push_cfun (DECL_STRUCT_FUNCTION (to_context));
5882 if (gimple_in_ssa_p (cfun))
5883 add_referenced_var (decl);
5884
5885 new_name = make_ssa_name (decl, SSA_NAME_DEF_STMT (name));
5886 if (SSA_NAME_IS_DEFAULT_DEF (name))
5887 set_default_def (decl, new_name);
5888 pop_cfun ();
5889
5890 loc = pointer_map_insert (vars_map, name);
5891 *loc = new_name;
5892 }
5893 else
5894 new_name = (tree) *loc;
5895
5896 return new_name;
5897 }
5898
5899 struct move_stmt_d
5900 {
5901 tree orig_block;
5902 tree new_block;
5903 tree from_context;
5904 tree to_context;
5905 struct pointer_map_t *vars_map;
5906 htab_t new_label_map;
5907 struct pointer_map_t *eh_map;
5908 bool remap_decls_p;
5909 };
5910
5911 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
5912 contained in *TP if it has been ORIG_BLOCK previously and change the
5913 DECL_CONTEXT of every local variable referenced in *TP. */
5914
5915 static tree
5916 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
5917 {
5918 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5919 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
5920 tree t = *tp;
5921
5922 if (EXPR_P (t))
5923 /* We should never have TREE_BLOCK set on non-statements. */
5924 gcc_assert (!TREE_BLOCK (t));
5925
5926 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
5927 {
5928 if (TREE_CODE (t) == SSA_NAME)
5929 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
5930 else if (TREE_CODE (t) == LABEL_DECL)
5931 {
5932 if (p->new_label_map)
5933 {
5934 struct tree_map in, *out;
5935 in.base.from = t;
5936 out = (struct tree_map *)
5937 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
5938 if (out)
5939 *tp = t = out->to;
5940 }
5941
5942 DECL_CONTEXT (t) = p->to_context;
5943 }
5944 else if (p->remap_decls_p)
5945 {
5946 /* Replace T with its duplicate. T should no longer appear in the
5947 parent function, so this looks wasteful; however, it may appear
5948 in referenced_vars, and more importantly, as virtual operands of
5949 statements, and in alias lists of other variables. It would be
5950 quite difficult to expunge it from all those places. ??? It might
5951 suffice to do this for addressable variables. */
5952 if ((TREE_CODE (t) == VAR_DECL
5953 && !is_global_var (t))
5954 || TREE_CODE (t) == CONST_DECL)
5955 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
5956
5957 if (SSA_VAR_P (t)
5958 && gimple_in_ssa_p (cfun))
5959 {
5960 push_cfun (DECL_STRUCT_FUNCTION (p->to_context));
5961 add_referenced_var (*tp);
5962 pop_cfun ();
5963 }
5964 }
5965 *walk_subtrees = 0;
5966 }
5967 else if (TYPE_P (t))
5968 *walk_subtrees = 0;
5969
5970 return NULL_TREE;
5971 }
5972
5973 /* Helper for move_stmt_r. Given an EH region number for the source
5974 function, map that to the duplicate EH regio number in the dest. */
5975
5976 static int
5977 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
5978 {
5979 eh_region old_r, new_r;
5980 void **slot;
5981
5982 old_r = get_eh_region_from_number (old_nr);
5983 slot = pointer_map_contains (p->eh_map, old_r);
5984 new_r = (eh_region) *slot;
5985
5986 return new_r->index;
5987 }
5988
5989 /* Similar, but operate on INTEGER_CSTs. */
5990
5991 static tree
5992 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
5993 {
5994 int old_nr, new_nr;
5995
5996 old_nr = tree_low_cst (old_t_nr, 0);
5997 new_nr = move_stmt_eh_region_nr (old_nr, p);
5998
5999 return build_int_cst (integer_type_node, new_nr);
6000 }
6001
6002 /* Like move_stmt_op, but for gimple statements.
6003
6004 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6005 contained in the current statement in *GSI_P and change the
6006 DECL_CONTEXT of every local variable referenced in the current
6007 statement. */
6008
6009 static tree
6010 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6011 struct walk_stmt_info *wi)
6012 {
6013 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6014 gimple stmt = gsi_stmt (*gsi_p);
6015 tree block = gimple_block (stmt);
6016
6017 if (p->orig_block == NULL_TREE
6018 || block == p->orig_block
6019 || block == NULL_TREE)
6020 gimple_set_block (stmt, p->new_block);
6021 #ifdef ENABLE_CHECKING
6022 else if (block != p->new_block)
6023 {
6024 while (block && block != p->orig_block)
6025 block = BLOCK_SUPERCONTEXT (block);
6026 gcc_assert (block);
6027 }
6028 #endif
6029
6030 switch (gimple_code (stmt))
6031 {
6032 case GIMPLE_CALL:
6033 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6034 {
6035 tree r, fndecl = gimple_call_fndecl (stmt);
6036 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6037 switch (DECL_FUNCTION_CODE (fndecl))
6038 {
6039 case BUILT_IN_EH_COPY_VALUES:
6040 r = gimple_call_arg (stmt, 1);
6041 r = move_stmt_eh_region_tree_nr (r, p);
6042 gimple_call_set_arg (stmt, 1, r);
6043 /* FALLTHRU */
6044
6045 case BUILT_IN_EH_POINTER:
6046 case BUILT_IN_EH_FILTER:
6047 r = gimple_call_arg (stmt, 0);
6048 r = move_stmt_eh_region_tree_nr (r, p);
6049 gimple_call_set_arg (stmt, 0, r);
6050 break;
6051
6052 default:
6053 break;
6054 }
6055 }
6056 break;
6057
6058 case GIMPLE_RESX:
6059 {
6060 int r = gimple_resx_region (stmt);
6061 r = move_stmt_eh_region_nr (r, p);
6062 gimple_resx_set_region (stmt, r);
6063 }
6064 break;
6065
6066 case GIMPLE_EH_DISPATCH:
6067 {
6068 int r = gimple_eh_dispatch_region (stmt);
6069 r = move_stmt_eh_region_nr (r, p);
6070 gimple_eh_dispatch_set_region (stmt, r);
6071 }
6072 break;
6073
6074 case GIMPLE_OMP_RETURN:
6075 case GIMPLE_OMP_CONTINUE:
6076 break;
6077 default:
6078 if (is_gimple_omp (stmt))
6079 {
6080 /* Do not remap variables inside OMP directives. Variables
6081 referenced in clauses and directive header belong to the
6082 parent function and should not be moved into the child
6083 function. */
6084 bool save_remap_decls_p = p->remap_decls_p;
6085 p->remap_decls_p = false;
6086 *handled_ops_p = true;
6087
6088 walk_gimple_seq (gimple_omp_body (stmt), move_stmt_r,
6089 move_stmt_op, wi);
6090
6091 p->remap_decls_p = save_remap_decls_p;
6092 }
6093 break;
6094 }
6095
6096 return NULL_TREE;
6097 }
6098
6099 /* Move basic block BB from function CFUN to function DEST_FN. The
6100 block is moved out of the original linked list and placed after
6101 block AFTER in the new list. Also, the block is removed from the
6102 original array of blocks and placed in DEST_FN's array of blocks.
6103 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6104 updated to reflect the moved edges.
6105
6106 The local variables are remapped to new instances, VARS_MAP is used
6107 to record the mapping. */
6108
6109 static void
6110 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6111 basic_block after, bool update_edge_count_p,
6112 struct move_stmt_d *d)
6113 {
6114 struct control_flow_graph *cfg;
6115 edge_iterator ei;
6116 edge e;
6117 gimple_stmt_iterator si;
6118 unsigned old_len, new_len;
6119
6120 /* Remove BB from dominance structures. */
6121 delete_from_dominance_info (CDI_DOMINATORS, bb);
6122 if (current_loops)
6123 remove_bb_from_loops (bb);
6124
6125 /* Link BB to the new linked list. */
6126 move_block_after (bb, after);
6127
6128 /* Update the edge count in the corresponding flowgraphs. */
6129 if (update_edge_count_p)
6130 FOR_EACH_EDGE (e, ei, bb->succs)
6131 {
6132 cfun->cfg->x_n_edges--;
6133 dest_cfun->cfg->x_n_edges++;
6134 }
6135
6136 /* Remove BB from the original basic block array. */
6137 VEC_replace (basic_block, cfun->cfg->x_basic_block_info, bb->index, NULL);
6138 cfun->cfg->x_n_basic_blocks--;
6139
6140 /* Grow DEST_CFUN's basic block array if needed. */
6141 cfg = dest_cfun->cfg;
6142 cfg->x_n_basic_blocks++;
6143 if (bb->index >= cfg->x_last_basic_block)
6144 cfg->x_last_basic_block = bb->index + 1;
6145
6146 old_len = VEC_length (basic_block, cfg->x_basic_block_info);
6147 if ((unsigned) cfg->x_last_basic_block >= old_len)
6148 {
6149 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6150 VEC_safe_grow_cleared (basic_block, gc, cfg->x_basic_block_info,
6151 new_len);
6152 }
6153
6154 VEC_replace (basic_block, cfg->x_basic_block_info,
6155 bb->index, bb);
6156
6157 /* Remap the variables in phi nodes. */
6158 for (si = gsi_start_phis (bb); !gsi_end_p (si); )
6159 {
6160 gimple phi = gsi_stmt (si);
6161 use_operand_p use;
6162 tree op = PHI_RESULT (phi);
6163 ssa_op_iter oi;
6164
6165 if (!is_gimple_reg (op))
6166 {
6167 /* Remove the phi nodes for virtual operands (alias analysis will be
6168 run for the new function, anyway). */
6169 remove_phi_node (&si, true);
6170 continue;
6171 }
6172
6173 SET_PHI_RESULT (phi,
6174 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6175 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6176 {
6177 op = USE_FROM_PTR (use);
6178 if (TREE_CODE (op) == SSA_NAME)
6179 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6180 }
6181
6182 gsi_next (&si);
6183 }
6184
6185 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6186 {
6187 gimple stmt = gsi_stmt (si);
6188 struct walk_stmt_info wi;
6189
6190 memset (&wi, 0, sizeof (wi));
6191 wi.info = d;
6192 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6193
6194 if (gimple_code (stmt) == GIMPLE_LABEL)
6195 {
6196 tree label = gimple_label_label (stmt);
6197 int uid = LABEL_DECL_UID (label);
6198
6199 gcc_assert (uid > -1);
6200
6201 old_len = VEC_length (basic_block, cfg->x_label_to_block_map);
6202 if (old_len <= (unsigned) uid)
6203 {
6204 new_len = 3 * uid / 2 + 1;
6205 VEC_safe_grow_cleared (basic_block, gc,
6206 cfg->x_label_to_block_map, new_len);
6207 }
6208
6209 VEC_replace (basic_block, cfg->x_label_to_block_map, uid, bb);
6210 VEC_replace (basic_block, cfun->cfg->x_label_to_block_map, uid, NULL);
6211
6212 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6213
6214 if (uid >= dest_cfun->cfg->last_label_uid)
6215 dest_cfun->cfg->last_label_uid = uid + 1;
6216 }
6217
6218 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6219 remove_stmt_from_eh_lp_fn (cfun, stmt);
6220
6221 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6222 gimple_remove_stmt_histograms (cfun, stmt);
6223
6224 /* We cannot leave any operands allocated from the operand caches of
6225 the current function. */
6226 free_stmt_operands (stmt);
6227 push_cfun (dest_cfun);
6228 update_stmt (stmt);
6229 pop_cfun ();
6230 }
6231
6232 FOR_EACH_EDGE (e, ei, bb->succs)
6233 if (e->goto_locus)
6234 {
6235 tree block = e->goto_block;
6236 if (d->orig_block == NULL_TREE
6237 || block == d->orig_block)
6238 e->goto_block = d->new_block;
6239 #ifdef ENABLE_CHECKING
6240 else if (block != d->new_block)
6241 {
6242 while (block && block != d->orig_block)
6243 block = BLOCK_SUPERCONTEXT (block);
6244 gcc_assert (block);
6245 }
6246 #endif
6247 }
6248 }
6249
6250 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6251 the outermost EH region. Use REGION as the incoming base EH region. */
6252
6253 static eh_region
6254 find_outermost_region_in_block (struct function *src_cfun,
6255 basic_block bb, eh_region region)
6256 {
6257 gimple_stmt_iterator si;
6258
6259 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6260 {
6261 gimple stmt = gsi_stmt (si);
6262 eh_region stmt_region;
6263 int lp_nr;
6264
6265 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6266 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6267 if (stmt_region)
6268 {
6269 if (region == NULL)
6270 region = stmt_region;
6271 else if (stmt_region != region)
6272 {
6273 region = eh_region_outermost (src_cfun, stmt_region, region);
6274 gcc_assert (region != NULL);
6275 }
6276 }
6277 }
6278
6279 return region;
6280 }
6281
6282 static tree
6283 new_label_mapper (tree decl, void *data)
6284 {
6285 htab_t hash = (htab_t) data;
6286 struct tree_map *m;
6287 void **slot;
6288
6289 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6290
6291 m = XNEW (struct tree_map);
6292 m->hash = DECL_UID (decl);
6293 m->base.from = decl;
6294 m->to = create_artificial_label (UNKNOWN_LOCATION);
6295 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6296 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6297 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6298
6299 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6300 gcc_assert (*slot == NULL);
6301
6302 *slot = m;
6303
6304 return m->to;
6305 }
6306
6307 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6308 subblocks. */
6309
6310 static void
6311 replace_block_vars_by_duplicates (tree block, struct pointer_map_t *vars_map,
6312 tree to_context)
6313 {
6314 tree *tp, t;
6315
6316 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6317 {
6318 t = *tp;
6319 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6320 continue;
6321 replace_by_duplicate_decl (&t, vars_map, to_context);
6322 if (t != *tp)
6323 {
6324 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6325 {
6326 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6327 DECL_HAS_VALUE_EXPR_P (t) = 1;
6328 }
6329 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6330 *tp = t;
6331 }
6332 }
6333
6334 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6335 replace_block_vars_by_duplicates (block, vars_map, to_context);
6336 }
6337
6338 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6339 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6340 single basic block in the original CFG and the new basic block is
6341 returned. DEST_CFUN must not have a CFG yet.
6342
6343 Note that the region need not be a pure SESE region. Blocks inside
6344 the region may contain calls to abort/exit. The only restriction
6345 is that ENTRY_BB should be the only entry point and it must
6346 dominate EXIT_BB.
6347
6348 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6349 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6350 to the new function.
6351
6352 All local variables referenced in the region are assumed to be in
6353 the corresponding BLOCK_VARS and unexpanded variable lists
6354 associated with DEST_CFUN. */
6355
6356 basic_block
6357 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
6358 basic_block exit_bb, tree orig_block)
6359 {
6360 VEC(basic_block,heap) *bbs, *dom_bbs;
6361 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6362 basic_block after, bb, *entry_pred, *exit_succ, abb;
6363 struct function *saved_cfun = cfun;
6364 int *entry_flag, *exit_flag;
6365 unsigned *entry_prob, *exit_prob;
6366 unsigned i, num_entry_edges, num_exit_edges;
6367 edge e;
6368 edge_iterator ei;
6369 htab_t new_label_map;
6370 struct pointer_map_t *vars_map, *eh_map;
6371 struct loop *loop = entry_bb->loop_father;
6372 struct move_stmt_d d;
6373
6374 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6375 region. */
6376 gcc_assert (entry_bb != exit_bb
6377 && (!exit_bb
6378 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
6379
6380 /* Collect all the blocks in the region. Manually add ENTRY_BB
6381 because it won't be added by dfs_enumerate_from. */
6382 bbs = NULL;
6383 VEC_safe_push (basic_block, heap, bbs, entry_bb);
6384 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6385
6386 /* The blocks that used to be dominated by something in BBS will now be
6387 dominated by the new block. */
6388 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6389 VEC_address (basic_block, bbs),
6390 VEC_length (basic_block, bbs));
6391
6392 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6393 the predecessor edges to ENTRY_BB and the successor edges to
6394 EXIT_BB so that we can re-attach them to the new basic block that
6395 will replace the region. */
6396 num_entry_edges = EDGE_COUNT (entry_bb->preds);
6397 entry_pred = (basic_block *) xcalloc (num_entry_edges, sizeof (basic_block));
6398 entry_flag = (int *) xcalloc (num_entry_edges, sizeof (int));
6399 entry_prob = XNEWVEC (unsigned, num_entry_edges);
6400 i = 0;
6401 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
6402 {
6403 entry_prob[i] = e->probability;
6404 entry_flag[i] = e->flags;
6405 entry_pred[i++] = e->src;
6406 remove_edge (e);
6407 }
6408
6409 if (exit_bb)
6410 {
6411 num_exit_edges = EDGE_COUNT (exit_bb->succs);
6412 exit_succ = (basic_block *) xcalloc (num_exit_edges,
6413 sizeof (basic_block));
6414 exit_flag = (int *) xcalloc (num_exit_edges, sizeof (int));
6415 exit_prob = XNEWVEC (unsigned, num_exit_edges);
6416 i = 0;
6417 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
6418 {
6419 exit_prob[i] = e->probability;
6420 exit_flag[i] = e->flags;
6421 exit_succ[i++] = e->dest;
6422 remove_edge (e);
6423 }
6424 }
6425 else
6426 {
6427 num_exit_edges = 0;
6428 exit_succ = NULL;
6429 exit_flag = NULL;
6430 exit_prob = NULL;
6431 }
6432
6433 /* Switch context to the child function to initialize DEST_FN's CFG. */
6434 gcc_assert (dest_cfun->cfg == NULL);
6435 push_cfun (dest_cfun);
6436
6437 init_empty_tree_cfg ();
6438
6439 /* Initialize EH information for the new function. */
6440 eh_map = NULL;
6441 new_label_map = NULL;
6442 if (saved_cfun->eh)
6443 {
6444 eh_region region = NULL;
6445
6446 FOR_EACH_VEC_ELT (basic_block, bbs, i, bb)
6447 region = find_outermost_region_in_block (saved_cfun, bb, region);
6448
6449 init_eh_for_function ();
6450 if (region != NULL)
6451 {
6452 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6453 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
6454 new_label_mapper, new_label_map);
6455 }
6456 }
6457
6458 pop_cfun ();
6459
6460 /* Move blocks from BBS into DEST_CFUN. */
6461 gcc_assert (VEC_length (basic_block, bbs) >= 2);
6462 after = dest_cfun->cfg->x_entry_block_ptr;
6463 vars_map = pointer_map_create ();
6464
6465 memset (&d, 0, sizeof (d));
6466 d.orig_block = orig_block;
6467 d.new_block = DECL_INITIAL (dest_cfun->decl);
6468 d.from_context = cfun->decl;
6469 d.to_context = dest_cfun->decl;
6470 d.vars_map = vars_map;
6471 d.new_label_map = new_label_map;
6472 d.eh_map = eh_map;
6473 d.remap_decls_p = true;
6474
6475 FOR_EACH_VEC_ELT (basic_block, bbs, i, bb)
6476 {
6477 /* No need to update edge counts on the last block. It has
6478 already been updated earlier when we detached the region from
6479 the original CFG. */
6480 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
6481 after = bb;
6482 }
6483
6484 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
6485 if (orig_block)
6486 {
6487 tree block;
6488 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6489 == NULL_TREE);
6490 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6491 = BLOCK_SUBBLOCKS (orig_block);
6492 for (block = BLOCK_SUBBLOCKS (orig_block);
6493 block; block = BLOCK_CHAIN (block))
6494 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
6495 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
6496 }
6497
6498 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
6499 vars_map, dest_cfun->decl);
6500
6501 if (new_label_map)
6502 htab_delete (new_label_map);
6503 if (eh_map)
6504 pointer_map_destroy (eh_map);
6505 pointer_map_destroy (vars_map);
6506
6507 /* Rewire the entry and exit blocks. The successor to the entry
6508 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6509 the child function. Similarly, the predecessor of DEST_FN's
6510 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
6511 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6512 various CFG manipulation function get to the right CFG.
6513
6514 FIXME, this is silly. The CFG ought to become a parameter to
6515 these helpers. */
6516 push_cfun (dest_cfun);
6517 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
6518 if (exit_bb)
6519 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
6520 pop_cfun ();
6521
6522 /* Back in the original function, the SESE region has disappeared,
6523 create a new basic block in its place. */
6524 bb = create_empty_bb (entry_pred[0]);
6525 if (current_loops)
6526 add_bb_to_loop (bb, loop);
6527 for (i = 0; i < num_entry_edges; i++)
6528 {
6529 e = make_edge (entry_pred[i], bb, entry_flag[i]);
6530 e->probability = entry_prob[i];
6531 }
6532
6533 for (i = 0; i < num_exit_edges; i++)
6534 {
6535 e = make_edge (bb, exit_succ[i], exit_flag[i]);
6536 e->probability = exit_prob[i];
6537 }
6538
6539 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
6540 FOR_EACH_VEC_ELT (basic_block, dom_bbs, i, abb)
6541 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
6542 VEC_free (basic_block, heap, dom_bbs);
6543
6544 if (exit_bb)
6545 {
6546 free (exit_prob);
6547 free (exit_flag);
6548 free (exit_succ);
6549 }
6550 free (entry_prob);
6551 free (entry_flag);
6552 free (entry_pred);
6553 VEC_free (basic_block, heap, bbs);
6554
6555 return bb;
6556 }
6557
6558
6559 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree-pass.h)
6560 */
6561
6562 void
6563 dump_function_to_file (tree fn, FILE *file, int flags)
6564 {
6565 tree arg, var;
6566 struct function *dsf;
6567 bool ignore_topmost_bind = false, any_var = false;
6568 basic_block bb;
6569 tree chain;
6570 bool tmclone = TREE_CODE (fn) == FUNCTION_DECL && decl_is_tm_clone (fn);
6571
6572 fprintf (file, "%s %s(", lang_hooks.decl_printable_name (fn, 2),
6573 tmclone ? "[tm-clone] " : "");
6574
6575 arg = DECL_ARGUMENTS (fn);
6576 while (arg)
6577 {
6578 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
6579 fprintf (file, " ");
6580 print_generic_expr (file, arg, dump_flags);
6581 if (flags & TDF_VERBOSE)
6582 print_node (file, "", arg, 4);
6583 if (DECL_CHAIN (arg))
6584 fprintf (file, ", ");
6585 arg = DECL_CHAIN (arg);
6586 }
6587 fprintf (file, ")\n");
6588
6589 if (flags & TDF_VERBOSE)
6590 print_node (file, "", fn, 2);
6591
6592 dsf = DECL_STRUCT_FUNCTION (fn);
6593 if (dsf && (flags & TDF_EH))
6594 dump_eh_tree (file, dsf);
6595
6596 if (flags & TDF_RAW && !gimple_has_body_p (fn))
6597 {
6598 dump_node (fn, TDF_SLIM | flags, file);
6599 return;
6600 }
6601
6602 /* Switch CFUN to point to FN. */
6603 push_cfun (DECL_STRUCT_FUNCTION (fn));
6604
6605 /* When GIMPLE is lowered, the variables are no longer available in
6606 BIND_EXPRs, so display them separately. */
6607 if (cfun && cfun->decl == fn && !VEC_empty (tree, cfun->local_decls))
6608 {
6609 unsigned ix;
6610 ignore_topmost_bind = true;
6611
6612 fprintf (file, "{\n");
6613 FOR_EACH_LOCAL_DECL (cfun, ix, var)
6614 {
6615 print_generic_decl (file, var, flags);
6616 if (flags & TDF_VERBOSE)
6617 print_node (file, "", var, 4);
6618 fprintf (file, "\n");
6619
6620 any_var = true;
6621 }
6622 }
6623
6624 if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
6625 {
6626 /* If the CFG has been built, emit a CFG-based dump. */
6627 check_bb_profile (ENTRY_BLOCK_PTR, file);
6628 if (!ignore_topmost_bind)
6629 fprintf (file, "{\n");
6630
6631 if (any_var && n_basic_blocks)
6632 fprintf (file, "\n");
6633
6634 FOR_EACH_BB (bb)
6635 gimple_dump_bb (bb, file, 2, flags);
6636
6637 fprintf (file, "}\n");
6638 check_bb_profile (EXIT_BLOCK_PTR, file);
6639 }
6640 else if (DECL_SAVED_TREE (fn) == NULL)
6641 {
6642 /* The function is now in GIMPLE form but the CFG has not been
6643 built yet. Emit the single sequence of GIMPLE statements
6644 that make up its body. */
6645 gimple_seq body = gimple_body (fn);
6646
6647 if (gimple_seq_first_stmt (body)
6648 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
6649 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
6650 print_gimple_seq (file, body, 0, flags);
6651 else
6652 {
6653 if (!ignore_topmost_bind)
6654 fprintf (file, "{\n");
6655
6656 if (any_var)
6657 fprintf (file, "\n");
6658
6659 print_gimple_seq (file, body, 2, flags);
6660 fprintf (file, "}\n");
6661 }
6662 }
6663 else
6664 {
6665 int indent;
6666
6667 /* Make a tree based dump. */
6668 chain = DECL_SAVED_TREE (fn);
6669
6670 if (chain && TREE_CODE (chain) == BIND_EXPR)
6671 {
6672 if (ignore_topmost_bind)
6673 {
6674 chain = BIND_EXPR_BODY (chain);
6675 indent = 2;
6676 }
6677 else
6678 indent = 0;
6679 }
6680 else
6681 {
6682 if (!ignore_topmost_bind)
6683 fprintf (file, "{\n");
6684 indent = 2;
6685 }
6686
6687 if (any_var)
6688 fprintf (file, "\n");
6689
6690 print_generic_stmt_indented (file, chain, flags, indent);
6691 if (ignore_topmost_bind)
6692 fprintf (file, "}\n");
6693 }
6694
6695 if (flags & TDF_ENUMERATE_LOCALS)
6696 dump_enumerated_decls (file, flags);
6697 fprintf (file, "\n\n");
6698
6699 /* Restore CFUN. */
6700 pop_cfun ();
6701 }
6702
6703
6704 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
6705
6706 DEBUG_FUNCTION void
6707 debug_function (tree fn, int flags)
6708 {
6709 dump_function_to_file (fn, stderr, flags);
6710 }
6711
6712
6713 /* Print on FILE the indexes for the predecessors of basic_block BB. */
6714
6715 static void
6716 print_pred_bbs (FILE *file, basic_block bb)
6717 {
6718 edge e;
6719 edge_iterator ei;
6720
6721 FOR_EACH_EDGE (e, ei, bb->preds)
6722 fprintf (file, "bb_%d ", e->src->index);
6723 }
6724
6725
6726 /* Print on FILE the indexes for the successors of basic_block BB. */
6727
6728 static void
6729 print_succ_bbs (FILE *file, basic_block bb)
6730 {
6731 edge e;
6732 edge_iterator ei;
6733
6734 FOR_EACH_EDGE (e, ei, bb->succs)
6735 fprintf (file, "bb_%d ", e->dest->index);
6736 }
6737
6738 /* Print to FILE the basic block BB following the VERBOSITY level. */
6739
6740 void
6741 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
6742 {
6743 char *s_indent = (char *) alloca ((size_t) indent + 1);
6744 memset ((void *) s_indent, ' ', (size_t) indent);
6745 s_indent[indent] = '\0';
6746
6747 /* Print basic_block's header. */
6748 if (verbosity >= 2)
6749 {
6750 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
6751 print_pred_bbs (file, bb);
6752 fprintf (file, "}, succs = {");
6753 print_succ_bbs (file, bb);
6754 fprintf (file, "})\n");
6755 }
6756
6757 /* Print basic_block's body. */
6758 if (verbosity >= 3)
6759 {
6760 fprintf (file, "%s {\n", s_indent);
6761 gimple_dump_bb (bb, file, indent + 4, TDF_VOPS|TDF_MEMSYMS);
6762 fprintf (file, "%s }\n", s_indent);
6763 }
6764 }
6765
6766 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
6767
6768 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
6769 VERBOSITY level this outputs the contents of the loop, or just its
6770 structure. */
6771
6772 static void
6773 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
6774 {
6775 char *s_indent;
6776 basic_block bb;
6777
6778 if (loop == NULL)
6779 return;
6780
6781 s_indent = (char *) alloca ((size_t) indent + 1);
6782 memset ((void *) s_indent, ' ', (size_t) indent);
6783 s_indent[indent] = '\0';
6784
6785 /* Print loop's header. */
6786 fprintf (file, "%sloop_%d (header = %d, latch = %d", s_indent,
6787 loop->num, loop->header->index, loop->latch->index);
6788 fprintf (file, ", niter = ");
6789 print_generic_expr (file, loop->nb_iterations, 0);
6790
6791 if (loop->any_upper_bound)
6792 {
6793 fprintf (file, ", upper_bound = ");
6794 dump_double_int (file, loop->nb_iterations_upper_bound, true);
6795 }
6796
6797 if (loop->any_estimate)
6798 {
6799 fprintf (file, ", estimate = ");
6800 dump_double_int (file, loop->nb_iterations_estimate, true);
6801 }
6802 fprintf (file, ")\n");
6803
6804 /* Print loop's body. */
6805 if (verbosity >= 1)
6806 {
6807 fprintf (file, "%s{\n", s_indent);
6808 FOR_EACH_BB (bb)
6809 if (bb->loop_father == loop)
6810 print_loops_bb (file, bb, indent, verbosity);
6811
6812 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
6813 fprintf (file, "%s}\n", s_indent);
6814 }
6815 }
6816
6817 /* Print the LOOP and its sibling loops on FILE, indented INDENT
6818 spaces. Following VERBOSITY level this outputs the contents of the
6819 loop, or just its structure. */
6820
6821 static void
6822 print_loop_and_siblings (FILE *file, struct loop *loop, int indent, int verbosity)
6823 {
6824 if (loop == NULL)
6825 return;
6826
6827 print_loop (file, loop, indent, verbosity);
6828 print_loop_and_siblings (file, loop->next, indent, verbosity);
6829 }
6830
6831 /* Follow a CFG edge from the entry point of the program, and on entry
6832 of a loop, pretty print the loop structure on FILE. */
6833
6834 void
6835 print_loops (FILE *file, int verbosity)
6836 {
6837 basic_block bb;
6838
6839 bb = ENTRY_BLOCK_PTR;
6840 if (bb && bb->loop_father)
6841 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
6842 }
6843
6844
6845 /* Debugging loops structure at tree level, at some VERBOSITY level. */
6846
6847 DEBUG_FUNCTION void
6848 debug_loops (int verbosity)
6849 {
6850 print_loops (stderr, verbosity);
6851 }
6852
6853 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
6854
6855 DEBUG_FUNCTION void
6856 debug_loop (struct loop *loop, int verbosity)
6857 {
6858 print_loop (stderr, loop, 0, verbosity);
6859 }
6860
6861 /* Print on stderr the code of loop number NUM, at some VERBOSITY
6862 level. */
6863
6864 DEBUG_FUNCTION void
6865 debug_loop_num (unsigned num, int verbosity)
6866 {
6867 debug_loop (get_loop (num), verbosity);
6868 }
6869
6870 /* Return true if BB ends with a call, possibly followed by some
6871 instructions that must stay with the call. Return false,
6872 otherwise. */
6873
6874 static bool
6875 gimple_block_ends_with_call_p (basic_block bb)
6876 {
6877 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6878 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
6879 }
6880
6881
6882 /* Return true if BB ends with a conditional branch. Return false,
6883 otherwise. */
6884
6885 static bool
6886 gimple_block_ends_with_condjump_p (const_basic_block bb)
6887 {
6888 gimple stmt = last_stmt (CONST_CAST_BB (bb));
6889 return (stmt && gimple_code (stmt) == GIMPLE_COND);
6890 }
6891
6892
6893 /* Return true if we need to add fake edge to exit at statement T.
6894 Helper function for gimple_flow_call_edges_add. */
6895
6896 static bool
6897 need_fake_edge_p (gimple t)
6898 {
6899 tree fndecl = NULL_TREE;
6900 int call_flags = 0;
6901
6902 /* NORETURN and LONGJMP calls already have an edge to exit.
6903 CONST and PURE calls do not need one.
6904 We don't currently check for CONST and PURE here, although
6905 it would be a good idea, because those attributes are
6906 figured out from the RTL in mark_constant_function, and
6907 the counter incrementation code from -fprofile-arcs
6908 leads to different results from -fbranch-probabilities. */
6909 if (is_gimple_call (t))
6910 {
6911 fndecl = gimple_call_fndecl (t);
6912 call_flags = gimple_call_flags (t);
6913 }
6914
6915 if (is_gimple_call (t)
6916 && fndecl
6917 && DECL_BUILT_IN (fndecl)
6918 && (call_flags & ECF_NOTHROW)
6919 && !(call_flags & ECF_RETURNS_TWICE)
6920 /* fork() doesn't really return twice, but the effect of
6921 wrapping it in __gcov_fork() which calls __gcov_flush()
6922 and clears the counters before forking has the same
6923 effect as returning twice. Force a fake edge. */
6924 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6925 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
6926 return false;
6927
6928 if (is_gimple_call (t))
6929 {
6930 edge_iterator ei;
6931 edge e;
6932 basic_block bb;
6933
6934 if (!(call_flags & ECF_NORETURN))
6935 return true;
6936
6937 bb = gimple_bb (t);
6938 FOR_EACH_EDGE (e, ei, bb->succs)
6939 if ((e->flags & EDGE_FAKE) == 0)
6940 return true;
6941 }
6942
6943 if (gimple_code (t) == GIMPLE_ASM
6944 && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
6945 return true;
6946
6947 return false;
6948 }
6949
6950
6951 /* Add fake edges to the function exit for any non constant and non
6952 noreturn calls (or noreturn calls with EH/abnormal edges),
6953 volatile inline assembly in the bitmap of blocks specified by BLOCKS
6954 or to the whole CFG if BLOCKS is zero. Return the number of blocks
6955 that were split.
6956
6957 The goal is to expose cases in which entering a basic block does
6958 not imply that all subsequent instructions must be executed. */
6959
6960 static int
6961 gimple_flow_call_edges_add (sbitmap blocks)
6962 {
6963 int i;
6964 int blocks_split = 0;
6965 int last_bb = last_basic_block;
6966 bool check_last_block = false;
6967
6968 if (n_basic_blocks == NUM_FIXED_BLOCKS)
6969 return 0;
6970
6971 if (! blocks)
6972 check_last_block = true;
6973 else
6974 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
6975
6976 /* In the last basic block, before epilogue generation, there will be
6977 a fallthru edge to EXIT. Special care is required if the last insn
6978 of the last basic block is a call because make_edge folds duplicate
6979 edges, which would result in the fallthru edge also being marked
6980 fake, which would result in the fallthru edge being removed by
6981 remove_fake_edges, which would result in an invalid CFG.
6982
6983 Moreover, we can't elide the outgoing fake edge, since the block
6984 profiler needs to take this into account in order to solve the minimal
6985 spanning tree in the case that the call doesn't return.
6986
6987 Handle this by adding a dummy instruction in a new last basic block. */
6988 if (check_last_block)
6989 {
6990 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
6991 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6992 gimple t = NULL;
6993
6994 if (!gsi_end_p (gsi))
6995 t = gsi_stmt (gsi);
6996
6997 if (t && need_fake_edge_p (t))
6998 {
6999 edge e;
7000
7001 e = find_edge (bb, EXIT_BLOCK_PTR);
7002 if (e)
7003 {
7004 gsi_insert_on_edge (e, gimple_build_nop ());
7005 gsi_commit_edge_inserts ();
7006 }
7007 }
7008 }
7009
7010 /* Now add fake edges to the function exit for any non constant
7011 calls since there is no way that we can determine if they will
7012 return or not... */
7013 for (i = 0; i < last_bb; i++)
7014 {
7015 basic_block bb = BASIC_BLOCK (i);
7016 gimple_stmt_iterator gsi;
7017 gimple stmt, last_stmt;
7018
7019 if (!bb)
7020 continue;
7021
7022 if (blocks && !TEST_BIT (blocks, i))
7023 continue;
7024
7025 gsi = gsi_last_nondebug_bb (bb);
7026 if (!gsi_end_p (gsi))
7027 {
7028 last_stmt = gsi_stmt (gsi);
7029 do
7030 {
7031 stmt = gsi_stmt (gsi);
7032 if (need_fake_edge_p (stmt))
7033 {
7034 edge e;
7035
7036 /* The handling above of the final block before the
7037 epilogue should be enough to verify that there is
7038 no edge to the exit block in CFG already.
7039 Calling make_edge in such case would cause us to
7040 mark that edge as fake and remove it later. */
7041 #ifdef ENABLE_CHECKING
7042 if (stmt == last_stmt)
7043 {
7044 e = find_edge (bb, EXIT_BLOCK_PTR);
7045 gcc_assert (e == NULL);
7046 }
7047 #endif
7048
7049 /* Note that the following may create a new basic block
7050 and renumber the existing basic blocks. */
7051 if (stmt != last_stmt)
7052 {
7053 e = split_block (bb, stmt);
7054 if (e)
7055 blocks_split++;
7056 }
7057 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
7058 }
7059 gsi_prev (&gsi);
7060 }
7061 while (!gsi_end_p (gsi));
7062 }
7063 }
7064
7065 if (blocks_split)
7066 verify_flow_info ();
7067
7068 return blocks_split;
7069 }
7070
7071 /* Removes edge E and all the blocks dominated by it, and updates dominance
7072 information. The IL in E->src needs to be updated separately.
7073 If dominance info is not available, only the edge E is removed.*/
7074
7075 void
7076 remove_edge_and_dominated_blocks (edge e)
7077 {
7078 VEC (basic_block, heap) *bbs_to_remove = NULL;
7079 VEC (basic_block, heap) *bbs_to_fix_dom = NULL;
7080 bitmap df, df_idom;
7081 edge f;
7082 edge_iterator ei;
7083 bool none_removed = false;
7084 unsigned i;
7085 basic_block bb, dbb;
7086 bitmap_iterator bi;
7087
7088 if (!dom_info_available_p (CDI_DOMINATORS))
7089 {
7090 remove_edge (e);
7091 return;
7092 }
7093
7094 /* No updating is needed for edges to exit. */
7095 if (e->dest == EXIT_BLOCK_PTR)
7096 {
7097 if (cfgcleanup_altered_bbs)
7098 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7099 remove_edge (e);
7100 return;
7101 }
7102
7103 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7104 that is not dominated by E->dest, then this set is empty. Otherwise,
7105 all the basic blocks dominated by E->dest are removed.
7106
7107 Also, to DF_IDOM we store the immediate dominators of the blocks in
7108 the dominance frontier of E (i.e., of the successors of the
7109 removed blocks, if there are any, and of E->dest otherwise). */
7110 FOR_EACH_EDGE (f, ei, e->dest->preds)
7111 {
7112 if (f == e)
7113 continue;
7114
7115 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7116 {
7117 none_removed = true;
7118 break;
7119 }
7120 }
7121
7122 df = BITMAP_ALLOC (NULL);
7123 df_idom = BITMAP_ALLOC (NULL);
7124
7125 if (none_removed)
7126 bitmap_set_bit (df_idom,
7127 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7128 else
7129 {
7130 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7131 FOR_EACH_VEC_ELT (basic_block, bbs_to_remove, i, bb)
7132 {
7133 FOR_EACH_EDGE (f, ei, bb->succs)
7134 {
7135 if (f->dest != EXIT_BLOCK_PTR)
7136 bitmap_set_bit (df, f->dest->index);
7137 }
7138 }
7139 FOR_EACH_VEC_ELT (basic_block, bbs_to_remove, i, bb)
7140 bitmap_clear_bit (df, bb->index);
7141
7142 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7143 {
7144 bb = BASIC_BLOCK (i);
7145 bitmap_set_bit (df_idom,
7146 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7147 }
7148 }
7149
7150 if (cfgcleanup_altered_bbs)
7151 {
7152 /* Record the set of the altered basic blocks. */
7153 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7154 bitmap_ior_into (cfgcleanup_altered_bbs, df);
7155 }
7156
7157 /* Remove E and the cancelled blocks. */
7158 if (none_removed)
7159 remove_edge (e);
7160 else
7161 {
7162 /* Walk backwards so as to get a chance to substitute all
7163 released DEFs into debug stmts. See
7164 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7165 details. */
7166 for (i = VEC_length (basic_block, bbs_to_remove); i-- > 0; )
7167 delete_basic_block (VEC_index (basic_block, bbs_to_remove, i));
7168 }
7169
7170 /* Update the dominance information. The immediate dominator may change only
7171 for blocks whose immediate dominator belongs to DF_IDOM:
7172
7173 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7174 removal. Let Z the arbitrary block such that idom(Z) = Y and
7175 Z dominates X after the removal. Before removal, there exists a path P
7176 from Y to X that avoids Z. Let F be the last edge on P that is
7177 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7178 dominates W, and because of P, Z does not dominate W), and W belongs to
7179 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7180 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
7181 {
7182 bb = BASIC_BLOCK (i);
7183 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
7184 dbb;
7185 dbb = next_dom_son (CDI_DOMINATORS, dbb))
7186 VEC_safe_push (basic_block, heap, bbs_to_fix_dom, dbb);
7187 }
7188
7189 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
7190
7191 BITMAP_FREE (df);
7192 BITMAP_FREE (df_idom);
7193 VEC_free (basic_block, heap, bbs_to_remove);
7194 VEC_free (basic_block, heap, bbs_to_fix_dom);
7195 }
7196
7197 /* Purge dead EH edges from basic block BB. */
7198
7199 bool
7200 gimple_purge_dead_eh_edges (basic_block bb)
7201 {
7202 bool changed = false;
7203 edge e;
7204 edge_iterator ei;
7205 gimple stmt = last_stmt (bb);
7206
7207 if (stmt && stmt_can_throw_internal (stmt))
7208 return false;
7209
7210 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7211 {
7212 if (e->flags & EDGE_EH)
7213 {
7214 remove_edge_and_dominated_blocks (e);
7215 changed = true;
7216 }
7217 else
7218 ei_next (&ei);
7219 }
7220
7221 return changed;
7222 }
7223
7224 /* Purge dead EH edges from basic block listed in BLOCKS. */
7225
7226 bool
7227 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
7228 {
7229 bool changed = false;
7230 unsigned i;
7231 bitmap_iterator bi;
7232
7233 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7234 {
7235 basic_block bb = BASIC_BLOCK (i);
7236
7237 /* Earlier gimple_purge_dead_eh_edges could have removed
7238 this basic block already. */
7239 gcc_assert (bb || changed);
7240 if (bb != NULL)
7241 changed |= gimple_purge_dead_eh_edges (bb);
7242 }
7243
7244 return changed;
7245 }
7246
7247 /* Purge dead abnormal call edges from basic block BB. */
7248
7249 bool
7250 gimple_purge_dead_abnormal_call_edges (basic_block bb)
7251 {
7252 bool changed = false;
7253 edge e;
7254 edge_iterator ei;
7255 gimple stmt = last_stmt (bb);
7256
7257 if (!cfun->has_nonlocal_label)
7258 return false;
7259
7260 if (stmt && stmt_can_make_abnormal_goto (stmt))
7261 return false;
7262
7263 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7264 {
7265 if (e->flags & EDGE_ABNORMAL)
7266 {
7267 remove_edge_and_dominated_blocks (e);
7268 changed = true;
7269 }
7270 else
7271 ei_next (&ei);
7272 }
7273
7274 return changed;
7275 }
7276
7277 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
7278
7279 bool
7280 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
7281 {
7282 bool changed = false;
7283 unsigned i;
7284 bitmap_iterator bi;
7285
7286 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7287 {
7288 basic_block bb = BASIC_BLOCK (i);
7289
7290 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
7291 this basic block already. */
7292 gcc_assert (bb || changed);
7293 if (bb != NULL)
7294 changed |= gimple_purge_dead_abnormal_call_edges (bb);
7295 }
7296
7297 return changed;
7298 }
7299
7300 /* This function is called whenever a new edge is created or
7301 redirected. */
7302
7303 static void
7304 gimple_execute_on_growing_pred (edge e)
7305 {
7306 basic_block bb = e->dest;
7307
7308 if (!gimple_seq_empty_p (phi_nodes (bb)))
7309 reserve_phi_args_for_new_edge (bb);
7310 }
7311
7312 /* This function is called immediately before edge E is removed from
7313 the edge vector E->dest->preds. */
7314
7315 static void
7316 gimple_execute_on_shrinking_pred (edge e)
7317 {
7318 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
7319 remove_phi_args (e);
7320 }
7321
7322 /*---------------------------------------------------------------------------
7323 Helper functions for Loop versioning
7324 ---------------------------------------------------------------------------*/
7325
7326 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
7327 of 'first'. Both of them are dominated by 'new_head' basic block. When
7328 'new_head' was created by 'second's incoming edge it received phi arguments
7329 on the edge by split_edge(). Later, additional edge 'e' was created to
7330 connect 'new_head' and 'first'. Now this routine adds phi args on this
7331 additional edge 'e' that new_head to second edge received as part of edge
7332 splitting. */
7333
7334 static void
7335 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
7336 basic_block new_head, edge e)
7337 {
7338 gimple phi1, phi2;
7339 gimple_stmt_iterator psi1, psi2;
7340 tree def;
7341 edge e2 = find_edge (new_head, second);
7342
7343 /* Because NEW_HEAD has been created by splitting SECOND's incoming
7344 edge, we should always have an edge from NEW_HEAD to SECOND. */
7345 gcc_assert (e2 != NULL);
7346
7347 /* Browse all 'second' basic block phi nodes and add phi args to
7348 edge 'e' for 'first' head. PHI args are always in correct order. */
7349
7350 for (psi2 = gsi_start_phis (second),
7351 psi1 = gsi_start_phis (first);
7352 !gsi_end_p (psi2) && !gsi_end_p (psi1);
7353 gsi_next (&psi2), gsi_next (&psi1))
7354 {
7355 phi1 = gsi_stmt (psi1);
7356 phi2 = gsi_stmt (psi2);
7357 def = PHI_ARG_DEF (phi2, e2->dest_idx);
7358 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
7359 }
7360 }
7361
7362
7363 /* Adds a if else statement to COND_BB with condition COND_EXPR.
7364 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
7365 the destination of the ELSE part. */
7366
7367 static void
7368 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
7369 basic_block second_head ATTRIBUTE_UNUSED,
7370 basic_block cond_bb, void *cond_e)
7371 {
7372 gimple_stmt_iterator gsi;
7373 gimple new_cond_expr;
7374 tree cond_expr = (tree) cond_e;
7375 edge e0;
7376
7377 /* Build new conditional expr */
7378 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
7379 NULL_TREE, NULL_TREE);
7380
7381 /* Add new cond in cond_bb. */
7382 gsi = gsi_last_bb (cond_bb);
7383 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
7384
7385 /* Adjust edges appropriately to connect new head with first head
7386 as well as second head. */
7387 e0 = single_succ_edge (cond_bb);
7388 e0->flags &= ~EDGE_FALLTHRU;
7389 e0->flags |= EDGE_FALSE_VALUE;
7390 }
7391
7392 struct cfg_hooks gimple_cfg_hooks = {
7393 "gimple",
7394 gimple_verify_flow_info,
7395 gimple_dump_bb, /* dump_bb */
7396 create_bb, /* create_basic_block */
7397 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
7398 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
7399 gimple_can_remove_branch_p, /* can_remove_branch_p */
7400 remove_bb, /* delete_basic_block */
7401 gimple_split_block, /* split_block */
7402 gimple_move_block_after, /* move_block_after */
7403 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
7404 gimple_merge_blocks, /* merge_blocks */
7405 gimple_predict_edge, /* predict_edge */
7406 gimple_predicted_by_p, /* predicted_by_p */
7407 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
7408 gimple_duplicate_bb, /* duplicate_block */
7409 gimple_split_edge, /* split_edge */
7410 gimple_make_forwarder_block, /* make_forward_block */
7411 NULL, /* tidy_fallthru_edge */
7412 NULL, /* force_nonfallthru */
7413 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
7414 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
7415 gimple_flow_call_edges_add, /* flow_call_edges_add */
7416 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
7417 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
7418 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
7419 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
7420 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
7421 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
7422 flush_pending_stmts /* flush_pending_stmts */
7423 };
7424
7425
7426 /* Split all critical edges. */
7427
7428 static unsigned int
7429 split_critical_edges (void)
7430 {
7431 basic_block bb;
7432 edge e;
7433 edge_iterator ei;
7434
7435 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
7436 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
7437 mappings around the calls to split_edge. */
7438 start_recording_case_labels ();
7439 FOR_ALL_BB (bb)
7440 {
7441 FOR_EACH_EDGE (e, ei, bb->succs)
7442 {
7443 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
7444 split_edge (e);
7445 /* PRE inserts statements to edges and expects that
7446 since split_critical_edges was done beforehand, committing edge
7447 insertions will not split more edges. In addition to critical
7448 edges we must split edges that have multiple successors and
7449 end by control flow statements, such as RESX.
7450 Go ahead and split them too. This matches the logic in
7451 gimple_find_edge_insert_loc. */
7452 else if ((!single_pred_p (e->dest)
7453 || !gimple_seq_empty_p (phi_nodes (e->dest))
7454 || e->dest == EXIT_BLOCK_PTR)
7455 && e->src != ENTRY_BLOCK_PTR
7456 && !(e->flags & EDGE_ABNORMAL))
7457 {
7458 gimple_stmt_iterator gsi;
7459
7460 gsi = gsi_last_bb (e->src);
7461 if (!gsi_end_p (gsi)
7462 && stmt_ends_bb_p (gsi_stmt (gsi))
7463 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
7464 && !gimple_call_builtin_p (gsi_stmt (gsi),
7465 BUILT_IN_RETURN)))
7466 split_edge (e);
7467 }
7468 }
7469 }
7470 end_recording_case_labels ();
7471 return 0;
7472 }
7473
7474 struct gimple_opt_pass pass_split_crit_edges =
7475 {
7476 {
7477 GIMPLE_PASS,
7478 "crited", /* name */
7479 NULL, /* gate */
7480 split_critical_edges, /* execute */
7481 NULL, /* sub */
7482 NULL, /* next */
7483 0, /* static_pass_number */
7484 TV_TREE_SPLIT_EDGES, /* tv_id */
7485 PROP_cfg, /* properties required */
7486 PROP_no_crit_edges, /* properties_provided */
7487 0, /* properties_destroyed */
7488 0, /* todo_flags_start */
7489 TODO_verify_flow /* todo_flags_finish */
7490 }
7491 };
7492
7493
7494 /* Build a ternary operation and gimplify it. Emit code before GSI.
7495 Return the gimple_val holding the result. */
7496
7497 tree
7498 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
7499 tree type, tree a, tree b, tree c)
7500 {
7501 tree ret;
7502 location_t loc = gimple_location (gsi_stmt (*gsi));
7503
7504 ret = fold_build3_loc (loc, code, type, a, b, c);
7505 STRIP_NOPS (ret);
7506
7507 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7508 GSI_SAME_STMT);
7509 }
7510
7511 /* Build a binary operation and gimplify it. Emit code before GSI.
7512 Return the gimple_val holding the result. */
7513
7514 tree
7515 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
7516 tree type, tree a, tree b)
7517 {
7518 tree ret;
7519
7520 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
7521 STRIP_NOPS (ret);
7522
7523 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7524 GSI_SAME_STMT);
7525 }
7526
7527 /* Build a unary operation and gimplify it. Emit code before GSI.
7528 Return the gimple_val holding the result. */
7529
7530 tree
7531 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
7532 tree a)
7533 {
7534 tree ret;
7535
7536 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
7537 STRIP_NOPS (ret);
7538
7539 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7540 GSI_SAME_STMT);
7541 }
7542
7543
7544 \f
7545 /* Emit return warnings. */
7546
7547 static unsigned int
7548 execute_warn_function_return (void)
7549 {
7550 source_location location;
7551 gimple last;
7552 edge e;
7553 edge_iterator ei;
7554
7555 /* If we have a path to EXIT, then we do return. */
7556 if (TREE_THIS_VOLATILE (cfun->decl)
7557 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
7558 {
7559 location = UNKNOWN_LOCATION;
7560 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7561 {
7562 last = last_stmt (e->src);
7563 if ((gimple_code (last) == GIMPLE_RETURN
7564 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
7565 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
7566 break;
7567 }
7568 if (location == UNKNOWN_LOCATION)
7569 location = cfun->function_end_locus;
7570 warning_at (location, 0, "%<noreturn%> function does return");
7571 }
7572
7573 /* If we see "return;" in some basic block, then we do reach the end
7574 without returning a value. */
7575 else if (warn_return_type
7576 && !TREE_NO_WARNING (cfun->decl)
7577 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
7578 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
7579 {
7580 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7581 {
7582 gimple last = last_stmt (e->src);
7583 if (gimple_code (last) == GIMPLE_RETURN
7584 && gimple_return_retval (last) == NULL
7585 && !gimple_no_warning_p (last))
7586 {
7587 location = gimple_location (last);
7588 if (location == UNKNOWN_LOCATION)
7589 location = cfun->function_end_locus;
7590 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
7591 TREE_NO_WARNING (cfun->decl) = 1;
7592 break;
7593 }
7594 }
7595 }
7596 return 0;
7597 }
7598
7599
7600 /* Given a basic block B which ends with a conditional and has
7601 precisely two successors, determine which of the edges is taken if
7602 the conditional is true and which is taken if the conditional is
7603 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
7604
7605 void
7606 extract_true_false_edges_from_block (basic_block b,
7607 edge *true_edge,
7608 edge *false_edge)
7609 {
7610 edge e = EDGE_SUCC (b, 0);
7611
7612 if (e->flags & EDGE_TRUE_VALUE)
7613 {
7614 *true_edge = e;
7615 *false_edge = EDGE_SUCC (b, 1);
7616 }
7617 else
7618 {
7619 *false_edge = e;
7620 *true_edge = EDGE_SUCC (b, 1);
7621 }
7622 }
7623
7624 struct gimple_opt_pass pass_warn_function_return =
7625 {
7626 {
7627 GIMPLE_PASS,
7628 "*warn_function_return", /* name */
7629 NULL, /* gate */
7630 execute_warn_function_return, /* execute */
7631 NULL, /* sub */
7632 NULL, /* next */
7633 0, /* static_pass_number */
7634 TV_NONE, /* tv_id */
7635 PROP_cfg, /* properties_required */
7636 0, /* properties_provided */
7637 0, /* properties_destroyed */
7638 0, /* todo_flags_start */
7639 0 /* todo_flags_finish */
7640 }
7641 };
7642
7643 /* Emit noreturn warnings. */
7644
7645 static unsigned int
7646 execute_warn_function_noreturn (void)
7647 {
7648 if (!TREE_THIS_VOLATILE (current_function_decl)
7649 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0)
7650 warn_function_noreturn (current_function_decl);
7651 return 0;
7652 }
7653
7654 static bool
7655 gate_warn_function_noreturn (void)
7656 {
7657 return warn_suggest_attribute_noreturn;
7658 }
7659
7660 struct gimple_opt_pass pass_warn_function_noreturn =
7661 {
7662 {
7663 GIMPLE_PASS,
7664 "*warn_function_noreturn", /* name */
7665 gate_warn_function_noreturn, /* gate */
7666 execute_warn_function_noreturn, /* execute */
7667 NULL, /* sub */
7668 NULL, /* next */
7669 0, /* static_pass_number */
7670 TV_NONE, /* tv_id */
7671 PROP_cfg, /* properties_required */
7672 0, /* properties_provided */
7673 0, /* properties_destroyed */
7674 0, /* todo_flags_start */
7675 0 /* todo_flags_finish */
7676 }
7677 };
7678
7679
7680 /* Walk a gimplified function and warn for functions whose return value is
7681 ignored and attribute((warn_unused_result)) is set. This is done before
7682 inlining, so we don't have to worry about that. */
7683
7684 static void
7685 do_warn_unused_result (gimple_seq seq)
7686 {
7687 tree fdecl, ftype;
7688 gimple_stmt_iterator i;
7689
7690 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
7691 {
7692 gimple g = gsi_stmt (i);
7693
7694 switch (gimple_code (g))
7695 {
7696 case GIMPLE_BIND:
7697 do_warn_unused_result (gimple_bind_body (g));
7698 break;
7699 case GIMPLE_TRY:
7700 do_warn_unused_result (gimple_try_eval (g));
7701 do_warn_unused_result (gimple_try_cleanup (g));
7702 break;
7703 case GIMPLE_CATCH:
7704 do_warn_unused_result (gimple_catch_handler (g));
7705 break;
7706 case GIMPLE_EH_FILTER:
7707 do_warn_unused_result (gimple_eh_filter_failure (g));
7708 break;
7709
7710 case GIMPLE_CALL:
7711 if (gimple_call_lhs (g))
7712 break;
7713 if (gimple_call_internal_p (g))
7714 break;
7715
7716 /* This is a naked call, as opposed to a GIMPLE_CALL with an
7717 LHS. All calls whose value is ignored should be
7718 represented like this. Look for the attribute. */
7719 fdecl = gimple_call_fndecl (g);
7720 ftype = gimple_call_fntype (g);
7721
7722 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
7723 {
7724 location_t loc = gimple_location (g);
7725
7726 if (fdecl)
7727 warning_at (loc, OPT_Wunused_result,
7728 "ignoring return value of %qD, "
7729 "declared with attribute warn_unused_result",
7730 fdecl);
7731 else
7732 warning_at (loc, OPT_Wunused_result,
7733 "ignoring return value of function "
7734 "declared with attribute warn_unused_result");
7735 }
7736 break;
7737
7738 default:
7739 /* Not a container, not a call, or a call whose value is used. */
7740 break;
7741 }
7742 }
7743 }
7744
7745 static unsigned int
7746 run_warn_unused_result (void)
7747 {
7748 do_warn_unused_result (gimple_body (current_function_decl));
7749 return 0;
7750 }
7751
7752 static bool
7753 gate_warn_unused_result (void)
7754 {
7755 return flag_warn_unused_result;
7756 }
7757
7758 struct gimple_opt_pass pass_warn_unused_result =
7759 {
7760 {
7761 GIMPLE_PASS,
7762 "*warn_unused_result", /* name */
7763 gate_warn_unused_result, /* gate */
7764 run_warn_unused_result, /* execute */
7765 NULL, /* sub */
7766 NULL, /* next */
7767 0, /* static_pass_number */
7768 TV_NONE, /* tv_id */
7769 PROP_gimple_any, /* properties_required */
7770 0, /* properties_provided */
7771 0, /* properties_destroyed */
7772 0, /* todo_flags_start */
7773 0, /* todo_flags_finish */
7774 }
7775 };