re PR tree-optimization/51481 (ice: dead STMT in EH table)
[gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
3 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "output.h"
30 #include "flags.h"
31 #include "function.h"
32 #include "ggc.h"
33 #include "langhooks.h"
34 #include "tree-pretty-print.h"
35 #include "gimple-pretty-print.h"
36 #include "tree-flow.h"
37 #include "timevar.h"
38 #include "tree-dump.h"
39 #include "tree-pass.h"
40 #include "diagnostic-core.h"
41 #include "except.h"
42 #include "cfgloop.h"
43 #include "cfglayout.h"
44 #include "tree-ssa-propagate.h"
45 #include "value-prof.h"
46 #include "pointer-set.h"
47 #include "tree-inline.h"
48
49 /* This file contains functions for building the Control Flow Graph (CFG)
50 for a function tree. */
51
52 /* Local declarations. */
53
54 /* Initial capacity for the basic block array. */
55 static const int initial_cfg_capacity = 20;
56
57 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
58 which use a particular edge. The CASE_LABEL_EXPRs are chained together
59 via their TREE_CHAIN field, which we clear after we're done with the
60 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
61
62 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
63 update the case vector in response to edge redirections.
64
65 Right now this table is set up and torn down at key points in the
66 compilation process. It would be nice if we could make the table
67 more persistent. The key is getting notification of changes to
68 the CFG (particularly edge removal, creation and redirection). */
69
70 static struct pointer_map_t *edge_to_cases;
71
72 /* If we record edge_to_cases, this bitmap will hold indexes
73 of basic blocks that end in a GIMPLE_SWITCH which we touched
74 due to edge manipulations. */
75
76 static bitmap touched_switch_bbs;
77
78 /* CFG statistics. */
79 struct cfg_stats_d
80 {
81 long num_merged_labels;
82 };
83
84 static struct cfg_stats_d cfg_stats;
85
86 /* Nonzero if we found a computed goto while building basic blocks. */
87 static bool found_computed_goto;
88
89 /* Hash table to store last discriminator assigned for each locus. */
90 struct locus_discrim_map
91 {
92 location_t locus;
93 int discriminator;
94 };
95 static htab_t discriminator_per_locus;
96
97 /* Basic blocks and flowgraphs. */
98 static void make_blocks (gimple_seq);
99 static void factor_computed_gotos (void);
100
101 /* Edges. */
102 static void make_edges (void);
103 static void make_cond_expr_edges (basic_block);
104 static void make_gimple_switch_edges (basic_block);
105 static void make_goto_expr_edges (basic_block);
106 static void make_gimple_asm_edges (basic_block);
107 static unsigned int locus_map_hash (const void *);
108 static int locus_map_eq (const void *, const void *);
109 static void assign_discriminator (location_t, basic_block);
110 static edge gimple_redirect_edge_and_branch (edge, basic_block);
111 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
112 static unsigned int split_critical_edges (void);
113
114 /* Various helpers. */
115 static inline bool stmt_starts_bb_p (gimple, gimple);
116 static int gimple_verify_flow_info (void);
117 static void gimple_make_forwarder_block (edge);
118 static void gimple_cfg2vcg (FILE *);
119 static gimple first_non_label_stmt (basic_block);
120 static bool verify_gimple_transaction (gimple);
121
122 /* Flowgraph optimization and cleanup. */
123 static void gimple_merge_blocks (basic_block, basic_block);
124 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
125 static void remove_bb (basic_block);
126 static edge find_taken_edge_computed_goto (basic_block, tree);
127 static edge find_taken_edge_cond_expr (basic_block, tree);
128 static edge find_taken_edge_switch_expr (basic_block, tree);
129 static tree find_case_label_for_value (gimple, tree);
130 static void group_case_labels_stmt (gimple);
131
132 void
133 init_empty_tree_cfg_for_function (struct function *fn)
134 {
135 /* Initialize the basic block array. */
136 init_flow (fn);
137 profile_status_for_function (fn) = PROFILE_ABSENT;
138 n_basic_blocks_for_function (fn) = NUM_FIXED_BLOCKS;
139 last_basic_block_for_function (fn) = NUM_FIXED_BLOCKS;
140 basic_block_info_for_function (fn)
141 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
142 VEC_safe_grow_cleared (basic_block, gc,
143 basic_block_info_for_function (fn),
144 initial_cfg_capacity);
145
146 /* Build a mapping of labels to their associated blocks. */
147 label_to_block_map_for_function (fn)
148 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
149 VEC_safe_grow_cleared (basic_block, gc,
150 label_to_block_map_for_function (fn),
151 initial_cfg_capacity);
152
153 SET_BASIC_BLOCK_FOR_FUNCTION (fn, ENTRY_BLOCK,
154 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn));
155 SET_BASIC_BLOCK_FOR_FUNCTION (fn, EXIT_BLOCK,
156 EXIT_BLOCK_PTR_FOR_FUNCTION (fn));
157
158 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)->next_bb
159 = EXIT_BLOCK_PTR_FOR_FUNCTION (fn);
160 EXIT_BLOCK_PTR_FOR_FUNCTION (fn)->prev_bb
161 = ENTRY_BLOCK_PTR_FOR_FUNCTION (fn);
162 }
163
164 void
165 init_empty_tree_cfg (void)
166 {
167 init_empty_tree_cfg_for_function (cfun);
168 }
169
170 /*---------------------------------------------------------------------------
171 Create basic blocks
172 ---------------------------------------------------------------------------*/
173
174 /* Entry point to the CFG builder for trees. SEQ is the sequence of
175 statements to be added to the flowgraph. */
176
177 static void
178 build_gimple_cfg (gimple_seq seq)
179 {
180 /* Register specific gimple functions. */
181 gimple_register_cfg_hooks ();
182
183 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
184
185 init_empty_tree_cfg ();
186
187 found_computed_goto = 0;
188 make_blocks (seq);
189
190 /* Computed gotos are hell to deal with, especially if there are
191 lots of them with a large number of destinations. So we factor
192 them to a common computed goto location before we build the
193 edge list. After we convert back to normal form, we will un-factor
194 the computed gotos since factoring introduces an unwanted jump. */
195 if (found_computed_goto)
196 factor_computed_gotos ();
197
198 /* Make sure there is always at least one block, even if it's empty. */
199 if (n_basic_blocks == NUM_FIXED_BLOCKS)
200 create_empty_bb (ENTRY_BLOCK_PTR);
201
202 /* Adjust the size of the array. */
203 if (VEC_length (basic_block, basic_block_info) < (size_t) n_basic_blocks)
204 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, n_basic_blocks);
205
206 /* To speed up statement iterator walks, we first purge dead labels. */
207 cleanup_dead_labels ();
208
209 /* Group case nodes to reduce the number of edges.
210 We do this after cleaning up dead labels because otherwise we miss
211 a lot of obvious case merging opportunities. */
212 group_case_labels ();
213
214 /* Create the edges of the flowgraph. */
215 discriminator_per_locus = htab_create (13, locus_map_hash, locus_map_eq,
216 free);
217 make_edges ();
218 cleanup_dead_labels ();
219 htab_delete (discriminator_per_locus);
220
221 /* Debugging dumps. */
222
223 /* Write the flowgraph to a VCG file. */
224 {
225 int local_dump_flags;
226 FILE *vcg_file = dump_begin (TDI_vcg, &local_dump_flags);
227 if (vcg_file)
228 {
229 gimple_cfg2vcg (vcg_file);
230 dump_end (TDI_vcg, vcg_file);
231 }
232 }
233 }
234
235 static unsigned int
236 execute_build_cfg (void)
237 {
238 gimple_seq body = gimple_body (current_function_decl);
239
240 build_gimple_cfg (body);
241 gimple_set_body (current_function_decl, NULL);
242 if (dump_file && (dump_flags & TDF_DETAILS))
243 {
244 fprintf (dump_file, "Scope blocks:\n");
245 dump_scope_blocks (dump_file, dump_flags);
246 }
247 return 0;
248 }
249
250 struct gimple_opt_pass pass_build_cfg =
251 {
252 {
253 GIMPLE_PASS,
254 "cfg", /* name */
255 NULL, /* gate */
256 execute_build_cfg, /* execute */
257 NULL, /* sub */
258 NULL, /* next */
259 0, /* static_pass_number */
260 TV_TREE_CFG, /* tv_id */
261 PROP_gimple_leh, /* properties_required */
262 PROP_cfg, /* properties_provided */
263 0, /* properties_destroyed */
264 0, /* todo_flags_start */
265 TODO_verify_stmts | TODO_cleanup_cfg /* todo_flags_finish */
266 }
267 };
268
269
270 /* Return true if T is a computed goto. */
271
272 static bool
273 computed_goto_p (gimple t)
274 {
275 return (gimple_code (t) == GIMPLE_GOTO
276 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
277 }
278
279
280 /* Search the CFG for any computed gotos. If found, factor them to a
281 common computed goto site. Also record the location of that site so
282 that we can un-factor the gotos after we have converted back to
283 normal form. */
284
285 static void
286 factor_computed_gotos (void)
287 {
288 basic_block bb;
289 tree factored_label_decl = NULL;
290 tree var = NULL;
291 gimple factored_computed_goto_label = NULL;
292 gimple factored_computed_goto = NULL;
293
294 /* We know there are one or more computed gotos in this function.
295 Examine the last statement in each basic block to see if the block
296 ends with a computed goto. */
297
298 FOR_EACH_BB (bb)
299 {
300 gimple_stmt_iterator gsi = gsi_last_bb (bb);
301 gimple last;
302
303 if (gsi_end_p (gsi))
304 continue;
305
306 last = gsi_stmt (gsi);
307
308 /* Ignore the computed goto we create when we factor the original
309 computed gotos. */
310 if (last == factored_computed_goto)
311 continue;
312
313 /* If the last statement is a computed goto, factor it. */
314 if (computed_goto_p (last))
315 {
316 gimple assignment;
317
318 /* The first time we find a computed goto we need to create
319 the factored goto block and the variable each original
320 computed goto will use for their goto destination. */
321 if (!factored_computed_goto)
322 {
323 basic_block new_bb = create_empty_bb (bb);
324 gimple_stmt_iterator new_gsi = gsi_start_bb (new_bb);
325
326 /* Create the destination of the factored goto. Each original
327 computed goto will put its desired destination into this
328 variable and jump to the label we create immediately
329 below. */
330 var = create_tmp_var (ptr_type_node, "gotovar");
331
332 /* Build a label for the new block which will contain the
333 factored computed goto. */
334 factored_label_decl = create_artificial_label (UNKNOWN_LOCATION);
335 factored_computed_goto_label
336 = gimple_build_label (factored_label_decl);
337 gsi_insert_after (&new_gsi, factored_computed_goto_label,
338 GSI_NEW_STMT);
339
340 /* Build our new computed goto. */
341 factored_computed_goto = gimple_build_goto (var);
342 gsi_insert_after (&new_gsi, factored_computed_goto, GSI_NEW_STMT);
343 }
344
345 /* Copy the original computed goto's destination into VAR. */
346 assignment = gimple_build_assign (var, gimple_goto_dest (last));
347 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
348
349 /* And re-vector the computed goto to the new destination. */
350 gimple_goto_set_dest (last, factored_label_decl);
351 }
352 }
353 }
354
355
356 /* Build a flowgraph for the sequence of stmts SEQ. */
357
358 static void
359 make_blocks (gimple_seq seq)
360 {
361 gimple_stmt_iterator i = gsi_start (seq);
362 gimple stmt = NULL;
363 bool start_new_block = true;
364 bool first_stmt_of_seq = true;
365 basic_block bb = ENTRY_BLOCK_PTR;
366
367 while (!gsi_end_p (i))
368 {
369 gimple prev_stmt;
370
371 prev_stmt = stmt;
372 stmt = gsi_stmt (i);
373
374 /* If the statement starts a new basic block or if we have determined
375 in a previous pass that we need to create a new block for STMT, do
376 so now. */
377 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
378 {
379 if (!first_stmt_of_seq)
380 seq = gsi_split_seq_before (&i);
381 bb = create_basic_block (seq, NULL, bb);
382 start_new_block = false;
383 }
384
385 /* Now add STMT to BB and create the subgraphs for special statement
386 codes. */
387 gimple_set_bb (stmt, bb);
388
389 if (computed_goto_p (stmt))
390 found_computed_goto = true;
391
392 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
393 next iteration. */
394 if (stmt_ends_bb_p (stmt))
395 {
396 /* If the stmt can make abnormal goto use a new temporary
397 for the assignment to the LHS. This makes sure the old value
398 of the LHS is available on the abnormal edge. Otherwise
399 we will end up with overlapping life-ranges for abnormal
400 SSA names. */
401 if (gimple_has_lhs (stmt)
402 && stmt_can_make_abnormal_goto (stmt)
403 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
404 {
405 tree lhs = gimple_get_lhs (stmt);
406 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
407 gimple s = gimple_build_assign (lhs, tmp);
408 gimple_set_location (s, gimple_location (stmt));
409 gimple_set_block (s, gimple_block (stmt));
410 gimple_set_lhs (stmt, tmp);
411 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
412 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
413 DECL_GIMPLE_REG_P (tmp) = 1;
414 gsi_insert_after (&i, s, GSI_SAME_STMT);
415 }
416 start_new_block = true;
417 }
418
419 gsi_next (&i);
420 first_stmt_of_seq = false;
421 }
422 }
423
424
425 /* Create and return a new empty basic block after bb AFTER. */
426
427 static basic_block
428 create_bb (void *h, void *e, basic_block after)
429 {
430 basic_block bb;
431
432 gcc_assert (!e);
433
434 /* Create and initialize a new basic block. Since alloc_block uses
435 GC allocation that clears memory to allocate a basic block, we do
436 not have to clear the newly allocated basic block here. */
437 bb = alloc_block ();
438
439 bb->index = last_basic_block;
440 bb->flags = BB_NEW;
441 bb->il.gimple = ggc_alloc_cleared_gimple_bb_info ();
442 set_bb_seq (bb, h ? (gimple_seq) h : gimple_seq_alloc ());
443
444 /* Add the new block to the linked list of blocks. */
445 link_block (bb, after);
446
447 /* Grow the basic block array if needed. */
448 if ((size_t) last_basic_block == VEC_length (basic_block, basic_block_info))
449 {
450 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
451 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
452 }
453
454 /* Add the newly created block to the array. */
455 SET_BASIC_BLOCK (last_basic_block, bb);
456
457 n_basic_blocks++;
458 last_basic_block++;
459
460 return bb;
461 }
462
463
464 /*---------------------------------------------------------------------------
465 Edge creation
466 ---------------------------------------------------------------------------*/
467
468 /* Fold COND_EXPR_COND of each COND_EXPR. */
469
470 void
471 fold_cond_expr_cond (void)
472 {
473 basic_block bb;
474
475 FOR_EACH_BB (bb)
476 {
477 gimple stmt = last_stmt (bb);
478
479 if (stmt && gimple_code (stmt) == GIMPLE_COND)
480 {
481 location_t loc = gimple_location (stmt);
482 tree cond;
483 bool zerop, onep;
484
485 fold_defer_overflow_warnings ();
486 cond = fold_binary_loc (loc, gimple_cond_code (stmt), boolean_type_node,
487 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
488 if (cond)
489 {
490 zerop = integer_zerop (cond);
491 onep = integer_onep (cond);
492 }
493 else
494 zerop = onep = false;
495
496 fold_undefer_overflow_warnings (zerop || onep,
497 stmt,
498 WARN_STRICT_OVERFLOW_CONDITIONAL);
499 if (zerop)
500 gimple_cond_make_false (stmt);
501 else if (onep)
502 gimple_cond_make_true (stmt);
503 }
504 }
505 }
506
507 /* Join all the blocks in the flowgraph. */
508
509 static void
510 make_edges (void)
511 {
512 basic_block bb;
513 struct omp_region *cur_region = NULL;
514
515 /* Create an edge from entry to the first block with executable
516 statements in it. */
517 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
518
519 /* Traverse the basic block array placing edges. */
520 FOR_EACH_BB (bb)
521 {
522 gimple last = last_stmt (bb);
523 bool fallthru;
524
525 if (last)
526 {
527 enum gimple_code code = gimple_code (last);
528 switch (code)
529 {
530 case GIMPLE_GOTO:
531 make_goto_expr_edges (bb);
532 fallthru = false;
533 break;
534 case GIMPLE_RETURN:
535 make_edge (bb, EXIT_BLOCK_PTR, 0);
536 fallthru = false;
537 break;
538 case GIMPLE_COND:
539 make_cond_expr_edges (bb);
540 fallthru = false;
541 break;
542 case GIMPLE_SWITCH:
543 make_gimple_switch_edges (bb);
544 fallthru = false;
545 break;
546 case GIMPLE_RESX:
547 make_eh_edges (last);
548 fallthru = false;
549 break;
550 case GIMPLE_EH_DISPATCH:
551 fallthru = make_eh_dispatch_edges (last);
552 break;
553
554 case GIMPLE_CALL:
555 /* If this function receives a nonlocal goto, then we need to
556 make edges from this call site to all the nonlocal goto
557 handlers. */
558 if (stmt_can_make_abnormal_goto (last))
559 make_abnormal_goto_edges (bb, true);
560
561 /* If this statement has reachable exception handlers, then
562 create abnormal edges to them. */
563 make_eh_edges (last);
564
565 /* BUILTIN_RETURN is really a return statement. */
566 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
567 make_edge (bb, EXIT_BLOCK_PTR, 0), fallthru = false;
568 /* Some calls are known not to return. */
569 else
570 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
571 break;
572
573 case GIMPLE_ASSIGN:
574 /* A GIMPLE_ASSIGN may throw internally and thus be considered
575 control-altering. */
576 if (is_ctrl_altering_stmt (last))
577 make_eh_edges (last);
578 fallthru = true;
579 break;
580
581 case GIMPLE_ASM:
582 make_gimple_asm_edges (bb);
583 fallthru = true;
584 break;
585
586 case GIMPLE_OMP_PARALLEL:
587 case GIMPLE_OMP_TASK:
588 case GIMPLE_OMP_FOR:
589 case GIMPLE_OMP_SINGLE:
590 case GIMPLE_OMP_MASTER:
591 case GIMPLE_OMP_ORDERED:
592 case GIMPLE_OMP_CRITICAL:
593 case GIMPLE_OMP_SECTION:
594 cur_region = new_omp_region (bb, code, cur_region);
595 fallthru = true;
596 break;
597
598 case GIMPLE_OMP_SECTIONS:
599 cur_region = new_omp_region (bb, code, cur_region);
600 fallthru = true;
601 break;
602
603 case GIMPLE_OMP_SECTIONS_SWITCH:
604 fallthru = false;
605 break;
606
607 case GIMPLE_OMP_ATOMIC_LOAD:
608 case GIMPLE_OMP_ATOMIC_STORE:
609 fallthru = true;
610 break;
611
612 case GIMPLE_OMP_RETURN:
613 /* In the case of a GIMPLE_OMP_SECTION, the edge will go
614 somewhere other than the next block. This will be
615 created later. */
616 cur_region->exit = bb;
617 fallthru = cur_region->type != GIMPLE_OMP_SECTION;
618 cur_region = cur_region->outer;
619 break;
620
621 case GIMPLE_OMP_CONTINUE:
622 cur_region->cont = bb;
623 switch (cur_region->type)
624 {
625 case GIMPLE_OMP_FOR:
626 /* Mark all GIMPLE_OMP_FOR and GIMPLE_OMP_CONTINUE
627 succs edges as abnormal to prevent splitting
628 them. */
629 single_succ_edge (cur_region->entry)->flags |= EDGE_ABNORMAL;
630 /* Make the loopback edge. */
631 make_edge (bb, single_succ (cur_region->entry),
632 EDGE_ABNORMAL);
633
634 /* Create an edge from GIMPLE_OMP_FOR to exit, which
635 corresponds to the case that the body of the loop
636 is not executed at all. */
637 make_edge (cur_region->entry, bb->next_bb, EDGE_ABNORMAL);
638 make_edge (bb, bb->next_bb, EDGE_FALLTHRU | EDGE_ABNORMAL);
639 fallthru = false;
640 break;
641
642 case GIMPLE_OMP_SECTIONS:
643 /* Wire up the edges into and out of the nested sections. */
644 {
645 basic_block switch_bb = single_succ (cur_region->entry);
646
647 struct omp_region *i;
648 for (i = cur_region->inner; i ; i = i->next)
649 {
650 gcc_assert (i->type == GIMPLE_OMP_SECTION);
651 make_edge (switch_bb, i->entry, 0);
652 make_edge (i->exit, bb, EDGE_FALLTHRU);
653 }
654
655 /* Make the loopback edge to the block with
656 GIMPLE_OMP_SECTIONS_SWITCH. */
657 make_edge (bb, switch_bb, 0);
658
659 /* Make the edge from the switch to exit. */
660 make_edge (switch_bb, bb->next_bb, 0);
661 fallthru = false;
662 }
663 break;
664
665 default:
666 gcc_unreachable ();
667 }
668 break;
669
670 case GIMPLE_TRANSACTION:
671 {
672 tree abort_label = gimple_transaction_label (last);
673 if (abort_label)
674 make_edge (bb, label_to_block (abort_label), 0);
675 fallthru = true;
676 }
677 break;
678
679 default:
680 gcc_assert (!stmt_ends_bb_p (last));
681 fallthru = true;
682 }
683 }
684 else
685 fallthru = true;
686
687 if (fallthru)
688 {
689 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
690 if (last)
691 assign_discriminator (gimple_location (last), bb->next_bb);
692 }
693 }
694
695 if (root_omp_region)
696 free_omp_regions ();
697
698 /* Fold COND_EXPR_COND of each COND_EXPR. */
699 fold_cond_expr_cond ();
700 }
701
702 /* Trivial hash function for a location_t. ITEM is a pointer to
703 a hash table entry that maps a location_t to a discriminator. */
704
705 static unsigned int
706 locus_map_hash (const void *item)
707 {
708 return ((const struct locus_discrim_map *) item)->locus;
709 }
710
711 /* Equality function for the locus-to-discriminator map. VA and VB
712 point to the two hash table entries to compare. */
713
714 static int
715 locus_map_eq (const void *va, const void *vb)
716 {
717 const struct locus_discrim_map *a = (const struct locus_discrim_map *) va;
718 const struct locus_discrim_map *b = (const struct locus_discrim_map *) vb;
719 return a->locus == b->locus;
720 }
721
722 /* Find the next available discriminator value for LOCUS. The
723 discriminator distinguishes among several basic blocks that
724 share a common locus, allowing for more accurate sample-based
725 profiling. */
726
727 static int
728 next_discriminator_for_locus (location_t locus)
729 {
730 struct locus_discrim_map item;
731 struct locus_discrim_map **slot;
732
733 item.locus = locus;
734 item.discriminator = 0;
735 slot = (struct locus_discrim_map **)
736 htab_find_slot_with_hash (discriminator_per_locus, (void *) &item,
737 (hashval_t) locus, INSERT);
738 gcc_assert (slot);
739 if (*slot == HTAB_EMPTY_ENTRY)
740 {
741 *slot = XNEW (struct locus_discrim_map);
742 gcc_assert (*slot);
743 (*slot)->locus = locus;
744 (*slot)->discriminator = 0;
745 }
746 (*slot)->discriminator++;
747 return (*slot)->discriminator;
748 }
749
750 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
751
752 static bool
753 same_line_p (location_t locus1, location_t locus2)
754 {
755 expanded_location from, to;
756
757 if (locus1 == locus2)
758 return true;
759
760 from = expand_location (locus1);
761 to = expand_location (locus2);
762
763 if (from.line != to.line)
764 return false;
765 if (from.file == to.file)
766 return true;
767 return (from.file != NULL
768 && to.file != NULL
769 && filename_cmp (from.file, to.file) == 0);
770 }
771
772 /* Assign a unique discriminator value to block BB if it begins at the same
773 LOCUS as its predecessor block. */
774
775 static void
776 assign_discriminator (location_t locus, basic_block bb)
777 {
778 gimple first_in_to_bb, last_in_to_bb;
779
780 if (locus == 0 || bb->discriminator != 0)
781 return;
782
783 first_in_to_bb = first_non_label_stmt (bb);
784 last_in_to_bb = last_stmt (bb);
785 if ((first_in_to_bb && same_line_p (locus, gimple_location (first_in_to_bb)))
786 || (last_in_to_bb && same_line_p (locus, gimple_location (last_in_to_bb))))
787 bb->discriminator = next_discriminator_for_locus (locus);
788 }
789
790 /* Create the edges for a GIMPLE_COND starting at block BB. */
791
792 static void
793 make_cond_expr_edges (basic_block bb)
794 {
795 gimple entry = last_stmt (bb);
796 gimple then_stmt, else_stmt;
797 basic_block then_bb, else_bb;
798 tree then_label, else_label;
799 edge e;
800 location_t entry_locus;
801
802 gcc_assert (entry);
803 gcc_assert (gimple_code (entry) == GIMPLE_COND);
804
805 entry_locus = gimple_location (entry);
806
807 /* Entry basic blocks for each component. */
808 then_label = gimple_cond_true_label (entry);
809 else_label = gimple_cond_false_label (entry);
810 then_bb = label_to_block (then_label);
811 else_bb = label_to_block (else_label);
812 then_stmt = first_stmt (then_bb);
813 else_stmt = first_stmt (else_bb);
814
815 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
816 assign_discriminator (entry_locus, then_bb);
817 e->goto_locus = gimple_location (then_stmt);
818 if (e->goto_locus)
819 e->goto_block = gimple_block (then_stmt);
820 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
821 if (e)
822 {
823 assign_discriminator (entry_locus, else_bb);
824 e->goto_locus = gimple_location (else_stmt);
825 if (e->goto_locus)
826 e->goto_block = gimple_block (else_stmt);
827 }
828
829 /* We do not need the labels anymore. */
830 gimple_cond_set_true_label (entry, NULL_TREE);
831 gimple_cond_set_false_label (entry, NULL_TREE);
832 }
833
834
835 /* Called for each element in the hash table (P) as we delete the
836 edge to cases hash table.
837
838 Clear all the TREE_CHAINs to prevent problems with copying of
839 SWITCH_EXPRs and structure sharing rules, then free the hash table
840 element. */
841
842 static bool
843 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
844 void *data ATTRIBUTE_UNUSED)
845 {
846 tree t, next;
847
848 for (t = (tree) *value; t; t = next)
849 {
850 next = CASE_CHAIN (t);
851 CASE_CHAIN (t) = NULL;
852 }
853
854 *value = NULL;
855 return true;
856 }
857
858 /* Start recording information mapping edges to case labels. */
859
860 void
861 start_recording_case_labels (void)
862 {
863 gcc_assert (edge_to_cases == NULL);
864 edge_to_cases = pointer_map_create ();
865 touched_switch_bbs = BITMAP_ALLOC (NULL);
866 }
867
868 /* Return nonzero if we are recording information for case labels. */
869
870 static bool
871 recording_case_labels_p (void)
872 {
873 return (edge_to_cases != NULL);
874 }
875
876 /* Stop recording information mapping edges to case labels and
877 remove any information we have recorded. */
878 void
879 end_recording_case_labels (void)
880 {
881 bitmap_iterator bi;
882 unsigned i;
883 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
884 pointer_map_destroy (edge_to_cases);
885 edge_to_cases = NULL;
886 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
887 {
888 basic_block bb = BASIC_BLOCK (i);
889 if (bb)
890 {
891 gimple stmt = last_stmt (bb);
892 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
893 group_case_labels_stmt (stmt);
894 }
895 }
896 BITMAP_FREE (touched_switch_bbs);
897 }
898
899 /* If we are inside a {start,end}_recording_cases block, then return
900 a chain of CASE_LABEL_EXPRs from T which reference E.
901
902 Otherwise return NULL. */
903
904 static tree
905 get_cases_for_edge (edge e, gimple t)
906 {
907 void **slot;
908 size_t i, n;
909
910 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
911 chains available. Return NULL so the caller can detect this case. */
912 if (!recording_case_labels_p ())
913 return NULL;
914
915 slot = pointer_map_contains (edge_to_cases, e);
916 if (slot)
917 return (tree) *slot;
918
919 /* If we did not find E in the hash table, then this must be the first
920 time we have been queried for information about E & T. Add all the
921 elements from T to the hash table then perform the query again. */
922
923 n = gimple_switch_num_labels (t);
924 for (i = 0; i < n; i++)
925 {
926 tree elt = gimple_switch_label (t, i);
927 tree lab = CASE_LABEL (elt);
928 basic_block label_bb = label_to_block (lab);
929 edge this_edge = find_edge (e->src, label_bb);
930
931 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
932 a new chain. */
933 slot = pointer_map_insert (edge_to_cases, this_edge);
934 CASE_CHAIN (elt) = (tree) *slot;
935 *slot = elt;
936 }
937
938 return (tree) *pointer_map_contains (edge_to_cases, e);
939 }
940
941 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
942
943 static void
944 make_gimple_switch_edges (basic_block bb)
945 {
946 gimple entry = last_stmt (bb);
947 location_t entry_locus;
948 size_t i, n;
949
950 entry_locus = gimple_location (entry);
951
952 n = gimple_switch_num_labels (entry);
953
954 for (i = 0; i < n; ++i)
955 {
956 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
957 basic_block label_bb = label_to_block (lab);
958 make_edge (bb, label_bb, 0);
959 assign_discriminator (entry_locus, label_bb);
960 }
961 }
962
963
964 /* Return the basic block holding label DEST. */
965
966 basic_block
967 label_to_block_fn (struct function *ifun, tree dest)
968 {
969 int uid = LABEL_DECL_UID (dest);
970
971 /* We would die hard when faced by an undefined label. Emit a label to
972 the very first basic block. This will hopefully make even the dataflow
973 and undefined variable warnings quite right. */
974 if (seen_error () && uid < 0)
975 {
976 gimple_stmt_iterator gsi = gsi_start_bb (BASIC_BLOCK (NUM_FIXED_BLOCKS));
977 gimple stmt;
978
979 stmt = gimple_build_label (dest);
980 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
981 uid = LABEL_DECL_UID (dest);
982 }
983 if (VEC_length (basic_block, ifun->cfg->x_label_to_block_map)
984 <= (unsigned int) uid)
985 return NULL;
986 return VEC_index (basic_block, ifun->cfg->x_label_to_block_map, uid);
987 }
988
989 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
990 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
991
992 void
993 make_abnormal_goto_edges (basic_block bb, bool for_call)
994 {
995 basic_block target_bb;
996 gimple_stmt_iterator gsi;
997
998 FOR_EACH_BB (target_bb)
999 for (gsi = gsi_start_bb (target_bb); !gsi_end_p (gsi); gsi_next (&gsi))
1000 {
1001 gimple label_stmt = gsi_stmt (gsi);
1002 tree target;
1003
1004 if (gimple_code (label_stmt) != GIMPLE_LABEL)
1005 break;
1006
1007 target = gimple_label_label (label_stmt);
1008
1009 /* Make an edge to every label block that has been marked as a
1010 potential target for a computed goto or a non-local goto. */
1011 if ((FORCED_LABEL (target) && !for_call)
1012 || (DECL_NONLOCAL (target) && for_call))
1013 {
1014 make_edge (bb, target_bb, EDGE_ABNORMAL);
1015 break;
1016 }
1017 }
1018 }
1019
1020 /* Create edges for a goto statement at block BB. */
1021
1022 static void
1023 make_goto_expr_edges (basic_block bb)
1024 {
1025 gimple_stmt_iterator last = gsi_last_bb (bb);
1026 gimple goto_t = gsi_stmt (last);
1027
1028 /* A simple GOTO creates normal edges. */
1029 if (simple_goto_p (goto_t))
1030 {
1031 tree dest = gimple_goto_dest (goto_t);
1032 basic_block label_bb = label_to_block (dest);
1033 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1034 e->goto_locus = gimple_location (goto_t);
1035 assign_discriminator (e->goto_locus, label_bb);
1036 if (e->goto_locus)
1037 e->goto_block = gimple_block (goto_t);
1038 gsi_remove (&last, true);
1039 return;
1040 }
1041
1042 /* A computed GOTO creates abnormal edges. */
1043 make_abnormal_goto_edges (bb, false);
1044 }
1045
1046 /* Create edges for an asm statement with labels at block BB. */
1047
1048 static void
1049 make_gimple_asm_edges (basic_block bb)
1050 {
1051 gimple stmt = last_stmt (bb);
1052 location_t stmt_loc = gimple_location (stmt);
1053 int i, n = gimple_asm_nlabels (stmt);
1054
1055 for (i = 0; i < n; ++i)
1056 {
1057 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1058 basic_block label_bb = label_to_block (label);
1059 make_edge (bb, label_bb, 0);
1060 assign_discriminator (stmt_loc, label_bb);
1061 }
1062 }
1063
1064 /*---------------------------------------------------------------------------
1065 Flowgraph analysis
1066 ---------------------------------------------------------------------------*/
1067
1068 /* Cleanup useless labels in basic blocks. This is something we wish
1069 to do early because it allows us to group case labels before creating
1070 the edges for the CFG, and it speeds up block statement iterators in
1071 all passes later on.
1072 We rerun this pass after CFG is created, to get rid of the labels that
1073 are no longer referenced. After then we do not run it any more, since
1074 (almost) no new labels should be created. */
1075
1076 /* A map from basic block index to the leading label of that block. */
1077 static struct label_record
1078 {
1079 /* The label. */
1080 tree label;
1081
1082 /* True if the label is referenced from somewhere. */
1083 bool used;
1084 } *label_for_bb;
1085
1086 /* Given LABEL return the first label in the same basic block. */
1087
1088 static tree
1089 main_block_label (tree label)
1090 {
1091 basic_block bb = label_to_block (label);
1092 tree main_label = label_for_bb[bb->index].label;
1093
1094 /* label_to_block possibly inserted undefined label into the chain. */
1095 if (!main_label)
1096 {
1097 label_for_bb[bb->index].label = label;
1098 main_label = label;
1099 }
1100
1101 label_for_bb[bb->index].used = true;
1102 return main_label;
1103 }
1104
1105 /* Clean up redundant labels within the exception tree. */
1106
1107 static void
1108 cleanup_dead_labels_eh (void)
1109 {
1110 eh_landing_pad lp;
1111 eh_region r;
1112 tree lab;
1113 int i;
1114
1115 if (cfun->eh == NULL)
1116 return;
1117
1118 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
1119 if (lp && lp->post_landing_pad)
1120 {
1121 lab = main_block_label (lp->post_landing_pad);
1122 if (lab != lp->post_landing_pad)
1123 {
1124 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1125 EH_LANDING_PAD_NR (lab) = lp->index;
1126 }
1127 }
1128
1129 FOR_ALL_EH_REGION (r)
1130 switch (r->type)
1131 {
1132 case ERT_CLEANUP:
1133 case ERT_MUST_NOT_THROW:
1134 break;
1135
1136 case ERT_TRY:
1137 {
1138 eh_catch c;
1139 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1140 {
1141 lab = c->label;
1142 if (lab)
1143 c->label = main_block_label (lab);
1144 }
1145 }
1146 break;
1147
1148 case ERT_ALLOWED_EXCEPTIONS:
1149 lab = r->u.allowed.label;
1150 if (lab)
1151 r->u.allowed.label = main_block_label (lab);
1152 break;
1153 }
1154 }
1155
1156
1157 /* Cleanup redundant labels. This is a three-step process:
1158 1) Find the leading label for each block.
1159 2) Redirect all references to labels to the leading labels.
1160 3) Cleanup all useless labels. */
1161
1162 void
1163 cleanup_dead_labels (void)
1164 {
1165 basic_block bb;
1166 label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
1167
1168 /* Find a suitable label for each block. We use the first user-defined
1169 label if there is one, or otherwise just the first label we see. */
1170 FOR_EACH_BB (bb)
1171 {
1172 gimple_stmt_iterator i;
1173
1174 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1175 {
1176 tree label;
1177 gimple stmt = gsi_stmt (i);
1178
1179 if (gimple_code (stmt) != GIMPLE_LABEL)
1180 break;
1181
1182 label = gimple_label_label (stmt);
1183
1184 /* If we have not yet seen a label for the current block,
1185 remember this one and see if there are more labels. */
1186 if (!label_for_bb[bb->index].label)
1187 {
1188 label_for_bb[bb->index].label = label;
1189 continue;
1190 }
1191
1192 /* If we did see a label for the current block already, but it
1193 is an artificially created label, replace it if the current
1194 label is a user defined label. */
1195 if (!DECL_ARTIFICIAL (label)
1196 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1197 {
1198 label_for_bb[bb->index].label = label;
1199 break;
1200 }
1201 }
1202 }
1203
1204 /* Now redirect all jumps/branches to the selected label.
1205 First do so for each block ending in a control statement. */
1206 FOR_EACH_BB (bb)
1207 {
1208 gimple stmt = last_stmt (bb);
1209 tree label, new_label;
1210
1211 if (!stmt)
1212 continue;
1213
1214 switch (gimple_code (stmt))
1215 {
1216 case GIMPLE_COND:
1217 label = gimple_cond_true_label (stmt);
1218 if (label)
1219 {
1220 new_label = main_block_label (label);
1221 if (new_label != label)
1222 gimple_cond_set_true_label (stmt, new_label);
1223 }
1224
1225 label = gimple_cond_false_label (stmt);
1226 if (label)
1227 {
1228 new_label = main_block_label (label);
1229 if (new_label != label)
1230 gimple_cond_set_false_label (stmt, new_label);
1231 }
1232 break;
1233
1234 case GIMPLE_SWITCH:
1235 {
1236 size_t i, n = gimple_switch_num_labels (stmt);
1237
1238 /* Replace all destination labels. */
1239 for (i = 0; i < n; ++i)
1240 {
1241 tree case_label = gimple_switch_label (stmt, i);
1242 label = CASE_LABEL (case_label);
1243 new_label = main_block_label (label);
1244 if (new_label != label)
1245 CASE_LABEL (case_label) = new_label;
1246 }
1247 break;
1248 }
1249
1250 case GIMPLE_ASM:
1251 {
1252 int i, n = gimple_asm_nlabels (stmt);
1253
1254 for (i = 0; i < n; ++i)
1255 {
1256 tree cons = gimple_asm_label_op (stmt, i);
1257 tree label = main_block_label (TREE_VALUE (cons));
1258 TREE_VALUE (cons) = label;
1259 }
1260 break;
1261 }
1262
1263 /* We have to handle gotos until they're removed, and we don't
1264 remove them until after we've created the CFG edges. */
1265 case GIMPLE_GOTO:
1266 if (!computed_goto_p (stmt))
1267 {
1268 label = gimple_goto_dest (stmt);
1269 new_label = main_block_label (label);
1270 if (new_label != label)
1271 gimple_goto_set_dest (stmt, new_label);
1272 }
1273 break;
1274
1275 case GIMPLE_TRANSACTION:
1276 {
1277 tree label = gimple_transaction_label (stmt);
1278 if (label)
1279 {
1280 tree new_label = main_block_label (label);
1281 if (new_label != label)
1282 gimple_transaction_set_label (stmt, new_label);
1283 }
1284 }
1285 break;
1286
1287 default:
1288 break;
1289 }
1290 }
1291
1292 /* Do the same for the exception region tree labels. */
1293 cleanup_dead_labels_eh ();
1294
1295 /* Finally, purge dead labels. All user-defined labels and labels that
1296 can be the target of non-local gotos and labels which have their
1297 address taken are preserved. */
1298 FOR_EACH_BB (bb)
1299 {
1300 gimple_stmt_iterator i;
1301 tree label_for_this_bb = label_for_bb[bb->index].label;
1302
1303 if (!label_for_this_bb)
1304 continue;
1305
1306 /* If the main label of the block is unused, we may still remove it. */
1307 if (!label_for_bb[bb->index].used)
1308 label_for_this_bb = NULL;
1309
1310 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1311 {
1312 tree label;
1313 gimple stmt = gsi_stmt (i);
1314
1315 if (gimple_code (stmt) != GIMPLE_LABEL)
1316 break;
1317
1318 label = gimple_label_label (stmt);
1319
1320 if (label == label_for_this_bb
1321 || !DECL_ARTIFICIAL (label)
1322 || DECL_NONLOCAL (label)
1323 || FORCED_LABEL (label))
1324 gsi_next (&i);
1325 else
1326 gsi_remove (&i, true);
1327 }
1328 }
1329
1330 free (label_for_bb);
1331 }
1332
1333 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1334 the ones jumping to the same label.
1335 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1336
1337 static void
1338 group_case_labels_stmt (gimple stmt)
1339 {
1340 int old_size = gimple_switch_num_labels (stmt);
1341 int i, j, new_size = old_size;
1342 tree default_case = NULL_TREE;
1343 tree default_label = NULL_TREE;
1344 bool has_default;
1345
1346 /* The default label is always the first case in a switch
1347 statement after gimplification if it was not optimized
1348 away */
1349 if (!CASE_LOW (gimple_switch_default_label (stmt))
1350 && !CASE_HIGH (gimple_switch_default_label (stmt)))
1351 {
1352 default_case = gimple_switch_default_label (stmt);
1353 default_label = CASE_LABEL (default_case);
1354 has_default = true;
1355 }
1356 else
1357 has_default = false;
1358
1359 /* Look for possible opportunities to merge cases. */
1360 if (has_default)
1361 i = 1;
1362 else
1363 i = 0;
1364 while (i < old_size)
1365 {
1366 tree base_case, base_label, base_high;
1367 base_case = gimple_switch_label (stmt, i);
1368
1369 gcc_assert (base_case);
1370 base_label = CASE_LABEL (base_case);
1371
1372 /* Discard cases that have the same destination as the
1373 default case. */
1374 if (base_label == default_label)
1375 {
1376 gimple_switch_set_label (stmt, i, NULL_TREE);
1377 i++;
1378 new_size--;
1379 continue;
1380 }
1381
1382 base_high = CASE_HIGH (base_case)
1383 ? CASE_HIGH (base_case)
1384 : CASE_LOW (base_case);
1385 i++;
1386
1387 /* Try to merge case labels. Break out when we reach the end
1388 of the label vector or when we cannot merge the next case
1389 label with the current one. */
1390 while (i < old_size)
1391 {
1392 tree merge_case = gimple_switch_label (stmt, i);
1393 tree merge_label = CASE_LABEL (merge_case);
1394 double_int bhp1 = double_int_add (tree_to_double_int (base_high),
1395 double_int_one);
1396
1397 /* Merge the cases if they jump to the same place,
1398 and their ranges are consecutive. */
1399 if (merge_label == base_label
1400 && double_int_equal_p (tree_to_double_int (CASE_LOW (merge_case)),
1401 bhp1))
1402 {
1403 base_high = CASE_HIGH (merge_case) ?
1404 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1405 CASE_HIGH (base_case) = base_high;
1406 gimple_switch_set_label (stmt, i, NULL_TREE);
1407 new_size--;
1408 i++;
1409 }
1410 else
1411 break;
1412 }
1413 }
1414
1415 /* Compress the case labels in the label vector, and adjust the
1416 length of the vector. */
1417 for (i = 0, j = 0; i < new_size; i++)
1418 {
1419 while (! gimple_switch_label (stmt, j))
1420 j++;
1421 gimple_switch_set_label (stmt, i,
1422 gimple_switch_label (stmt, j++));
1423 }
1424
1425 gcc_assert (new_size <= old_size);
1426 gimple_switch_set_num_labels (stmt, new_size);
1427 }
1428
1429 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1430 and scan the sorted vector of cases. Combine the ones jumping to the
1431 same label. */
1432
1433 void
1434 group_case_labels (void)
1435 {
1436 basic_block bb;
1437
1438 FOR_EACH_BB (bb)
1439 {
1440 gimple stmt = last_stmt (bb);
1441 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1442 group_case_labels_stmt (stmt);
1443 }
1444 }
1445
1446 /* Checks whether we can merge block B into block A. */
1447
1448 static bool
1449 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1450 {
1451 gimple stmt;
1452 gimple_stmt_iterator gsi;
1453 gimple_seq phis;
1454
1455 if (!single_succ_p (a))
1456 return false;
1457
1458 if (single_succ_edge (a)->flags & (EDGE_ABNORMAL | EDGE_EH | EDGE_PRESERVE))
1459 return false;
1460
1461 if (single_succ (a) != b)
1462 return false;
1463
1464 if (!single_pred_p (b))
1465 return false;
1466
1467 if (b == EXIT_BLOCK_PTR)
1468 return false;
1469
1470 /* If A ends by a statement causing exceptions or something similar, we
1471 cannot merge the blocks. */
1472 stmt = last_stmt (a);
1473 if (stmt && stmt_ends_bb_p (stmt))
1474 return false;
1475
1476 /* Do not allow a block with only a non-local label to be merged. */
1477 if (stmt
1478 && gimple_code (stmt) == GIMPLE_LABEL
1479 && DECL_NONLOCAL (gimple_label_label (stmt)))
1480 return false;
1481
1482 /* Examine the labels at the beginning of B. */
1483 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
1484 {
1485 tree lab;
1486 stmt = gsi_stmt (gsi);
1487 if (gimple_code (stmt) != GIMPLE_LABEL)
1488 break;
1489 lab = gimple_label_label (stmt);
1490
1491 /* Do not remove user forced labels or for -O0 any user labels. */
1492 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1493 return false;
1494 }
1495
1496 /* Protect the loop latches. */
1497 if (current_loops && b->loop_father->latch == b)
1498 return false;
1499
1500 /* It must be possible to eliminate all phi nodes in B. If ssa form
1501 is not up-to-date and a name-mapping is registered, we cannot eliminate
1502 any phis. Symbols marked for renaming are never a problem though. */
1503 phis = phi_nodes (b);
1504 if (!gimple_seq_empty_p (phis)
1505 && name_mappings_registered_p ())
1506 return false;
1507
1508 /* When not optimizing, don't merge if we'd lose goto_locus. */
1509 if (!optimize
1510 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1511 {
1512 location_t goto_locus = single_succ_edge (a)->goto_locus;
1513 gimple_stmt_iterator prev, next;
1514 prev = gsi_last_nondebug_bb (a);
1515 next = gsi_after_labels (b);
1516 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1517 gsi_next_nondebug (&next);
1518 if ((gsi_end_p (prev)
1519 || gimple_location (gsi_stmt (prev)) != goto_locus)
1520 && (gsi_end_p (next)
1521 || gimple_location (gsi_stmt (next)) != goto_locus))
1522 return false;
1523 }
1524
1525 return true;
1526 }
1527
1528 /* Return true if the var whose chain of uses starts at PTR has no
1529 nondebug uses. */
1530 bool
1531 has_zero_uses_1 (const ssa_use_operand_t *head)
1532 {
1533 const ssa_use_operand_t *ptr;
1534
1535 for (ptr = head->next; ptr != head; ptr = ptr->next)
1536 if (!is_gimple_debug (USE_STMT (ptr)))
1537 return false;
1538
1539 return true;
1540 }
1541
1542 /* Return true if the var whose chain of uses starts at PTR has a
1543 single nondebug use. Set USE_P and STMT to that single nondebug
1544 use, if so, or to NULL otherwise. */
1545 bool
1546 single_imm_use_1 (const ssa_use_operand_t *head,
1547 use_operand_p *use_p, gimple *stmt)
1548 {
1549 ssa_use_operand_t *ptr, *single_use = 0;
1550
1551 for (ptr = head->next; ptr != head; ptr = ptr->next)
1552 if (!is_gimple_debug (USE_STMT (ptr)))
1553 {
1554 if (single_use)
1555 {
1556 single_use = NULL;
1557 break;
1558 }
1559 single_use = ptr;
1560 }
1561
1562 if (use_p)
1563 *use_p = single_use;
1564
1565 if (stmt)
1566 *stmt = single_use ? single_use->loc.stmt : NULL;
1567
1568 return !!single_use;
1569 }
1570
1571 /* Replaces all uses of NAME by VAL. */
1572
1573 void
1574 replace_uses_by (tree name, tree val)
1575 {
1576 imm_use_iterator imm_iter;
1577 use_operand_p use;
1578 gimple stmt;
1579 edge e;
1580
1581 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1582 {
1583 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1584 {
1585 replace_exp (use, val);
1586
1587 if (gimple_code (stmt) == GIMPLE_PHI)
1588 {
1589 e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
1590 if (e->flags & EDGE_ABNORMAL)
1591 {
1592 /* This can only occur for virtual operands, since
1593 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1594 would prevent replacement. */
1595 gcc_assert (!is_gimple_reg (name));
1596 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1597 }
1598 }
1599 }
1600
1601 if (gimple_code (stmt) != GIMPLE_PHI)
1602 {
1603 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1604 gimple orig_stmt = stmt;
1605 size_t i;
1606
1607 fold_stmt (&gsi);
1608 stmt = gsi_stmt (gsi);
1609 if (cfgcleanup_altered_bbs && !is_gimple_debug (stmt))
1610 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1611
1612 /* FIXME. This should go in update_stmt. */
1613 for (i = 0; i < gimple_num_ops (stmt); i++)
1614 {
1615 tree op = gimple_op (stmt, i);
1616 /* Operands may be empty here. For example, the labels
1617 of a GIMPLE_COND are nulled out following the creation
1618 of the corresponding CFG edges. */
1619 if (op && TREE_CODE (op) == ADDR_EXPR)
1620 recompute_tree_invariant_for_addr_expr (op);
1621 }
1622
1623 maybe_clean_or_replace_eh_stmt (orig_stmt, stmt);
1624 update_stmt (stmt);
1625 }
1626 }
1627
1628 gcc_assert (has_zero_uses (name));
1629
1630 /* Also update the trees stored in loop structures. */
1631 if (current_loops)
1632 {
1633 struct loop *loop;
1634 loop_iterator li;
1635
1636 FOR_EACH_LOOP (li, loop, 0)
1637 {
1638 substitute_in_loop_info (loop, name, val);
1639 }
1640 }
1641 }
1642
1643 /* Merge block B into block A. */
1644
1645 static void
1646 gimple_merge_blocks (basic_block a, basic_block b)
1647 {
1648 gimple_stmt_iterator last, gsi, psi;
1649 gimple_seq phis = phi_nodes (b);
1650
1651 if (dump_file)
1652 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1653
1654 /* Remove all single-valued PHI nodes from block B of the form
1655 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1656 gsi = gsi_last_bb (a);
1657 for (psi = gsi_start (phis); !gsi_end_p (psi); )
1658 {
1659 gimple phi = gsi_stmt (psi);
1660 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1661 gimple copy;
1662 bool may_replace_uses = !is_gimple_reg (def)
1663 || may_propagate_copy (def, use);
1664
1665 /* In case we maintain loop closed ssa form, do not propagate arguments
1666 of loop exit phi nodes. */
1667 if (current_loops
1668 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1669 && is_gimple_reg (def)
1670 && TREE_CODE (use) == SSA_NAME
1671 && a->loop_father != b->loop_father)
1672 may_replace_uses = false;
1673
1674 if (!may_replace_uses)
1675 {
1676 gcc_assert (is_gimple_reg (def));
1677
1678 /* Note that just emitting the copies is fine -- there is no problem
1679 with ordering of phi nodes. This is because A is the single
1680 predecessor of B, therefore results of the phi nodes cannot
1681 appear as arguments of the phi nodes. */
1682 copy = gimple_build_assign (def, use);
1683 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1684 remove_phi_node (&psi, false);
1685 }
1686 else
1687 {
1688 /* If we deal with a PHI for virtual operands, we can simply
1689 propagate these without fussing with folding or updating
1690 the stmt. */
1691 if (!is_gimple_reg (def))
1692 {
1693 imm_use_iterator iter;
1694 use_operand_p use_p;
1695 gimple stmt;
1696
1697 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1698 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1699 SET_USE (use_p, use);
1700
1701 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1702 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1703 }
1704 else
1705 replace_uses_by (def, use);
1706
1707 remove_phi_node (&psi, true);
1708 }
1709 }
1710
1711 /* Ensure that B follows A. */
1712 move_block_after (b, a);
1713
1714 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1715 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1716
1717 /* Remove labels from B and set gimple_bb to A for other statements. */
1718 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1719 {
1720 gimple stmt = gsi_stmt (gsi);
1721 if (gimple_code (stmt) == GIMPLE_LABEL)
1722 {
1723 tree label = gimple_label_label (stmt);
1724 int lp_nr;
1725
1726 gsi_remove (&gsi, false);
1727
1728 /* Now that we can thread computed gotos, we might have
1729 a situation where we have a forced label in block B
1730 However, the label at the start of block B might still be
1731 used in other ways (think about the runtime checking for
1732 Fortran assigned gotos). So we can not just delete the
1733 label. Instead we move the label to the start of block A. */
1734 if (FORCED_LABEL (label))
1735 {
1736 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1737 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1738 }
1739 /* Other user labels keep around in a form of a debug stmt. */
1740 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1741 {
1742 gimple dbg = gimple_build_debug_bind (label,
1743 integer_zero_node,
1744 stmt);
1745 gimple_debug_bind_reset_value (dbg);
1746 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1747 }
1748
1749 lp_nr = EH_LANDING_PAD_NR (label);
1750 if (lp_nr)
1751 {
1752 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1753 lp->post_landing_pad = NULL;
1754 }
1755 }
1756 else
1757 {
1758 gimple_set_bb (stmt, a);
1759 gsi_next (&gsi);
1760 }
1761 }
1762
1763 /* Merge the sequences. */
1764 last = gsi_last_bb (a);
1765 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1766 set_bb_seq (b, NULL);
1767
1768 if (cfgcleanup_altered_bbs)
1769 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1770 }
1771
1772
1773 /* Return the one of two successors of BB that is not reachable by a
1774 complex edge, if there is one. Else, return BB. We use
1775 this in optimizations that use post-dominators for their heuristics,
1776 to catch the cases in C++ where function calls are involved. */
1777
1778 basic_block
1779 single_noncomplex_succ (basic_block bb)
1780 {
1781 edge e0, e1;
1782 if (EDGE_COUNT (bb->succs) != 2)
1783 return bb;
1784
1785 e0 = EDGE_SUCC (bb, 0);
1786 e1 = EDGE_SUCC (bb, 1);
1787 if (e0->flags & EDGE_COMPLEX)
1788 return e1->dest;
1789 if (e1->flags & EDGE_COMPLEX)
1790 return e0->dest;
1791
1792 return bb;
1793 }
1794
1795 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1796
1797 void
1798 notice_special_calls (gimple call)
1799 {
1800 int flags = gimple_call_flags (call);
1801
1802 if (flags & ECF_MAY_BE_ALLOCA)
1803 cfun->calls_alloca = true;
1804 if (flags & ECF_RETURNS_TWICE)
1805 cfun->calls_setjmp = true;
1806 }
1807
1808
1809 /* Clear flags set by notice_special_calls. Used by dead code removal
1810 to update the flags. */
1811
1812 void
1813 clear_special_calls (void)
1814 {
1815 cfun->calls_alloca = false;
1816 cfun->calls_setjmp = false;
1817 }
1818
1819 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1820
1821 static void
1822 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1823 {
1824 /* Since this block is no longer reachable, we can just delete all
1825 of its PHI nodes. */
1826 remove_phi_nodes (bb);
1827
1828 /* Remove edges to BB's successors. */
1829 while (EDGE_COUNT (bb->succs) > 0)
1830 remove_edge (EDGE_SUCC (bb, 0));
1831 }
1832
1833
1834 /* Remove statements of basic block BB. */
1835
1836 static void
1837 remove_bb (basic_block bb)
1838 {
1839 gimple_stmt_iterator i;
1840
1841 if (dump_file)
1842 {
1843 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1844 if (dump_flags & TDF_DETAILS)
1845 {
1846 dump_bb (bb, dump_file, 0);
1847 fprintf (dump_file, "\n");
1848 }
1849 }
1850
1851 if (current_loops)
1852 {
1853 struct loop *loop = bb->loop_father;
1854
1855 /* If a loop gets removed, clean up the information associated
1856 with it. */
1857 if (loop->latch == bb
1858 || loop->header == bb)
1859 free_numbers_of_iterations_estimates_loop (loop);
1860 }
1861
1862 /* Remove all the instructions in the block. */
1863 if (bb_seq (bb) != NULL)
1864 {
1865 /* Walk backwards so as to get a chance to substitute all
1866 released DEFs into debug stmts. See
1867 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
1868 details. */
1869 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
1870 {
1871 gimple stmt = gsi_stmt (i);
1872 if (gimple_code (stmt) == GIMPLE_LABEL
1873 && (FORCED_LABEL (gimple_label_label (stmt))
1874 || DECL_NONLOCAL (gimple_label_label (stmt))))
1875 {
1876 basic_block new_bb;
1877 gimple_stmt_iterator new_gsi;
1878
1879 /* A non-reachable non-local label may still be referenced.
1880 But it no longer needs to carry the extra semantics of
1881 non-locality. */
1882 if (DECL_NONLOCAL (gimple_label_label (stmt)))
1883 {
1884 DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
1885 FORCED_LABEL (gimple_label_label (stmt)) = 1;
1886 }
1887
1888 new_bb = bb->prev_bb;
1889 new_gsi = gsi_start_bb (new_bb);
1890 gsi_remove (&i, false);
1891 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
1892 }
1893 else
1894 {
1895 /* Release SSA definitions if we are in SSA. Note that we
1896 may be called when not in SSA. For example,
1897 final_cleanup calls this function via
1898 cleanup_tree_cfg. */
1899 if (gimple_in_ssa_p (cfun))
1900 release_defs (stmt);
1901
1902 gsi_remove (&i, true);
1903 }
1904
1905 if (gsi_end_p (i))
1906 i = gsi_last_bb (bb);
1907 else
1908 gsi_prev (&i);
1909 }
1910 }
1911
1912 remove_phi_nodes_and_edges_for_unreachable_block (bb);
1913 bb->il.gimple = NULL;
1914 }
1915
1916
1917 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
1918 predicate VAL, return the edge that will be taken out of the block.
1919 If VAL does not match a unique edge, NULL is returned. */
1920
1921 edge
1922 find_taken_edge (basic_block bb, tree val)
1923 {
1924 gimple stmt;
1925
1926 stmt = last_stmt (bb);
1927
1928 gcc_assert (stmt);
1929 gcc_assert (is_ctrl_stmt (stmt));
1930
1931 if (val == NULL)
1932 return NULL;
1933
1934 if (!is_gimple_min_invariant (val))
1935 return NULL;
1936
1937 if (gimple_code (stmt) == GIMPLE_COND)
1938 return find_taken_edge_cond_expr (bb, val);
1939
1940 if (gimple_code (stmt) == GIMPLE_SWITCH)
1941 return find_taken_edge_switch_expr (bb, val);
1942
1943 if (computed_goto_p (stmt))
1944 {
1945 /* Only optimize if the argument is a label, if the argument is
1946 not a label then we can not construct a proper CFG.
1947
1948 It may be the case that we only need to allow the LABEL_REF to
1949 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
1950 appear inside a LABEL_EXPR just to be safe. */
1951 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
1952 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
1953 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
1954 return NULL;
1955 }
1956
1957 gcc_unreachable ();
1958 }
1959
1960 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
1961 statement, determine which of the outgoing edges will be taken out of the
1962 block. Return NULL if either edge may be taken. */
1963
1964 static edge
1965 find_taken_edge_computed_goto (basic_block bb, tree val)
1966 {
1967 basic_block dest;
1968 edge e = NULL;
1969
1970 dest = label_to_block (val);
1971 if (dest)
1972 {
1973 e = find_edge (bb, dest);
1974 gcc_assert (e != NULL);
1975 }
1976
1977 return e;
1978 }
1979
1980 /* Given a constant value VAL and the entry block BB to a COND_EXPR
1981 statement, determine which of the two edges will be taken out of the
1982 block. Return NULL if either edge may be taken. */
1983
1984 static edge
1985 find_taken_edge_cond_expr (basic_block bb, tree val)
1986 {
1987 edge true_edge, false_edge;
1988
1989 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1990
1991 gcc_assert (TREE_CODE (val) == INTEGER_CST);
1992 return (integer_zerop (val) ? false_edge : true_edge);
1993 }
1994
1995 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
1996 statement, determine which edge will be taken out of the block. Return
1997 NULL if any edge may be taken. */
1998
1999 static edge
2000 find_taken_edge_switch_expr (basic_block bb, tree val)
2001 {
2002 basic_block dest_bb;
2003 edge e;
2004 gimple switch_stmt;
2005 tree taken_case;
2006
2007 switch_stmt = last_stmt (bb);
2008 taken_case = find_case_label_for_value (switch_stmt, val);
2009 dest_bb = label_to_block (CASE_LABEL (taken_case));
2010
2011 e = find_edge (bb, dest_bb);
2012 gcc_assert (e);
2013 return e;
2014 }
2015
2016
2017 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2018 We can make optimal use here of the fact that the case labels are
2019 sorted: We can do a binary search for a case matching VAL. */
2020
2021 static tree
2022 find_case_label_for_value (gimple switch_stmt, tree val)
2023 {
2024 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2025 tree default_case = gimple_switch_default_label (switch_stmt);
2026
2027 for (low = 0, high = n; high - low > 1; )
2028 {
2029 size_t i = (high + low) / 2;
2030 tree t = gimple_switch_label (switch_stmt, i);
2031 int cmp;
2032
2033 /* Cache the result of comparing CASE_LOW and val. */
2034 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2035
2036 if (cmp > 0)
2037 high = i;
2038 else
2039 low = i;
2040
2041 if (CASE_HIGH (t) == NULL)
2042 {
2043 /* A singe-valued case label. */
2044 if (cmp == 0)
2045 return t;
2046 }
2047 else
2048 {
2049 /* A case range. We can only handle integer ranges. */
2050 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2051 return t;
2052 }
2053 }
2054
2055 return default_case;
2056 }
2057
2058
2059 /* Dump a basic block on stderr. */
2060
2061 void
2062 gimple_debug_bb (basic_block bb)
2063 {
2064 gimple_dump_bb (bb, stderr, 0, TDF_VOPS|TDF_MEMSYMS);
2065 }
2066
2067
2068 /* Dump basic block with index N on stderr. */
2069
2070 basic_block
2071 gimple_debug_bb_n (int n)
2072 {
2073 gimple_debug_bb (BASIC_BLOCK (n));
2074 return BASIC_BLOCK (n);
2075 }
2076
2077
2078 /* Dump the CFG on stderr.
2079
2080 FLAGS are the same used by the tree dumping functions
2081 (see TDF_* in tree-pass.h). */
2082
2083 void
2084 gimple_debug_cfg (int flags)
2085 {
2086 gimple_dump_cfg (stderr, flags);
2087 }
2088
2089
2090 /* Dump the program showing basic block boundaries on the given FILE.
2091
2092 FLAGS are the same used by the tree dumping functions (see TDF_* in
2093 tree.h). */
2094
2095 void
2096 gimple_dump_cfg (FILE *file, int flags)
2097 {
2098 if (flags & TDF_DETAILS)
2099 {
2100 dump_function_header (file, current_function_decl, flags);
2101 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2102 n_basic_blocks, n_edges, last_basic_block);
2103
2104 brief_dump_cfg (file);
2105 fprintf (file, "\n");
2106 }
2107
2108 if (flags & TDF_STATS)
2109 dump_cfg_stats (file);
2110
2111 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2112 }
2113
2114
2115 /* Dump CFG statistics on FILE. */
2116
2117 void
2118 dump_cfg_stats (FILE *file)
2119 {
2120 static long max_num_merged_labels = 0;
2121 unsigned long size, total = 0;
2122 long num_edges;
2123 basic_block bb;
2124 const char * const fmt_str = "%-30s%-13s%12s\n";
2125 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2126 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2127 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2128 const char *funcname
2129 = lang_hooks.decl_printable_name (current_function_decl, 2);
2130
2131
2132 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2133
2134 fprintf (file, "---------------------------------------------------------\n");
2135 fprintf (file, fmt_str, "", " Number of ", "Memory");
2136 fprintf (file, fmt_str, "", " instances ", "used ");
2137 fprintf (file, "---------------------------------------------------------\n");
2138
2139 size = n_basic_blocks * sizeof (struct basic_block_def);
2140 total += size;
2141 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2142 SCALE (size), LABEL (size));
2143
2144 num_edges = 0;
2145 FOR_EACH_BB (bb)
2146 num_edges += EDGE_COUNT (bb->succs);
2147 size = num_edges * sizeof (struct edge_def);
2148 total += size;
2149 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2150
2151 fprintf (file, "---------------------------------------------------------\n");
2152 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2153 LABEL (total));
2154 fprintf (file, "---------------------------------------------------------\n");
2155 fprintf (file, "\n");
2156
2157 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2158 max_num_merged_labels = cfg_stats.num_merged_labels;
2159
2160 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2161 cfg_stats.num_merged_labels, max_num_merged_labels);
2162
2163 fprintf (file, "\n");
2164 }
2165
2166
2167 /* Dump CFG statistics on stderr. Keep extern so that it's always
2168 linked in the final executable. */
2169
2170 DEBUG_FUNCTION void
2171 debug_cfg_stats (void)
2172 {
2173 dump_cfg_stats (stderr);
2174 }
2175
2176
2177 /* Dump the flowgraph to a .vcg FILE. */
2178
2179 static void
2180 gimple_cfg2vcg (FILE *file)
2181 {
2182 edge e;
2183 edge_iterator ei;
2184 basic_block bb;
2185 const char *funcname
2186 = lang_hooks.decl_printable_name (current_function_decl, 2);
2187
2188 /* Write the file header. */
2189 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2190 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2191 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2192
2193 /* Write blocks and edges. */
2194 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2195 {
2196 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2197 e->dest->index);
2198
2199 if (e->flags & EDGE_FAKE)
2200 fprintf (file, " linestyle: dotted priority: 10");
2201 else
2202 fprintf (file, " linestyle: solid priority: 100");
2203
2204 fprintf (file, " }\n");
2205 }
2206 fputc ('\n', file);
2207
2208 FOR_EACH_BB (bb)
2209 {
2210 enum gimple_code head_code, end_code;
2211 const char *head_name, *end_name;
2212 int head_line = 0;
2213 int end_line = 0;
2214 gimple first = first_stmt (bb);
2215 gimple last = last_stmt (bb);
2216
2217 if (first)
2218 {
2219 head_code = gimple_code (first);
2220 head_name = gimple_code_name[head_code];
2221 head_line = get_lineno (first);
2222 }
2223 else
2224 head_name = "no-statement";
2225
2226 if (last)
2227 {
2228 end_code = gimple_code (last);
2229 end_name = gimple_code_name[end_code];
2230 end_line = get_lineno (last);
2231 }
2232 else
2233 end_name = "no-statement";
2234
2235 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2236 bb->index, bb->index, head_name, head_line, end_name,
2237 end_line);
2238
2239 FOR_EACH_EDGE (e, ei, bb->succs)
2240 {
2241 if (e->dest == EXIT_BLOCK_PTR)
2242 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2243 else
2244 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2245
2246 if (e->flags & EDGE_FAKE)
2247 fprintf (file, " priority: 10 linestyle: dotted");
2248 else
2249 fprintf (file, " priority: 100 linestyle: solid");
2250
2251 fprintf (file, " }\n");
2252 }
2253
2254 if (bb->next_bb != EXIT_BLOCK_PTR)
2255 fputc ('\n', file);
2256 }
2257
2258 fputs ("}\n\n", file);
2259 }
2260
2261
2262
2263 /*---------------------------------------------------------------------------
2264 Miscellaneous helpers
2265 ---------------------------------------------------------------------------*/
2266
2267 /* Return true if T represents a stmt that always transfers control. */
2268
2269 bool
2270 is_ctrl_stmt (gimple t)
2271 {
2272 switch (gimple_code (t))
2273 {
2274 case GIMPLE_COND:
2275 case GIMPLE_SWITCH:
2276 case GIMPLE_GOTO:
2277 case GIMPLE_RETURN:
2278 case GIMPLE_RESX:
2279 return true;
2280 default:
2281 return false;
2282 }
2283 }
2284
2285
2286 /* Return true if T is a statement that may alter the flow of control
2287 (e.g., a call to a non-returning function). */
2288
2289 bool
2290 is_ctrl_altering_stmt (gimple t)
2291 {
2292 gcc_assert (t);
2293
2294 switch (gimple_code (t))
2295 {
2296 case GIMPLE_CALL:
2297 {
2298 int flags = gimple_call_flags (t);
2299
2300 /* A non-pure/const call alters flow control if the current
2301 function has nonlocal labels. */
2302 if (!(flags & (ECF_CONST | ECF_PURE | ECF_LEAF))
2303 && cfun->has_nonlocal_label)
2304 return true;
2305
2306 /* A call also alters control flow if it does not return. */
2307 if (flags & ECF_NORETURN)
2308 return true;
2309
2310 /* TM ending statements have backedges out of the transaction.
2311 Return true so we split the basic block containing them.
2312 Note that the TM_BUILTIN test is merely an optimization. */
2313 if ((flags & ECF_TM_BUILTIN)
2314 && is_tm_ending_fndecl (gimple_call_fndecl (t)))
2315 return true;
2316
2317 /* BUILT_IN_RETURN call is same as return statement. */
2318 if (gimple_call_builtin_p (t, BUILT_IN_RETURN))
2319 return true;
2320 }
2321 break;
2322
2323 case GIMPLE_EH_DISPATCH:
2324 /* EH_DISPATCH branches to the individual catch handlers at
2325 this level of a try or allowed-exceptions region. It can
2326 fallthru to the next statement as well. */
2327 return true;
2328
2329 case GIMPLE_ASM:
2330 if (gimple_asm_nlabels (t) > 0)
2331 return true;
2332 break;
2333
2334 CASE_GIMPLE_OMP:
2335 /* OpenMP directives alter control flow. */
2336 return true;
2337
2338 case GIMPLE_TRANSACTION:
2339 /* A transaction start alters control flow. */
2340 return true;
2341
2342 default:
2343 break;
2344 }
2345
2346 /* If a statement can throw, it alters control flow. */
2347 return stmt_can_throw_internal (t);
2348 }
2349
2350
2351 /* Return true if T is a simple local goto. */
2352
2353 bool
2354 simple_goto_p (gimple t)
2355 {
2356 return (gimple_code (t) == GIMPLE_GOTO
2357 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2358 }
2359
2360
2361 /* Return true if T can make an abnormal transfer of control flow.
2362 Transfers of control flow associated with EH are excluded. */
2363
2364 bool
2365 stmt_can_make_abnormal_goto (gimple t)
2366 {
2367 if (computed_goto_p (t))
2368 return true;
2369 if (is_gimple_call (t))
2370 return (gimple_has_side_effects (t) && cfun->has_nonlocal_label
2371 && !(gimple_call_flags (t) & ECF_LEAF));
2372 return false;
2373 }
2374
2375
2376 /* Return true if STMT should start a new basic block. PREV_STMT is
2377 the statement preceding STMT. It is used when STMT is a label or a
2378 case label. Labels should only start a new basic block if their
2379 previous statement wasn't a label. Otherwise, sequence of labels
2380 would generate unnecessary basic blocks that only contain a single
2381 label. */
2382
2383 static inline bool
2384 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2385 {
2386 if (stmt == NULL)
2387 return false;
2388
2389 /* Labels start a new basic block only if the preceding statement
2390 wasn't a label of the same type. This prevents the creation of
2391 consecutive blocks that have nothing but a single label. */
2392 if (gimple_code (stmt) == GIMPLE_LABEL)
2393 {
2394 /* Nonlocal and computed GOTO targets always start a new block. */
2395 if (DECL_NONLOCAL (gimple_label_label (stmt))
2396 || FORCED_LABEL (gimple_label_label (stmt)))
2397 return true;
2398
2399 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2400 {
2401 if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
2402 return true;
2403
2404 cfg_stats.num_merged_labels++;
2405 return false;
2406 }
2407 else
2408 return true;
2409 }
2410
2411 return false;
2412 }
2413
2414
2415 /* Return true if T should end a basic block. */
2416
2417 bool
2418 stmt_ends_bb_p (gimple t)
2419 {
2420 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2421 }
2422
2423 /* Remove block annotations and other data structures. */
2424
2425 void
2426 delete_tree_cfg_annotations (void)
2427 {
2428 label_to_block_map = NULL;
2429 }
2430
2431
2432 /* Return the first statement in basic block BB. */
2433
2434 gimple
2435 first_stmt (basic_block bb)
2436 {
2437 gimple_stmt_iterator i = gsi_start_bb (bb);
2438 gimple stmt = NULL;
2439
2440 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2441 {
2442 gsi_next (&i);
2443 stmt = NULL;
2444 }
2445 return stmt;
2446 }
2447
2448 /* Return the first non-label statement in basic block BB. */
2449
2450 static gimple
2451 first_non_label_stmt (basic_block bb)
2452 {
2453 gimple_stmt_iterator i = gsi_start_bb (bb);
2454 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2455 gsi_next (&i);
2456 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2457 }
2458
2459 /* Return the last statement in basic block BB. */
2460
2461 gimple
2462 last_stmt (basic_block bb)
2463 {
2464 gimple_stmt_iterator i = gsi_last_bb (bb);
2465 gimple stmt = NULL;
2466
2467 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2468 {
2469 gsi_prev (&i);
2470 stmt = NULL;
2471 }
2472 return stmt;
2473 }
2474
2475 /* Return the last statement of an otherwise empty block. Return NULL
2476 if the block is totally empty, or if it contains more than one
2477 statement. */
2478
2479 gimple
2480 last_and_only_stmt (basic_block bb)
2481 {
2482 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2483 gimple last, prev;
2484
2485 if (gsi_end_p (i))
2486 return NULL;
2487
2488 last = gsi_stmt (i);
2489 gsi_prev_nondebug (&i);
2490 if (gsi_end_p (i))
2491 return last;
2492
2493 /* Empty statements should no longer appear in the instruction stream.
2494 Everything that might have appeared before should be deleted by
2495 remove_useless_stmts, and the optimizers should just gsi_remove
2496 instead of smashing with build_empty_stmt.
2497
2498 Thus the only thing that should appear here in a block containing
2499 one executable statement is a label. */
2500 prev = gsi_stmt (i);
2501 if (gimple_code (prev) == GIMPLE_LABEL)
2502 return last;
2503 else
2504 return NULL;
2505 }
2506
2507 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2508
2509 static void
2510 reinstall_phi_args (edge new_edge, edge old_edge)
2511 {
2512 edge_var_map_vector v;
2513 edge_var_map *vm;
2514 int i;
2515 gimple_stmt_iterator phis;
2516
2517 v = redirect_edge_var_map_vector (old_edge);
2518 if (!v)
2519 return;
2520
2521 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2522 VEC_iterate (edge_var_map, v, i, vm) && !gsi_end_p (phis);
2523 i++, gsi_next (&phis))
2524 {
2525 gimple phi = gsi_stmt (phis);
2526 tree result = redirect_edge_var_map_result (vm);
2527 tree arg = redirect_edge_var_map_def (vm);
2528
2529 gcc_assert (result == gimple_phi_result (phi));
2530
2531 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2532 }
2533
2534 redirect_edge_var_map_clear (old_edge);
2535 }
2536
2537 /* Returns the basic block after which the new basic block created
2538 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2539 near its "logical" location. This is of most help to humans looking
2540 at debugging dumps. */
2541
2542 static basic_block
2543 split_edge_bb_loc (edge edge_in)
2544 {
2545 basic_block dest = edge_in->dest;
2546 basic_block dest_prev = dest->prev_bb;
2547
2548 if (dest_prev)
2549 {
2550 edge e = find_edge (dest_prev, dest);
2551 if (e && !(e->flags & EDGE_COMPLEX))
2552 return edge_in->src;
2553 }
2554 return dest_prev;
2555 }
2556
2557 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2558 Abort on abnormal edges. */
2559
2560 static basic_block
2561 gimple_split_edge (edge edge_in)
2562 {
2563 basic_block new_bb, after_bb, dest;
2564 edge new_edge, e;
2565
2566 /* Abnormal edges cannot be split. */
2567 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2568
2569 dest = edge_in->dest;
2570
2571 after_bb = split_edge_bb_loc (edge_in);
2572
2573 new_bb = create_empty_bb (after_bb);
2574 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2575 new_bb->count = edge_in->count;
2576 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2577 new_edge->probability = REG_BR_PROB_BASE;
2578 new_edge->count = edge_in->count;
2579
2580 e = redirect_edge_and_branch (edge_in, new_bb);
2581 gcc_assert (e == edge_in);
2582 reinstall_phi_args (new_edge, e);
2583
2584 return new_bb;
2585 }
2586
2587
2588 /* Verify properties of the address expression T with base object BASE. */
2589
2590 static tree
2591 verify_address (tree t, tree base)
2592 {
2593 bool old_constant;
2594 bool old_side_effects;
2595 bool new_constant;
2596 bool new_side_effects;
2597
2598 old_constant = TREE_CONSTANT (t);
2599 old_side_effects = TREE_SIDE_EFFECTS (t);
2600
2601 recompute_tree_invariant_for_addr_expr (t);
2602 new_side_effects = TREE_SIDE_EFFECTS (t);
2603 new_constant = TREE_CONSTANT (t);
2604
2605 if (old_constant != new_constant)
2606 {
2607 error ("constant not recomputed when ADDR_EXPR changed");
2608 return t;
2609 }
2610 if (old_side_effects != new_side_effects)
2611 {
2612 error ("side effects not recomputed when ADDR_EXPR changed");
2613 return t;
2614 }
2615
2616 if (!(TREE_CODE (base) == VAR_DECL
2617 || TREE_CODE (base) == PARM_DECL
2618 || TREE_CODE (base) == RESULT_DECL))
2619 return NULL_TREE;
2620
2621 if (DECL_GIMPLE_REG_P (base))
2622 {
2623 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2624 return base;
2625 }
2626
2627 return NULL_TREE;
2628 }
2629
2630 /* Callback for walk_tree, check that all elements with address taken are
2631 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2632 inside a PHI node. */
2633
2634 static tree
2635 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2636 {
2637 tree t = *tp, x;
2638
2639 if (TYPE_P (t))
2640 *walk_subtrees = 0;
2641
2642 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2643 #define CHECK_OP(N, MSG) \
2644 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2645 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2646
2647 switch (TREE_CODE (t))
2648 {
2649 case SSA_NAME:
2650 if (SSA_NAME_IN_FREE_LIST (t))
2651 {
2652 error ("SSA name in freelist but still referenced");
2653 return *tp;
2654 }
2655 break;
2656
2657 case INDIRECT_REF:
2658 error ("INDIRECT_REF in gimple IL");
2659 return t;
2660
2661 case MEM_REF:
2662 x = TREE_OPERAND (t, 0);
2663 if (!POINTER_TYPE_P (TREE_TYPE (x))
2664 || !is_gimple_mem_ref_addr (x))
2665 {
2666 error ("invalid first operand of MEM_REF");
2667 return x;
2668 }
2669 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2670 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2671 {
2672 error ("invalid offset operand of MEM_REF");
2673 return TREE_OPERAND (t, 1);
2674 }
2675 if (TREE_CODE (x) == ADDR_EXPR
2676 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2677 return x;
2678 *walk_subtrees = 0;
2679 break;
2680
2681 case ASSERT_EXPR:
2682 x = fold (ASSERT_EXPR_COND (t));
2683 if (x == boolean_false_node)
2684 {
2685 error ("ASSERT_EXPR with an always-false condition");
2686 return *tp;
2687 }
2688 break;
2689
2690 case MODIFY_EXPR:
2691 error ("MODIFY_EXPR not expected while having tuples");
2692 return *tp;
2693
2694 case ADDR_EXPR:
2695 {
2696 tree tem;
2697
2698 gcc_assert (is_gimple_address (t));
2699
2700 /* Skip any references (they will be checked when we recurse down the
2701 tree) and ensure that any variable used as a prefix is marked
2702 addressable. */
2703 for (x = TREE_OPERAND (t, 0);
2704 handled_component_p (x);
2705 x = TREE_OPERAND (x, 0))
2706 ;
2707
2708 if ((tem = verify_address (t, x)))
2709 return tem;
2710
2711 if (!(TREE_CODE (x) == VAR_DECL
2712 || TREE_CODE (x) == PARM_DECL
2713 || TREE_CODE (x) == RESULT_DECL))
2714 return NULL;
2715
2716 if (!TREE_ADDRESSABLE (x))
2717 {
2718 error ("address taken, but ADDRESSABLE bit not set");
2719 return x;
2720 }
2721
2722 break;
2723 }
2724
2725 case COND_EXPR:
2726 x = COND_EXPR_COND (t);
2727 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2728 {
2729 error ("non-integral used in condition");
2730 return x;
2731 }
2732 if (!is_gimple_condexpr (x))
2733 {
2734 error ("invalid conditional operand");
2735 return x;
2736 }
2737 break;
2738
2739 case NON_LVALUE_EXPR:
2740 case TRUTH_NOT_EXPR:
2741 gcc_unreachable ();
2742
2743 CASE_CONVERT:
2744 case FIX_TRUNC_EXPR:
2745 case FLOAT_EXPR:
2746 case NEGATE_EXPR:
2747 case ABS_EXPR:
2748 case BIT_NOT_EXPR:
2749 CHECK_OP (0, "invalid operand to unary operator");
2750 break;
2751
2752 case REALPART_EXPR:
2753 case IMAGPART_EXPR:
2754 case COMPONENT_REF:
2755 case ARRAY_REF:
2756 case ARRAY_RANGE_REF:
2757 case BIT_FIELD_REF:
2758 case VIEW_CONVERT_EXPR:
2759 /* We have a nest of references. Verify that each of the operands
2760 that determine where to reference is either a constant or a variable,
2761 verify that the base is valid, and then show we've already checked
2762 the subtrees. */
2763 while (handled_component_p (t))
2764 {
2765 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2766 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2767 else if (TREE_CODE (t) == ARRAY_REF
2768 || TREE_CODE (t) == ARRAY_RANGE_REF)
2769 {
2770 CHECK_OP (1, "invalid array index");
2771 if (TREE_OPERAND (t, 2))
2772 CHECK_OP (2, "invalid array lower bound");
2773 if (TREE_OPERAND (t, 3))
2774 CHECK_OP (3, "invalid array stride");
2775 }
2776 else if (TREE_CODE (t) == BIT_FIELD_REF)
2777 {
2778 if (!host_integerp (TREE_OPERAND (t, 1), 1)
2779 || !host_integerp (TREE_OPERAND (t, 2), 1))
2780 {
2781 error ("invalid position or size operand to BIT_FIELD_REF");
2782 return t;
2783 }
2784 else if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2785 && (TYPE_PRECISION (TREE_TYPE (t))
2786 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2787 {
2788 error ("integral result type precision does not match "
2789 "field size of BIT_FIELD_REF");
2790 return t;
2791 }
2792 if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2793 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2794 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2795 {
2796 error ("mode precision of non-integral result does not "
2797 "match field size of BIT_FIELD_REF");
2798 return t;
2799 }
2800 }
2801
2802 t = TREE_OPERAND (t, 0);
2803 }
2804
2805 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2806 {
2807 error ("invalid reference prefix");
2808 return t;
2809 }
2810 *walk_subtrees = 0;
2811 break;
2812 case PLUS_EXPR:
2813 case MINUS_EXPR:
2814 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2815 POINTER_PLUS_EXPR. */
2816 if (POINTER_TYPE_P (TREE_TYPE (t)))
2817 {
2818 error ("invalid operand to plus/minus, type is a pointer");
2819 return t;
2820 }
2821 CHECK_OP (0, "invalid operand to binary operator");
2822 CHECK_OP (1, "invalid operand to binary operator");
2823 break;
2824
2825 case POINTER_PLUS_EXPR:
2826 /* Check to make sure the first operand is a pointer or reference type. */
2827 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
2828 {
2829 error ("invalid operand to pointer plus, first operand is not a pointer");
2830 return t;
2831 }
2832 /* Check to make sure the second operand is a ptrofftype. */
2833 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
2834 {
2835 error ("invalid operand to pointer plus, second operand is not an "
2836 "integer type of appropriate width");
2837 return t;
2838 }
2839 /* FALLTHROUGH */
2840 case LT_EXPR:
2841 case LE_EXPR:
2842 case GT_EXPR:
2843 case GE_EXPR:
2844 case EQ_EXPR:
2845 case NE_EXPR:
2846 case UNORDERED_EXPR:
2847 case ORDERED_EXPR:
2848 case UNLT_EXPR:
2849 case UNLE_EXPR:
2850 case UNGT_EXPR:
2851 case UNGE_EXPR:
2852 case UNEQ_EXPR:
2853 case LTGT_EXPR:
2854 case MULT_EXPR:
2855 case TRUNC_DIV_EXPR:
2856 case CEIL_DIV_EXPR:
2857 case FLOOR_DIV_EXPR:
2858 case ROUND_DIV_EXPR:
2859 case TRUNC_MOD_EXPR:
2860 case CEIL_MOD_EXPR:
2861 case FLOOR_MOD_EXPR:
2862 case ROUND_MOD_EXPR:
2863 case RDIV_EXPR:
2864 case EXACT_DIV_EXPR:
2865 case MIN_EXPR:
2866 case MAX_EXPR:
2867 case LSHIFT_EXPR:
2868 case RSHIFT_EXPR:
2869 case LROTATE_EXPR:
2870 case RROTATE_EXPR:
2871 case BIT_IOR_EXPR:
2872 case BIT_XOR_EXPR:
2873 case BIT_AND_EXPR:
2874 CHECK_OP (0, "invalid operand to binary operator");
2875 CHECK_OP (1, "invalid operand to binary operator");
2876 break;
2877
2878 case CONSTRUCTOR:
2879 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2880 *walk_subtrees = 0;
2881 break;
2882
2883 case CASE_LABEL_EXPR:
2884 if (CASE_CHAIN (t))
2885 {
2886 error ("invalid CASE_CHAIN");
2887 return t;
2888 }
2889 break;
2890
2891 default:
2892 break;
2893 }
2894 return NULL;
2895
2896 #undef CHECK_OP
2897 }
2898
2899
2900 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
2901 Returns true if there is an error, otherwise false. */
2902
2903 static bool
2904 verify_types_in_gimple_min_lval (tree expr)
2905 {
2906 tree op;
2907
2908 if (is_gimple_id (expr))
2909 return false;
2910
2911 if (TREE_CODE (expr) != TARGET_MEM_REF
2912 && TREE_CODE (expr) != MEM_REF)
2913 {
2914 error ("invalid expression for min lvalue");
2915 return true;
2916 }
2917
2918 /* TARGET_MEM_REFs are strange beasts. */
2919 if (TREE_CODE (expr) == TARGET_MEM_REF)
2920 return false;
2921
2922 op = TREE_OPERAND (expr, 0);
2923 if (!is_gimple_val (op))
2924 {
2925 error ("invalid operand in indirect reference");
2926 debug_generic_stmt (op);
2927 return true;
2928 }
2929 /* Memory references now generally can involve a value conversion. */
2930
2931 return false;
2932 }
2933
2934 /* Verify if EXPR is a valid GIMPLE reference expression. If
2935 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
2936 if there is an error, otherwise false. */
2937
2938 static bool
2939 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
2940 {
2941 while (handled_component_p (expr))
2942 {
2943 tree op = TREE_OPERAND (expr, 0);
2944
2945 if (TREE_CODE (expr) == ARRAY_REF
2946 || TREE_CODE (expr) == ARRAY_RANGE_REF)
2947 {
2948 if (!is_gimple_val (TREE_OPERAND (expr, 1))
2949 || (TREE_OPERAND (expr, 2)
2950 && !is_gimple_val (TREE_OPERAND (expr, 2)))
2951 || (TREE_OPERAND (expr, 3)
2952 && !is_gimple_val (TREE_OPERAND (expr, 3))))
2953 {
2954 error ("invalid operands to array reference");
2955 debug_generic_stmt (expr);
2956 return true;
2957 }
2958 }
2959
2960 /* Verify if the reference array element types are compatible. */
2961 if (TREE_CODE (expr) == ARRAY_REF
2962 && !useless_type_conversion_p (TREE_TYPE (expr),
2963 TREE_TYPE (TREE_TYPE (op))))
2964 {
2965 error ("type mismatch in array reference");
2966 debug_generic_stmt (TREE_TYPE (expr));
2967 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2968 return true;
2969 }
2970 if (TREE_CODE (expr) == ARRAY_RANGE_REF
2971 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
2972 TREE_TYPE (TREE_TYPE (op))))
2973 {
2974 error ("type mismatch in array range reference");
2975 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
2976 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2977 return true;
2978 }
2979
2980 if ((TREE_CODE (expr) == REALPART_EXPR
2981 || TREE_CODE (expr) == IMAGPART_EXPR)
2982 && !useless_type_conversion_p (TREE_TYPE (expr),
2983 TREE_TYPE (TREE_TYPE (op))))
2984 {
2985 error ("type mismatch in real/imagpart reference");
2986 debug_generic_stmt (TREE_TYPE (expr));
2987 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2988 return true;
2989 }
2990
2991 if (TREE_CODE (expr) == COMPONENT_REF
2992 && !useless_type_conversion_p (TREE_TYPE (expr),
2993 TREE_TYPE (TREE_OPERAND (expr, 1))))
2994 {
2995 error ("type mismatch in component reference");
2996 debug_generic_stmt (TREE_TYPE (expr));
2997 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
2998 return true;
2999 }
3000
3001 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3002 {
3003 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3004 that their operand is not an SSA name or an invariant when
3005 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3006 bug). Otherwise there is nothing to verify, gross mismatches at
3007 most invoke undefined behavior. */
3008 if (require_lvalue
3009 && (TREE_CODE (op) == SSA_NAME
3010 || is_gimple_min_invariant (op)))
3011 {
3012 error ("conversion of an SSA_NAME on the left hand side");
3013 debug_generic_stmt (expr);
3014 return true;
3015 }
3016 else if (TREE_CODE (op) == SSA_NAME
3017 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3018 {
3019 error ("conversion of register to a different size");
3020 debug_generic_stmt (expr);
3021 return true;
3022 }
3023 else if (!handled_component_p (op))
3024 return false;
3025 }
3026
3027 expr = op;
3028 }
3029
3030 if (TREE_CODE (expr) == MEM_REF)
3031 {
3032 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3033 {
3034 error ("invalid address operand in MEM_REF");
3035 debug_generic_stmt (expr);
3036 return true;
3037 }
3038 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3039 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3040 {
3041 error ("invalid offset operand in MEM_REF");
3042 debug_generic_stmt (expr);
3043 return true;
3044 }
3045 }
3046 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3047 {
3048 if (!TMR_BASE (expr)
3049 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3050 {
3051 error ("invalid address operand in TARGET_MEM_REF");
3052 return true;
3053 }
3054 if (!TMR_OFFSET (expr)
3055 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3056 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3057 {
3058 error ("invalid offset operand in TARGET_MEM_REF");
3059 debug_generic_stmt (expr);
3060 return true;
3061 }
3062 }
3063
3064 return ((require_lvalue || !is_gimple_min_invariant (expr))
3065 && verify_types_in_gimple_min_lval (expr));
3066 }
3067
3068 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3069 list of pointer-to types that is trivially convertible to DEST. */
3070
3071 static bool
3072 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3073 {
3074 tree src;
3075
3076 if (!TYPE_POINTER_TO (src_obj))
3077 return true;
3078
3079 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3080 if (useless_type_conversion_p (dest, src))
3081 return true;
3082
3083 return false;
3084 }
3085
3086 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3087 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3088
3089 static bool
3090 valid_fixed_convert_types_p (tree type1, tree type2)
3091 {
3092 return (FIXED_POINT_TYPE_P (type1)
3093 && (INTEGRAL_TYPE_P (type2)
3094 || SCALAR_FLOAT_TYPE_P (type2)
3095 || FIXED_POINT_TYPE_P (type2)));
3096 }
3097
3098 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3099 is a problem, otherwise false. */
3100
3101 static bool
3102 verify_gimple_call (gimple stmt)
3103 {
3104 tree fn = gimple_call_fn (stmt);
3105 tree fntype, fndecl;
3106 unsigned i;
3107
3108 if (gimple_call_internal_p (stmt))
3109 {
3110 if (fn)
3111 {
3112 error ("gimple call has two targets");
3113 debug_generic_stmt (fn);
3114 return true;
3115 }
3116 }
3117 else
3118 {
3119 if (!fn)
3120 {
3121 error ("gimple call has no target");
3122 return true;
3123 }
3124 }
3125
3126 if (fn && !is_gimple_call_addr (fn))
3127 {
3128 error ("invalid function in gimple call");
3129 debug_generic_stmt (fn);
3130 return true;
3131 }
3132
3133 if (fn
3134 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3135 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3136 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3137 {
3138 error ("non-function in gimple call");
3139 return true;
3140 }
3141
3142 fndecl = gimple_call_fndecl (stmt);
3143 if (fndecl
3144 && TREE_CODE (fndecl) == FUNCTION_DECL
3145 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3146 && !DECL_PURE_P (fndecl)
3147 && !TREE_READONLY (fndecl))
3148 {
3149 error ("invalid pure const state for function");
3150 return true;
3151 }
3152
3153 if (gimple_call_lhs (stmt)
3154 && (!is_gimple_lvalue (gimple_call_lhs (stmt))
3155 || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
3156 {
3157 error ("invalid LHS in gimple call");
3158 return true;
3159 }
3160
3161 if (gimple_call_lhs (stmt) && gimple_call_noreturn_p (stmt))
3162 {
3163 error ("LHS in noreturn call");
3164 return true;
3165 }
3166
3167 fntype = gimple_call_fntype (stmt);
3168 if (fntype
3169 && gimple_call_lhs (stmt)
3170 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3171 TREE_TYPE (fntype))
3172 /* ??? At least C++ misses conversions at assignments from
3173 void * call results.
3174 ??? Java is completely off. Especially with functions
3175 returning java.lang.Object.
3176 For now simply allow arbitrary pointer type conversions. */
3177 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3178 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3179 {
3180 error ("invalid conversion in gimple call");
3181 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3182 debug_generic_stmt (TREE_TYPE (fntype));
3183 return true;
3184 }
3185
3186 if (gimple_call_chain (stmt)
3187 && !is_gimple_val (gimple_call_chain (stmt)))
3188 {
3189 error ("invalid static chain in gimple call");
3190 debug_generic_stmt (gimple_call_chain (stmt));
3191 return true;
3192 }
3193
3194 /* If there is a static chain argument, this should not be an indirect
3195 call, and the decl should have DECL_STATIC_CHAIN set. */
3196 if (gimple_call_chain (stmt))
3197 {
3198 if (!gimple_call_fndecl (stmt))
3199 {
3200 error ("static chain in indirect gimple call");
3201 return true;
3202 }
3203 fn = TREE_OPERAND (fn, 0);
3204
3205 if (!DECL_STATIC_CHAIN (fn))
3206 {
3207 error ("static chain with function that doesn%'t use one");
3208 return true;
3209 }
3210 }
3211
3212 /* ??? The C frontend passes unpromoted arguments in case it
3213 didn't see a function declaration before the call. So for now
3214 leave the call arguments mostly unverified. Once we gimplify
3215 unit-at-a-time we have a chance to fix this. */
3216
3217 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3218 {
3219 tree arg = gimple_call_arg (stmt, i);
3220 if ((is_gimple_reg_type (TREE_TYPE (arg))
3221 && !is_gimple_val (arg))
3222 || (!is_gimple_reg_type (TREE_TYPE (arg))
3223 && !is_gimple_lvalue (arg)))
3224 {
3225 error ("invalid argument to gimple call");
3226 debug_generic_expr (arg);
3227 return true;
3228 }
3229 }
3230
3231 return false;
3232 }
3233
3234 /* Verifies the gimple comparison with the result type TYPE and
3235 the operands OP0 and OP1. */
3236
3237 static bool
3238 verify_gimple_comparison (tree type, tree op0, tree op1)
3239 {
3240 tree op0_type = TREE_TYPE (op0);
3241 tree op1_type = TREE_TYPE (op1);
3242
3243 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3244 {
3245 error ("invalid operands in gimple comparison");
3246 return true;
3247 }
3248
3249 /* For comparisons we do not have the operations type as the
3250 effective type the comparison is carried out in. Instead
3251 we require that either the first operand is trivially
3252 convertible into the second, or the other way around.
3253 Because we special-case pointers to void we allow
3254 comparisons of pointers with the same mode as well. */
3255 if (!useless_type_conversion_p (op0_type, op1_type)
3256 && !useless_type_conversion_p (op1_type, op0_type)
3257 && (!POINTER_TYPE_P (op0_type)
3258 || !POINTER_TYPE_P (op1_type)
3259 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3260 {
3261 error ("mismatching comparison operand types");
3262 debug_generic_expr (op0_type);
3263 debug_generic_expr (op1_type);
3264 return true;
3265 }
3266
3267 /* The resulting type of a comparison may be an effective boolean type. */
3268 if (INTEGRAL_TYPE_P (type)
3269 && (TREE_CODE (type) == BOOLEAN_TYPE
3270 || TYPE_PRECISION (type) == 1))
3271 ;
3272 /* Or an integer vector type with the same size and element count
3273 as the comparison operand types. */
3274 else if (TREE_CODE (type) == VECTOR_TYPE
3275 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3276 {
3277 if (TREE_CODE (op0_type) != VECTOR_TYPE
3278 || TREE_CODE (op1_type) != VECTOR_TYPE)
3279 {
3280 error ("non-vector operands in vector comparison");
3281 debug_generic_expr (op0_type);
3282 debug_generic_expr (op1_type);
3283 return true;
3284 }
3285
3286 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3287 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3288 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type)))))
3289 {
3290 error ("invalid vector comparison resulting type");
3291 debug_generic_expr (type);
3292 return true;
3293 }
3294 }
3295 else
3296 {
3297 error ("bogus comparison result type");
3298 debug_generic_expr (type);
3299 return true;
3300 }
3301
3302 return false;
3303 }
3304
3305 /* Verify a gimple assignment statement STMT with an unary rhs.
3306 Returns true if anything is wrong. */
3307
3308 static bool
3309 verify_gimple_assign_unary (gimple stmt)
3310 {
3311 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3312 tree lhs = gimple_assign_lhs (stmt);
3313 tree lhs_type = TREE_TYPE (lhs);
3314 tree rhs1 = gimple_assign_rhs1 (stmt);
3315 tree rhs1_type = TREE_TYPE (rhs1);
3316
3317 if (!is_gimple_reg (lhs))
3318 {
3319 error ("non-register as LHS of unary operation");
3320 return true;
3321 }
3322
3323 if (!is_gimple_val (rhs1))
3324 {
3325 error ("invalid operand in unary operation");
3326 return true;
3327 }
3328
3329 /* First handle conversions. */
3330 switch (rhs_code)
3331 {
3332 CASE_CONVERT:
3333 {
3334 /* Allow conversions between integral types and pointers only if
3335 there is no sign or zero extension involved.
3336 For targets were the precision of ptrofftype doesn't match that
3337 of pointers we need to allow arbitrary conversions from and
3338 to ptrofftype. */
3339 if ((POINTER_TYPE_P (lhs_type)
3340 && INTEGRAL_TYPE_P (rhs1_type)
3341 && (TYPE_PRECISION (lhs_type) >= TYPE_PRECISION (rhs1_type)
3342 || ptrofftype_p (rhs1_type)))
3343 || (POINTER_TYPE_P (rhs1_type)
3344 && INTEGRAL_TYPE_P (lhs_type)
3345 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3346 || ptrofftype_p (sizetype))))
3347 return false;
3348
3349 /* Allow conversion from integer to offset type and vice versa. */
3350 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3351 && TREE_CODE (rhs1_type) == INTEGER_TYPE)
3352 || (TREE_CODE (lhs_type) == INTEGER_TYPE
3353 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3354 return false;
3355
3356 /* Otherwise assert we are converting between types of the
3357 same kind. */
3358 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3359 {
3360 error ("invalid types in nop conversion");
3361 debug_generic_expr (lhs_type);
3362 debug_generic_expr (rhs1_type);
3363 return true;
3364 }
3365
3366 return false;
3367 }
3368
3369 case ADDR_SPACE_CONVERT_EXPR:
3370 {
3371 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3372 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3373 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3374 {
3375 error ("invalid types in address space conversion");
3376 debug_generic_expr (lhs_type);
3377 debug_generic_expr (rhs1_type);
3378 return true;
3379 }
3380
3381 return false;
3382 }
3383
3384 case FIXED_CONVERT_EXPR:
3385 {
3386 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3387 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3388 {
3389 error ("invalid types in fixed-point conversion");
3390 debug_generic_expr (lhs_type);
3391 debug_generic_expr (rhs1_type);
3392 return true;
3393 }
3394
3395 return false;
3396 }
3397
3398 case FLOAT_EXPR:
3399 {
3400 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3401 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3402 || !VECTOR_FLOAT_TYPE_P(lhs_type)))
3403 {
3404 error ("invalid types in conversion to floating point");
3405 debug_generic_expr (lhs_type);
3406 debug_generic_expr (rhs1_type);
3407 return true;
3408 }
3409
3410 return false;
3411 }
3412
3413 case FIX_TRUNC_EXPR:
3414 {
3415 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3416 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3417 || !VECTOR_FLOAT_TYPE_P(rhs1_type)))
3418 {
3419 error ("invalid types in conversion to integer");
3420 debug_generic_expr (lhs_type);
3421 debug_generic_expr (rhs1_type);
3422 return true;
3423 }
3424
3425 return false;
3426 }
3427
3428 case VEC_UNPACK_HI_EXPR:
3429 case VEC_UNPACK_LO_EXPR:
3430 case REDUC_MAX_EXPR:
3431 case REDUC_MIN_EXPR:
3432 case REDUC_PLUS_EXPR:
3433 case VEC_UNPACK_FLOAT_HI_EXPR:
3434 case VEC_UNPACK_FLOAT_LO_EXPR:
3435 /* FIXME. */
3436 return false;
3437
3438 case NEGATE_EXPR:
3439 case ABS_EXPR:
3440 case BIT_NOT_EXPR:
3441 case PAREN_EXPR:
3442 case NON_LVALUE_EXPR:
3443 case CONJ_EXPR:
3444 break;
3445
3446 default:
3447 gcc_unreachable ();
3448 }
3449
3450 /* For the remaining codes assert there is no conversion involved. */
3451 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3452 {
3453 error ("non-trivial conversion in unary operation");
3454 debug_generic_expr (lhs_type);
3455 debug_generic_expr (rhs1_type);
3456 return true;
3457 }
3458
3459 return false;
3460 }
3461
3462 /* Verify a gimple assignment statement STMT with a binary rhs.
3463 Returns true if anything is wrong. */
3464
3465 static bool
3466 verify_gimple_assign_binary (gimple stmt)
3467 {
3468 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3469 tree lhs = gimple_assign_lhs (stmt);
3470 tree lhs_type = TREE_TYPE (lhs);
3471 tree rhs1 = gimple_assign_rhs1 (stmt);
3472 tree rhs1_type = TREE_TYPE (rhs1);
3473 tree rhs2 = gimple_assign_rhs2 (stmt);
3474 tree rhs2_type = TREE_TYPE (rhs2);
3475
3476 if (!is_gimple_reg (lhs))
3477 {
3478 error ("non-register as LHS of binary operation");
3479 return true;
3480 }
3481
3482 if (!is_gimple_val (rhs1)
3483 || !is_gimple_val (rhs2))
3484 {
3485 error ("invalid operands in binary operation");
3486 return true;
3487 }
3488
3489 /* First handle operations that involve different types. */
3490 switch (rhs_code)
3491 {
3492 case COMPLEX_EXPR:
3493 {
3494 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3495 || !(INTEGRAL_TYPE_P (rhs1_type)
3496 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3497 || !(INTEGRAL_TYPE_P (rhs2_type)
3498 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3499 {
3500 error ("type mismatch in complex expression");
3501 debug_generic_expr (lhs_type);
3502 debug_generic_expr (rhs1_type);
3503 debug_generic_expr (rhs2_type);
3504 return true;
3505 }
3506
3507 return false;
3508 }
3509
3510 case LSHIFT_EXPR:
3511 case RSHIFT_EXPR:
3512 case LROTATE_EXPR:
3513 case RROTATE_EXPR:
3514 {
3515 /* Shifts and rotates are ok on integral types, fixed point
3516 types and integer vector types. */
3517 if ((!INTEGRAL_TYPE_P (rhs1_type)
3518 && !FIXED_POINT_TYPE_P (rhs1_type)
3519 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3520 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3521 || (!INTEGRAL_TYPE_P (rhs2_type)
3522 /* Vector shifts of vectors are also ok. */
3523 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3524 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3525 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3526 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3527 || !useless_type_conversion_p (lhs_type, rhs1_type))
3528 {
3529 error ("type mismatch in shift expression");
3530 debug_generic_expr (lhs_type);
3531 debug_generic_expr (rhs1_type);
3532 debug_generic_expr (rhs2_type);
3533 return true;
3534 }
3535
3536 return false;
3537 }
3538
3539 case VEC_LSHIFT_EXPR:
3540 case VEC_RSHIFT_EXPR:
3541 {
3542 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3543 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3544 || POINTER_TYPE_P (TREE_TYPE (rhs1_type))
3545 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type))
3546 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3547 || (!INTEGRAL_TYPE_P (rhs2_type)
3548 && (TREE_CODE (rhs2_type) != VECTOR_TYPE
3549 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3550 || !useless_type_conversion_p (lhs_type, rhs1_type))
3551 {
3552 error ("type mismatch in vector shift expression");
3553 debug_generic_expr (lhs_type);
3554 debug_generic_expr (rhs1_type);
3555 debug_generic_expr (rhs2_type);
3556 return true;
3557 }
3558 /* For shifting a vector of non-integral components we
3559 only allow shifting by a constant multiple of the element size. */
3560 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3561 && (TREE_CODE (rhs2) != INTEGER_CST
3562 || !div_if_zero_remainder (EXACT_DIV_EXPR, rhs2,
3563 TYPE_SIZE (TREE_TYPE (rhs1_type)))))
3564 {
3565 error ("non-element sized vector shift of floating point vector");
3566 return true;
3567 }
3568
3569 return false;
3570 }
3571
3572 case WIDEN_LSHIFT_EXPR:
3573 {
3574 if (!INTEGRAL_TYPE_P (lhs_type)
3575 || !INTEGRAL_TYPE_P (rhs1_type)
3576 || TREE_CODE (rhs2) != INTEGER_CST
3577 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3578 {
3579 error ("type mismatch in widening vector shift expression");
3580 debug_generic_expr (lhs_type);
3581 debug_generic_expr (rhs1_type);
3582 debug_generic_expr (rhs2_type);
3583 return true;
3584 }
3585
3586 return false;
3587 }
3588
3589 case VEC_WIDEN_LSHIFT_HI_EXPR:
3590 case VEC_WIDEN_LSHIFT_LO_EXPR:
3591 {
3592 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3593 || TREE_CODE (lhs_type) != VECTOR_TYPE
3594 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3595 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3596 || TREE_CODE (rhs2) != INTEGER_CST
3597 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3598 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3599 {
3600 error ("type mismatch in widening vector shift expression");
3601 debug_generic_expr (lhs_type);
3602 debug_generic_expr (rhs1_type);
3603 debug_generic_expr (rhs2_type);
3604 return true;
3605 }
3606
3607 return false;
3608 }
3609
3610 case PLUS_EXPR:
3611 case MINUS_EXPR:
3612 {
3613 /* We use regular PLUS_EXPR and MINUS_EXPR for vectors.
3614 ??? This just makes the checker happy and may not be what is
3615 intended. */
3616 if (TREE_CODE (lhs_type) == VECTOR_TYPE
3617 && POINTER_TYPE_P (TREE_TYPE (lhs_type)))
3618 {
3619 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3620 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3621 {
3622 error ("invalid non-vector operands to vector valued plus");
3623 return true;
3624 }
3625 lhs_type = TREE_TYPE (lhs_type);
3626 rhs1_type = TREE_TYPE (rhs1_type);
3627 rhs2_type = TREE_TYPE (rhs2_type);
3628 /* PLUS_EXPR is commutative, so we might end up canonicalizing
3629 the pointer to 2nd place. */
3630 if (POINTER_TYPE_P (rhs2_type))
3631 {
3632 tree tem = rhs1_type;
3633 rhs1_type = rhs2_type;
3634 rhs2_type = tem;
3635 }
3636 goto do_pointer_plus_expr_check;
3637 }
3638 if (POINTER_TYPE_P (lhs_type)
3639 || POINTER_TYPE_P (rhs1_type)
3640 || POINTER_TYPE_P (rhs2_type))
3641 {
3642 error ("invalid (pointer) operands to plus/minus");
3643 return true;
3644 }
3645
3646 /* Continue with generic binary expression handling. */
3647 break;
3648 }
3649
3650 case POINTER_PLUS_EXPR:
3651 {
3652 do_pointer_plus_expr_check:
3653 if (!POINTER_TYPE_P (rhs1_type)
3654 || !useless_type_conversion_p (lhs_type, rhs1_type)
3655 || !ptrofftype_p (rhs2_type))
3656 {
3657 error ("type mismatch in pointer plus expression");
3658 debug_generic_stmt (lhs_type);
3659 debug_generic_stmt (rhs1_type);
3660 debug_generic_stmt (rhs2_type);
3661 return true;
3662 }
3663
3664 return false;
3665 }
3666
3667 case TRUTH_ANDIF_EXPR:
3668 case TRUTH_ORIF_EXPR:
3669 case TRUTH_AND_EXPR:
3670 case TRUTH_OR_EXPR:
3671 case TRUTH_XOR_EXPR:
3672
3673 gcc_unreachable ();
3674
3675 case LT_EXPR:
3676 case LE_EXPR:
3677 case GT_EXPR:
3678 case GE_EXPR:
3679 case EQ_EXPR:
3680 case NE_EXPR:
3681 case UNORDERED_EXPR:
3682 case ORDERED_EXPR:
3683 case UNLT_EXPR:
3684 case UNLE_EXPR:
3685 case UNGT_EXPR:
3686 case UNGE_EXPR:
3687 case UNEQ_EXPR:
3688 case LTGT_EXPR:
3689 /* Comparisons are also binary, but the result type is not
3690 connected to the operand types. */
3691 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3692
3693 case WIDEN_MULT_EXPR:
3694 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3695 return true;
3696 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3697 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3698
3699 case WIDEN_SUM_EXPR:
3700 case VEC_WIDEN_MULT_HI_EXPR:
3701 case VEC_WIDEN_MULT_LO_EXPR:
3702 case VEC_PACK_TRUNC_EXPR:
3703 case VEC_PACK_SAT_EXPR:
3704 case VEC_PACK_FIX_TRUNC_EXPR:
3705 case VEC_EXTRACT_EVEN_EXPR:
3706 case VEC_EXTRACT_ODD_EXPR:
3707 case VEC_INTERLEAVE_HIGH_EXPR:
3708 case VEC_INTERLEAVE_LOW_EXPR:
3709 /* FIXME. */
3710 return false;
3711
3712 case MULT_EXPR:
3713 case TRUNC_DIV_EXPR:
3714 case CEIL_DIV_EXPR:
3715 case FLOOR_DIV_EXPR:
3716 case ROUND_DIV_EXPR:
3717 case TRUNC_MOD_EXPR:
3718 case CEIL_MOD_EXPR:
3719 case FLOOR_MOD_EXPR:
3720 case ROUND_MOD_EXPR:
3721 case RDIV_EXPR:
3722 case EXACT_DIV_EXPR:
3723 case MIN_EXPR:
3724 case MAX_EXPR:
3725 case BIT_IOR_EXPR:
3726 case BIT_XOR_EXPR:
3727 case BIT_AND_EXPR:
3728 /* Continue with generic binary expression handling. */
3729 break;
3730
3731 default:
3732 gcc_unreachable ();
3733 }
3734
3735 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3736 || !useless_type_conversion_p (lhs_type, rhs2_type))
3737 {
3738 error ("type mismatch in binary expression");
3739 debug_generic_stmt (lhs_type);
3740 debug_generic_stmt (rhs1_type);
3741 debug_generic_stmt (rhs2_type);
3742 return true;
3743 }
3744
3745 return false;
3746 }
3747
3748 /* Verify a gimple assignment statement STMT with a ternary rhs.
3749 Returns true if anything is wrong. */
3750
3751 static bool
3752 verify_gimple_assign_ternary (gimple stmt)
3753 {
3754 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3755 tree lhs = gimple_assign_lhs (stmt);
3756 tree lhs_type = TREE_TYPE (lhs);
3757 tree rhs1 = gimple_assign_rhs1 (stmt);
3758 tree rhs1_type = TREE_TYPE (rhs1);
3759 tree rhs2 = gimple_assign_rhs2 (stmt);
3760 tree rhs2_type = TREE_TYPE (rhs2);
3761 tree rhs3 = gimple_assign_rhs3 (stmt);
3762 tree rhs3_type = TREE_TYPE (rhs3);
3763
3764 if (!is_gimple_reg (lhs))
3765 {
3766 error ("non-register as LHS of ternary operation");
3767 return true;
3768 }
3769
3770 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3771 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3772 || !is_gimple_val (rhs2)
3773 || !is_gimple_val (rhs3))
3774 {
3775 error ("invalid operands in ternary operation");
3776 return true;
3777 }
3778
3779 /* First handle operations that involve different types. */
3780 switch (rhs_code)
3781 {
3782 case WIDEN_MULT_PLUS_EXPR:
3783 case WIDEN_MULT_MINUS_EXPR:
3784 if ((!INTEGRAL_TYPE_P (rhs1_type)
3785 && !FIXED_POINT_TYPE_P (rhs1_type))
3786 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3787 || !useless_type_conversion_p (lhs_type, rhs3_type)
3788 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3789 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3790 {
3791 error ("type mismatch in widening multiply-accumulate expression");
3792 debug_generic_expr (lhs_type);
3793 debug_generic_expr (rhs1_type);
3794 debug_generic_expr (rhs2_type);
3795 debug_generic_expr (rhs3_type);
3796 return true;
3797 }
3798 break;
3799
3800 case FMA_EXPR:
3801 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3802 || !useless_type_conversion_p (lhs_type, rhs2_type)
3803 || !useless_type_conversion_p (lhs_type, rhs3_type))
3804 {
3805 error ("type mismatch in fused multiply-add expression");
3806 debug_generic_expr (lhs_type);
3807 debug_generic_expr (rhs1_type);
3808 debug_generic_expr (rhs2_type);
3809 debug_generic_expr (rhs3_type);
3810 return true;
3811 }
3812 break;
3813
3814 case COND_EXPR:
3815 case VEC_COND_EXPR:
3816 if (!useless_type_conversion_p (lhs_type, rhs2_type)
3817 || !useless_type_conversion_p (lhs_type, rhs3_type))
3818 {
3819 error ("type mismatch in conditional expression");
3820 debug_generic_expr (lhs_type);
3821 debug_generic_expr (rhs2_type);
3822 debug_generic_expr (rhs3_type);
3823 return true;
3824 }
3825 break;
3826
3827 case VEC_PERM_EXPR:
3828 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3829 || !useless_type_conversion_p (lhs_type, rhs2_type))
3830 {
3831 error ("type mismatch in vector permute expression");
3832 debug_generic_expr (lhs_type);
3833 debug_generic_expr (rhs1_type);
3834 debug_generic_expr (rhs2_type);
3835 debug_generic_expr (rhs3_type);
3836 return true;
3837 }
3838
3839 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3840 || TREE_CODE (rhs2_type) != VECTOR_TYPE
3841 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
3842 {
3843 error ("vector types expected in vector permute expression");
3844 debug_generic_expr (lhs_type);
3845 debug_generic_expr (rhs1_type);
3846 debug_generic_expr (rhs2_type);
3847 debug_generic_expr (rhs3_type);
3848 return true;
3849 }
3850
3851 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
3852 || TYPE_VECTOR_SUBPARTS (rhs2_type)
3853 != TYPE_VECTOR_SUBPARTS (rhs3_type)
3854 || TYPE_VECTOR_SUBPARTS (rhs3_type)
3855 != TYPE_VECTOR_SUBPARTS (lhs_type))
3856 {
3857 error ("vectors with different element number found "
3858 "in vector permute expression");
3859 debug_generic_expr (lhs_type);
3860 debug_generic_expr (rhs1_type);
3861 debug_generic_expr (rhs2_type);
3862 debug_generic_expr (rhs3_type);
3863 return true;
3864 }
3865
3866 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
3867 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
3868 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
3869 {
3870 error ("invalid mask type in vector permute expression");
3871 debug_generic_expr (lhs_type);
3872 debug_generic_expr (rhs1_type);
3873 debug_generic_expr (rhs2_type);
3874 debug_generic_expr (rhs3_type);
3875 return true;
3876 }
3877
3878 return false;
3879
3880 case DOT_PROD_EXPR:
3881 case REALIGN_LOAD_EXPR:
3882 /* FIXME. */
3883 return false;
3884
3885 default:
3886 gcc_unreachable ();
3887 }
3888 return false;
3889 }
3890
3891 /* Verify a gimple assignment statement STMT with a single rhs.
3892 Returns true if anything is wrong. */
3893
3894 static bool
3895 verify_gimple_assign_single (gimple stmt)
3896 {
3897 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3898 tree lhs = gimple_assign_lhs (stmt);
3899 tree lhs_type = TREE_TYPE (lhs);
3900 tree rhs1 = gimple_assign_rhs1 (stmt);
3901 tree rhs1_type = TREE_TYPE (rhs1);
3902 bool res = false;
3903
3904 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3905 {
3906 error ("non-trivial conversion at assignment");
3907 debug_generic_expr (lhs_type);
3908 debug_generic_expr (rhs1_type);
3909 return true;
3910 }
3911
3912 if (handled_component_p (lhs))
3913 res |= verify_types_in_gimple_reference (lhs, true);
3914
3915 /* Special codes we cannot handle via their class. */
3916 switch (rhs_code)
3917 {
3918 case ADDR_EXPR:
3919 {
3920 tree op = TREE_OPERAND (rhs1, 0);
3921 if (!is_gimple_addressable (op))
3922 {
3923 error ("invalid operand in unary expression");
3924 return true;
3925 }
3926
3927 /* Technically there is no longer a need for matching types, but
3928 gimple hygiene asks for this check. In LTO we can end up
3929 combining incompatible units and thus end up with addresses
3930 of globals that change their type to a common one. */
3931 if (!in_lto_p
3932 && !types_compatible_p (TREE_TYPE (op),
3933 TREE_TYPE (TREE_TYPE (rhs1)))
3934 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
3935 TREE_TYPE (op)))
3936 {
3937 error ("type mismatch in address expression");
3938 debug_generic_stmt (TREE_TYPE (rhs1));
3939 debug_generic_stmt (TREE_TYPE (op));
3940 return true;
3941 }
3942
3943 return verify_types_in_gimple_reference (op, true);
3944 }
3945
3946 /* tcc_reference */
3947 case INDIRECT_REF:
3948 error ("INDIRECT_REF in gimple IL");
3949 return true;
3950
3951 case COMPONENT_REF:
3952 case BIT_FIELD_REF:
3953 case ARRAY_REF:
3954 case ARRAY_RANGE_REF:
3955 case VIEW_CONVERT_EXPR:
3956 case REALPART_EXPR:
3957 case IMAGPART_EXPR:
3958 case TARGET_MEM_REF:
3959 case MEM_REF:
3960 if (!is_gimple_reg (lhs)
3961 && is_gimple_reg_type (TREE_TYPE (lhs)))
3962 {
3963 error ("invalid rhs for gimple memory store");
3964 debug_generic_stmt (lhs);
3965 debug_generic_stmt (rhs1);
3966 return true;
3967 }
3968 return res || verify_types_in_gimple_reference (rhs1, false);
3969
3970 /* tcc_constant */
3971 case SSA_NAME:
3972 case INTEGER_CST:
3973 case REAL_CST:
3974 case FIXED_CST:
3975 case COMPLEX_CST:
3976 case VECTOR_CST:
3977 case STRING_CST:
3978 return res;
3979
3980 /* tcc_declaration */
3981 case CONST_DECL:
3982 return res;
3983 case VAR_DECL:
3984 case PARM_DECL:
3985 if (!is_gimple_reg (lhs)
3986 && !is_gimple_reg (rhs1)
3987 && is_gimple_reg_type (TREE_TYPE (lhs)))
3988 {
3989 error ("invalid rhs for gimple memory store");
3990 debug_generic_stmt (lhs);
3991 debug_generic_stmt (rhs1);
3992 return true;
3993 }
3994 return res;
3995
3996 case CONSTRUCTOR:
3997 case OBJ_TYPE_REF:
3998 case ASSERT_EXPR:
3999 case WITH_SIZE_EXPR:
4000 /* FIXME. */
4001 return res;
4002
4003 default:;
4004 }
4005
4006 return res;
4007 }
4008
4009 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4010 is a problem, otherwise false. */
4011
4012 static bool
4013 verify_gimple_assign (gimple stmt)
4014 {
4015 switch (gimple_assign_rhs_class (stmt))
4016 {
4017 case GIMPLE_SINGLE_RHS:
4018 return verify_gimple_assign_single (stmt);
4019
4020 case GIMPLE_UNARY_RHS:
4021 return verify_gimple_assign_unary (stmt);
4022
4023 case GIMPLE_BINARY_RHS:
4024 return verify_gimple_assign_binary (stmt);
4025
4026 case GIMPLE_TERNARY_RHS:
4027 return verify_gimple_assign_ternary (stmt);
4028
4029 default:
4030 gcc_unreachable ();
4031 }
4032 }
4033
4034 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4035 is a problem, otherwise false. */
4036
4037 static bool
4038 verify_gimple_return (gimple stmt)
4039 {
4040 tree op = gimple_return_retval (stmt);
4041 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4042
4043 /* We cannot test for present return values as we do not fix up missing
4044 return values from the original source. */
4045 if (op == NULL)
4046 return false;
4047
4048 if (!is_gimple_val (op)
4049 && TREE_CODE (op) != RESULT_DECL)
4050 {
4051 error ("invalid operand in return statement");
4052 debug_generic_stmt (op);
4053 return true;
4054 }
4055
4056 if ((TREE_CODE (op) == RESULT_DECL
4057 && DECL_BY_REFERENCE (op))
4058 || (TREE_CODE (op) == SSA_NAME
4059 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4060 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4061 op = TREE_TYPE (op);
4062
4063 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4064 {
4065 error ("invalid conversion in return statement");
4066 debug_generic_stmt (restype);
4067 debug_generic_stmt (TREE_TYPE (op));
4068 return true;
4069 }
4070
4071 return false;
4072 }
4073
4074
4075 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4076 is a problem, otherwise false. */
4077
4078 static bool
4079 verify_gimple_goto (gimple stmt)
4080 {
4081 tree dest = gimple_goto_dest (stmt);
4082
4083 /* ??? We have two canonical forms of direct goto destinations, a
4084 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4085 if (TREE_CODE (dest) != LABEL_DECL
4086 && (!is_gimple_val (dest)
4087 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4088 {
4089 error ("goto destination is neither a label nor a pointer");
4090 return true;
4091 }
4092
4093 return false;
4094 }
4095
4096 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4097 is a problem, otherwise false. */
4098
4099 static bool
4100 verify_gimple_switch (gimple stmt)
4101 {
4102 if (!is_gimple_val (gimple_switch_index (stmt)))
4103 {
4104 error ("invalid operand to switch statement");
4105 debug_generic_stmt (gimple_switch_index (stmt));
4106 return true;
4107 }
4108
4109 return false;
4110 }
4111
4112 /* Verify a gimple debug statement STMT.
4113 Returns true if anything is wrong. */
4114
4115 static bool
4116 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4117 {
4118 /* There isn't much that could be wrong in a gimple debug stmt. A
4119 gimple debug bind stmt, for example, maps a tree, that's usually
4120 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4121 component or member of an aggregate type, to another tree, that
4122 can be an arbitrary expression. These stmts expand into debug
4123 insns, and are converted to debug notes by var-tracking.c. */
4124 return false;
4125 }
4126
4127 /* Verify a gimple label statement STMT.
4128 Returns true if anything is wrong. */
4129
4130 static bool
4131 verify_gimple_label (gimple stmt)
4132 {
4133 tree decl = gimple_label_label (stmt);
4134 int uid;
4135 bool err = false;
4136
4137 if (TREE_CODE (decl) != LABEL_DECL)
4138 return true;
4139
4140 uid = LABEL_DECL_UID (decl);
4141 if (cfun->cfg
4142 && (uid == -1
4143 || VEC_index (basic_block,
4144 label_to_block_map, uid) != gimple_bb (stmt)))
4145 {
4146 error ("incorrect entry in label_to_block_map");
4147 err |= true;
4148 }
4149
4150 uid = EH_LANDING_PAD_NR (decl);
4151 if (uid)
4152 {
4153 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4154 if (decl != lp->post_landing_pad)
4155 {
4156 error ("incorrect setting of landing pad number");
4157 err |= true;
4158 }
4159 }
4160
4161 return err;
4162 }
4163
4164 /* Verify the GIMPLE statement STMT. Returns true if there is an
4165 error, otherwise false. */
4166
4167 static bool
4168 verify_gimple_stmt (gimple stmt)
4169 {
4170 switch (gimple_code (stmt))
4171 {
4172 case GIMPLE_ASSIGN:
4173 return verify_gimple_assign (stmt);
4174
4175 case GIMPLE_LABEL:
4176 return verify_gimple_label (stmt);
4177
4178 case GIMPLE_CALL:
4179 return verify_gimple_call (stmt);
4180
4181 case GIMPLE_COND:
4182 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4183 {
4184 error ("invalid comparison code in gimple cond");
4185 return true;
4186 }
4187 if (!(!gimple_cond_true_label (stmt)
4188 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4189 || !(!gimple_cond_false_label (stmt)
4190 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4191 {
4192 error ("invalid labels in gimple cond");
4193 return true;
4194 }
4195
4196 return verify_gimple_comparison (boolean_type_node,
4197 gimple_cond_lhs (stmt),
4198 gimple_cond_rhs (stmt));
4199
4200 case GIMPLE_GOTO:
4201 return verify_gimple_goto (stmt);
4202
4203 case GIMPLE_SWITCH:
4204 return verify_gimple_switch (stmt);
4205
4206 case GIMPLE_RETURN:
4207 return verify_gimple_return (stmt);
4208
4209 case GIMPLE_ASM:
4210 return false;
4211
4212 case GIMPLE_TRANSACTION:
4213 return verify_gimple_transaction (stmt);
4214
4215 /* Tuples that do not have tree operands. */
4216 case GIMPLE_NOP:
4217 case GIMPLE_PREDICT:
4218 case GIMPLE_RESX:
4219 case GIMPLE_EH_DISPATCH:
4220 case GIMPLE_EH_MUST_NOT_THROW:
4221 return false;
4222
4223 CASE_GIMPLE_OMP:
4224 /* OpenMP directives are validated by the FE and never operated
4225 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4226 non-gimple expressions when the main index variable has had
4227 its address taken. This does not affect the loop itself
4228 because the header of an GIMPLE_OMP_FOR is merely used to determine
4229 how to setup the parallel iteration. */
4230 return false;
4231
4232 case GIMPLE_DEBUG:
4233 return verify_gimple_debug (stmt);
4234
4235 default:
4236 gcc_unreachable ();
4237 }
4238 }
4239
4240 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4241 and false otherwise. */
4242
4243 static bool
4244 verify_gimple_phi (gimple phi)
4245 {
4246 bool err = false;
4247 unsigned i;
4248 tree phi_result = gimple_phi_result (phi);
4249 bool virtual_p;
4250
4251 if (!phi_result)
4252 {
4253 error ("invalid PHI result");
4254 return true;
4255 }
4256
4257 virtual_p = !is_gimple_reg (phi_result);
4258 if (TREE_CODE (phi_result) != SSA_NAME
4259 || (virtual_p
4260 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4261 {
4262 error ("invalid PHI result");
4263 err = true;
4264 }
4265
4266 for (i = 0; i < gimple_phi_num_args (phi); i++)
4267 {
4268 tree t = gimple_phi_arg_def (phi, i);
4269
4270 if (!t)
4271 {
4272 error ("missing PHI def");
4273 err |= true;
4274 continue;
4275 }
4276 /* Addressable variables do have SSA_NAMEs but they
4277 are not considered gimple values. */
4278 else if ((TREE_CODE (t) == SSA_NAME
4279 && virtual_p != !is_gimple_reg (t))
4280 || (virtual_p
4281 && (TREE_CODE (t) != SSA_NAME
4282 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4283 || (!virtual_p
4284 && !is_gimple_val (t)))
4285 {
4286 error ("invalid PHI argument");
4287 debug_generic_expr (t);
4288 err |= true;
4289 }
4290 #ifdef ENABLE_TYPES_CHECKING
4291 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4292 {
4293 error ("incompatible types in PHI argument %u", i);
4294 debug_generic_stmt (TREE_TYPE (phi_result));
4295 debug_generic_stmt (TREE_TYPE (t));
4296 err |= true;
4297 }
4298 #endif
4299 }
4300
4301 return err;
4302 }
4303
4304 /* Verify the GIMPLE statements inside the sequence STMTS. */
4305
4306 static bool
4307 verify_gimple_in_seq_2 (gimple_seq stmts)
4308 {
4309 gimple_stmt_iterator ittr;
4310 bool err = false;
4311
4312 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4313 {
4314 gimple stmt = gsi_stmt (ittr);
4315
4316 switch (gimple_code (stmt))
4317 {
4318 case GIMPLE_BIND:
4319 err |= verify_gimple_in_seq_2 (gimple_bind_body (stmt));
4320 break;
4321
4322 case GIMPLE_TRY:
4323 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4324 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4325 break;
4326
4327 case GIMPLE_EH_FILTER:
4328 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4329 break;
4330
4331 case GIMPLE_EH_ELSE:
4332 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (stmt));
4333 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (stmt));
4334 break;
4335
4336 case GIMPLE_CATCH:
4337 err |= verify_gimple_in_seq_2 (gimple_catch_handler (stmt));
4338 break;
4339
4340 case GIMPLE_TRANSACTION:
4341 err |= verify_gimple_transaction (stmt);
4342 break;
4343
4344 default:
4345 {
4346 bool err2 = verify_gimple_stmt (stmt);
4347 if (err2)
4348 debug_gimple_stmt (stmt);
4349 err |= err2;
4350 }
4351 }
4352 }
4353
4354 return err;
4355 }
4356
4357 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4358 is a problem, otherwise false. */
4359
4360 static bool
4361 verify_gimple_transaction (gimple stmt)
4362 {
4363 tree lab = gimple_transaction_label (stmt);
4364 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4365 return true;
4366 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4367 }
4368
4369
4370 /* Verify the GIMPLE statements inside the statement list STMTS. */
4371
4372 DEBUG_FUNCTION void
4373 verify_gimple_in_seq (gimple_seq stmts)
4374 {
4375 timevar_push (TV_TREE_STMT_VERIFY);
4376 if (verify_gimple_in_seq_2 (stmts))
4377 internal_error ("verify_gimple failed");
4378 timevar_pop (TV_TREE_STMT_VERIFY);
4379 }
4380
4381 /* Return true when the T can be shared. */
4382
4383 bool
4384 tree_node_can_be_shared (tree t)
4385 {
4386 if (IS_TYPE_OR_DECL_P (t)
4387 || is_gimple_min_invariant (t)
4388 || TREE_CODE (t) == SSA_NAME
4389 || t == error_mark_node
4390 || TREE_CODE (t) == IDENTIFIER_NODE)
4391 return true;
4392
4393 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4394 return true;
4395
4396 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4397 && is_gimple_min_invariant (TREE_OPERAND (t, 1)))
4398 || TREE_CODE (t) == COMPONENT_REF
4399 || TREE_CODE (t) == REALPART_EXPR
4400 || TREE_CODE (t) == IMAGPART_EXPR)
4401 t = TREE_OPERAND (t, 0);
4402
4403 if (DECL_P (t))
4404 return true;
4405
4406 return false;
4407 }
4408
4409 /* Called via walk_gimple_stmt. Verify tree sharing. */
4410
4411 static tree
4412 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4413 {
4414 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4415 struct pointer_set_t *visited = (struct pointer_set_t *) wi->info;
4416
4417 if (tree_node_can_be_shared (*tp))
4418 {
4419 *walk_subtrees = false;
4420 return NULL;
4421 }
4422
4423 if (pointer_set_insert (visited, *tp))
4424 return *tp;
4425
4426 return NULL;
4427 }
4428
4429 static bool eh_error_found;
4430 static int
4431 verify_eh_throw_stmt_node (void **slot, void *data)
4432 {
4433 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4434 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4435
4436 if (!pointer_set_contains (visited, node->stmt))
4437 {
4438 error ("dead STMT in EH table");
4439 debug_gimple_stmt (node->stmt);
4440 eh_error_found = true;
4441 }
4442 return 1;
4443 }
4444
4445 /* Verify the GIMPLE statements in the CFG of FN. */
4446
4447 DEBUG_FUNCTION void
4448 verify_gimple_in_cfg (struct function *fn)
4449 {
4450 basic_block bb;
4451 bool err = false;
4452 struct pointer_set_t *visited, *visited_stmts;
4453
4454 timevar_push (TV_TREE_STMT_VERIFY);
4455 visited = pointer_set_create ();
4456 visited_stmts = pointer_set_create ();
4457
4458 FOR_EACH_BB_FN (bb, fn)
4459 {
4460 gimple_stmt_iterator gsi;
4461
4462 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4463 {
4464 gimple phi = gsi_stmt (gsi);
4465 bool err2 = false;
4466 unsigned i;
4467
4468 pointer_set_insert (visited_stmts, phi);
4469
4470 if (gimple_bb (phi) != bb)
4471 {
4472 error ("gimple_bb (phi) is set to a wrong basic block");
4473 err2 = true;
4474 }
4475
4476 err2 |= verify_gimple_phi (phi);
4477
4478 for (i = 0; i < gimple_phi_num_args (phi); i++)
4479 {
4480 tree arg = gimple_phi_arg_def (phi, i);
4481 tree addr = walk_tree (&arg, verify_node_sharing, visited, NULL);
4482 if (addr)
4483 {
4484 error ("incorrect sharing of tree nodes");
4485 debug_generic_expr (addr);
4486 err2 |= true;
4487 }
4488 }
4489
4490 if (err2)
4491 debug_gimple_stmt (phi);
4492 err |= err2;
4493 }
4494
4495 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4496 {
4497 gimple stmt = gsi_stmt (gsi);
4498 bool err2 = false;
4499 struct walk_stmt_info wi;
4500 tree addr;
4501 int lp_nr;
4502
4503 pointer_set_insert (visited_stmts, stmt);
4504
4505 if (gimple_bb (stmt) != bb)
4506 {
4507 error ("gimple_bb (stmt) is set to a wrong basic block");
4508 err2 = true;
4509 }
4510
4511 err2 |= verify_gimple_stmt (stmt);
4512
4513 memset (&wi, 0, sizeof (wi));
4514 wi.info = (void *) visited;
4515 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
4516 if (addr)
4517 {
4518 error ("incorrect sharing of tree nodes");
4519 debug_generic_expr (addr);
4520 err2 |= true;
4521 }
4522
4523 /* ??? Instead of not checking these stmts at all the walker
4524 should know its context via wi. */
4525 if (!is_gimple_debug (stmt)
4526 && !is_gimple_omp (stmt))
4527 {
4528 memset (&wi, 0, sizeof (wi));
4529 addr = walk_gimple_op (stmt, verify_expr, &wi);
4530 if (addr)
4531 {
4532 debug_generic_expr (addr);
4533 inform (gimple_location (stmt), "in statement");
4534 err2 |= true;
4535 }
4536 }
4537
4538 /* If the statement is marked as part of an EH region, then it is
4539 expected that the statement could throw. Verify that when we
4540 have optimizations that simplify statements such that we prove
4541 that they cannot throw, that we update other data structures
4542 to match. */
4543 lp_nr = lookup_stmt_eh_lp (stmt);
4544 if (lp_nr != 0)
4545 {
4546 if (!stmt_could_throw_p (stmt))
4547 {
4548 error ("statement marked for throw, but doesn%'t");
4549 err2 |= true;
4550 }
4551 else if (lp_nr > 0
4552 && !gsi_one_before_end_p (gsi)
4553 && stmt_can_throw_internal (stmt))
4554 {
4555 error ("statement marked for throw in middle of block");
4556 err2 |= true;
4557 }
4558 }
4559
4560 if (err2)
4561 debug_gimple_stmt (stmt);
4562 err |= err2;
4563 }
4564 }
4565
4566 eh_error_found = false;
4567 if (get_eh_throw_stmt_table (cfun))
4568 htab_traverse (get_eh_throw_stmt_table (cfun),
4569 verify_eh_throw_stmt_node,
4570 visited_stmts);
4571
4572 if (err || eh_error_found)
4573 internal_error ("verify_gimple failed");
4574
4575 pointer_set_destroy (visited);
4576 pointer_set_destroy (visited_stmts);
4577 verify_histograms ();
4578 timevar_pop (TV_TREE_STMT_VERIFY);
4579 }
4580
4581
4582 /* Verifies that the flow information is OK. */
4583
4584 static int
4585 gimple_verify_flow_info (void)
4586 {
4587 int err = 0;
4588 basic_block bb;
4589 gimple_stmt_iterator gsi;
4590 gimple stmt;
4591 edge e;
4592 edge_iterator ei;
4593
4594 if (ENTRY_BLOCK_PTR->il.gimple)
4595 {
4596 error ("ENTRY_BLOCK has IL associated with it");
4597 err = 1;
4598 }
4599
4600 if (EXIT_BLOCK_PTR->il.gimple)
4601 {
4602 error ("EXIT_BLOCK has IL associated with it");
4603 err = 1;
4604 }
4605
4606 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4607 if (e->flags & EDGE_FALLTHRU)
4608 {
4609 error ("fallthru to exit from bb %d", e->src->index);
4610 err = 1;
4611 }
4612
4613 FOR_EACH_BB (bb)
4614 {
4615 bool found_ctrl_stmt = false;
4616
4617 stmt = NULL;
4618
4619 /* Skip labels on the start of basic block. */
4620 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4621 {
4622 tree label;
4623 gimple prev_stmt = stmt;
4624
4625 stmt = gsi_stmt (gsi);
4626
4627 if (gimple_code (stmt) != GIMPLE_LABEL)
4628 break;
4629
4630 label = gimple_label_label (stmt);
4631 if (prev_stmt && DECL_NONLOCAL (label))
4632 {
4633 error ("nonlocal label ");
4634 print_generic_expr (stderr, label, 0);
4635 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4636 bb->index);
4637 err = 1;
4638 }
4639
4640 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
4641 {
4642 error ("EH landing pad label ");
4643 print_generic_expr (stderr, label, 0);
4644 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4645 bb->index);
4646 err = 1;
4647 }
4648
4649 if (label_to_block (label) != bb)
4650 {
4651 error ("label ");
4652 print_generic_expr (stderr, label, 0);
4653 fprintf (stderr, " to block does not match in bb %d",
4654 bb->index);
4655 err = 1;
4656 }
4657
4658 if (decl_function_context (label) != current_function_decl)
4659 {
4660 error ("label ");
4661 print_generic_expr (stderr, label, 0);
4662 fprintf (stderr, " has incorrect context in bb %d",
4663 bb->index);
4664 err = 1;
4665 }
4666 }
4667
4668 /* Verify that body of basic block BB is free of control flow. */
4669 for (; !gsi_end_p (gsi); gsi_next (&gsi))
4670 {
4671 gimple stmt = gsi_stmt (gsi);
4672
4673 if (found_ctrl_stmt)
4674 {
4675 error ("control flow in the middle of basic block %d",
4676 bb->index);
4677 err = 1;
4678 }
4679
4680 if (stmt_ends_bb_p (stmt))
4681 found_ctrl_stmt = true;
4682
4683 if (gimple_code (stmt) == GIMPLE_LABEL)
4684 {
4685 error ("label ");
4686 print_generic_expr (stderr, gimple_label_label (stmt), 0);
4687 fprintf (stderr, " in the middle of basic block %d", bb->index);
4688 err = 1;
4689 }
4690 }
4691
4692 gsi = gsi_last_bb (bb);
4693 if (gsi_end_p (gsi))
4694 continue;
4695
4696 stmt = gsi_stmt (gsi);
4697
4698 if (gimple_code (stmt) == GIMPLE_LABEL)
4699 continue;
4700
4701 err |= verify_eh_edges (stmt);
4702
4703 if (is_ctrl_stmt (stmt))
4704 {
4705 FOR_EACH_EDGE (e, ei, bb->succs)
4706 if (e->flags & EDGE_FALLTHRU)
4707 {
4708 error ("fallthru edge after a control statement in bb %d",
4709 bb->index);
4710 err = 1;
4711 }
4712 }
4713
4714 if (gimple_code (stmt) != GIMPLE_COND)
4715 {
4716 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4717 after anything else but if statement. */
4718 FOR_EACH_EDGE (e, ei, bb->succs)
4719 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
4720 {
4721 error ("true/false edge after a non-GIMPLE_COND in bb %d",
4722 bb->index);
4723 err = 1;
4724 }
4725 }
4726
4727 switch (gimple_code (stmt))
4728 {
4729 case GIMPLE_COND:
4730 {
4731 edge true_edge;
4732 edge false_edge;
4733
4734 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
4735
4736 if (!true_edge
4737 || !false_edge
4738 || !(true_edge->flags & EDGE_TRUE_VALUE)
4739 || !(false_edge->flags & EDGE_FALSE_VALUE)
4740 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4741 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4742 || EDGE_COUNT (bb->succs) >= 3)
4743 {
4744 error ("wrong outgoing edge flags at end of bb %d",
4745 bb->index);
4746 err = 1;
4747 }
4748 }
4749 break;
4750
4751 case GIMPLE_GOTO:
4752 if (simple_goto_p (stmt))
4753 {
4754 error ("explicit goto at end of bb %d", bb->index);
4755 err = 1;
4756 }
4757 else
4758 {
4759 /* FIXME. We should double check that the labels in the
4760 destination blocks have their address taken. */
4761 FOR_EACH_EDGE (e, ei, bb->succs)
4762 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
4763 | EDGE_FALSE_VALUE))
4764 || !(e->flags & EDGE_ABNORMAL))
4765 {
4766 error ("wrong outgoing edge flags at end of bb %d",
4767 bb->index);
4768 err = 1;
4769 }
4770 }
4771 break;
4772
4773 case GIMPLE_CALL:
4774 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
4775 break;
4776 /* ... fallthru ... */
4777 case GIMPLE_RETURN:
4778 if (!single_succ_p (bb)
4779 || (single_succ_edge (bb)->flags
4780 & (EDGE_FALLTHRU | EDGE_ABNORMAL
4781 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4782 {
4783 error ("wrong outgoing edge flags at end of bb %d", bb->index);
4784 err = 1;
4785 }
4786 if (single_succ (bb) != EXIT_BLOCK_PTR)
4787 {
4788 error ("return edge does not point to exit in bb %d",
4789 bb->index);
4790 err = 1;
4791 }
4792 break;
4793
4794 case GIMPLE_SWITCH:
4795 {
4796 tree prev;
4797 edge e;
4798 size_t i, n;
4799
4800 n = gimple_switch_num_labels (stmt);
4801
4802 /* Mark all the destination basic blocks. */
4803 for (i = 0; i < n; ++i)
4804 {
4805 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
4806 basic_block label_bb = label_to_block (lab);
4807 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
4808 label_bb->aux = (void *)1;
4809 }
4810
4811 /* Verify that the case labels are sorted. */
4812 prev = gimple_switch_label (stmt, 0);
4813 for (i = 1; i < n; ++i)
4814 {
4815 tree c = gimple_switch_label (stmt, i);
4816 if (!CASE_LOW (c))
4817 {
4818 error ("found default case not at the start of "
4819 "case vector");
4820 err = 1;
4821 continue;
4822 }
4823 if (CASE_LOW (prev)
4824 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
4825 {
4826 error ("case labels not sorted: ");
4827 print_generic_expr (stderr, prev, 0);
4828 fprintf (stderr," is greater than ");
4829 print_generic_expr (stderr, c, 0);
4830 fprintf (stderr," but comes before it.\n");
4831 err = 1;
4832 }
4833 prev = c;
4834 }
4835 /* VRP will remove the default case if it can prove it will
4836 never be executed. So do not verify there always exists
4837 a default case here. */
4838
4839 FOR_EACH_EDGE (e, ei, bb->succs)
4840 {
4841 if (!e->dest->aux)
4842 {
4843 error ("extra outgoing edge %d->%d",
4844 bb->index, e->dest->index);
4845 err = 1;
4846 }
4847
4848 e->dest->aux = (void *)2;
4849 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
4850 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4851 {
4852 error ("wrong outgoing edge flags at end of bb %d",
4853 bb->index);
4854 err = 1;
4855 }
4856 }
4857
4858 /* Check that we have all of them. */
4859 for (i = 0; i < n; ++i)
4860 {
4861 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
4862 basic_block label_bb = label_to_block (lab);
4863
4864 if (label_bb->aux != (void *)2)
4865 {
4866 error ("missing edge %i->%i", bb->index, label_bb->index);
4867 err = 1;
4868 }
4869 }
4870
4871 FOR_EACH_EDGE (e, ei, bb->succs)
4872 e->dest->aux = (void *)0;
4873 }
4874 break;
4875
4876 case GIMPLE_EH_DISPATCH:
4877 err |= verify_eh_dispatch_edge (stmt);
4878 break;
4879
4880 default:
4881 break;
4882 }
4883 }
4884
4885 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
4886 verify_dominators (CDI_DOMINATORS);
4887
4888 return err;
4889 }
4890
4891
4892 /* Updates phi nodes after creating a forwarder block joined
4893 by edge FALLTHRU. */
4894
4895 static void
4896 gimple_make_forwarder_block (edge fallthru)
4897 {
4898 edge e;
4899 edge_iterator ei;
4900 basic_block dummy, bb;
4901 tree var;
4902 gimple_stmt_iterator gsi;
4903
4904 dummy = fallthru->src;
4905 bb = fallthru->dest;
4906
4907 if (single_pred_p (bb))
4908 return;
4909
4910 /* If we redirected a branch we must create new PHI nodes at the
4911 start of BB. */
4912 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
4913 {
4914 gimple phi, new_phi;
4915
4916 phi = gsi_stmt (gsi);
4917 var = gimple_phi_result (phi);
4918 new_phi = create_phi_node (var, bb);
4919 SSA_NAME_DEF_STMT (var) = new_phi;
4920 gimple_phi_set_result (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
4921 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
4922 UNKNOWN_LOCATION);
4923 }
4924
4925 /* Add the arguments we have stored on edges. */
4926 FOR_EACH_EDGE (e, ei, bb->preds)
4927 {
4928 if (e == fallthru)
4929 continue;
4930
4931 flush_pending_stmts (e);
4932 }
4933 }
4934
4935
4936 /* Return a non-special label in the head of basic block BLOCK.
4937 Create one if it doesn't exist. */
4938
4939 tree
4940 gimple_block_label (basic_block bb)
4941 {
4942 gimple_stmt_iterator i, s = gsi_start_bb (bb);
4943 bool first = true;
4944 tree label;
4945 gimple stmt;
4946
4947 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
4948 {
4949 stmt = gsi_stmt (i);
4950 if (gimple_code (stmt) != GIMPLE_LABEL)
4951 break;
4952 label = gimple_label_label (stmt);
4953 if (!DECL_NONLOCAL (label))
4954 {
4955 if (!first)
4956 gsi_move_before (&i, &s);
4957 return label;
4958 }
4959 }
4960
4961 label = create_artificial_label (UNKNOWN_LOCATION);
4962 stmt = gimple_build_label (label);
4963 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
4964 return label;
4965 }
4966
4967
4968 /* Attempt to perform edge redirection by replacing a possibly complex
4969 jump instruction by a goto or by removing the jump completely.
4970 This can apply only if all edges now point to the same block. The
4971 parameters and return values are equivalent to
4972 redirect_edge_and_branch. */
4973
4974 static edge
4975 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
4976 {
4977 basic_block src = e->src;
4978 gimple_stmt_iterator i;
4979 gimple stmt;
4980
4981 /* We can replace or remove a complex jump only when we have exactly
4982 two edges. */
4983 if (EDGE_COUNT (src->succs) != 2
4984 /* Verify that all targets will be TARGET. Specifically, the
4985 edge that is not E must also go to TARGET. */
4986 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
4987 return NULL;
4988
4989 i = gsi_last_bb (src);
4990 if (gsi_end_p (i))
4991 return NULL;
4992
4993 stmt = gsi_stmt (i);
4994
4995 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
4996 {
4997 gsi_remove (&i, true);
4998 e = ssa_redirect_edge (e, target);
4999 e->flags = EDGE_FALLTHRU;
5000 return e;
5001 }
5002
5003 return NULL;
5004 }
5005
5006
5007 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5008 edge representing the redirected branch. */
5009
5010 static edge
5011 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5012 {
5013 basic_block bb = e->src;
5014 gimple_stmt_iterator gsi;
5015 edge ret;
5016 gimple stmt;
5017
5018 if (e->flags & EDGE_ABNORMAL)
5019 return NULL;
5020
5021 if (e->dest == dest)
5022 return NULL;
5023
5024 if (e->flags & EDGE_EH)
5025 return redirect_eh_edge (e, dest);
5026
5027 if (e->src != ENTRY_BLOCK_PTR)
5028 {
5029 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5030 if (ret)
5031 return ret;
5032 }
5033
5034 gsi = gsi_last_bb (bb);
5035 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5036
5037 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5038 {
5039 case GIMPLE_COND:
5040 /* For COND_EXPR, we only need to redirect the edge. */
5041 break;
5042
5043 case GIMPLE_GOTO:
5044 /* No non-abnormal edges should lead from a non-simple goto, and
5045 simple ones should be represented implicitly. */
5046 gcc_unreachable ();
5047
5048 case GIMPLE_SWITCH:
5049 {
5050 tree label = gimple_block_label (dest);
5051 tree cases = get_cases_for_edge (e, stmt);
5052
5053 /* If we have a list of cases associated with E, then use it
5054 as it's a lot faster than walking the entire case vector. */
5055 if (cases)
5056 {
5057 edge e2 = find_edge (e->src, dest);
5058 tree last, first;
5059
5060 first = cases;
5061 while (cases)
5062 {
5063 last = cases;
5064 CASE_LABEL (cases) = label;
5065 cases = CASE_CHAIN (cases);
5066 }
5067
5068 /* If there was already an edge in the CFG, then we need
5069 to move all the cases associated with E to E2. */
5070 if (e2)
5071 {
5072 tree cases2 = get_cases_for_edge (e2, stmt);
5073
5074 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5075 CASE_CHAIN (cases2) = first;
5076 }
5077 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5078 }
5079 else
5080 {
5081 size_t i, n = gimple_switch_num_labels (stmt);
5082
5083 for (i = 0; i < n; i++)
5084 {
5085 tree elt = gimple_switch_label (stmt, i);
5086 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5087 CASE_LABEL (elt) = label;
5088 }
5089 }
5090 }
5091 break;
5092
5093 case GIMPLE_ASM:
5094 {
5095 int i, n = gimple_asm_nlabels (stmt);
5096 tree label = NULL;
5097
5098 for (i = 0; i < n; ++i)
5099 {
5100 tree cons = gimple_asm_label_op (stmt, i);
5101 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5102 {
5103 if (!label)
5104 label = gimple_block_label (dest);
5105 TREE_VALUE (cons) = label;
5106 }
5107 }
5108
5109 /* If we didn't find any label matching the former edge in the
5110 asm labels, we must be redirecting the fallthrough
5111 edge. */
5112 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5113 }
5114 break;
5115
5116 case GIMPLE_RETURN:
5117 gsi_remove (&gsi, true);
5118 e->flags |= EDGE_FALLTHRU;
5119 break;
5120
5121 case GIMPLE_OMP_RETURN:
5122 case GIMPLE_OMP_CONTINUE:
5123 case GIMPLE_OMP_SECTIONS_SWITCH:
5124 case GIMPLE_OMP_FOR:
5125 /* The edges from OMP constructs can be simply redirected. */
5126 break;
5127
5128 case GIMPLE_EH_DISPATCH:
5129 if (!(e->flags & EDGE_FALLTHRU))
5130 redirect_eh_dispatch_edge (stmt, e, dest);
5131 break;
5132
5133 case GIMPLE_TRANSACTION:
5134 /* The ABORT edge has a stored label associated with it, otherwise
5135 the edges are simply redirectable. */
5136 if (e->flags == 0)
5137 gimple_transaction_set_label (stmt, gimple_block_label (dest));
5138 break;
5139
5140 default:
5141 /* Otherwise it must be a fallthru edge, and we don't need to
5142 do anything besides redirecting it. */
5143 gcc_assert (e->flags & EDGE_FALLTHRU);
5144 break;
5145 }
5146
5147 /* Update/insert PHI nodes as necessary. */
5148
5149 /* Now update the edges in the CFG. */
5150 e = ssa_redirect_edge (e, dest);
5151
5152 return e;
5153 }
5154
5155 /* Returns true if it is possible to remove edge E by redirecting
5156 it to the destination of the other edge from E->src. */
5157
5158 static bool
5159 gimple_can_remove_branch_p (const_edge e)
5160 {
5161 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5162 return false;
5163
5164 return true;
5165 }
5166
5167 /* Simple wrapper, as we can always redirect fallthru edges. */
5168
5169 static basic_block
5170 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5171 {
5172 e = gimple_redirect_edge_and_branch (e, dest);
5173 gcc_assert (e);
5174
5175 return NULL;
5176 }
5177
5178
5179 /* Splits basic block BB after statement STMT (but at least after the
5180 labels). If STMT is NULL, BB is split just after the labels. */
5181
5182 static basic_block
5183 gimple_split_block (basic_block bb, void *stmt)
5184 {
5185 gimple_stmt_iterator gsi;
5186 gimple_stmt_iterator gsi_tgt;
5187 gimple act;
5188 gimple_seq list;
5189 basic_block new_bb;
5190 edge e;
5191 edge_iterator ei;
5192
5193 new_bb = create_empty_bb (bb);
5194
5195 /* Redirect the outgoing edges. */
5196 new_bb->succs = bb->succs;
5197 bb->succs = NULL;
5198 FOR_EACH_EDGE (e, ei, new_bb->succs)
5199 e->src = new_bb;
5200
5201 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5202 stmt = NULL;
5203
5204 /* Move everything from GSI to the new basic block. */
5205 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5206 {
5207 act = gsi_stmt (gsi);
5208 if (gimple_code (act) == GIMPLE_LABEL)
5209 continue;
5210
5211 if (!stmt)
5212 break;
5213
5214 if (stmt == act)
5215 {
5216 gsi_next (&gsi);
5217 break;
5218 }
5219 }
5220
5221 if (gsi_end_p (gsi))
5222 return new_bb;
5223
5224 /* Split the statement list - avoid re-creating new containers as this
5225 brings ugly quadratic memory consumption in the inliner.
5226 (We are still quadratic since we need to update stmt BB pointers,
5227 sadly.) */
5228 list = gsi_split_seq_before (&gsi);
5229 set_bb_seq (new_bb, list);
5230 for (gsi_tgt = gsi_start (list);
5231 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5232 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5233
5234 return new_bb;
5235 }
5236
5237
5238 /* Moves basic block BB after block AFTER. */
5239
5240 static bool
5241 gimple_move_block_after (basic_block bb, basic_block after)
5242 {
5243 if (bb->prev_bb == after)
5244 return true;
5245
5246 unlink_block (bb);
5247 link_block (bb, after);
5248
5249 return true;
5250 }
5251
5252
5253 /* Return true if basic_block can be duplicated. */
5254
5255 static bool
5256 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5257 {
5258 return true;
5259 }
5260
5261 /* Create a duplicate of the basic block BB. NOTE: This does not
5262 preserve SSA form. */
5263
5264 static basic_block
5265 gimple_duplicate_bb (basic_block bb)
5266 {
5267 basic_block new_bb;
5268 gimple_stmt_iterator gsi, gsi_tgt;
5269 gimple_seq phis = phi_nodes (bb);
5270 gimple phi, stmt, copy;
5271
5272 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
5273
5274 /* Copy the PHI nodes. We ignore PHI node arguments here because
5275 the incoming edges have not been setup yet. */
5276 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
5277 {
5278 phi = gsi_stmt (gsi);
5279 copy = create_phi_node (gimple_phi_result (phi), new_bb);
5280 create_new_def_for (gimple_phi_result (copy), copy,
5281 gimple_phi_result_ptr (copy));
5282 }
5283
5284 gsi_tgt = gsi_start_bb (new_bb);
5285 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5286 {
5287 def_operand_p def_p;
5288 ssa_op_iter op_iter;
5289 tree lhs;
5290
5291 stmt = gsi_stmt (gsi);
5292 if (gimple_code (stmt) == GIMPLE_LABEL)
5293 continue;
5294
5295 /* Don't duplicate label debug stmts. */
5296 if (gimple_debug_bind_p (stmt)
5297 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5298 == LABEL_DECL)
5299 continue;
5300
5301 /* Create a new copy of STMT and duplicate STMT's virtual
5302 operands. */
5303 copy = gimple_copy (stmt);
5304 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5305
5306 maybe_duplicate_eh_stmt (copy, stmt);
5307 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5308
5309 /* When copying around a stmt writing into a local non-user
5310 aggregate, make sure it won't share stack slot with other
5311 vars. */
5312 lhs = gimple_get_lhs (stmt);
5313 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5314 {
5315 tree base = get_base_address (lhs);
5316 if (base
5317 && (TREE_CODE (base) == VAR_DECL
5318 || TREE_CODE (base) == RESULT_DECL)
5319 && DECL_IGNORED_P (base)
5320 && !TREE_STATIC (base)
5321 && !DECL_EXTERNAL (base)
5322 && (TREE_CODE (base) != VAR_DECL
5323 || !DECL_HAS_VALUE_EXPR_P (base)))
5324 DECL_NONSHAREABLE (base) = 1;
5325 }
5326
5327 /* Create new names for all the definitions created by COPY and
5328 add replacement mappings for each new name. */
5329 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5330 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5331 }
5332
5333 return new_bb;
5334 }
5335
5336 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5337
5338 static void
5339 add_phi_args_after_copy_edge (edge e_copy)
5340 {
5341 basic_block bb, bb_copy = e_copy->src, dest;
5342 edge e;
5343 edge_iterator ei;
5344 gimple phi, phi_copy;
5345 tree def;
5346 gimple_stmt_iterator psi, psi_copy;
5347
5348 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5349 return;
5350
5351 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5352
5353 if (e_copy->dest->flags & BB_DUPLICATED)
5354 dest = get_bb_original (e_copy->dest);
5355 else
5356 dest = e_copy->dest;
5357
5358 e = find_edge (bb, dest);
5359 if (!e)
5360 {
5361 /* During loop unrolling the target of the latch edge is copied.
5362 In this case we are not looking for edge to dest, but to
5363 duplicated block whose original was dest. */
5364 FOR_EACH_EDGE (e, ei, bb->succs)
5365 {
5366 if ((e->dest->flags & BB_DUPLICATED)
5367 && get_bb_original (e->dest) == dest)
5368 break;
5369 }
5370
5371 gcc_assert (e != NULL);
5372 }
5373
5374 for (psi = gsi_start_phis (e->dest),
5375 psi_copy = gsi_start_phis (e_copy->dest);
5376 !gsi_end_p (psi);
5377 gsi_next (&psi), gsi_next (&psi_copy))
5378 {
5379 phi = gsi_stmt (psi);
5380 phi_copy = gsi_stmt (psi_copy);
5381 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5382 add_phi_arg (phi_copy, def, e_copy,
5383 gimple_phi_arg_location_from_edge (phi, e));
5384 }
5385 }
5386
5387
5388 /* Basic block BB_COPY was created by code duplication. Add phi node
5389 arguments for edges going out of BB_COPY. The blocks that were
5390 duplicated have BB_DUPLICATED set. */
5391
5392 void
5393 add_phi_args_after_copy_bb (basic_block bb_copy)
5394 {
5395 edge e_copy;
5396 edge_iterator ei;
5397
5398 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5399 {
5400 add_phi_args_after_copy_edge (e_copy);
5401 }
5402 }
5403
5404 /* Blocks in REGION_COPY array of length N_REGION were created by
5405 duplication of basic blocks. Add phi node arguments for edges
5406 going from these blocks. If E_COPY is not NULL, also add
5407 phi node arguments for its destination.*/
5408
5409 void
5410 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5411 edge e_copy)
5412 {
5413 unsigned i;
5414
5415 for (i = 0; i < n_region; i++)
5416 region_copy[i]->flags |= BB_DUPLICATED;
5417
5418 for (i = 0; i < n_region; i++)
5419 add_phi_args_after_copy_bb (region_copy[i]);
5420 if (e_copy)
5421 add_phi_args_after_copy_edge (e_copy);
5422
5423 for (i = 0; i < n_region; i++)
5424 region_copy[i]->flags &= ~BB_DUPLICATED;
5425 }
5426
5427 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5428 important exit edge EXIT. By important we mean that no SSA name defined
5429 inside region is live over the other exit edges of the region. All entry
5430 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5431 to the duplicate of the region. SSA form, dominance and loop information
5432 is updated. The new basic blocks are stored to REGION_COPY in the same
5433 order as they had in REGION, provided that REGION_COPY is not NULL.
5434 The function returns false if it is unable to copy the region,
5435 true otherwise. */
5436
5437 bool
5438 gimple_duplicate_sese_region (edge entry, edge exit,
5439 basic_block *region, unsigned n_region,
5440 basic_block *region_copy)
5441 {
5442 unsigned i;
5443 bool free_region_copy = false, copying_header = false;
5444 struct loop *loop = entry->dest->loop_father;
5445 edge exit_copy;
5446 VEC (basic_block, heap) *doms;
5447 edge redirected;
5448 int total_freq = 0, entry_freq = 0;
5449 gcov_type total_count = 0, entry_count = 0;
5450
5451 if (!can_copy_bbs_p (region, n_region))
5452 return false;
5453
5454 /* Some sanity checking. Note that we do not check for all possible
5455 missuses of the functions. I.e. if you ask to copy something weird,
5456 it will work, but the state of structures probably will not be
5457 correct. */
5458 for (i = 0; i < n_region; i++)
5459 {
5460 /* We do not handle subloops, i.e. all the blocks must belong to the
5461 same loop. */
5462 if (region[i]->loop_father != loop)
5463 return false;
5464
5465 if (region[i] != entry->dest
5466 && region[i] == loop->header)
5467 return false;
5468 }
5469
5470 set_loop_copy (loop, loop);
5471
5472 /* In case the function is used for loop header copying (which is the primary
5473 use), ensure that EXIT and its copy will be new latch and entry edges. */
5474 if (loop->header == entry->dest)
5475 {
5476 copying_header = true;
5477 set_loop_copy (loop, loop_outer (loop));
5478
5479 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5480 return false;
5481
5482 for (i = 0; i < n_region; i++)
5483 if (region[i] != exit->src
5484 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5485 return false;
5486 }
5487
5488 if (!region_copy)
5489 {
5490 region_copy = XNEWVEC (basic_block, n_region);
5491 free_region_copy = true;
5492 }
5493
5494 gcc_assert (!need_ssa_update_p (cfun));
5495
5496 /* Record blocks outside the region that are dominated by something
5497 inside. */
5498 doms = NULL;
5499 initialize_original_copy_tables ();
5500
5501 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5502
5503 if (entry->dest->count)
5504 {
5505 total_count = entry->dest->count;
5506 entry_count = entry->count;
5507 /* Fix up corner cases, to avoid division by zero or creation of negative
5508 frequencies. */
5509 if (entry_count > total_count)
5510 entry_count = total_count;
5511 }
5512 else
5513 {
5514 total_freq = entry->dest->frequency;
5515 entry_freq = EDGE_FREQUENCY (entry);
5516 /* Fix up corner cases, to avoid division by zero or creation of negative
5517 frequencies. */
5518 if (total_freq == 0)
5519 total_freq = 1;
5520 else if (entry_freq > total_freq)
5521 entry_freq = total_freq;
5522 }
5523
5524 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5525 split_edge_bb_loc (entry));
5526 if (total_count)
5527 {
5528 scale_bbs_frequencies_gcov_type (region, n_region,
5529 total_count - entry_count,
5530 total_count);
5531 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
5532 total_count);
5533 }
5534 else
5535 {
5536 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5537 total_freq);
5538 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5539 }
5540
5541 if (copying_header)
5542 {
5543 loop->header = exit->dest;
5544 loop->latch = exit->src;
5545 }
5546
5547 /* Redirect the entry and add the phi node arguments. */
5548 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
5549 gcc_assert (redirected != NULL);
5550 flush_pending_stmts (entry);
5551
5552 /* Concerning updating of dominators: We must recount dominators
5553 for entry block and its copy. Anything that is outside of the
5554 region, but was dominated by something inside needs recounting as
5555 well. */
5556 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
5557 VEC_safe_push (basic_block, heap, doms, get_bb_original (entry->dest));
5558 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5559 VEC_free (basic_block, heap, doms);
5560
5561 /* Add the other PHI node arguments. */
5562 add_phi_args_after_copy (region_copy, n_region, NULL);
5563
5564 /* Update the SSA web. */
5565 update_ssa (TODO_update_ssa);
5566
5567 if (free_region_copy)
5568 free (region_copy);
5569
5570 free_original_copy_tables ();
5571 return true;
5572 }
5573
5574 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
5575 are stored to REGION_COPY in the same order in that they appear
5576 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5577 the region, EXIT an exit from it. The condition guarding EXIT
5578 is moved to ENTRY. Returns true if duplication succeeds, false
5579 otherwise.
5580
5581 For example,
5582
5583 some_code;
5584 if (cond)
5585 A;
5586 else
5587 B;
5588
5589 is transformed to
5590
5591 if (cond)
5592 {
5593 some_code;
5594 A;
5595 }
5596 else
5597 {
5598 some_code;
5599 B;
5600 }
5601 */
5602
5603 bool
5604 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
5605 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
5606 basic_block *region_copy ATTRIBUTE_UNUSED)
5607 {
5608 unsigned i;
5609 bool free_region_copy = false;
5610 struct loop *loop = exit->dest->loop_father;
5611 struct loop *orig_loop = entry->dest->loop_father;
5612 basic_block switch_bb, entry_bb, nentry_bb;
5613 VEC (basic_block, heap) *doms;
5614 int total_freq = 0, exit_freq = 0;
5615 gcov_type total_count = 0, exit_count = 0;
5616 edge exits[2], nexits[2], e;
5617 gimple_stmt_iterator gsi;
5618 gimple cond_stmt;
5619 edge sorig, snew;
5620 basic_block exit_bb;
5621 gimple_stmt_iterator psi;
5622 gimple phi;
5623 tree def;
5624
5625 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
5626 exits[0] = exit;
5627 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
5628
5629 if (!can_copy_bbs_p (region, n_region))
5630 return false;
5631
5632 initialize_original_copy_tables ();
5633 set_loop_copy (orig_loop, loop);
5634 duplicate_subloops (orig_loop, loop);
5635
5636 if (!region_copy)
5637 {
5638 region_copy = XNEWVEC (basic_block, n_region);
5639 free_region_copy = true;
5640 }
5641
5642 gcc_assert (!need_ssa_update_p (cfun));
5643
5644 /* Record blocks outside the region that are dominated by something
5645 inside. */
5646 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5647
5648 if (exit->src->count)
5649 {
5650 total_count = exit->src->count;
5651 exit_count = exit->count;
5652 /* Fix up corner cases, to avoid division by zero or creation of negative
5653 frequencies. */
5654 if (exit_count > total_count)
5655 exit_count = total_count;
5656 }
5657 else
5658 {
5659 total_freq = exit->src->frequency;
5660 exit_freq = EDGE_FREQUENCY (exit);
5661 /* Fix up corner cases, to avoid division by zero or creation of negative
5662 frequencies. */
5663 if (total_freq == 0)
5664 total_freq = 1;
5665 if (exit_freq > total_freq)
5666 exit_freq = total_freq;
5667 }
5668
5669 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
5670 split_edge_bb_loc (exit));
5671 if (total_count)
5672 {
5673 scale_bbs_frequencies_gcov_type (region, n_region,
5674 total_count - exit_count,
5675 total_count);
5676 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
5677 total_count);
5678 }
5679 else
5680 {
5681 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
5682 total_freq);
5683 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
5684 }
5685
5686 /* Create the switch block, and put the exit condition to it. */
5687 entry_bb = entry->dest;
5688 nentry_bb = get_bb_copy (entry_bb);
5689 if (!last_stmt (entry->src)
5690 || !stmt_ends_bb_p (last_stmt (entry->src)))
5691 switch_bb = entry->src;
5692 else
5693 switch_bb = split_edge (entry);
5694 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
5695
5696 gsi = gsi_last_bb (switch_bb);
5697 cond_stmt = last_stmt (exit->src);
5698 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
5699 cond_stmt = gimple_copy (cond_stmt);
5700
5701 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
5702
5703 sorig = single_succ_edge (switch_bb);
5704 sorig->flags = exits[1]->flags;
5705 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
5706
5707 /* Register the new edge from SWITCH_BB in loop exit lists. */
5708 rescan_loop_exit (snew, true, false);
5709
5710 /* Add the PHI node arguments. */
5711 add_phi_args_after_copy (region_copy, n_region, snew);
5712
5713 /* Get rid of now superfluous conditions and associated edges (and phi node
5714 arguments). */
5715 exit_bb = exit->dest;
5716
5717 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
5718 PENDING_STMT (e) = NULL;
5719
5720 /* The latch of ORIG_LOOP was copied, and so was the backedge
5721 to the original header. We redirect this backedge to EXIT_BB. */
5722 for (i = 0; i < n_region; i++)
5723 if (get_bb_original (region_copy[i]) == orig_loop->latch)
5724 {
5725 gcc_assert (single_succ_edge (region_copy[i]));
5726 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
5727 PENDING_STMT (e) = NULL;
5728 for (psi = gsi_start_phis (exit_bb);
5729 !gsi_end_p (psi);
5730 gsi_next (&psi))
5731 {
5732 phi = gsi_stmt (psi);
5733 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
5734 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
5735 }
5736 }
5737 e = redirect_edge_and_branch (nexits[0], nexits[1]->dest);
5738 PENDING_STMT (e) = NULL;
5739
5740 /* Anything that is outside of the region, but was dominated by something
5741 inside needs to update dominance info. */
5742 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5743 VEC_free (basic_block, heap, doms);
5744 /* Update the SSA web. */
5745 update_ssa (TODO_update_ssa);
5746
5747 if (free_region_copy)
5748 free (region_copy);
5749
5750 free_original_copy_tables ();
5751 return true;
5752 }
5753
5754 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
5755 adding blocks when the dominator traversal reaches EXIT. This
5756 function silently assumes that ENTRY strictly dominates EXIT. */
5757
5758 void
5759 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
5760 VEC(basic_block,heap) **bbs_p)
5761 {
5762 basic_block son;
5763
5764 for (son = first_dom_son (CDI_DOMINATORS, entry);
5765 son;
5766 son = next_dom_son (CDI_DOMINATORS, son))
5767 {
5768 VEC_safe_push (basic_block, heap, *bbs_p, son);
5769 if (son != exit)
5770 gather_blocks_in_sese_region (son, exit, bbs_p);
5771 }
5772 }
5773
5774 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
5775 The duplicates are recorded in VARS_MAP. */
5776
5777 static void
5778 replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
5779 tree to_context)
5780 {
5781 tree t = *tp, new_t;
5782 struct function *f = DECL_STRUCT_FUNCTION (to_context);
5783 void **loc;
5784
5785 if (DECL_CONTEXT (t) == to_context)
5786 return;
5787
5788 loc = pointer_map_contains (vars_map, t);
5789
5790 if (!loc)
5791 {
5792 loc = pointer_map_insert (vars_map, t);
5793
5794 if (SSA_VAR_P (t))
5795 {
5796 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
5797 add_local_decl (f, new_t);
5798 }
5799 else
5800 {
5801 gcc_assert (TREE_CODE (t) == CONST_DECL);
5802 new_t = copy_node (t);
5803 }
5804 DECL_CONTEXT (new_t) = to_context;
5805
5806 *loc = new_t;
5807 }
5808 else
5809 new_t = (tree) *loc;
5810
5811 *tp = new_t;
5812 }
5813
5814
5815 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
5816 VARS_MAP maps old ssa names and var_decls to the new ones. */
5817
5818 static tree
5819 replace_ssa_name (tree name, struct pointer_map_t *vars_map,
5820 tree to_context)
5821 {
5822 void **loc;
5823 tree new_name, decl = SSA_NAME_VAR (name);
5824
5825 gcc_assert (is_gimple_reg (name));
5826
5827 loc = pointer_map_contains (vars_map, name);
5828
5829 if (!loc)
5830 {
5831 replace_by_duplicate_decl (&decl, vars_map, to_context);
5832
5833 push_cfun (DECL_STRUCT_FUNCTION (to_context));
5834 if (gimple_in_ssa_p (cfun))
5835 add_referenced_var (decl);
5836
5837 new_name = make_ssa_name (decl, SSA_NAME_DEF_STMT (name));
5838 if (SSA_NAME_IS_DEFAULT_DEF (name))
5839 set_default_def (decl, new_name);
5840 pop_cfun ();
5841
5842 loc = pointer_map_insert (vars_map, name);
5843 *loc = new_name;
5844 }
5845 else
5846 new_name = (tree) *loc;
5847
5848 return new_name;
5849 }
5850
5851 struct move_stmt_d
5852 {
5853 tree orig_block;
5854 tree new_block;
5855 tree from_context;
5856 tree to_context;
5857 struct pointer_map_t *vars_map;
5858 htab_t new_label_map;
5859 struct pointer_map_t *eh_map;
5860 bool remap_decls_p;
5861 };
5862
5863 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
5864 contained in *TP if it has been ORIG_BLOCK previously and change the
5865 DECL_CONTEXT of every local variable referenced in *TP. */
5866
5867 static tree
5868 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
5869 {
5870 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5871 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
5872 tree t = *tp;
5873
5874 if (EXPR_P (t))
5875 /* We should never have TREE_BLOCK set on non-statements. */
5876 gcc_assert (!TREE_BLOCK (t));
5877
5878 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
5879 {
5880 if (TREE_CODE (t) == SSA_NAME)
5881 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
5882 else if (TREE_CODE (t) == LABEL_DECL)
5883 {
5884 if (p->new_label_map)
5885 {
5886 struct tree_map in, *out;
5887 in.base.from = t;
5888 out = (struct tree_map *)
5889 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
5890 if (out)
5891 *tp = t = out->to;
5892 }
5893
5894 DECL_CONTEXT (t) = p->to_context;
5895 }
5896 else if (p->remap_decls_p)
5897 {
5898 /* Replace T with its duplicate. T should no longer appear in the
5899 parent function, so this looks wasteful; however, it may appear
5900 in referenced_vars, and more importantly, as virtual operands of
5901 statements, and in alias lists of other variables. It would be
5902 quite difficult to expunge it from all those places. ??? It might
5903 suffice to do this for addressable variables. */
5904 if ((TREE_CODE (t) == VAR_DECL
5905 && !is_global_var (t))
5906 || TREE_CODE (t) == CONST_DECL)
5907 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
5908
5909 if (SSA_VAR_P (t)
5910 && gimple_in_ssa_p (cfun))
5911 {
5912 push_cfun (DECL_STRUCT_FUNCTION (p->to_context));
5913 add_referenced_var (*tp);
5914 pop_cfun ();
5915 }
5916 }
5917 *walk_subtrees = 0;
5918 }
5919 else if (TYPE_P (t))
5920 *walk_subtrees = 0;
5921
5922 return NULL_TREE;
5923 }
5924
5925 /* Helper for move_stmt_r. Given an EH region number for the source
5926 function, map that to the duplicate EH regio number in the dest. */
5927
5928 static int
5929 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
5930 {
5931 eh_region old_r, new_r;
5932 void **slot;
5933
5934 old_r = get_eh_region_from_number (old_nr);
5935 slot = pointer_map_contains (p->eh_map, old_r);
5936 new_r = (eh_region) *slot;
5937
5938 return new_r->index;
5939 }
5940
5941 /* Similar, but operate on INTEGER_CSTs. */
5942
5943 static tree
5944 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
5945 {
5946 int old_nr, new_nr;
5947
5948 old_nr = tree_low_cst (old_t_nr, 0);
5949 new_nr = move_stmt_eh_region_nr (old_nr, p);
5950
5951 return build_int_cst (integer_type_node, new_nr);
5952 }
5953
5954 /* Like move_stmt_op, but for gimple statements.
5955
5956 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
5957 contained in the current statement in *GSI_P and change the
5958 DECL_CONTEXT of every local variable referenced in the current
5959 statement. */
5960
5961 static tree
5962 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
5963 struct walk_stmt_info *wi)
5964 {
5965 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
5966 gimple stmt = gsi_stmt (*gsi_p);
5967 tree block = gimple_block (stmt);
5968
5969 if (p->orig_block == NULL_TREE
5970 || block == p->orig_block
5971 || block == NULL_TREE)
5972 gimple_set_block (stmt, p->new_block);
5973 #ifdef ENABLE_CHECKING
5974 else if (block != p->new_block)
5975 {
5976 while (block && block != p->orig_block)
5977 block = BLOCK_SUPERCONTEXT (block);
5978 gcc_assert (block);
5979 }
5980 #endif
5981
5982 switch (gimple_code (stmt))
5983 {
5984 case GIMPLE_CALL:
5985 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
5986 {
5987 tree r, fndecl = gimple_call_fndecl (stmt);
5988 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
5989 switch (DECL_FUNCTION_CODE (fndecl))
5990 {
5991 case BUILT_IN_EH_COPY_VALUES:
5992 r = gimple_call_arg (stmt, 1);
5993 r = move_stmt_eh_region_tree_nr (r, p);
5994 gimple_call_set_arg (stmt, 1, r);
5995 /* FALLTHRU */
5996
5997 case BUILT_IN_EH_POINTER:
5998 case BUILT_IN_EH_FILTER:
5999 r = gimple_call_arg (stmt, 0);
6000 r = move_stmt_eh_region_tree_nr (r, p);
6001 gimple_call_set_arg (stmt, 0, r);
6002 break;
6003
6004 default:
6005 break;
6006 }
6007 }
6008 break;
6009
6010 case GIMPLE_RESX:
6011 {
6012 int r = gimple_resx_region (stmt);
6013 r = move_stmt_eh_region_nr (r, p);
6014 gimple_resx_set_region (stmt, r);
6015 }
6016 break;
6017
6018 case GIMPLE_EH_DISPATCH:
6019 {
6020 int r = gimple_eh_dispatch_region (stmt);
6021 r = move_stmt_eh_region_nr (r, p);
6022 gimple_eh_dispatch_set_region (stmt, r);
6023 }
6024 break;
6025
6026 case GIMPLE_OMP_RETURN:
6027 case GIMPLE_OMP_CONTINUE:
6028 break;
6029 default:
6030 if (is_gimple_omp (stmt))
6031 {
6032 /* Do not remap variables inside OMP directives. Variables
6033 referenced in clauses and directive header belong to the
6034 parent function and should not be moved into the child
6035 function. */
6036 bool save_remap_decls_p = p->remap_decls_p;
6037 p->remap_decls_p = false;
6038 *handled_ops_p = true;
6039
6040 walk_gimple_seq (gimple_omp_body (stmt), move_stmt_r,
6041 move_stmt_op, wi);
6042
6043 p->remap_decls_p = save_remap_decls_p;
6044 }
6045 break;
6046 }
6047
6048 return NULL_TREE;
6049 }
6050
6051 /* Move basic block BB from function CFUN to function DEST_FN. The
6052 block is moved out of the original linked list and placed after
6053 block AFTER in the new list. Also, the block is removed from the
6054 original array of blocks and placed in DEST_FN's array of blocks.
6055 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6056 updated to reflect the moved edges.
6057
6058 The local variables are remapped to new instances, VARS_MAP is used
6059 to record the mapping. */
6060
6061 static void
6062 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6063 basic_block after, bool update_edge_count_p,
6064 struct move_stmt_d *d)
6065 {
6066 struct control_flow_graph *cfg;
6067 edge_iterator ei;
6068 edge e;
6069 gimple_stmt_iterator si;
6070 unsigned old_len, new_len;
6071
6072 /* Remove BB from dominance structures. */
6073 delete_from_dominance_info (CDI_DOMINATORS, bb);
6074 if (current_loops)
6075 remove_bb_from_loops (bb);
6076
6077 /* Link BB to the new linked list. */
6078 move_block_after (bb, after);
6079
6080 /* Update the edge count in the corresponding flowgraphs. */
6081 if (update_edge_count_p)
6082 FOR_EACH_EDGE (e, ei, bb->succs)
6083 {
6084 cfun->cfg->x_n_edges--;
6085 dest_cfun->cfg->x_n_edges++;
6086 }
6087
6088 /* Remove BB from the original basic block array. */
6089 VEC_replace (basic_block, cfun->cfg->x_basic_block_info, bb->index, NULL);
6090 cfun->cfg->x_n_basic_blocks--;
6091
6092 /* Grow DEST_CFUN's basic block array if needed. */
6093 cfg = dest_cfun->cfg;
6094 cfg->x_n_basic_blocks++;
6095 if (bb->index >= cfg->x_last_basic_block)
6096 cfg->x_last_basic_block = bb->index + 1;
6097
6098 old_len = VEC_length (basic_block, cfg->x_basic_block_info);
6099 if ((unsigned) cfg->x_last_basic_block >= old_len)
6100 {
6101 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6102 VEC_safe_grow_cleared (basic_block, gc, cfg->x_basic_block_info,
6103 new_len);
6104 }
6105
6106 VEC_replace (basic_block, cfg->x_basic_block_info,
6107 bb->index, bb);
6108
6109 /* Remap the variables in phi nodes. */
6110 for (si = gsi_start_phis (bb); !gsi_end_p (si); )
6111 {
6112 gimple phi = gsi_stmt (si);
6113 use_operand_p use;
6114 tree op = PHI_RESULT (phi);
6115 ssa_op_iter oi;
6116
6117 if (!is_gimple_reg (op))
6118 {
6119 /* Remove the phi nodes for virtual operands (alias analysis will be
6120 run for the new function, anyway). */
6121 remove_phi_node (&si, true);
6122 continue;
6123 }
6124
6125 SET_PHI_RESULT (phi,
6126 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6127 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6128 {
6129 op = USE_FROM_PTR (use);
6130 if (TREE_CODE (op) == SSA_NAME)
6131 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6132 }
6133
6134 gsi_next (&si);
6135 }
6136
6137 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6138 {
6139 gimple stmt = gsi_stmt (si);
6140 struct walk_stmt_info wi;
6141
6142 memset (&wi, 0, sizeof (wi));
6143 wi.info = d;
6144 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6145
6146 if (gimple_code (stmt) == GIMPLE_LABEL)
6147 {
6148 tree label = gimple_label_label (stmt);
6149 int uid = LABEL_DECL_UID (label);
6150
6151 gcc_assert (uid > -1);
6152
6153 old_len = VEC_length (basic_block, cfg->x_label_to_block_map);
6154 if (old_len <= (unsigned) uid)
6155 {
6156 new_len = 3 * uid / 2 + 1;
6157 VEC_safe_grow_cleared (basic_block, gc,
6158 cfg->x_label_to_block_map, new_len);
6159 }
6160
6161 VEC_replace (basic_block, cfg->x_label_to_block_map, uid, bb);
6162 VEC_replace (basic_block, cfun->cfg->x_label_to_block_map, uid, NULL);
6163
6164 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6165
6166 if (uid >= dest_cfun->cfg->last_label_uid)
6167 dest_cfun->cfg->last_label_uid = uid + 1;
6168 }
6169
6170 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6171 remove_stmt_from_eh_lp_fn (cfun, stmt);
6172
6173 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6174 gimple_remove_stmt_histograms (cfun, stmt);
6175
6176 /* We cannot leave any operands allocated from the operand caches of
6177 the current function. */
6178 free_stmt_operands (stmt);
6179 push_cfun (dest_cfun);
6180 update_stmt (stmt);
6181 pop_cfun ();
6182 }
6183
6184 FOR_EACH_EDGE (e, ei, bb->succs)
6185 if (e->goto_locus)
6186 {
6187 tree block = e->goto_block;
6188 if (d->orig_block == NULL_TREE
6189 || block == d->orig_block)
6190 e->goto_block = d->new_block;
6191 #ifdef ENABLE_CHECKING
6192 else if (block != d->new_block)
6193 {
6194 while (block && block != d->orig_block)
6195 block = BLOCK_SUPERCONTEXT (block);
6196 gcc_assert (block);
6197 }
6198 #endif
6199 }
6200 }
6201
6202 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6203 the outermost EH region. Use REGION as the incoming base EH region. */
6204
6205 static eh_region
6206 find_outermost_region_in_block (struct function *src_cfun,
6207 basic_block bb, eh_region region)
6208 {
6209 gimple_stmt_iterator si;
6210
6211 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6212 {
6213 gimple stmt = gsi_stmt (si);
6214 eh_region stmt_region;
6215 int lp_nr;
6216
6217 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6218 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6219 if (stmt_region)
6220 {
6221 if (region == NULL)
6222 region = stmt_region;
6223 else if (stmt_region != region)
6224 {
6225 region = eh_region_outermost (src_cfun, stmt_region, region);
6226 gcc_assert (region != NULL);
6227 }
6228 }
6229 }
6230
6231 return region;
6232 }
6233
6234 static tree
6235 new_label_mapper (tree decl, void *data)
6236 {
6237 htab_t hash = (htab_t) data;
6238 struct tree_map *m;
6239 void **slot;
6240
6241 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6242
6243 m = XNEW (struct tree_map);
6244 m->hash = DECL_UID (decl);
6245 m->base.from = decl;
6246 m->to = create_artificial_label (UNKNOWN_LOCATION);
6247 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6248 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6249 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6250
6251 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6252 gcc_assert (*slot == NULL);
6253
6254 *slot = m;
6255
6256 return m->to;
6257 }
6258
6259 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6260 subblocks. */
6261
6262 static void
6263 replace_block_vars_by_duplicates (tree block, struct pointer_map_t *vars_map,
6264 tree to_context)
6265 {
6266 tree *tp, t;
6267
6268 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6269 {
6270 t = *tp;
6271 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6272 continue;
6273 replace_by_duplicate_decl (&t, vars_map, to_context);
6274 if (t != *tp)
6275 {
6276 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6277 {
6278 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6279 DECL_HAS_VALUE_EXPR_P (t) = 1;
6280 }
6281 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6282 *tp = t;
6283 }
6284 }
6285
6286 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6287 replace_block_vars_by_duplicates (block, vars_map, to_context);
6288 }
6289
6290 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6291 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6292 single basic block in the original CFG and the new basic block is
6293 returned. DEST_CFUN must not have a CFG yet.
6294
6295 Note that the region need not be a pure SESE region. Blocks inside
6296 the region may contain calls to abort/exit. The only restriction
6297 is that ENTRY_BB should be the only entry point and it must
6298 dominate EXIT_BB.
6299
6300 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6301 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6302 to the new function.
6303
6304 All local variables referenced in the region are assumed to be in
6305 the corresponding BLOCK_VARS and unexpanded variable lists
6306 associated with DEST_CFUN. */
6307
6308 basic_block
6309 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
6310 basic_block exit_bb, tree orig_block)
6311 {
6312 VEC(basic_block,heap) *bbs, *dom_bbs;
6313 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6314 basic_block after, bb, *entry_pred, *exit_succ, abb;
6315 struct function *saved_cfun = cfun;
6316 int *entry_flag, *exit_flag;
6317 unsigned *entry_prob, *exit_prob;
6318 unsigned i, num_entry_edges, num_exit_edges;
6319 edge e;
6320 edge_iterator ei;
6321 htab_t new_label_map;
6322 struct pointer_map_t *vars_map, *eh_map;
6323 struct loop *loop = entry_bb->loop_father;
6324 struct move_stmt_d d;
6325
6326 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6327 region. */
6328 gcc_assert (entry_bb != exit_bb
6329 && (!exit_bb
6330 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
6331
6332 /* Collect all the blocks in the region. Manually add ENTRY_BB
6333 because it won't be added by dfs_enumerate_from. */
6334 bbs = NULL;
6335 VEC_safe_push (basic_block, heap, bbs, entry_bb);
6336 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6337
6338 /* The blocks that used to be dominated by something in BBS will now be
6339 dominated by the new block. */
6340 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6341 VEC_address (basic_block, bbs),
6342 VEC_length (basic_block, bbs));
6343
6344 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6345 the predecessor edges to ENTRY_BB and the successor edges to
6346 EXIT_BB so that we can re-attach them to the new basic block that
6347 will replace the region. */
6348 num_entry_edges = EDGE_COUNT (entry_bb->preds);
6349 entry_pred = (basic_block *) xcalloc (num_entry_edges, sizeof (basic_block));
6350 entry_flag = (int *) xcalloc (num_entry_edges, sizeof (int));
6351 entry_prob = XNEWVEC (unsigned, num_entry_edges);
6352 i = 0;
6353 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
6354 {
6355 entry_prob[i] = e->probability;
6356 entry_flag[i] = e->flags;
6357 entry_pred[i++] = e->src;
6358 remove_edge (e);
6359 }
6360
6361 if (exit_bb)
6362 {
6363 num_exit_edges = EDGE_COUNT (exit_bb->succs);
6364 exit_succ = (basic_block *) xcalloc (num_exit_edges,
6365 sizeof (basic_block));
6366 exit_flag = (int *) xcalloc (num_exit_edges, sizeof (int));
6367 exit_prob = XNEWVEC (unsigned, num_exit_edges);
6368 i = 0;
6369 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
6370 {
6371 exit_prob[i] = e->probability;
6372 exit_flag[i] = e->flags;
6373 exit_succ[i++] = e->dest;
6374 remove_edge (e);
6375 }
6376 }
6377 else
6378 {
6379 num_exit_edges = 0;
6380 exit_succ = NULL;
6381 exit_flag = NULL;
6382 exit_prob = NULL;
6383 }
6384
6385 /* Switch context to the child function to initialize DEST_FN's CFG. */
6386 gcc_assert (dest_cfun->cfg == NULL);
6387 push_cfun (dest_cfun);
6388
6389 init_empty_tree_cfg ();
6390
6391 /* Initialize EH information for the new function. */
6392 eh_map = NULL;
6393 new_label_map = NULL;
6394 if (saved_cfun->eh)
6395 {
6396 eh_region region = NULL;
6397
6398 FOR_EACH_VEC_ELT (basic_block, bbs, i, bb)
6399 region = find_outermost_region_in_block (saved_cfun, bb, region);
6400
6401 init_eh_for_function ();
6402 if (region != NULL)
6403 {
6404 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6405 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
6406 new_label_mapper, new_label_map);
6407 }
6408 }
6409
6410 pop_cfun ();
6411
6412 /* Move blocks from BBS into DEST_CFUN. */
6413 gcc_assert (VEC_length (basic_block, bbs) >= 2);
6414 after = dest_cfun->cfg->x_entry_block_ptr;
6415 vars_map = pointer_map_create ();
6416
6417 memset (&d, 0, sizeof (d));
6418 d.orig_block = orig_block;
6419 d.new_block = DECL_INITIAL (dest_cfun->decl);
6420 d.from_context = cfun->decl;
6421 d.to_context = dest_cfun->decl;
6422 d.vars_map = vars_map;
6423 d.new_label_map = new_label_map;
6424 d.eh_map = eh_map;
6425 d.remap_decls_p = true;
6426
6427 FOR_EACH_VEC_ELT (basic_block, bbs, i, bb)
6428 {
6429 /* No need to update edge counts on the last block. It has
6430 already been updated earlier when we detached the region from
6431 the original CFG. */
6432 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
6433 after = bb;
6434 }
6435
6436 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
6437 if (orig_block)
6438 {
6439 tree block;
6440 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6441 == NULL_TREE);
6442 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6443 = BLOCK_SUBBLOCKS (orig_block);
6444 for (block = BLOCK_SUBBLOCKS (orig_block);
6445 block; block = BLOCK_CHAIN (block))
6446 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
6447 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
6448 }
6449
6450 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
6451 vars_map, dest_cfun->decl);
6452
6453 if (new_label_map)
6454 htab_delete (new_label_map);
6455 if (eh_map)
6456 pointer_map_destroy (eh_map);
6457 pointer_map_destroy (vars_map);
6458
6459 /* Rewire the entry and exit blocks. The successor to the entry
6460 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6461 the child function. Similarly, the predecessor of DEST_FN's
6462 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
6463 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6464 various CFG manipulation function get to the right CFG.
6465
6466 FIXME, this is silly. The CFG ought to become a parameter to
6467 these helpers. */
6468 push_cfun (dest_cfun);
6469 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
6470 if (exit_bb)
6471 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
6472 pop_cfun ();
6473
6474 /* Back in the original function, the SESE region has disappeared,
6475 create a new basic block in its place. */
6476 bb = create_empty_bb (entry_pred[0]);
6477 if (current_loops)
6478 add_bb_to_loop (bb, loop);
6479 for (i = 0; i < num_entry_edges; i++)
6480 {
6481 e = make_edge (entry_pred[i], bb, entry_flag[i]);
6482 e->probability = entry_prob[i];
6483 }
6484
6485 for (i = 0; i < num_exit_edges; i++)
6486 {
6487 e = make_edge (bb, exit_succ[i], exit_flag[i]);
6488 e->probability = exit_prob[i];
6489 }
6490
6491 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
6492 FOR_EACH_VEC_ELT (basic_block, dom_bbs, i, abb)
6493 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
6494 VEC_free (basic_block, heap, dom_bbs);
6495
6496 if (exit_bb)
6497 {
6498 free (exit_prob);
6499 free (exit_flag);
6500 free (exit_succ);
6501 }
6502 free (entry_prob);
6503 free (entry_flag);
6504 free (entry_pred);
6505 VEC_free (basic_block, heap, bbs);
6506
6507 return bb;
6508 }
6509
6510
6511 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree-pass.h)
6512 */
6513
6514 void
6515 dump_function_to_file (tree fn, FILE *file, int flags)
6516 {
6517 tree arg, var;
6518 struct function *dsf;
6519 bool ignore_topmost_bind = false, any_var = false;
6520 basic_block bb;
6521 tree chain;
6522 bool tmclone = TREE_CODE (fn) == FUNCTION_DECL && decl_is_tm_clone (fn);
6523
6524 fprintf (file, "%s %s(", lang_hooks.decl_printable_name (fn, 2),
6525 tmclone ? "[tm-clone] " : "");
6526
6527 arg = DECL_ARGUMENTS (fn);
6528 while (arg)
6529 {
6530 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
6531 fprintf (file, " ");
6532 print_generic_expr (file, arg, dump_flags);
6533 if (flags & TDF_VERBOSE)
6534 print_node (file, "", arg, 4);
6535 if (DECL_CHAIN (arg))
6536 fprintf (file, ", ");
6537 arg = DECL_CHAIN (arg);
6538 }
6539 fprintf (file, ")\n");
6540
6541 if (flags & TDF_VERBOSE)
6542 print_node (file, "", fn, 2);
6543
6544 dsf = DECL_STRUCT_FUNCTION (fn);
6545 if (dsf && (flags & TDF_EH))
6546 dump_eh_tree (file, dsf);
6547
6548 if (flags & TDF_RAW && !gimple_has_body_p (fn))
6549 {
6550 dump_node (fn, TDF_SLIM | flags, file);
6551 return;
6552 }
6553
6554 /* Switch CFUN to point to FN. */
6555 push_cfun (DECL_STRUCT_FUNCTION (fn));
6556
6557 /* When GIMPLE is lowered, the variables are no longer available in
6558 BIND_EXPRs, so display them separately. */
6559 if (cfun && cfun->decl == fn && !VEC_empty (tree, cfun->local_decls))
6560 {
6561 unsigned ix;
6562 ignore_topmost_bind = true;
6563
6564 fprintf (file, "{\n");
6565 FOR_EACH_LOCAL_DECL (cfun, ix, var)
6566 {
6567 print_generic_decl (file, var, flags);
6568 if (flags & TDF_VERBOSE)
6569 print_node (file, "", var, 4);
6570 fprintf (file, "\n");
6571
6572 any_var = true;
6573 }
6574 }
6575
6576 if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
6577 {
6578 /* If the CFG has been built, emit a CFG-based dump. */
6579 check_bb_profile (ENTRY_BLOCK_PTR, file);
6580 if (!ignore_topmost_bind)
6581 fprintf (file, "{\n");
6582
6583 if (any_var && n_basic_blocks)
6584 fprintf (file, "\n");
6585
6586 FOR_EACH_BB (bb)
6587 gimple_dump_bb (bb, file, 2, flags);
6588
6589 fprintf (file, "}\n");
6590 check_bb_profile (EXIT_BLOCK_PTR, file);
6591 }
6592 else if (DECL_SAVED_TREE (fn) == NULL)
6593 {
6594 /* The function is now in GIMPLE form but the CFG has not been
6595 built yet. Emit the single sequence of GIMPLE statements
6596 that make up its body. */
6597 gimple_seq body = gimple_body (fn);
6598
6599 if (gimple_seq_first_stmt (body)
6600 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
6601 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
6602 print_gimple_seq (file, body, 0, flags);
6603 else
6604 {
6605 if (!ignore_topmost_bind)
6606 fprintf (file, "{\n");
6607
6608 if (any_var)
6609 fprintf (file, "\n");
6610
6611 print_gimple_seq (file, body, 2, flags);
6612 fprintf (file, "}\n");
6613 }
6614 }
6615 else
6616 {
6617 int indent;
6618
6619 /* Make a tree based dump. */
6620 chain = DECL_SAVED_TREE (fn);
6621
6622 if (chain && TREE_CODE (chain) == BIND_EXPR)
6623 {
6624 if (ignore_topmost_bind)
6625 {
6626 chain = BIND_EXPR_BODY (chain);
6627 indent = 2;
6628 }
6629 else
6630 indent = 0;
6631 }
6632 else
6633 {
6634 if (!ignore_topmost_bind)
6635 fprintf (file, "{\n");
6636 indent = 2;
6637 }
6638
6639 if (any_var)
6640 fprintf (file, "\n");
6641
6642 print_generic_stmt_indented (file, chain, flags, indent);
6643 if (ignore_topmost_bind)
6644 fprintf (file, "}\n");
6645 }
6646
6647 if (flags & TDF_ENUMERATE_LOCALS)
6648 dump_enumerated_decls (file, flags);
6649 fprintf (file, "\n\n");
6650
6651 /* Restore CFUN. */
6652 pop_cfun ();
6653 }
6654
6655
6656 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
6657
6658 DEBUG_FUNCTION void
6659 debug_function (tree fn, int flags)
6660 {
6661 dump_function_to_file (fn, stderr, flags);
6662 }
6663
6664
6665 /* Print on FILE the indexes for the predecessors of basic_block BB. */
6666
6667 static void
6668 print_pred_bbs (FILE *file, basic_block bb)
6669 {
6670 edge e;
6671 edge_iterator ei;
6672
6673 FOR_EACH_EDGE (e, ei, bb->preds)
6674 fprintf (file, "bb_%d ", e->src->index);
6675 }
6676
6677
6678 /* Print on FILE the indexes for the successors of basic_block BB. */
6679
6680 static void
6681 print_succ_bbs (FILE *file, basic_block bb)
6682 {
6683 edge e;
6684 edge_iterator ei;
6685
6686 FOR_EACH_EDGE (e, ei, bb->succs)
6687 fprintf (file, "bb_%d ", e->dest->index);
6688 }
6689
6690 /* Print to FILE the basic block BB following the VERBOSITY level. */
6691
6692 void
6693 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
6694 {
6695 char *s_indent = (char *) alloca ((size_t) indent + 1);
6696 memset ((void *) s_indent, ' ', (size_t) indent);
6697 s_indent[indent] = '\0';
6698
6699 /* Print basic_block's header. */
6700 if (verbosity >= 2)
6701 {
6702 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
6703 print_pred_bbs (file, bb);
6704 fprintf (file, "}, succs = {");
6705 print_succ_bbs (file, bb);
6706 fprintf (file, "})\n");
6707 }
6708
6709 /* Print basic_block's body. */
6710 if (verbosity >= 3)
6711 {
6712 fprintf (file, "%s {\n", s_indent);
6713 gimple_dump_bb (bb, file, indent + 4, TDF_VOPS|TDF_MEMSYMS);
6714 fprintf (file, "%s }\n", s_indent);
6715 }
6716 }
6717
6718 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
6719
6720 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
6721 VERBOSITY level this outputs the contents of the loop, or just its
6722 structure. */
6723
6724 static void
6725 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
6726 {
6727 char *s_indent;
6728 basic_block bb;
6729
6730 if (loop == NULL)
6731 return;
6732
6733 s_indent = (char *) alloca ((size_t) indent + 1);
6734 memset ((void *) s_indent, ' ', (size_t) indent);
6735 s_indent[indent] = '\0';
6736
6737 /* Print loop's header. */
6738 fprintf (file, "%sloop_%d (header = %d, latch = %d", s_indent,
6739 loop->num, loop->header->index, loop->latch->index);
6740 fprintf (file, ", niter = ");
6741 print_generic_expr (file, loop->nb_iterations, 0);
6742
6743 if (loop->any_upper_bound)
6744 {
6745 fprintf (file, ", upper_bound = ");
6746 dump_double_int (file, loop->nb_iterations_upper_bound, true);
6747 }
6748
6749 if (loop->any_estimate)
6750 {
6751 fprintf (file, ", estimate = ");
6752 dump_double_int (file, loop->nb_iterations_estimate, true);
6753 }
6754 fprintf (file, ")\n");
6755
6756 /* Print loop's body. */
6757 if (verbosity >= 1)
6758 {
6759 fprintf (file, "%s{\n", s_indent);
6760 FOR_EACH_BB (bb)
6761 if (bb->loop_father == loop)
6762 print_loops_bb (file, bb, indent, verbosity);
6763
6764 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
6765 fprintf (file, "%s}\n", s_indent);
6766 }
6767 }
6768
6769 /* Print the LOOP and its sibling loops on FILE, indented INDENT
6770 spaces. Following VERBOSITY level this outputs the contents of the
6771 loop, or just its structure. */
6772
6773 static void
6774 print_loop_and_siblings (FILE *file, struct loop *loop, int indent, int verbosity)
6775 {
6776 if (loop == NULL)
6777 return;
6778
6779 print_loop (file, loop, indent, verbosity);
6780 print_loop_and_siblings (file, loop->next, indent, verbosity);
6781 }
6782
6783 /* Follow a CFG edge from the entry point of the program, and on entry
6784 of a loop, pretty print the loop structure on FILE. */
6785
6786 void
6787 print_loops (FILE *file, int verbosity)
6788 {
6789 basic_block bb;
6790
6791 bb = ENTRY_BLOCK_PTR;
6792 if (bb && bb->loop_father)
6793 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
6794 }
6795
6796
6797 /* Debugging loops structure at tree level, at some VERBOSITY level. */
6798
6799 DEBUG_FUNCTION void
6800 debug_loops (int verbosity)
6801 {
6802 print_loops (stderr, verbosity);
6803 }
6804
6805 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
6806
6807 DEBUG_FUNCTION void
6808 debug_loop (struct loop *loop, int verbosity)
6809 {
6810 print_loop (stderr, loop, 0, verbosity);
6811 }
6812
6813 /* Print on stderr the code of loop number NUM, at some VERBOSITY
6814 level. */
6815
6816 DEBUG_FUNCTION void
6817 debug_loop_num (unsigned num, int verbosity)
6818 {
6819 debug_loop (get_loop (num), verbosity);
6820 }
6821
6822 /* Return true if BB ends with a call, possibly followed by some
6823 instructions that must stay with the call. Return false,
6824 otherwise. */
6825
6826 static bool
6827 gimple_block_ends_with_call_p (basic_block bb)
6828 {
6829 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6830 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
6831 }
6832
6833
6834 /* Return true if BB ends with a conditional branch. Return false,
6835 otherwise. */
6836
6837 static bool
6838 gimple_block_ends_with_condjump_p (const_basic_block bb)
6839 {
6840 gimple stmt = last_stmt (CONST_CAST_BB (bb));
6841 return (stmt && gimple_code (stmt) == GIMPLE_COND);
6842 }
6843
6844
6845 /* Return true if we need to add fake edge to exit at statement T.
6846 Helper function for gimple_flow_call_edges_add. */
6847
6848 static bool
6849 need_fake_edge_p (gimple t)
6850 {
6851 tree fndecl = NULL_TREE;
6852 int call_flags = 0;
6853
6854 /* NORETURN and LONGJMP calls already have an edge to exit.
6855 CONST and PURE calls do not need one.
6856 We don't currently check for CONST and PURE here, although
6857 it would be a good idea, because those attributes are
6858 figured out from the RTL in mark_constant_function, and
6859 the counter incrementation code from -fprofile-arcs
6860 leads to different results from -fbranch-probabilities. */
6861 if (is_gimple_call (t))
6862 {
6863 fndecl = gimple_call_fndecl (t);
6864 call_flags = gimple_call_flags (t);
6865 }
6866
6867 if (is_gimple_call (t)
6868 && fndecl
6869 && DECL_BUILT_IN (fndecl)
6870 && (call_flags & ECF_NOTHROW)
6871 && !(call_flags & ECF_RETURNS_TWICE)
6872 /* fork() doesn't really return twice, but the effect of
6873 wrapping it in __gcov_fork() which calls __gcov_flush()
6874 and clears the counters before forking has the same
6875 effect as returning twice. Force a fake edge. */
6876 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6877 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
6878 return false;
6879
6880 if (is_gimple_call (t)
6881 && !(call_flags & ECF_NORETURN))
6882 return true;
6883
6884 if (gimple_code (t) == GIMPLE_ASM
6885 && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
6886 return true;
6887
6888 return false;
6889 }
6890
6891
6892 /* Add fake edges to the function exit for any non constant and non
6893 noreturn calls, volatile inline assembly in the bitmap of blocks
6894 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
6895 the number of blocks that were split.
6896
6897 The goal is to expose cases in which entering a basic block does
6898 not imply that all subsequent instructions must be executed. */
6899
6900 static int
6901 gimple_flow_call_edges_add (sbitmap blocks)
6902 {
6903 int i;
6904 int blocks_split = 0;
6905 int last_bb = last_basic_block;
6906 bool check_last_block = false;
6907
6908 if (n_basic_blocks == NUM_FIXED_BLOCKS)
6909 return 0;
6910
6911 if (! blocks)
6912 check_last_block = true;
6913 else
6914 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
6915
6916 /* In the last basic block, before epilogue generation, there will be
6917 a fallthru edge to EXIT. Special care is required if the last insn
6918 of the last basic block is a call because make_edge folds duplicate
6919 edges, which would result in the fallthru edge also being marked
6920 fake, which would result in the fallthru edge being removed by
6921 remove_fake_edges, which would result in an invalid CFG.
6922
6923 Moreover, we can't elide the outgoing fake edge, since the block
6924 profiler needs to take this into account in order to solve the minimal
6925 spanning tree in the case that the call doesn't return.
6926
6927 Handle this by adding a dummy instruction in a new last basic block. */
6928 if (check_last_block)
6929 {
6930 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
6931 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6932 gimple t = NULL;
6933
6934 if (!gsi_end_p (gsi))
6935 t = gsi_stmt (gsi);
6936
6937 if (t && need_fake_edge_p (t))
6938 {
6939 edge e;
6940
6941 e = find_edge (bb, EXIT_BLOCK_PTR);
6942 if (e)
6943 {
6944 gsi_insert_on_edge (e, gimple_build_nop ());
6945 gsi_commit_edge_inserts ();
6946 }
6947 }
6948 }
6949
6950 /* Now add fake edges to the function exit for any non constant
6951 calls since there is no way that we can determine if they will
6952 return or not... */
6953 for (i = 0; i < last_bb; i++)
6954 {
6955 basic_block bb = BASIC_BLOCK (i);
6956 gimple_stmt_iterator gsi;
6957 gimple stmt, last_stmt;
6958
6959 if (!bb)
6960 continue;
6961
6962 if (blocks && !TEST_BIT (blocks, i))
6963 continue;
6964
6965 gsi = gsi_last_nondebug_bb (bb);
6966 if (!gsi_end_p (gsi))
6967 {
6968 last_stmt = gsi_stmt (gsi);
6969 do
6970 {
6971 stmt = gsi_stmt (gsi);
6972 if (need_fake_edge_p (stmt))
6973 {
6974 edge e;
6975
6976 /* The handling above of the final block before the
6977 epilogue should be enough to verify that there is
6978 no edge to the exit block in CFG already.
6979 Calling make_edge in such case would cause us to
6980 mark that edge as fake and remove it later. */
6981 #ifdef ENABLE_CHECKING
6982 if (stmt == last_stmt)
6983 {
6984 e = find_edge (bb, EXIT_BLOCK_PTR);
6985 gcc_assert (e == NULL);
6986 }
6987 #endif
6988
6989 /* Note that the following may create a new basic block
6990 and renumber the existing basic blocks. */
6991 if (stmt != last_stmt)
6992 {
6993 e = split_block (bb, stmt);
6994 if (e)
6995 blocks_split++;
6996 }
6997 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
6998 }
6999 gsi_prev (&gsi);
7000 }
7001 while (!gsi_end_p (gsi));
7002 }
7003 }
7004
7005 if (blocks_split)
7006 verify_flow_info ();
7007
7008 return blocks_split;
7009 }
7010
7011 /* Removes edge E and all the blocks dominated by it, and updates dominance
7012 information. The IL in E->src needs to be updated separately.
7013 If dominance info is not available, only the edge E is removed.*/
7014
7015 void
7016 remove_edge_and_dominated_blocks (edge e)
7017 {
7018 VEC (basic_block, heap) *bbs_to_remove = NULL;
7019 VEC (basic_block, heap) *bbs_to_fix_dom = NULL;
7020 bitmap df, df_idom;
7021 edge f;
7022 edge_iterator ei;
7023 bool none_removed = false;
7024 unsigned i;
7025 basic_block bb, dbb;
7026 bitmap_iterator bi;
7027
7028 if (!dom_info_available_p (CDI_DOMINATORS))
7029 {
7030 remove_edge (e);
7031 return;
7032 }
7033
7034 /* No updating is needed for edges to exit. */
7035 if (e->dest == EXIT_BLOCK_PTR)
7036 {
7037 if (cfgcleanup_altered_bbs)
7038 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7039 remove_edge (e);
7040 return;
7041 }
7042
7043 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7044 that is not dominated by E->dest, then this set is empty. Otherwise,
7045 all the basic blocks dominated by E->dest are removed.
7046
7047 Also, to DF_IDOM we store the immediate dominators of the blocks in
7048 the dominance frontier of E (i.e., of the successors of the
7049 removed blocks, if there are any, and of E->dest otherwise). */
7050 FOR_EACH_EDGE (f, ei, e->dest->preds)
7051 {
7052 if (f == e)
7053 continue;
7054
7055 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7056 {
7057 none_removed = true;
7058 break;
7059 }
7060 }
7061
7062 df = BITMAP_ALLOC (NULL);
7063 df_idom = BITMAP_ALLOC (NULL);
7064
7065 if (none_removed)
7066 bitmap_set_bit (df_idom,
7067 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7068 else
7069 {
7070 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7071 FOR_EACH_VEC_ELT (basic_block, bbs_to_remove, i, bb)
7072 {
7073 FOR_EACH_EDGE (f, ei, bb->succs)
7074 {
7075 if (f->dest != EXIT_BLOCK_PTR)
7076 bitmap_set_bit (df, f->dest->index);
7077 }
7078 }
7079 FOR_EACH_VEC_ELT (basic_block, bbs_to_remove, i, bb)
7080 bitmap_clear_bit (df, bb->index);
7081
7082 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7083 {
7084 bb = BASIC_BLOCK (i);
7085 bitmap_set_bit (df_idom,
7086 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7087 }
7088 }
7089
7090 if (cfgcleanup_altered_bbs)
7091 {
7092 /* Record the set of the altered basic blocks. */
7093 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7094 bitmap_ior_into (cfgcleanup_altered_bbs, df);
7095 }
7096
7097 /* Remove E and the cancelled blocks. */
7098 if (none_removed)
7099 remove_edge (e);
7100 else
7101 {
7102 /* Walk backwards so as to get a chance to substitute all
7103 released DEFs into debug stmts. See
7104 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7105 details. */
7106 for (i = VEC_length (basic_block, bbs_to_remove); i-- > 0; )
7107 delete_basic_block (VEC_index (basic_block, bbs_to_remove, i));
7108 }
7109
7110 /* Update the dominance information. The immediate dominator may change only
7111 for blocks whose immediate dominator belongs to DF_IDOM:
7112
7113 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7114 removal. Let Z the arbitrary block such that idom(Z) = Y and
7115 Z dominates X after the removal. Before removal, there exists a path P
7116 from Y to X that avoids Z. Let F be the last edge on P that is
7117 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7118 dominates W, and because of P, Z does not dominate W), and W belongs to
7119 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7120 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
7121 {
7122 bb = BASIC_BLOCK (i);
7123 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
7124 dbb;
7125 dbb = next_dom_son (CDI_DOMINATORS, dbb))
7126 VEC_safe_push (basic_block, heap, bbs_to_fix_dom, dbb);
7127 }
7128
7129 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
7130
7131 BITMAP_FREE (df);
7132 BITMAP_FREE (df_idom);
7133 VEC_free (basic_block, heap, bbs_to_remove);
7134 VEC_free (basic_block, heap, bbs_to_fix_dom);
7135 }
7136
7137 /* Purge dead EH edges from basic block BB. */
7138
7139 bool
7140 gimple_purge_dead_eh_edges (basic_block bb)
7141 {
7142 bool changed = false;
7143 edge e;
7144 edge_iterator ei;
7145 gimple stmt = last_stmt (bb);
7146
7147 if (stmt && stmt_can_throw_internal (stmt))
7148 return false;
7149
7150 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7151 {
7152 if (e->flags & EDGE_EH)
7153 {
7154 remove_edge_and_dominated_blocks (e);
7155 changed = true;
7156 }
7157 else
7158 ei_next (&ei);
7159 }
7160
7161 return changed;
7162 }
7163
7164 /* Purge dead EH edges from basic block listed in BLOCKS. */
7165
7166 bool
7167 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
7168 {
7169 bool changed = false;
7170 unsigned i;
7171 bitmap_iterator bi;
7172
7173 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7174 {
7175 basic_block bb = BASIC_BLOCK (i);
7176
7177 /* Earlier gimple_purge_dead_eh_edges could have removed
7178 this basic block already. */
7179 gcc_assert (bb || changed);
7180 if (bb != NULL)
7181 changed |= gimple_purge_dead_eh_edges (bb);
7182 }
7183
7184 return changed;
7185 }
7186
7187 /* Purge dead abnormal call edges from basic block BB. */
7188
7189 bool
7190 gimple_purge_dead_abnormal_call_edges (basic_block bb)
7191 {
7192 bool changed = false;
7193 edge e;
7194 edge_iterator ei;
7195 gimple stmt = last_stmt (bb);
7196
7197 if (!cfun->has_nonlocal_label)
7198 return false;
7199
7200 if (stmt && stmt_can_make_abnormal_goto (stmt))
7201 return false;
7202
7203 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7204 {
7205 if (e->flags & EDGE_ABNORMAL)
7206 {
7207 remove_edge_and_dominated_blocks (e);
7208 changed = true;
7209 }
7210 else
7211 ei_next (&ei);
7212 }
7213
7214 return changed;
7215 }
7216
7217 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
7218
7219 bool
7220 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
7221 {
7222 bool changed = false;
7223 unsigned i;
7224 bitmap_iterator bi;
7225
7226 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7227 {
7228 basic_block bb = BASIC_BLOCK (i);
7229
7230 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
7231 this basic block already. */
7232 gcc_assert (bb || changed);
7233 if (bb != NULL)
7234 changed |= gimple_purge_dead_abnormal_call_edges (bb);
7235 }
7236
7237 return changed;
7238 }
7239
7240 /* This function is called whenever a new edge is created or
7241 redirected. */
7242
7243 static void
7244 gimple_execute_on_growing_pred (edge e)
7245 {
7246 basic_block bb = e->dest;
7247
7248 if (!gimple_seq_empty_p (phi_nodes (bb)))
7249 reserve_phi_args_for_new_edge (bb);
7250 }
7251
7252 /* This function is called immediately before edge E is removed from
7253 the edge vector E->dest->preds. */
7254
7255 static void
7256 gimple_execute_on_shrinking_pred (edge e)
7257 {
7258 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
7259 remove_phi_args (e);
7260 }
7261
7262 /*---------------------------------------------------------------------------
7263 Helper functions for Loop versioning
7264 ---------------------------------------------------------------------------*/
7265
7266 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
7267 of 'first'. Both of them are dominated by 'new_head' basic block. When
7268 'new_head' was created by 'second's incoming edge it received phi arguments
7269 on the edge by split_edge(). Later, additional edge 'e' was created to
7270 connect 'new_head' and 'first'. Now this routine adds phi args on this
7271 additional edge 'e' that new_head to second edge received as part of edge
7272 splitting. */
7273
7274 static void
7275 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
7276 basic_block new_head, edge e)
7277 {
7278 gimple phi1, phi2;
7279 gimple_stmt_iterator psi1, psi2;
7280 tree def;
7281 edge e2 = find_edge (new_head, second);
7282
7283 /* Because NEW_HEAD has been created by splitting SECOND's incoming
7284 edge, we should always have an edge from NEW_HEAD to SECOND. */
7285 gcc_assert (e2 != NULL);
7286
7287 /* Browse all 'second' basic block phi nodes and add phi args to
7288 edge 'e' for 'first' head. PHI args are always in correct order. */
7289
7290 for (psi2 = gsi_start_phis (second),
7291 psi1 = gsi_start_phis (first);
7292 !gsi_end_p (psi2) && !gsi_end_p (psi1);
7293 gsi_next (&psi2), gsi_next (&psi1))
7294 {
7295 phi1 = gsi_stmt (psi1);
7296 phi2 = gsi_stmt (psi2);
7297 def = PHI_ARG_DEF (phi2, e2->dest_idx);
7298 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
7299 }
7300 }
7301
7302
7303 /* Adds a if else statement to COND_BB with condition COND_EXPR.
7304 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
7305 the destination of the ELSE part. */
7306
7307 static void
7308 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
7309 basic_block second_head ATTRIBUTE_UNUSED,
7310 basic_block cond_bb, void *cond_e)
7311 {
7312 gimple_stmt_iterator gsi;
7313 gimple new_cond_expr;
7314 tree cond_expr = (tree) cond_e;
7315 edge e0;
7316
7317 /* Build new conditional expr */
7318 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
7319 NULL_TREE, NULL_TREE);
7320
7321 /* Add new cond in cond_bb. */
7322 gsi = gsi_last_bb (cond_bb);
7323 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
7324
7325 /* Adjust edges appropriately to connect new head with first head
7326 as well as second head. */
7327 e0 = single_succ_edge (cond_bb);
7328 e0->flags &= ~EDGE_FALLTHRU;
7329 e0->flags |= EDGE_FALSE_VALUE;
7330 }
7331
7332 struct cfg_hooks gimple_cfg_hooks = {
7333 "gimple",
7334 gimple_verify_flow_info,
7335 gimple_dump_bb, /* dump_bb */
7336 create_bb, /* create_basic_block */
7337 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
7338 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
7339 gimple_can_remove_branch_p, /* can_remove_branch_p */
7340 remove_bb, /* delete_basic_block */
7341 gimple_split_block, /* split_block */
7342 gimple_move_block_after, /* move_block_after */
7343 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
7344 gimple_merge_blocks, /* merge_blocks */
7345 gimple_predict_edge, /* predict_edge */
7346 gimple_predicted_by_p, /* predicted_by_p */
7347 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
7348 gimple_duplicate_bb, /* duplicate_block */
7349 gimple_split_edge, /* split_edge */
7350 gimple_make_forwarder_block, /* make_forward_block */
7351 NULL, /* tidy_fallthru_edge */
7352 NULL, /* force_nonfallthru */
7353 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
7354 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
7355 gimple_flow_call_edges_add, /* flow_call_edges_add */
7356 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
7357 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
7358 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
7359 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
7360 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
7361 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
7362 flush_pending_stmts /* flush_pending_stmts */
7363 };
7364
7365
7366 /* Split all critical edges. */
7367
7368 static unsigned int
7369 split_critical_edges (void)
7370 {
7371 basic_block bb;
7372 edge e;
7373 edge_iterator ei;
7374
7375 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
7376 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
7377 mappings around the calls to split_edge. */
7378 start_recording_case_labels ();
7379 FOR_ALL_BB (bb)
7380 {
7381 FOR_EACH_EDGE (e, ei, bb->succs)
7382 {
7383 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
7384 split_edge (e);
7385 /* PRE inserts statements to edges and expects that
7386 since split_critical_edges was done beforehand, committing edge
7387 insertions will not split more edges. In addition to critical
7388 edges we must split edges that have multiple successors and
7389 end by control flow statements, such as RESX.
7390 Go ahead and split them too. This matches the logic in
7391 gimple_find_edge_insert_loc. */
7392 else if ((!single_pred_p (e->dest)
7393 || !gimple_seq_empty_p (phi_nodes (e->dest))
7394 || e->dest == EXIT_BLOCK_PTR)
7395 && e->src != ENTRY_BLOCK_PTR
7396 && !(e->flags & EDGE_ABNORMAL))
7397 {
7398 gimple_stmt_iterator gsi;
7399
7400 gsi = gsi_last_bb (e->src);
7401 if (!gsi_end_p (gsi)
7402 && stmt_ends_bb_p (gsi_stmt (gsi))
7403 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
7404 && !gimple_call_builtin_p (gsi_stmt (gsi),
7405 BUILT_IN_RETURN)))
7406 split_edge (e);
7407 }
7408 }
7409 }
7410 end_recording_case_labels ();
7411 return 0;
7412 }
7413
7414 struct gimple_opt_pass pass_split_crit_edges =
7415 {
7416 {
7417 GIMPLE_PASS,
7418 "crited", /* name */
7419 NULL, /* gate */
7420 split_critical_edges, /* execute */
7421 NULL, /* sub */
7422 NULL, /* next */
7423 0, /* static_pass_number */
7424 TV_TREE_SPLIT_EDGES, /* tv_id */
7425 PROP_cfg, /* properties required */
7426 PROP_no_crit_edges, /* properties_provided */
7427 0, /* properties_destroyed */
7428 0, /* todo_flags_start */
7429 TODO_verify_flow /* todo_flags_finish */
7430 }
7431 };
7432
7433
7434 /* Build a ternary operation and gimplify it. Emit code before GSI.
7435 Return the gimple_val holding the result. */
7436
7437 tree
7438 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
7439 tree type, tree a, tree b, tree c)
7440 {
7441 tree ret;
7442 location_t loc = gimple_location (gsi_stmt (*gsi));
7443
7444 ret = fold_build3_loc (loc, code, type, a, b, c);
7445 STRIP_NOPS (ret);
7446
7447 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7448 GSI_SAME_STMT);
7449 }
7450
7451 /* Build a binary operation and gimplify it. Emit code before GSI.
7452 Return the gimple_val holding the result. */
7453
7454 tree
7455 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
7456 tree type, tree a, tree b)
7457 {
7458 tree ret;
7459
7460 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
7461 STRIP_NOPS (ret);
7462
7463 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7464 GSI_SAME_STMT);
7465 }
7466
7467 /* Build a unary operation and gimplify it. Emit code before GSI.
7468 Return the gimple_val holding the result. */
7469
7470 tree
7471 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
7472 tree a)
7473 {
7474 tree ret;
7475
7476 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
7477 STRIP_NOPS (ret);
7478
7479 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7480 GSI_SAME_STMT);
7481 }
7482
7483
7484 \f
7485 /* Emit return warnings. */
7486
7487 static unsigned int
7488 execute_warn_function_return (void)
7489 {
7490 source_location location;
7491 gimple last;
7492 edge e;
7493 edge_iterator ei;
7494
7495 /* If we have a path to EXIT, then we do return. */
7496 if (TREE_THIS_VOLATILE (cfun->decl)
7497 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
7498 {
7499 location = UNKNOWN_LOCATION;
7500 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7501 {
7502 last = last_stmt (e->src);
7503 if ((gimple_code (last) == GIMPLE_RETURN
7504 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
7505 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
7506 break;
7507 }
7508 if (location == UNKNOWN_LOCATION)
7509 location = cfun->function_end_locus;
7510 warning_at (location, 0, "%<noreturn%> function does return");
7511 }
7512
7513 /* If we see "return;" in some basic block, then we do reach the end
7514 without returning a value. */
7515 else if (warn_return_type
7516 && !TREE_NO_WARNING (cfun->decl)
7517 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
7518 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
7519 {
7520 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7521 {
7522 gimple last = last_stmt (e->src);
7523 if (gimple_code (last) == GIMPLE_RETURN
7524 && gimple_return_retval (last) == NULL
7525 && !gimple_no_warning_p (last))
7526 {
7527 location = gimple_location (last);
7528 if (location == UNKNOWN_LOCATION)
7529 location = cfun->function_end_locus;
7530 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
7531 TREE_NO_WARNING (cfun->decl) = 1;
7532 break;
7533 }
7534 }
7535 }
7536 return 0;
7537 }
7538
7539
7540 /* Given a basic block B which ends with a conditional and has
7541 precisely two successors, determine which of the edges is taken if
7542 the conditional is true and which is taken if the conditional is
7543 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
7544
7545 void
7546 extract_true_false_edges_from_block (basic_block b,
7547 edge *true_edge,
7548 edge *false_edge)
7549 {
7550 edge e = EDGE_SUCC (b, 0);
7551
7552 if (e->flags & EDGE_TRUE_VALUE)
7553 {
7554 *true_edge = e;
7555 *false_edge = EDGE_SUCC (b, 1);
7556 }
7557 else
7558 {
7559 *false_edge = e;
7560 *true_edge = EDGE_SUCC (b, 1);
7561 }
7562 }
7563
7564 struct gimple_opt_pass pass_warn_function_return =
7565 {
7566 {
7567 GIMPLE_PASS,
7568 "*warn_function_return", /* name */
7569 NULL, /* gate */
7570 execute_warn_function_return, /* execute */
7571 NULL, /* sub */
7572 NULL, /* next */
7573 0, /* static_pass_number */
7574 TV_NONE, /* tv_id */
7575 PROP_cfg, /* properties_required */
7576 0, /* properties_provided */
7577 0, /* properties_destroyed */
7578 0, /* todo_flags_start */
7579 0 /* todo_flags_finish */
7580 }
7581 };
7582
7583 /* Emit noreturn warnings. */
7584
7585 static unsigned int
7586 execute_warn_function_noreturn (void)
7587 {
7588 if (!TREE_THIS_VOLATILE (current_function_decl)
7589 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0)
7590 warn_function_noreturn (current_function_decl);
7591 return 0;
7592 }
7593
7594 static bool
7595 gate_warn_function_noreturn (void)
7596 {
7597 return warn_suggest_attribute_noreturn;
7598 }
7599
7600 struct gimple_opt_pass pass_warn_function_noreturn =
7601 {
7602 {
7603 GIMPLE_PASS,
7604 "*warn_function_noreturn", /* name */
7605 gate_warn_function_noreturn, /* gate */
7606 execute_warn_function_noreturn, /* execute */
7607 NULL, /* sub */
7608 NULL, /* next */
7609 0, /* static_pass_number */
7610 TV_NONE, /* tv_id */
7611 PROP_cfg, /* properties_required */
7612 0, /* properties_provided */
7613 0, /* properties_destroyed */
7614 0, /* todo_flags_start */
7615 0 /* todo_flags_finish */
7616 }
7617 };
7618
7619
7620 /* Walk a gimplified function and warn for functions whose return value is
7621 ignored and attribute((warn_unused_result)) is set. This is done before
7622 inlining, so we don't have to worry about that. */
7623
7624 static void
7625 do_warn_unused_result (gimple_seq seq)
7626 {
7627 tree fdecl, ftype;
7628 gimple_stmt_iterator i;
7629
7630 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
7631 {
7632 gimple g = gsi_stmt (i);
7633
7634 switch (gimple_code (g))
7635 {
7636 case GIMPLE_BIND:
7637 do_warn_unused_result (gimple_bind_body (g));
7638 break;
7639 case GIMPLE_TRY:
7640 do_warn_unused_result (gimple_try_eval (g));
7641 do_warn_unused_result (gimple_try_cleanup (g));
7642 break;
7643 case GIMPLE_CATCH:
7644 do_warn_unused_result (gimple_catch_handler (g));
7645 break;
7646 case GIMPLE_EH_FILTER:
7647 do_warn_unused_result (gimple_eh_filter_failure (g));
7648 break;
7649
7650 case GIMPLE_CALL:
7651 if (gimple_call_lhs (g))
7652 break;
7653 if (gimple_call_internal_p (g))
7654 break;
7655
7656 /* This is a naked call, as opposed to a GIMPLE_CALL with an
7657 LHS. All calls whose value is ignored should be
7658 represented like this. Look for the attribute. */
7659 fdecl = gimple_call_fndecl (g);
7660 ftype = gimple_call_fntype (g);
7661
7662 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
7663 {
7664 location_t loc = gimple_location (g);
7665
7666 if (fdecl)
7667 warning_at (loc, OPT_Wunused_result,
7668 "ignoring return value of %qD, "
7669 "declared with attribute warn_unused_result",
7670 fdecl);
7671 else
7672 warning_at (loc, OPT_Wunused_result,
7673 "ignoring return value of function "
7674 "declared with attribute warn_unused_result");
7675 }
7676 break;
7677
7678 default:
7679 /* Not a container, not a call, or a call whose value is used. */
7680 break;
7681 }
7682 }
7683 }
7684
7685 static unsigned int
7686 run_warn_unused_result (void)
7687 {
7688 do_warn_unused_result (gimple_body (current_function_decl));
7689 return 0;
7690 }
7691
7692 static bool
7693 gate_warn_unused_result (void)
7694 {
7695 return flag_warn_unused_result;
7696 }
7697
7698 struct gimple_opt_pass pass_warn_unused_result =
7699 {
7700 {
7701 GIMPLE_PASS,
7702 "*warn_unused_result", /* name */
7703 gate_warn_unused_result, /* gate */
7704 run_warn_unused_result, /* execute */
7705 NULL, /* sub */
7706 NULL, /* next */
7707 0, /* static_pass_number */
7708 TV_NONE, /* tv_id */
7709 PROP_gimple_any, /* properties_required */
7710 0, /* properties_provided */
7711 0, /* properties_destroyed */
7712 0, /* todo_flags_start */
7713 0, /* todo_flags_finish */
7714 }
7715 };