i386.c (legitimize_tls_address): Generate tls_initial_exec_64_sun only when !TARGET_X32.
[gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
3 2010, 2011, 2012 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "output.h"
30 #include "flags.h"
31 #include "function.h"
32 #include "ggc.h"
33 #include "langhooks.h"
34 #include "tree-pretty-print.h"
35 #include "gimple-pretty-print.h"
36 #include "tree-flow.h"
37 #include "timevar.h"
38 #include "tree-dump.h"
39 #include "tree-pass.h"
40 #include "diagnostic-core.h"
41 #include "except.h"
42 #include "cfgloop.h"
43 #include "cfglayout.h"
44 #include "tree-ssa-propagate.h"
45 #include "value-prof.h"
46 #include "pointer-set.h"
47 #include "tree-inline.h"
48
49 /* This file contains functions for building the Control Flow Graph (CFG)
50 for a function tree. */
51
52 /* Local declarations. */
53
54 /* Initial capacity for the basic block array. */
55 static const int initial_cfg_capacity = 20;
56
57 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
58 which use a particular edge. The CASE_LABEL_EXPRs are chained together
59 via their CASE_CHAIN field, which we clear after we're done with the
60 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
61
62 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
63 update the case vector in response to edge redirections.
64
65 Right now this table is set up and torn down at key points in the
66 compilation process. It would be nice if we could make the table
67 more persistent. The key is getting notification of changes to
68 the CFG (particularly edge removal, creation and redirection). */
69
70 static struct pointer_map_t *edge_to_cases;
71
72 /* If we record edge_to_cases, this bitmap will hold indexes
73 of basic blocks that end in a GIMPLE_SWITCH which we touched
74 due to edge manipulations. */
75
76 static bitmap touched_switch_bbs;
77
78 /* CFG statistics. */
79 struct cfg_stats_d
80 {
81 long num_merged_labels;
82 };
83
84 static struct cfg_stats_d cfg_stats;
85
86 /* Nonzero if we found a computed goto while building basic blocks. */
87 static bool found_computed_goto;
88
89 /* Hash table to store last discriminator assigned for each locus. */
90 struct locus_discrim_map
91 {
92 location_t locus;
93 int discriminator;
94 };
95 static htab_t discriminator_per_locus;
96
97 /* Basic blocks and flowgraphs. */
98 static void make_blocks (gimple_seq);
99 static void factor_computed_gotos (void);
100
101 /* Edges. */
102 static void make_edges (void);
103 static void make_cond_expr_edges (basic_block);
104 static void make_gimple_switch_edges (basic_block);
105 static void make_goto_expr_edges (basic_block);
106 static void make_gimple_asm_edges (basic_block);
107 static unsigned int locus_map_hash (const void *);
108 static int locus_map_eq (const void *, const void *);
109 static void assign_discriminator (location_t, basic_block);
110 static edge gimple_redirect_edge_and_branch (edge, basic_block);
111 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
112 static unsigned int split_critical_edges (void);
113
114 /* Various helpers. */
115 static inline bool stmt_starts_bb_p (gimple, gimple);
116 static int gimple_verify_flow_info (void);
117 static void gimple_make_forwarder_block (edge);
118 static void gimple_cfg2vcg (FILE *);
119 static gimple first_non_label_stmt (basic_block);
120 static bool verify_gimple_transaction (gimple);
121
122 /* Flowgraph optimization and cleanup. */
123 static void gimple_merge_blocks (basic_block, basic_block);
124 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
125 static void remove_bb (basic_block);
126 static edge find_taken_edge_computed_goto (basic_block, tree);
127 static edge find_taken_edge_cond_expr (basic_block, tree);
128 static edge find_taken_edge_switch_expr (basic_block, tree);
129 static tree find_case_label_for_value (gimple, tree);
130 static void group_case_labels_stmt (gimple);
131
132 void
133 init_empty_tree_cfg_for_function (struct function *fn)
134 {
135 /* Initialize the basic block array. */
136 init_flow (fn);
137 profile_status_for_function (fn) = PROFILE_ABSENT;
138 n_basic_blocks_for_function (fn) = NUM_FIXED_BLOCKS;
139 last_basic_block_for_function (fn) = NUM_FIXED_BLOCKS;
140 basic_block_info_for_function (fn)
141 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
142 VEC_safe_grow_cleared (basic_block, gc,
143 basic_block_info_for_function (fn),
144 initial_cfg_capacity);
145
146 /* Build a mapping of labels to their associated blocks. */
147 label_to_block_map_for_function (fn)
148 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
149 VEC_safe_grow_cleared (basic_block, gc,
150 label_to_block_map_for_function (fn),
151 initial_cfg_capacity);
152
153 SET_BASIC_BLOCK_FOR_FUNCTION (fn, ENTRY_BLOCK,
154 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn));
155 SET_BASIC_BLOCK_FOR_FUNCTION (fn, EXIT_BLOCK,
156 EXIT_BLOCK_PTR_FOR_FUNCTION (fn));
157
158 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)->next_bb
159 = EXIT_BLOCK_PTR_FOR_FUNCTION (fn);
160 EXIT_BLOCK_PTR_FOR_FUNCTION (fn)->prev_bb
161 = ENTRY_BLOCK_PTR_FOR_FUNCTION (fn);
162 }
163
164 void
165 init_empty_tree_cfg (void)
166 {
167 init_empty_tree_cfg_for_function (cfun);
168 }
169
170 /*---------------------------------------------------------------------------
171 Create basic blocks
172 ---------------------------------------------------------------------------*/
173
174 /* Entry point to the CFG builder for trees. SEQ is the sequence of
175 statements to be added to the flowgraph. */
176
177 static void
178 build_gimple_cfg (gimple_seq seq)
179 {
180 /* Register specific gimple functions. */
181 gimple_register_cfg_hooks ();
182
183 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
184
185 init_empty_tree_cfg ();
186
187 found_computed_goto = 0;
188 make_blocks (seq);
189
190 /* Computed gotos are hell to deal with, especially if there are
191 lots of them with a large number of destinations. So we factor
192 them to a common computed goto location before we build the
193 edge list. After we convert back to normal form, we will un-factor
194 the computed gotos since factoring introduces an unwanted jump. */
195 if (found_computed_goto)
196 factor_computed_gotos ();
197
198 /* Make sure there is always at least one block, even if it's empty. */
199 if (n_basic_blocks == NUM_FIXED_BLOCKS)
200 create_empty_bb (ENTRY_BLOCK_PTR);
201
202 /* Adjust the size of the array. */
203 if (VEC_length (basic_block, basic_block_info) < (size_t) n_basic_blocks)
204 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, n_basic_blocks);
205
206 /* To speed up statement iterator walks, we first purge dead labels. */
207 cleanup_dead_labels ();
208
209 /* Group case nodes to reduce the number of edges.
210 We do this after cleaning up dead labels because otherwise we miss
211 a lot of obvious case merging opportunities. */
212 group_case_labels ();
213
214 /* Create the edges of the flowgraph. */
215 discriminator_per_locus = htab_create (13, locus_map_hash, locus_map_eq,
216 free);
217 make_edges ();
218 cleanup_dead_labels ();
219 htab_delete (discriminator_per_locus);
220
221 /* Debugging dumps. */
222
223 /* Write the flowgraph to a VCG file. */
224 {
225 int local_dump_flags;
226 FILE *vcg_file = dump_begin (TDI_vcg, &local_dump_flags);
227 if (vcg_file)
228 {
229 gimple_cfg2vcg (vcg_file);
230 dump_end (TDI_vcg, vcg_file);
231 }
232 }
233 }
234
235 static unsigned int
236 execute_build_cfg (void)
237 {
238 gimple_seq body = gimple_body (current_function_decl);
239
240 build_gimple_cfg (body);
241 gimple_set_body (current_function_decl, NULL);
242 if (dump_file && (dump_flags & TDF_DETAILS))
243 {
244 fprintf (dump_file, "Scope blocks:\n");
245 dump_scope_blocks (dump_file, dump_flags);
246 }
247 return 0;
248 }
249
250 struct gimple_opt_pass pass_build_cfg =
251 {
252 {
253 GIMPLE_PASS,
254 "cfg", /* name */
255 NULL, /* gate */
256 execute_build_cfg, /* execute */
257 NULL, /* sub */
258 NULL, /* next */
259 0, /* static_pass_number */
260 TV_TREE_CFG, /* tv_id */
261 PROP_gimple_leh, /* properties_required */
262 PROP_cfg, /* properties_provided */
263 0, /* properties_destroyed */
264 0, /* todo_flags_start */
265 TODO_verify_stmts | TODO_cleanup_cfg /* todo_flags_finish */
266 }
267 };
268
269
270 /* Return true if T is a computed goto. */
271
272 static bool
273 computed_goto_p (gimple t)
274 {
275 return (gimple_code (t) == GIMPLE_GOTO
276 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
277 }
278
279
280 /* Search the CFG for any computed gotos. If found, factor them to a
281 common computed goto site. Also record the location of that site so
282 that we can un-factor the gotos after we have converted back to
283 normal form. */
284
285 static void
286 factor_computed_gotos (void)
287 {
288 basic_block bb;
289 tree factored_label_decl = NULL;
290 tree var = NULL;
291 gimple factored_computed_goto_label = NULL;
292 gimple factored_computed_goto = NULL;
293
294 /* We know there are one or more computed gotos in this function.
295 Examine the last statement in each basic block to see if the block
296 ends with a computed goto. */
297
298 FOR_EACH_BB (bb)
299 {
300 gimple_stmt_iterator gsi = gsi_last_bb (bb);
301 gimple last;
302
303 if (gsi_end_p (gsi))
304 continue;
305
306 last = gsi_stmt (gsi);
307
308 /* Ignore the computed goto we create when we factor the original
309 computed gotos. */
310 if (last == factored_computed_goto)
311 continue;
312
313 /* If the last statement is a computed goto, factor it. */
314 if (computed_goto_p (last))
315 {
316 gimple assignment;
317
318 /* The first time we find a computed goto we need to create
319 the factored goto block and the variable each original
320 computed goto will use for their goto destination. */
321 if (!factored_computed_goto)
322 {
323 basic_block new_bb = create_empty_bb (bb);
324 gimple_stmt_iterator new_gsi = gsi_start_bb (new_bb);
325
326 /* Create the destination of the factored goto. Each original
327 computed goto will put its desired destination into this
328 variable and jump to the label we create immediately
329 below. */
330 var = create_tmp_var (ptr_type_node, "gotovar");
331
332 /* Build a label for the new block which will contain the
333 factored computed goto. */
334 factored_label_decl = create_artificial_label (UNKNOWN_LOCATION);
335 factored_computed_goto_label
336 = gimple_build_label (factored_label_decl);
337 gsi_insert_after (&new_gsi, factored_computed_goto_label,
338 GSI_NEW_STMT);
339
340 /* Build our new computed goto. */
341 factored_computed_goto = gimple_build_goto (var);
342 gsi_insert_after (&new_gsi, factored_computed_goto, GSI_NEW_STMT);
343 }
344
345 /* Copy the original computed goto's destination into VAR. */
346 assignment = gimple_build_assign (var, gimple_goto_dest (last));
347 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
348
349 /* And re-vector the computed goto to the new destination. */
350 gimple_goto_set_dest (last, factored_label_decl);
351 }
352 }
353 }
354
355
356 /* Build a flowgraph for the sequence of stmts SEQ. */
357
358 static void
359 make_blocks (gimple_seq seq)
360 {
361 gimple_stmt_iterator i = gsi_start (seq);
362 gimple stmt = NULL;
363 bool start_new_block = true;
364 bool first_stmt_of_seq = true;
365 basic_block bb = ENTRY_BLOCK_PTR;
366
367 while (!gsi_end_p (i))
368 {
369 gimple prev_stmt;
370
371 prev_stmt = stmt;
372 stmt = gsi_stmt (i);
373
374 /* If the statement starts a new basic block or if we have determined
375 in a previous pass that we need to create a new block for STMT, do
376 so now. */
377 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
378 {
379 if (!first_stmt_of_seq)
380 gsi_split_seq_before (&i, &seq);
381 bb = create_basic_block (seq, NULL, bb);
382 start_new_block = false;
383 }
384
385 /* Now add STMT to BB and create the subgraphs for special statement
386 codes. */
387 gimple_set_bb (stmt, bb);
388
389 if (computed_goto_p (stmt))
390 found_computed_goto = true;
391
392 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
393 next iteration. */
394 if (stmt_ends_bb_p (stmt))
395 {
396 /* If the stmt can make abnormal goto use a new temporary
397 for the assignment to the LHS. This makes sure the old value
398 of the LHS is available on the abnormal edge. Otherwise
399 we will end up with overlapping life-ranges for abnormal
400 SSA names. */
401 if (gimple_has_lhs (stmt)
402 && stmt_can_make_abnormal_goto (stmt)
403 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
404 {
405 tree lhs = gimple_get_lhs (stmt);
406 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
407 gimple s = gimple_build_assign (lhs, tmp);
408 gimple_set_location (s, gimple_location (stmt));
409 gimple_set_block (s, gimple_block (stmt));
410 gimple_set_lhs (stmt, tmp);
411 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
412 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
413 DECL_GIMPLE_REG_P (tmp) = 1;
414 gsi_insert_after (&i, s, GSI_SAME_STMT);
415 }
416 start_new_block = true;
417 }
418
419 gsi_next (&i);
420 first_stmt_of_seq = false;
421 }
422 }
423
424
425 /* Create and return a new empty basic block after bb AFTER. */
426
427 static basic_block
428 create_bb (void *h, void *e, basic_block after)
429 {
430 basic_block bb;
431
432 gcc_assert (!e);
433
434 /* Create and initialize a new basic block. Since alloc_block uses
435 GC allocation that clears memory to allocate a basic block, we do
436 not have to clear the newly allocated basic block here. */
437 bb = alloc_block ();
438
439 bb->index = last_basic_block;
440 bb->flags = BB_NEW;
441 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
442
443 /* Add the new block to the linked list of blocks. */
444 link_block (bb, after);
445
446 /* Grow the basic block array if needed. */
447 if ((size_t) last_basic_block == VEC_length (basic_block, basic_block_info))
448 {
449 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
450 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
451 }
452
453 /* Add the newly created block to the array. */
454 SET_BASIC_BLOCK (last_basic_block, bb);
455
456 n_basic_blocks++;
457 last_basic_block++;
458
459 return bb;
460 }
461
462
463 /*---------------------------------------------------------------------------
464 Edge creation
465 ---------------------------------------------------------------------------*/
466
467 /* Fold COND_EXPR_COND of each COND_EXPR. */
468
469 void
470 fold_cond_expr_cond (void)
471 {
472 basic_block bb;
473
474 FOR_EACH_BB (bb)
475 {
476 gimple stmt = last_stmt (bb);
477
478 if (stmt && gimple_code (stmt) == GIMPLE_COND)
479 {
480 location_t loc = gimple_location (stmt);
481 tree cond;
482 bool zerop, onep;
483
484 fold_defer_overflow_warnings ();
485 cond = fold_binary_loc (loc, gimple_cond_code (stmt), boolean_type_node,
486 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
487 if (cond)
488 {
489 zerop = integer_zerop (cond);
490 onep = integer_onep (cond);
491 }
492 else
493 zerop = onep = false;
494
495 fold_undefer_overflow_warnings (zerop || onep,
496 stmt,
497 WARN_STRICT_OVERFLOW_CONDITIONAL);
498 if (zerop)
499 gimple_cond_make_false (stmt);
500 else if (onep)
501 gimple_cond_make_true (stmt);
502 }
503 }
504 }
505
506 /* Join all the blocks in the flowgraph. */
507
508 static void
509 make_edges (void)
510 {
511 basic_block bb;
512 struct omp_region *cur_region = NULL;
513
514 /* Create an edge from entry to the first block with executable
515 statements in it. */
516 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
517
518 /* Traverse the basic block array placing edges. */
519 FOR_EACH_BB (bb)
520 {
521 gimple last = last_stmt (bb);
522 bool fallthru;
523
524 if (last)
525 {
526 enum gimple_code code = gimple_code (last);
527 switch (code)
528 {
529 case GIMPLE_GOTO:
530 make_goto_expr_edges (bb);
531 fallthru = false;
532 break;
533 case GIMPLE_RETURN:
534 make_edge (bb, EXIT_BLOCK_PTR, 0);
535 fallthru = false;
536 break;
537 case GIMPLE_COND:
538 make_cond_expr_edges (bb);
539 fallthru = false;
540 break;
541 case GIMPLE_SWITCH:
542 make_gimple_switch_edges (bb);
543 fallthru = false;
544 break;
545 case GIMPLE_RESX:
546 make_eh_edges (last);
547 fallthru = false;
548 break;
549 case GIMPLE_EH_DISPATCH:
550 fallthru = make_eh_dispatch_edges (last);
551 break;
552
553 case GIMPLE_CALL:
554 /* If this function receives a nonlocal goto, then we need to
555 make edges from this call site to all the nonlocal goto
556 handlers. */
557 if (stmt_can_make_abnormal_goto (last))
558 make_abnormal_goto_edges (bb, true);
559
560 /* If this statement has reachable exception handlers, then
561 create abnormal edges to them. */
562 make_eh_edges (last);
563
564 /* BUILTIN_RETURN is really a return statement. */
565 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
566 make_edge (bb, EXIT_BLOCK_PTR, 0), fallthru = false;
567 /* Some calls are known not to return. */
568 else
569 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
570 break;
571
572 case GIMPLE_ASSIGN:
573 /* A GIMPLE_ASSIGN may throw internally and thus be considered
574 control-altering. */
575 if (is_ctrl_altering_stmt (last))
576 make_eh_edges (last);
577 fallthru = true;
578 break;
579
580 case GIMPLE_ASM:
581 make_gimple_asm_edges (bb);
582 fallthru = true;
583 break;
584
585 case GIMPLE_OMP_PARALLEL:
586 case GIMPLE_OMP_TASK:
587 case GIMPLE_OMP_FOR:
588 case GIMPLE_OMP_SINGLE:
589 case GIMPLE_OMP_MASTER:
590 case GIMPLE_OMP_ORDERED:
591 case GIMPLE_OMP_CRITICAL:
592 case GIMPLE_OMP_SECTION:
593 cur_region = new_omp_region (bb, code, cur_region);
594 fallthru = true;
595 break;
596
597 case GIMPLE_OMP_SECTIONS:
598 cur_region = new_omp_region (bb, code, cur_region);
599 fallthru = true;
600 break;
601
602 case GIMPLE_OMP_SECTIONS_SWITCH:
603 fallthru = false;
604 break;
605
606 case GIMPLE_OMP_ATOMIC_LOAD:
607 case GIMPLE_OMP_ATOMIC_STORE:
608 fallthru = true;
609 break;
610
611 case GIMPLE_OMP_RETURN:
612 /* In the case of a GIMPLE_OMP_SECTION, the edge will go
613 somewhere other than the next block. This will be
614 created later. */
615 cur_region->exit = bb;
616 fallthru = cur_region->type != GIMPLE_OMP_SECTION;
617 cur_region = cur_region->outer;
618 break;
619
620 case GIMPLE_OMP_CONTINUE:
621 cur_region->cont = bb;
622 switch (cur_region->type)
623 {
624 case GIMPLE_OMP_FOR:
625 /* Mark all GIMPLE_OMP_FOR and GIMPLE_OMP_CONTINUE
626 succs edges as abnormal to prevent splitting
627 them. */
628 single_succ_edge (cur_region->entry)->flags |= EDGE_ABNORMAL;
629 /* Make the loopback edge. */
630 make_edge (bb, single_succ (cur_region->entry),
631 EDGE_ABNORMAL);
632
633 /* Create an edge from GIMPLE_OMP_FOR to exit, which
634 corresponds to the case that the body of the loop
635 is not executed at all. */
636 make_edge (cur_region->entry, bb->next_bb, EDGE_ABNORMAL);
637 make_edge (bb, bb->next_bb, EDGE_FALLTHRU | EDGE_ABNORMAL);
638 fallthru = false;
639 break;
640
641 case GIMPLE_OMP_SECTIONS:
642 /* Wire up the edges into and out of the nested sections. */
643 {
644 basic_block switch_bb = single_succ (cur_region->entry);
645
646 struct omp_region *i;
647 for (i = cur_region->inner; i ; i = i->next)
648 {
649 gcc_assert (i->type == GIMPLE_OMP_SECTION);
650 make_edge (switch_bb, i->entry, 0);
651 make_edge (i->exit, bb, EDGE_FALLTHRU);
652 }
653
654 /* Make the loopback edge to the block with
655 GIMPLE_OMP_SECTIONS_SWITCH. */
656 make_edge (bb, switch_bb, 0);
657
658 /* Make the edge from the switch to exit. */
659 make_edge (switch_bb, bb->next_bb, 0);
660 fallthru = false;
661 }
662 break;
663
664 default:
665 gcc_unreachable ();
666 }
667 break;
668
669 case GIMPLE_TRANSACTION:
670 {
671 tree abort_label = gimple_transaction_label (last);
672 if (abort_label)
673 make_edge (bb, label_to_block (abort_label), 0);
674 fallthru = true;
675 }
676 break;
677
678 default:
679 gcc_assert (!stmt_ends_bb_p (last));
680 fallthru = true;
681 }
682 }
683 else
684 fallthru = true;
685
686 if (fallthru)
687 {
688 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
689 if (last)
690 assign_discriminator (gimple_location (last), bb->next_bb);
691 }
692 }
693
694 if (root_omp_region)
695 free_omp_regions ();
696
697 /* Fold COND_EXPR_COND of each COND_EXPR. */
698 fold_cond_expr_cond ();
699 }
700
701 /* Trivial hash function for a location_t. ITEM is a pointer to
702 a hash table entry that maps a location_t to a discriminator. */
703
704 static unsigned int
705 locus_map_hash (const void *item)
706 {
707 return ((const struct locus_discrim_map *) item)->locus;
708 }
709
710 /* Equality function for the locus-to-discriminator map. VA and VB
711 point to the two hash table entries to compare. */
712
713 static int
714 locus_map_eq (const void *va, const void *vb)
715 {
716 const struct locus_discrim_map *a = (const struct locus_discrim_map *) va;
717 const struct locus_discrim_map *b = (const struct locus_discrim_map *) vb;
718 return a->locus == b->locus;
719 }
720
721 /* Find the next available discriminator value for LOCUS. The
722 discriminator distinguishes among several basic blocks that
723 share a common locus, allowing for more accurate sample-based
724 profiling. */
725
726 static int
727 next_discriminator_for_locus (location_t locus)
728 {
729 struct locus_discrim_map item;
730 struct locus_discrim_map **slot;
731
732 item.locus = locus;
733 item.discriminator = 0;
734 slot = (struct locus_discrim_map **)
735 htab_find_slot_with_hash (discriminator_per_locus, (void *) &item,
736 (hashval_t) locus, INSERT);
737 gcc_assert (slot);
738 if (*slot == HTAB_EMPTY_ENTRY)
739 {
740 *slot = XNEW (struct locus_discrim_map);
741 gcc_assert (*slot);
742 (*slot)->locus = locus;
743 (*slot)->discriminator = 0;
744 }
745 (*slot)->discriminator++;
746 return (*slot)->discriminator;
747 }
748
749 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
750
751 static bool
752 same_line_p (location_t locus1, location_t locus2)
753 {
754 expanded_location from, to;
755
756 if (locus1 == locus2)
757 return true;
758
759 from = expand_location (locus1);
760 to = expand_location (locus2);
761
762 if (from.line != to.line)
763 return false;
764 if (from.file == to.file)
765 return true;
766 return (from.file != NULL
767 && to.file != NULL
768 && filename_cmp (from.file, to.file) == 0);
769 }
770
771 /* Assign a unique discriminator value to block BB if it begins at the same
772 LOCUS as its predecessor block. */
773
774 static void
775 assign_discriminator (location_t locus, basic_block bb)
776 {
777 gimple first_in_to_bb, last_in_to_bb;
778
779 if (locus == 0 || bb->discriminator != 0)
780 return;
781
782 first_in_to_bb = first_non_label_stmt (bb);
783 last_in_to_bb = last_stmt (bb);
784 if ((first_in_to_bb && same_line_p (locus, gimple_location (first_in_to_bb)))
785 || (last_in_to_bb && same_line_p (locus, gimple_location (last_in_to_bb))))
786 bb->discriminator = next_discriminator_for_locus (locus);
787 }
788
789 /* Create the edges for a GIMPLE_COND starting at block BB. */
790
791 static void
792 make_cond_expr_edges (basic_block bb)
793 {
794 gimple entry = last_stmt (bb);
795 gimple then_stmt, else_stmt;
796 basic_block then_bb, else_bb;
797 tree then_label, else_label;
798 edge e;
799 location_t entry_locus;
800
801 gcc_assert (entry);
802 gcc_assert (gimple_code (entry) == GIMPLE_COND);
803
804 entry_locus = gimple_location (entry);
805
806 /* Entry basic blocks for each component. */
807 then_label = gimple_cond_true_label (entry);
808 else_label = gimple_cond_false_label (entry);
809 then_bb = label_to_block (then_label);
810 else_bb = label_to_block (else_label);
811 then_stmt = first_stmt (then_bb);
812 else_stmt = first_stmt (else_bb);
813
814 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
815 assign_discriminator (entry_locus, then_bb);
816 e->goto_locus = gimple_location (then_stmt);
817 if (e->goto_locus)
818 e->goto_block = gimple_block (then_stmt);
819 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
820 if (e)
821 {
822 assign_discriminator (entry_locus, else_bb);
823 e->goto_locus = gimple_location (else_stmt);
824 if (e->goto_locus)
825 e->goto_block = gimple_block (else_stmt);
826 }
827
828 /* We do not need the labels anymore. */
829 gimple_cond_set_true_label (entry, NULL_TREE);
830 gimple_cond_set_false_label (entry, NULL_TREE);
831 }
832
833
834 /* Called for each element in the hash table (P) as we delete the
835 edge to cases hash table.
836
837 Clear all the TREE_CHAINs to prevent problems with copying of
838 SWITCH_EXPRs and structure sharing rules, then free the hash table
839 element. */
840
841 static bool
842 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
843 void *data ATTRIBUTE_UNUSED)
844 {
845 tree t, next;
846
847 for (t = (tree) *value; t; t = next)
848 {
849 next = CASE_CHAIN (t);
850 CASE_CHAIN (t) = NULL;
851 }
852
853 *value = NULL;
854 return true;
855 }
856
857 /* Start recording information mapping edges to case labels. */
858
859 void
860 start_recording_case_labels (void)
861 {
862 gcc_assert (edge_to_cases == NULL);
863 edge_to_cases = pointer_map_create ();
864 touched_switch_bbs = BITMAP_ALLOC (NULL);
865 }
866
867 /* Return nonzero if we are recording information for case labels. */
868
869 static bool
870 recording_case_labels_p (void)
871 {
872 return (edge_to_cases != NULL);
873 }
874
875 /* Stop recording information mapping edges to case labels and
876 remove any information we have recorded. */
877 void
878 end_recording_case_labels (void)
879 {
880 bitmap_iterator bi;
881 unsigned i;
882 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
883 pointer_map_destroy (edge_to_cases);
884 edge_to_cases = NULL;
885 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
886 {
887 basic_block bb = BASIC_BLOCK (i);
888 if (bb)
889 {
890 gimple stmt = last_stmt (bb);
891 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
892 group_case_labels_stmt (stmt);
893 }
894 }
895 BITMAP_FREE (touched_switch_bbs);
896 }
897
898 /* If we are inside a {start,end}_recording_cases block, then return
899 a chain of CASE_LABEL_EXPRs from T which reference E.
900
901 Otherwise return NULL. */
902
903 static tree
904 get_cases_for_edge (edge e, gimple t)
905 {
906 void **slot;
907 size_t i, n;
908
909 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
910 chains available. Return NULL so the caller can detect this case. */
911 if (!recording_case_labels_p ())
912 return NULL;
913
914 slot = pointer_map_contains (edge_to_cases, e);
915 if (slot)
916 return (tree) *slot;
917
918 /* If we did not find E in the hash table, then this must be the first
919 time we have been queried for information about E & T. Add all the
920 elements from T to the hash table then perform the query again. */
921
922 n = gimple_switch_num_labels (t);
923 for (i = 0; i < n; i++)
924 {
925 tree elt = gimple_switch_label (t, i);
926 tree lab = CASE_LABEL (elt);
927 basic_block label_bb = label_to_block (lab);
928 edge this_edge = find_edge (e->src, label_bb);
929
930 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
931 a new chain. */
932 slot = pointer_map_insert (edge_to_cases, this_edge);
933 CASE_CHAIN (elt) = (tree) *slot;
934 *slot = elt;
935 }
936
937 return (tree) *pointer_map_contains (edge_to_cases, e);
938 }
939
940 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
941
942 static void
943 make_gimple_switch_edges (basic_block bb)
944 {
945 gimple entry = last_stmt (bb);
946 location_t entry_locus;
947 size_t i, n;
948
949 entry_locus = gimple_location (entry);
950
951 n = gimple_switch_num_labels (entry);
952
953 for (i = 0; i < n; ++i)
954 {
955 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
956 basic_block label_bb = label_to_block (lab);
957 make_edge (bb, label_bb, 0);
958 assign_discriminator (entry_locus, label_bb);
959 }
960 }
961
962
963 /* Return the basic block holding label DEST. */
964
965 basic_block
966 label_to_block_fn (struct function *ifun, tree dest)
967 {
968 int uid = LABEL_DECL_UID (dest);
969
970 /* We would die hard when faced by an undefined label. Emit a label to
971 the very first basic block. This will hopefully make even the dataflow
972 and undefined variable warnings quite right. */
973 if (seen_error () && uid < 0)
974 {
975 gimple_stmt_iterator gsi = gsi_start_bb (BASIC_BLOCK (NUM_FIXED_BLOCKS));
976 gimple stmt;
977
978 stmt = gimple_build_label (dest);
979 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
980 uid = LABEL_DECL_UID (dest);
981 }
982 if (VEC_length (basic_block, ifun->cfg->x_label_to_block_map)
983 <= (unsigned int) uid)
984 return NULL;
985 return VEC_index (basic_block, ifun->cfg->x_label_to_block_map, uid);
986 }
987
988 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
989 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
990
991 void
992 make_abnormal_goto_edges (basic_block bb, bool for_call)
993 {
994 basic_block target_bb;
995 gimple_stmt_iterator gsi;
996
997 FOR_EACH_BB (target_bb)
998 for (gsi = gsi_start_bb (target_bb); !gsi_end_p (gsi); gsi_next (&gsi))
999 {
1000 gimple label_stmt = gsi_stmt (gsi);
1001 tree target;
1002
1003 if (gimple_code (label_stmt) != GIMPLE_LABEL)
1004 break;
1005
1006 target = gimple_label_label (label_stmt);
1007
1008 /* Make an edge to every label block that has been marked as a
1009 potential target for a computed goto or a non-local goto. */
1010 if ((FORCED_LABEL (target) && !for_call)
1011 || (DECL_NONLOCAL (target) && for_call))
1012 {
1013 make_edge (bb, target_bb, EDGE_ABNORMAL);
1014 break;
1015 }
1016 }
1017 }
1018
1019 /* Create edges for a goto statement at block BB. */
1020
1021 static void
1022 make_goto_expr_edges (basic_block bb)
1023 {
1024 gimple_stmt_iterator last = gsi_last_bb (bb);
1025 gimple goto_t = gsi_stmt (last);
1026
1027 /* A simple GOTO creates normal edges. */
1028 if (simple_goto_p (goto_t))
1029 {
1030 tree dest = gimple_goto_dest (goto_t);
1031 basic_block label_bb = label_to_block (dest);
1032 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1033 e->goto_locus = gimple_location (goto_t);
1034 assign_discriminator (e->goto_locus, label_bb);
1035 if (e->goto_locus)
1036 e->goto_block = gimple_block (goto_t);
1037 gsi_remove (&last, true);
1038 return;
1039 }
1040
1041 /* A computed GOTO creates abnormal edges. */
1042 make_abnormal_goto_edges (bb, false);
1043 }
1044
1045 /* Create edges for an asm statement with labels at block BB. */
1046
1047 static void
1048 make_gimple_asm_edges (basic_block bb)
1049 {
1050 gimple stmt = last_stmt (bb);
1051 location_t stmt_loc = gimple_location (stmt);
1052 int i, n = gimple_asm_nlabels (stmt);
1053
1054 for (i = 0; i < n; ++i)
1055 {
1056 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1057 basic_block label_bb = label_to_block (label);
1058 make_edge (bb, label_bb, 0);
1059 assign_discriminator (stmt_loc, label_bb);
1060 }
1061 }
1062
1063 /*---------------------------------------------------------------------------
1064 Flowgraph analysis
1065 ---------------------------------------------------------------------------*/
1066
1067 /* Cleanup useless labels in basic blocks. This is something we wish
1068 to do early because it allows us to group case labels before creating
1069 the edges for the CFG, and it speeds up block statement iterators in
1070 all passes later on.
1071 We rerun this pass after CFG is created, to get rid of the labels that
1072 are no longer referenced. After then we do not run it any more, since
1073 (almost) no new labels should be created. */
1074
1075 /* A map from basic block index to the leading label of that block. */
1076 static struct label_record
1077 {
1078 /* The label. */
1079 tree label;
1080
1081 /* True if the label is referenced from somewhere. */
1082 bool used;
1083 } *label_for_bb;
1084
1085 /* Given LABEL return the first label in the same basic block. */
1086
1087 static tree
1088 main_block_label (tree label)
1089 {
1090 basic_block bb = label_to_block (label);
1091 tree main_label = label_for_bb[bb->index].label;
1092
1093 /* label_to_block possibly inserted undefined label into the chain. */
1094 if (!main_label)
1095 {
1096 label_for_bb[bb->index].label = label;
1097 main_label = label;
1098 }
1099
1100 label_for_bb[bb->index].used = true;
1101 return main_label;
1102 }
1103
1104 /* Clean up redundant labels within the exception tree. */
1105
1106 static void
1107 cleanup_dead_labels_eh (void)
1108 {
1109 eh_landing_pad lp;
1110 eh_region r;
1111 tree lab;
1112 int i;
1113
1114 if (cfun->eh == NULL)
1115 return;
1116
1117 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
1118 if (lp && lp->post_landing_pad)
1119 {
1120 lab = main_block_label (lp->post_landing_pad);
1121 if (lab != lp->post_landing_pad)
1122 {
1123 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1124 EH_LANDING_PAD_NR (lab) = lp->index;
1125 }
1126 }
1127
1128 FOR_ALL_EH_REGION (r)
1129 switch (r->type)
1130 {
1131 case ERT_CLEANUP:
1132 case ERT_MUST_NOT_THROW:
1133 break;
1134
1135 case ERT_TRY:
1136 {
1137 eh_catch c;
1138 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1139 {
1140 lab = c->label;
1141 if (lab)
1142 c->label = main_block_label (lab);
1143 }
1144 }
1145 break;
1146
1147 case ERT_ALLOWED_EXCEPTIONS:
1148 lab = r->u.allowed.label;
1149 if (lab)
1150 r->u.allowed.label = main_block_label (lab);
1151 break;
1152 }
1153 }
1154
1155
1156 /* Cleanup redundant labels. This is a three-step process:
1157 1) Find the leading label for each block.
1158 2) Redirect all references to labels to the leading labels.
1159 3) Cleanup all useless labels. */
1160
1161 void
1162 cleanup_dead_labels (void)
1163 {
1164 basic_block bb;
1165 label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
1166
1167 /* Find a suitable label for each block. We use the first user-defined
1168 label if there is one, or otherwise just the first label we see. */
1169 FOR_EACH_BB (bb)
1170 {
1171 gimple_stmt_iterator i;
1172
1173 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1174 {
1175 tree label;
1176 gimple stmt = gsi_stmt (i);
1177
1178 if (gimple_code (stmt) != GIMPLE_LABEL)
1179 break;
1180
1181 label = gimple_label_label (stmt);
1182
1183 /* If we have not yet seen a label for the current block,
1184 remember this one and see if there are more labels. */
1185 if (!label_for_bb[bb->index].label)
1186 {
1187 label_for_bb[bb->index].label = label;
1188 continue;
1189 }
1190
1191 /* If we did see a label for the current block already, but it
1192 is an artificially created label, replace it if the current
1193 label is a user defined label. */
1194 if (!DECL_ARTIFICIAL (label)
1195 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1196 {
1197 label_for_bb[bb->index].label = label;
1198 break;
1199 }
1200 }
1201 }
1202
1203 /* Now redirect all jumps/branches to the selected label.
1204 First do so for each block ending in a control statement. */
1205 FOR_EACH_BB (bb)
1206 {
1207 gimple stmt = last_stmt (bb);
1208 tree label, new_label;
1209
1210 if (!stmt)
1211 continue;
1212
1213 switch (gimple_code (stmt))
1214 {
1215 case GIMPLE_COND:
1216 label = gimple_cond_true_label (stmt);
1217 if (label)
1218 {
1219 new_label = main_block_label (label);
1220 if (new_label != label)
1221 gimple_cond_set_true_label (stmt, new_label);
1222 }
1223
1224 label = gimple_cond_false_label (stmt);
1225 if (label)
1226 {
1227 new_label = main_block_label (label);
1228 if (new_label != label)
1229 gimple_cond_set_false_label (stmt, new_label);
1230 }
1231 break;
1232
1233 case GIMPLE_SWITCH:
1234 {
1235 size_t i, n = gimple_switch_num_labels (stmt);
1236
1237 /* Replace all destination labels. */
1238 for (i = 0; i < n; ++i)
1239 {
1240 tree case_label = gimple_switch_label (stmt, i);
1241 label = CASE_LABEL (case_label);
1242 new_label = main_block_label (label);
1243 if (new_label != label)
1244 CASE_LABEL (case_label) = new_label;
1245 }
1246 break;
1247 }
1248
1249 case GIMPLE_ASM:
1250 {
1251 int i, n = gimple_asm_nlabels (stmt);
1252
1253 for (i = 0; i < n; ++i)
1254 {
1255 tree cons = gimple_asm_label_op (stmt, i);
1256 tree label = main_block_label (TREE_VALUE (cons));
1257 TREE_VALUE (cons) = label;
1258 }
1259 break;
1260 }
1261
1262 /* We have to handle gotos until they're removed, and we don't
1263 remove them until after we've created the CFG edges. */
1264 case GIMPLE_GOTO:
1265 if (!computed_goto_p (stmt))
1266 {
1267 label = gimple_goto_dest (stmt);
1268 new_label = main_block_label (label);
1269 if (new_label != label)
1270 gimple_goto_set_dest (stmt, new_label);
1271 }
1272 break;
1273
1274 case GIMPLE_TRANSACTION:
1275 {
1276 tree label = gimple_transaction_label (stmt);
1277 if (label)
1278 {
1279 tree new_label = main_block_label (label);
1280 if (new_label != label)
1281 gimple_transaction_set_label (stmt, new_label);
1282 }
1283 }
1284 break;
1285
1286 default:
1287 break;
1288 }
1289 }
1290
1291 /* Do the same for the exception region tree labels. */
1292 cleanup_dead_labels_eh ();
1293
1294 /* Finally, purge dead labels. All user-defined labels and labels that
1295 can be the target of non-local gotos and labels which have their
1296 address taken are preserved. */
1297 FOR_EACH_BB (bb)
1298 {
1299 gimple_stmt_iterator i;
1300 tree label_for_this_bb = label_for_bb[bb->index].label;
1301
1302 if (!label_for_this_bb)
1303 continue;
1304
1305 /* If the main label of the block is unused, we may still remove it. */
1306 if (!label_for_bb[bb->index].used)
1307 label_for_this_bb = NULL;
1308
1309 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1310 {
1311 tree label;
1312 gimple stmt = gsi_stmt (i);
1313
1314 if (gimple_code (stmt) != GIMPLE_LABEL)
1315 break;
1316
1317 label = gimple_label_label (stmt);
1318
1319 if (label == label_for_this_bb
1320 || !DECL_ARTIFICIAL (label)
1321 || DECL_NONLOCAL (label)
1322 || FORCED_LABEL (label))
1323 gsi_next (&i);
1324 else
1325 gsi_remove (&i, true);
1326 }
1327 }
1328
1329 free (label_for_bb);
1330 }
1331
1332 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1333 the ones jumping to the same label.
1334 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1335
1336 static void
1337 group_case_labels_stmt (gimple stmt)
1338 {
1339 int old_size = gimple_switch_num_labels (stmt);
1340 int i, j, new_size = old_size;
1341 tree default_case = NULL_TREE;
1342 tree default_label = NULL_TREE;
1343 bool has_default;
1344
1345 /* The default label is always the first case in a switch
1346 statement after gimplification if it was not optimized
1347 away */
1348 if (!CASE_LOW (gimple_switch_default_label (stmt))
1349 && !CASE_HIGH (gimple_switch_default_label (stmt)))
1350 {
1351 default_case = gimple_switch_default_label (stmt);
1352 default_label = CASE_LABEL (default_case);
1353 has_default = true;
1354 }
1355 else
1356 has_default = false;
1357
1358 /* Look for possible opportunities to merge cases. */
1359 if (has_default)
1360 i = 1;
1361 else
1362 i = 0;
1363 while (i < old_size)
1364 {
1365 tree base_case, base_label, base_high;
1366 base_case = gimple_switch_label (stmt, i);
1367
1368 gcc_assert (base_case);
1369 base_label = CASE_LABEL (base_case);
1370
1371 /* Discard cases that have the same destination as the
1372 default case. */
1373 if (base_label == default_label)
1374 {
1375 gimple_switch_set_label (stmt, i, NULL_TREE);
1376 i++;
1377 new_size--;
1378 continue;
1379 }
1380
1381 base_high = CASE_HIGH (base_case)
1382 ? CASE_HIGH (base_case)
1383 : CASE_LOW (base_case);
1384 i++;
1385
1386 /* Try to merge case labels. Break out when we reach the end
1387 of the label vector or when we cannot merge the next case
1388 label with the current one. */
1389 while (i < old_size)
1390 {
1391 tree merge_case = gimple_switch_label (stmt, i);
1392 tree merge_label = CASE_LABEL (merge_case);
1393 double_int bhp1 = double_int_add (tree_to_double_int (base_high),
1394 double_int_one);
1395
1396 /* Merge the cases if they jump to the same place,
1397 and their ranges are consecutive. */
1398 if (merge_label == base_label
1399 && double_int_equal_p (tree_to_double_int (CASE_LOW (merge_case)),
1400 bhp1))
1401 {
1402 base_high = CASE_HIGH (merge_case) ?
1403 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1404 CASE_HIGH (base_case) = base_high;
1405 gimple_switch_set_label (stmt, i, NULL_TREE);
1406 new_size--;
1407 i++;
1408 }
1409 else
1410 break;
1411 }
1412 }
1413
1414 /* Compress the case labels in the label vector, and adjust the
1415 length of the vector. */
1416 for (i = 0, j = 0; i < new_size; i++)
1417 {
1418 while (! gimple_switch_label (stmt, j))
1419 j++;
1420 gimple_switch_set_label (stmt, i,
1421 gimple_switch_label (stmt, j++));
1422 }
1423
1424 gcc_assert (new_size <= old_size);
1425 gimple_switch_set_num_labels (stmt, new_size);
1426 }
1427
1428 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1429 and scan the sorted vector of cases. Combine the ones jumping to the
1430 same label. */
1431
1432 void
1433 group_case_labels (void)
1434 {
1435 basic_block bb;
1436
1437 FOR_EACH_BB (bb)
1438 {
1439 gimple stmt = last_stmt (bb);
1440 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1441 group_case_labels_stmt (stmt);
1442 }
1443 }
1444
1445 /* Checks whether we can merge block B into block A. */
1446
1447 static bool
1448 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1449 {
1450 gimple stmt;
1451 gimple_stmt_iterator gsi;
1452 gimple_seq phis;
1453
1454 if (!single_succ_p (a))
1455 return false;
1456
1457 if (single_succ_edge (a)->flags & (EDGE_ABNORMAL | EDGE_EH | EDGE_PRESERVE))
1458 return false;
1459
1460 if (single_succ (a) != b)
1461 return false;
1462
1463 if (!single_pred_p (b))
1464 return false;
1465
1466 if (b == EXIT_BLOCK_PTR)
1467 return false;
1468
1469 /* If A ends by a statement causing exceptions or something similar, we
1470 cannot merge the blocks. */
1471 stmt = last_stmt (a);
1472 if (stmt && stmt_ends_bb_p (stmt))
1473 return false;
1474
1475 /* Do not allow a block with only a non-local label to be merged. */
1476 if (stmt
1477 && gimple_code (stmt) == GIMPLE_LABEL
1478 && DECL_NONLOCAL (gimple_label_label (stmt)))
1479 return false;
1480
1481 /* Examine the labels at the beginning of B. */
1482 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
1483 {
1484 tree lab;
1485 stmt = gsi_stmt (gsi);
1486 if (gimple_code (stmt) != GIMPLE_LABEL)
1487 break;
1488 lab = gimple_label_label (stmt);
1489
1490 /* Do not remove user forced labels or for -O0 any user labels. */
1491 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1492 return false;
1493 }
1494
1495 /* Protect the loop latches. */
1496 if (current_loops && b->loop_father->latch == b)
1497 return false;
1498
1499 /* It must be possible to eliminate all phi nodes in B. If ssa form
1500 is not up-to-date and a name-mapping is registered, we cannot eliminate
1501 any phis. Symbols marked for renaming are never a problem though. */
1502 phis = phi_nodes (b);
1503 if (!gimple_seq_empty_p (phis)
1504 && name_mappings_registered_p ())
1505 return false;
1506
1507 /* When not optimizing, don't merge if we'd lose goto_locus. */
1508 if (!optimize
1509 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1510 {
1511 location_t goto_locus = single_succ_edge (a)->goto_locus;
1512 gimple_stmt_iterator prev, next;
1513 prev = gsi_last_nondebug_bb (a);
1514 next = gsi_after_labels (b);
1515 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1516 gsi_next_nondebug (&next);
1517 if ((gsi_end_p (prev)
1518 || gimple_location (gsi_stmt (prev)) != goto_locus)
1519 && (gsi_end_p (next)
1520 || gimple_location (gsi_stmt (next)) != goto_locus))
1521 return false;
1522 }
1523
1524 return true;
1525 }
1526
1527 /* Return true if the var whose chain of uses starts at PTR has no
1528 nondebug uses. */
1529 bool
1530 has_zero_uses_1 (const ssa_use_operand_t *head)
1531 {
1532 const ssa_use_operand_t *ptr;
1533
1534 for (ptr = head->next; ptr != head; ptr = ptr->next)
1535 if (!is_gimple_debug (USE_STMT (ptr)))
1536 return false;
1537
1538 return true;
1539 }
1540
1541 /* Return true if the var whose chain of uses starts at PTR has a
1542 single nondebug use. Set USE_P and STMT to that single nondebug
1543 use, if so, or to NULL otherwise. */
1544 bool
1545 single_imm_use_1 (const ssa_use_operand_t *head,
1546 use_operand_p *use_p, gimple *stmt)
1547 {
1548 ssa_use_operand_t *ptr, *single_use = 0;
1549
1550 for (ptr = head->next; ptr != head; ptr = ptr->next)
1551 if (!is_gimple_debug (USE_STMT (ptr)))
1552 {
1553 if (single_use)
1554 {
1555 single_use = NULL;
1556 break;
1557 }
1558 single_use = ptr;
1559 }
1560
1561 if (use_p)
1562 *use_p = single_use;
1563
1564 if (stmt)
1565 *stmt = single_use ? single_use->loc.stmt : NULL;
1566
1567 return !!single_use;
1568 }
1569
1570 /* Replaces all uses of NAME by VAL. */
1571
1572 void
1573 replace_uses_by (tree name, tree val)
1574 {
1575 imm_use_iterator imm_iter;
1576 use_operand_p use;
1577 gimple stmt;
1578 edge e;
1579
1580 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1581 {
1582 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1583 {
1584 replace_exp (use, val);
1585
1586 if (gimple_code (stmt) == GIMPLE_PHI)
1587 {
1588 e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
1589 if (e->flags & EDGE_ABNORMAL)
1590 {
1591 /* This can only occur for virtual operands, since
1592 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1593 would prevent replacement. */
1594 gcc_checking_assert (!is_gimple_reg (name));
1595 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1596 }
1597 }
1598 }
1599
1600 if (gimple_code (stmt) != GIMPLE_PHI)
1601 {
1602 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1603 gimple orig_stmt = stmt;
1604 size_t i;
1605
1606 /* Mark the block if we changed the last stmt in it. */
1607 if (cfgcleanup_altered_bbs
1608 && stmt_ends_bb_p (stmt))
1609 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1610
1611 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1612 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1613 only change sth from non-invariant to invariant, and only
1614 when propagating constants. */
1615 if (is_gimple_min_invariant (val))
1616 for (i = 0; i < gimple_num_ops (stmt); i++)
1617 {
1618 tree op = gimple_op (stmt, i);
1619 /* Operands may be empty here. For example, the labels
1620 of a GIMPLE_COND are nulled out following the creation
1621 of the corresponding CFG edges. */
1622 if (op && TREE_CODE (op) == ADDR_EXPR)
1623 recompute_tree_invariant_for_addr_expr (op);
1624 }
1625
1626 if (fold_stmt (&gsi))
1627 stmt = gsi_stmt (gsi);
1628
1629 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1630 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1631
1632 update_stmt (stmt);
1633 }
1634 }
1635
1636 gcc_checking_assert (has_zero_uses (name));
1637
1638 /* Also update the trees stored in loop structures. */
1639 if (current_loops)
1640 {
1641 struct loop *loop;
1642 loop_iterator li;
1643
1644 FOR_EACH_LOOP (li, loop, 0)
1645 {
1646 substitute_in_loop_info (loop, name, val);
1647 }
1648 }
1649 }
1650
1651 /* Merge block B into block A. */
1652
1653 static void
1654 gimple_merge_blocks (basic_block a, basic_block b)
1655 {
1656 gimple_stmt_iterator last, gsi, psi;
1657
1658 if (dump_file)
1659 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1660
1661 /* Remove all single-valued PHI nodes from block B of the form
1662 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1663 gsi = gsi_last_bb (a);
1664 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1665 {
1666 gimple phi = gsi_stmt (psi);
1667 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1668 gimple copy;
1669 bool may_replace_uses = !is_gimple_reg (def)
1670 || may_propagate_copy (def, use);
1671
1672 /* In case we maintain loop closed ssa form, do not propagate arguments
1673 of loop exit phi nodes. */
1674 if (current_loops
1675 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1676 && is_gimple_reg (def)
1677 && TREE_CODE (use) == SSA_NAME
1678 && a->loop_father != b->loop_father)
1679 may_replace_uses = false;
1680
1681 if (!may_replace_uses)
1682 {
1683 gcc_assert (is_gimple_reg (def));
1684
1685 /* Note that just emitting the copies is fine -- there is no problem
1686 with ordering of phi nodes. This is because A is the single
1687 predecessor of B, therefore results of the phi nodes cannot
1688 appear as arguments of the phi nodes. */
1689 copy = gimple_build_assign (def, use);
1690 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1691 remove_phi_node (&psi, false);
1692 }
1693 else
1694 {
1695 /* If we deal with a PHI for virtual operands, we can simply
1696 propagate these without fussing with folding or updating
1697 the stmt. */
1698 if (!is_gimple_reg (def))
1699 {
1700 imm_use_iterator iter;
1701 use_operand_p use_p;
1702 gimple stmt;
1703
1704 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1705 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1706 SET_USE (use_p, use);
1707
1708 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1709 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1710 }
1711 else
1712 replace_uses_by (def, use);
1713
1714 remove_phi_node (&psi, true);
1715 }
1716 }
1717
1718 /* Ensure that B follows A. */
1719 move_block_after (b, a);
1720
1721 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1722 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1723
1724 /* Remove labels from B and set gimple_bb to A for other statements. */
1725 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1726 {
1727 gimple stmt = gsi_stmt (gsi);
1728 if (gimple_code (stmt) == GIMPLE_LABEL)
1729 {
1730 tree label = gimple_label_label (stmt);
1731 int lp_nr;
1732
1733 gsi_remove (&gsi, false);
1734
1735 /* Now that we can thread computed gotos, we might have
1736 a situation where we have a forced label in block B
1737 However, the label at the start of block B might still be
1738 used in other ways (think about the runtime checking for
1739 Fortran assigned gotos). So we can not just delete the
1740 label. Instead we move the label to the start of block A. */
1741 if (FORCED_LABEL (label))
1742 {
1743 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1744 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1745 }
1746 /* Other user labels keep around in a form of a debug stmt. */
1747 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1748 {
1749 gimple dbg = gimple_build_debug_bind (label,
1750 integer_zero_node,
1751 stmt);
1752 gimple_debug_bind_reset_value (dbg);
1753 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1754 }
1755
1756 lp_nr = EH_LANDING_PAD_NR (label);
1757 if (lp_nr)
1758 {
1759 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1760 lp->post_landing_pad = NULL;
1761 }
1762 }
1763 else
1764 {
1765 gimple_set_bb (stmt, a);
1766 gsi_next (&gsi);
1767 }
1768 }
1769
1770 /* Merge the sequences. */
1771 last = gsi_last_bb (a);
1772 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1773 set_bb_seq (b, NULL);
1774
1775 if (cfgcleanup_altered_bbs)
1776 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1777 }
1778
1779
1780 /* Return the one of two successors of BB that is not reachable by a
1781 complex edge, if there is one. Else, return BB. We use
1782 this in optimizations that use post-dominators for their heuristics,
1783 to catch the cases in C++ where function calls are involved. */
1784
1785 basic_block
1786 single_noncomplex_succ (basic_block bb)
1787 {
1788 edge e0, e1;
1789 if (EDGE_COUNT (bb->succs) != 2)
1790 return bb;
1791
1792 e0 = EDGE_SUCC (bb, 0);
1793 e1 = EDGE_SUCC (bb, 1);
1794 if (e0->flags & EDGE_COMPLEX)
1795 return e1->dest;
1796 if (e1->flags & EDGE_COMPLEX)
1797 return e0->dest;
1798
1799 return bb;
1800 }
1801
1802 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1803
1804 void
1805 notice_special_calls (gimple call)
1806 {
1807 int flags = gimple_call_flags (call);
1808
1809 if (flags & ECF_MAY_BE_ALLOCA)
1810 cfun->calls_alloca = true;
1811 if (flags & ECF_RETURNS_TWICE)
1812 cfun->calls_setjmp = true;
1813 }
1814
1815
1816 /* Clear flags set by notice_special_calls. Used by dead code removal
1817 to update the flags. */
1818
1819 void
1820 clear_special_calls (void)
1821 {
1822 cfun->calls_alloca = false;
1823 cfun->calls_setjmp = false;
1824 }
1825
1826 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1827
1828 static void
1829 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1830 {
1831 /* Since this block is no longer reachable, we can just delete all
1832 of its PHI nodes. */
1833 remove_phi_nodes (bb);
1834
1835 /* Remove edges to BB's successors. */
1836 while (EDGE_COUNT (bb->succs) > 0)
1837 remove_edge (EDGE_SUCC (bb, 0));
1838 }
1839
1840
1841 /* Remove statements of basic block BB. */
1842
1843 static void
1844 remove_bb (basic_block bb)
1845 {
1846 gimple_stmt_iterator i;
1847
1848 if (dump_file)
1849 {
1850 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1851 if (dump_flags & TDF_DETAILS)
1852 {
1853 dump_bb (bb, dump_file, 0);
1854 fprintf (dump_file, "\n");
1855 }
1856 }
1857
1858 if (current_loops)
1859 {
1860 struct loop *loop = bb->loop_father;
1861
1862 /* If a loop gets removed, clean up the information associated
1863 with it. */
1864 if (loop->latch == bb
1865 || loop->header == bb)
1866 free_numbers_of_iterations_estimates_loop (loop);
1867 }
1868
1869 /* Remove all the instructions in the block. */
1870 if (bb_seq (bb) != NULL)
1871 {
1872 /* Walk backwards so as to get a chance to substitute all
1873 released DEFs into debug stmts. See
1874 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
1875 details. */
1876 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
1877 {
1878 gimple stmt = gsi_stmt (i);
1879 if (gimple_code (stmt) == GIMPLE_LABEL
1880 && (FORCED_LABEL (gimple_label_label (stmt))
1881 || DECL_NONLOCAL (gimple_label_label (stmt))))
1882 {
1883 basic_block new_bb;
1884 gimple_stmt_iterator new_gsi;
1885
1886 /* A non-reachable non-local label may still be referenced.
1887 But it no longer needs to carry the extra semantics of
1888 non-locality. */
1889 if (DECL_NONLOCAL (gimple_label_label (stmt)))
1890 {
1891 DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
1892 FORCED_LABEL (gimple_label_label (stmt)) = 1;
1893 }
1894
1895 new_bb = bb->prev_bb;
1896 new_gsi = gsi_start_bb (new_bb);
1897 gsi_remove (&i, false);
1898 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
1899 }
1900 else
1901 {
1902 /* Release SSA definitions if we are in SSA. Note that we
1903 may be called when not in SSA. For example,
1904 final_cleanup calls this function via
1905 cleanup_tree_cfg. */
1906 if (gimple_in_ssa_p (cfun))
1907 release_defs (stmt);
1908
1909 gsi_remove (&i, true);
1910 }
1911
1912 if (gsi_end_p (i))
1913 i = gsi_last_bb (bb);
1914 else
1915 gsi_prev (&i);
1916 }
1917 }
1918
1919 remove_phi_nodes_and_edges_for_unreachable_block (bb);
1920 bb->il.gimple.seq = NULL;
1921 bb->il.gimple.phi_nodes = NULL;
1922 }
1923
1924
1925 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
1926 predicate VAL, return the edge that will be taken out of the block.
1927 If VAL does not match a unique edge, NULL is returned. */
1928
1929 edge
1930 find_taken_edge (basic_block bb, tree val)
1931 {
1932 gimple stmt;
1933
1934 stmt = last_stmt (bb);
1935
1936 gcc_assert (stmt);
1937 gcc_assert (is_ctrl_stmt (stmt));
1938
1939 if (val == NULL)
1940 return NULL;
1941
1942 if (!is_gimple_min_invariant (val))
1943 return NULL;
1944
1945 if (gimple_code (stmt) == GIMPLE_COND)
1946 return find_taken_edge_cond_expr (bb, val);
1947
1948 if (gimple_code (stmt) == GIMPLE_SWITCH)
1949 return find_taken_edge_switch_expr (bb, val);
1950
1951 if (computed_goto_p (stmt))
1952 {
1953 /* Only optimize if the argument is a label, if the argument is
1954 not a label then we can not construct a proper CFG.
1955
1956 It may be the case that we only need to allow the LABEL_REF to
1957 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
1958 appear inside a LABEL_EXPR just to be safe. */
1959 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
1960 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
1961 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
1962 return NULL;
1963 }
1964
1965 gcc_unreachable ();
1966 }
1967
1968 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
1969 statement, determine which of the outgoing edges will be taken out of the
1970 block. Return NULL if either edge may be taken. */
1971
1972 static edge
1973 find_taken_edge_computed_goto (basic_block bb, tree val)
1974 {
1975 basic_block dest;
1976 edge e = NULL;
1977
1978 dest = label_to_block (val);
1979 if (dest)
1980 {
1981 e = find_edge (bb, dest);
1982 gcc_assert (e != NULL);
1983 }
1984
1985 return e;
1986 }
1987
1988 /* Given a constant value VAL and the entry block BB to a COND_EXPR
1989 statement, determine which of the two edges will be taken out of the
1990 block. Return NULL if either edge may be taken. */
1991
1992 static edge
1993 find_taken_edge_cond_expr (basic_block bb, tree val)
1994 {
1995 edge true_edge, false_edge;
1996
1997 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1998
1999 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2000 return (integer_zerop (val) ? false_edge : true_edge);
2001 }
2002
2003 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2004 statement, determine which edge will be taken out of the block. Return
2005 NULL if any edge may be taken. */
2006
2007 static edge
2008 find_taken_edge_switch_expr (basic_block bb, tree val)
2009 {
2010 basic_block dest_bb;
2011 edge e;
2012 gimple switch_stmt;
2013 tree taken_case;
2014
2015 switch_stmt = last_stmt (bb);
2016 taken_case = find_case_label_for_value (switch_stmt, val);
2017 dest_bb = label_to_block (CASE_LABEL (taken_case));
2018
2019 e = find_edge (bb, dest_bb);
2020 gcc_assert (e);
2021 return e;
2022 }
2023
2024
2025 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2026 We can make optimal use here of the fact that the case labels are
2027 sorted: We can do a binary search for a case matching VAL. */
2028
2029 static tree
2030 find_case_label_for_value (gimple switch_stmt, tree val)
2031 {
2032 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2033 tree default_case = gimple_switch_default_label (switch_stmt);
2034
2035 for (low = 0, high = n; high - low > 1; )
2036 {
2037 size_t i = (high + low) / 2;
2038 tree t = gimple_switch_label (switch_stmt, i);
2039 int cmp;
2040
2041 /* Cache the result of comparing CASE_LOW and val. */
2042 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2043
2044 if (cmp > 0)
2045 high = i;
2046 else
2047 low = i;
2048
2049 if (CASE_HIGH (t) == NULL)
2050 {
2051 /* A singe-valued case label. */
2052 if (cmp == 0)
2053 return t;
2054 }
2055 else
2056 {
2057 /* A case range. We can only handle integer ranges. */
2058 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2059 return t;
2060 }
2061 }
2062
2063 return default_case;
2064 }
2065
2066
2067 /* Dump a basic block on stderr. */
2068
2069 void
2070 gimple_debug_bb (basic_block bb)
2071 {
2072 gimple_dump_bb (bb, stderr, 0, TDF_VOPS|TDF_MEMSYMS);
2073 }
2074
2075
2076 /* Dump basic block with index N on stderr. */
2077
2078 basic_block
2079 gimple_debug_bb_n (int n)
2080 {
2081 gimple_debug_bb (BASIC_BLOCK (n));
2082 return BASIC_BLOCK (n);
2083 }
2084
2085
2086 /* Dump the CFG on stderr.
2087
2088 FLAGS are the same used by the tree dumping functions
2089 (see TDF_* in tree-pass.h). */
2090
2091 void
2092 gimple_debug_cfg (int flags)
2093 {
2094 gimple_dump_cfg (stderr, flags);
2095 }
2096
2097
2098 /* Dump the program showing basic block boundaries on the given FILE.
2099
2100 FLAGS are the same used by the tree dumping functions (see TDF_* in
2101 tree.h). */
2102
2103 void
2104 gimple_dump_cfg (FILE *file, int flags)
2105 {
2106 if (flags & TDF_DETAILS)
2107 {
2108 dump_function_header (file, current_function_decl, flags);
2109 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2110 n_basic_blocks, n_edges, last_basic_block);
2111
2112 brief_dump_cfg (file);
2113 fprintf (file, "\n");
2114 }
2115
2116 if (flags & TDF_STATS)
2117 dump_cfg_stats (file);
2118
2119 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2120 }
2121
2122
2123 /* Dump CFG statistics on FILE. */
2124
2125 void
2126 dump_cfg_stats (FILE *file)
2127 {
2128 static long max_num_merged_labels = 0;
2129 unsigned long size, total = 0;
2130 long num_edges;
2131 basic_block bb;
2132 const char * const fmt_str = "%-30s%-13s%12s\n";
2133 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2134 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2135 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2136 const char *funcname
2137 = lang_hooks.decl_printable_name (current_function_decl, 2);
2138
2139
2140 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2141
2142 fprintf (file, "---------------------------------------------------------\n");
2143 fprintf (file, fmt_str, "", " Number of ", "Memory");
2144 fprintf (file, fmt_str, "", " instances ", "used ");
2145 fprintf (file, "---------------------------------------------------------\n");
2146
2147 size = n_basic_blocks * sizeof (struct basic_block_def);
2148 total += size;
2149 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2150 SCALE (size), LABEL (size));
2151
2152 num_edges = 0;
2153 FOR_EACH_BB (bb)
2154 num_edges += EDGE_COUNT (bb->succs);
2155 size = num_edges * sizeof (struct edge_def);
2156 total += size;
2157 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2158
2159 fprintf (file, "---------------------------------------------------------\n");
2160 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2161 LABEL (total));
2162 fprintf (file, "---------------------------------------------------------\n");
2163 fprintf (file, "\n");
2164
2165 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2166 max_num_merged_labels = cfg_stats.num_merged_labels;
2167
2168 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2169 cfg_stats.num_merged_labels, max_num_merged_labels);
2170
2171 fprintf (file, "\n");
2172 }
2173
2174
2175 /* Dump CFG statistics on stderr. Keep extern so that it's always
2176 linked in the final executable. */
2177
2178 DEBUG_FUNCTION void
2179 debug_cfg_stats (void)
2180 {
2181 dump_cfg_stats (stderr);
2182 }
2183
2184
2185 /* Dump the flowgraph to a .vcg FILE. */
2186
2187 static void
2188 gimple_cfg2vcg (FILE *file)
2189 {
2190 edge e;
2191 edge_iterator ei;
2192 basic_block bb;
2193 const char *funcname
2194 = lang_hooks.decl_printable_name (current_function_decl, 2);
2195
2196 /* Write the file header. */
2197 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2198 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2199 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2200
2201 /* Write blocks and edges. */
2202 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2203 {
2204 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2205 e->dest->index);
2206
2207 if (e->flags & EDGE_FAKE)
2208 fprintf (file, " linestyle: dotted priority: 10");
2209 else
2210 fprintf (file, " linestyle: solid priority: 100");
2211
2212 fprintf (file, " }\n");
2213 }
2214 fputc ('\n', file);
2215
2216 FOR_EACH_BB (bb)
2217 {
2218 enum gimple_code head_code, end_code;
2219 const char *head_name, *end_name;
2220 int head_line = 0;
2221 int end_line = 0;
2222 gimple first = first_stmt (bb);
2223 gimple last = last_stmt (bb);
2224
2225 if (first)
2226 {
2227 head_code = gimple_code (first);
2228 head_name = gimple_code_name[head_code];
2229 head_line = get_lineno (first);
2230 }
2231 else
2232 head_name = "no-statement";
2233
2234 if (last)
2235 {
2236 end_code = gimple_code (last);
2237 end_name = gimple_code_name[end_code];
2238 end_line = get_lineno (last);
2239 }
2240 else
2241 end_name = "no-statement";
2242
2243 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2244 bb->index, bb->index, head_name, head_line, end_name,
2245 end_line);
2246
2247 FOR_EACH_EDGE (e, ei, bb->succs)
2248 {
2249 if (e->dest == EXIT_BLOCK_PTR)
2250 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2251 else
2252 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2253
2254 if (e->flags & EDGE_FAKE)
2255 fprintf (file, " priority: 10 linestyle: dotted");
2256 else
2257 fprintf (file, " priority: 100 linestyle: solid");
2258
2259 fprintf (file, " }\n");
2260 }
2261
2262 if (bb->next_bb != EXIT_BLOCK_PTR)
2263 fputc ('\n', file);
2264 }
2265
2266 fputs ("}\n\n", file);
2267 }
2268
2269
2270
2271 /*---------------------------------------------------------------------------
2272 Miscellaneous helpers
2273 ---------------------------------------------------------------------------*/
2274
2275 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2276 flow. Transfers of control flow associated with EH are excluded. */
2277
2278 static bool
2279 call_can_make_abnormal_goto (gimple t)
2280 {
2281 /* If the function has no non-local labels, then a call cannot make an
2282 abnormal transfer of control. */
2283 if (!cfun->has_nonlocal_label)
2284 return false;
2285
2286 /* Likewise if the call has no side effects. */
2287 if (!gimple_has_side_effects (t))
2288 return false;
2289
2290 /* Likewise if the called function is leaf. */
2291 if (gimple_call_flags (t) & ECF_LEAF)
2292 return false;
2293
2294 return true;
2295 }
2296
2297
2298 /* Return true if T can make an abnormal transfer of control flow.
2299 Transfers of control flow associated with EH are excluded. */
2300
2301 bool
2302 stmt_can_make_abnormal_goto (gimple t)
2303 {
2304 if (computed_goto_p (t))
2305 return true;
2306 if (is_gimple_call (t))
2307 return call_can_make_abnormal_goto (t);
2308 return false;
2309 }
2310
2311
2312 /* Return true if T represents a stmt that always transfers control. */
2313
2314 bool
2315 is_ctrl_stmt (gimple t)
2316 {
2317 switch (gimple_code (t))
2318 {
2319 case GIMPLE_COND:
2320 case GIMPLE_SWITCH:
2321 case GIMPLE_GOTO:
2322 case GIMPLE_RETURN:
2323 case GIMPLE_RESX:
2324 return true;
2325 default:
2326 return false;
2327 }
2328 }
2329
2330
2331 /* Return true if T is a statement that may alter the flow of control
2332 (e.g., a call to a non-returning function). */
2333
2334 bool
2335 is_ctrl_altering_stmt (gimple t)
2336 {
2337 gcc_assert (t);
2338
2339 switch (gimple_code (t))
2340 {
2341 case GIMPLE_CALL:
2342 {
2343 int flags = gimple_call_flags (t);
2344
2345 /* A call alters control flow if it can make an abnormal goto. */
2346 if (call_can_make_abnormal_goto (t))
2347 return true;
2348
2349 /* A call also alters control flow if it does not return. */
2350 if (flags & ECF_NORETURN)
2351 return true;
2352
2353 /* TM ending statements have backedges out of the transaction.
2354 Return true so we split the basic block containing them.
2355 Note that the TM_BUILTIN test is merely an optimization. */
2356 if ((flags & ECF_TM_BUILTIN)
2357 && is_tm_ending_fndecl (gimple_call_fndecl (t)))
2358 return true;
2359
2360 /* BUILT_IN_RETURN call is same as return statement. */
2361 if (gimple_call_builtin_p (t, BUILT_IN_RETURN))
2362 return true;
2363 }
2364 break;
2365
2366 case GIMPLE_EH_DISPATCH:
2367 /* EH_DISPATCH branches to the individual catch handlers at
2368 this level of a try or allowed-exceptions region. It can
2369 fallthru to the next statement as well. */
2370 return true;
2371
2372 case GIMPLE_ASM:
2373 if (gimple_asm_nlabels (t) > 0)
2374 return true;
2375 break;
2376
2377 CASE_GIMPLE_OMP:
2378 /* OpenMP directives alter control flow. */
2379 return true;
2380
2381 case GIMPLE_TRANSACTION:
2382 /* A transaction start alters control flow. */
2383 return true;
2384
2385 default:
2386 break;
2387 }
2388
2389 /* If a statement can throw, it alters control flow. */
2390 return stmt_can_throw_internal (t);
2391 }
2392
2393
2394 /* Return true if T is a simple local goto. */
2395
2396 bool
2397 simple_goto_p (gimple t)
2398 {
2399 return (gimple_code (t) == GIMPLE_GOTO
2400 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2401 }
2402
2403
2404 /* Return true if STMT should start a new basic block. PREV_STMT is
2405 the statement preceding STMT. It is used when STMT is a label or a
2406 case label. Labels should only start a new basic block if their
2407 previous statement wasn't a label. Otherwise, sequence of labels
2408 would generate unnecessary basic blocks that only contain a single
2409 label. */
2410
2411 static inline bool
2412 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2413 {
2414 if (stmt == NULL)
2415 return false;
2416
2417 /* Labels start a new basic block only if the preceding statement
2418 wasn't a label of the same type. This prevents the creation of
2419 consecutive blocks that have nothing but a single label. */
2420 if (gimple_code (stmt) == GIMPLE_LABEL)
2421 {
2422 /* Nonlocal and computed GOTO targets always start a new block. */
2423 if (DECL_NONLOCAL (gimple_label_label (stmt))
2424 || FORCED_LABEL (gimple_label_label (stmt)))
2425 return true;
2426
2427 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2428 {
2429 if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
2430 return true;
2431
2432 cfg_stats.num_merged_labels++;
2433 return false;
2434 }
2435 else
2436 return true;
2437 }
2438
2439 return false;
2440 }
2441
2442
2443 /* Return true if T should end a basic block. */
2444
2445 bool
2446 stmt_ends_bb_p (gimple t)
2447 {
2448 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2449 }
2450
2451 /* Remove block annotations and other data structures. */
2452
2453 void
2454 delete_tree_cfg_annotations (void)
2455 {
2456 label_to_block_map = NULL;
2457 }
2458
2459
2460 /* Return the first statement in basic block BB. */
2461
2462 gimple
2463 first_stmt (basic_block bb)
2464 {
2465 gimple_stmt_iterator i = gsi_start_bb (bb);
2466 gimple stmt = NULL;
2467
2468 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2469 {
2470 gsi_next (&i);
2471 stmt = NULL;
2472 }
2473 return stmt;
2474 }
2475
2476 /* Return the first non-label statement in basic block BB. */
2477
2478 static gimple
2479 first_non_label_stmt (basic_block bb)
2480 {
2481 gimple_stmt_iterator i = gsi_start_bb (bb);
2482 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2483 gsi_next (&i);
2484 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2485 }
2486
2487 /* Return the last statement in basic block BB. */
2488
2489 gimple
2490 last_stmt (basic_block bb)
2491 {
2492 gimple_stmt_iterator i = gsi_last_bb (bb);
2493 gimple stmt = NULL;
2494
2495 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2496 {
2497 gsi_prev (&i);
2498 stmt = NULL;
2499 }
2500 return stmt;
2501 }
2502
2503 /* Return the last statement of an otherwise empty block. Return NULL
2504 if the block is totally empty, or if it contains more than one
2505 statement. */
2506
2507 gimple
2508 last_and_only_stmt (basic_block bb)
2509 {
2510 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2511 gimple last, prev;
2512
2513 if (gsi_end_p (i))
2514 return NULL;
2515
2516 last = gsi_stmt (i);
2517 gsi_prev_nondebug (&i);
2518 if (gsi_end_p (i))
2519 return last;
2520
2521 /* Empty statements should no longer appear in the instruction stream.
2522 Everything that might have appeared before should be deleted by
2523 remove_useless_stmts, and the optimizers should just gsi_remove
2524 instead of smashing with build_empty_stmt.
2525
2526 Thus the only thing that should appear here in a block containing
2527 one executable statement is a label. */
2528 prev = gsi_stmt (i);
2529 if (gimple_code (prev) == GIMPLE_LABEL)
2530 return last;
2531 else
2532 return NULL;
2533 }
2534
2535 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2536
2537 static void
2538 reinstall_phi_args (edge new_edge, edge old_edge)
2539 {
2540 edge_var_map_vector v;
2541 edge_var_map *vm;
2542 int i;
2543 gimple_stmt_iterator phis;
2544
2545 v = redirect_edge_var_map_vector (old_edge);
2546 if (!v)
2547 return;
2548
2549 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2550 VEC_iterate (edge_var_map, v, i, vm) && !gsi_end_p (phis);
2551 i++, gsi_next (&phis))
2552 {
2553 gimple phi = gsi_stmt (phis);
2554 tree result = redirect_edge_var_map_result (vm);
2555 tree arg = redirect_edge_var_map_def (vm);
2556
2557 gcc_assert (result == gimple_phi_result (phi));
2558
2559 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2560 }
2561
2562 redirect_edge_var_map_clear (old_edge);
2563 }
2564
2565 /* Returns the basic block after which the new basic block created
2566 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2567 near its "logical" location. This is of most help to humans looking
2568 at debugging dumps. */
2569
2570 static basic_block
2571 split_edge_bb_loc (edge edge_in)
2572 {
2573 basic_block dest = edge_in->dest;
2574 basic_block dest_prev = dest->prev_bb;
2575
2576 if (dest_prev)
2577 {
2578 edge e = find_edge (dest_prev, dest);
2579 if (e && !(e->flags & EDGE_COMPLEX))
2580 return edge_in->src;
2581 }
2582 return dest_prev;
2583 }
2584
2585 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2586 Abort on abnormal edges. */
2587
2588 static basic_block
2589 gimple_split_edge (edge edge_in)
2590 {
2591 basic_block new_bb, after_bb, dest;
2592 edge new_edge, e;
2593
2594 /* Abnormal edges cannot be split. */
2595 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2596
2597 dest = edge_in->dest;
2598
2599 after_bb = split_edge_bb_loc (edge_in);
2600
2601 new_bb = create_empty_bb (after_bb);
2602 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2603 new_bb->count = edge_in->count;
2604 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2605 new_edge->probability = REG_BR_PROB_BASE;
2606 new_edge->count = edge_in->count;
2607
2608 e = redirect_edge_and_branch (edge_in, new_bb);
2609 gcc_assert (e == edge_in);
2610 reinstall_phi_args (new_edge, e);
2611
2612 return new_bb;
2613 }
2614
2615
2616 /* Verify properties of the address expression T with base object BASE. */
2617
2618 static tree
2619 verify_address (tree t, tree base)
2620 {
2621 bool old_constant;
2622 bool old_side_effects;
2623 bool new_constant;
2624 bool new_side_effects;
2625
2626 old_constant = TREE_CONSTANT (t);
2627 old_side_effects = TREE_SIDE_EFFECTS (t);
2628
2629 recompute_tree_invariant_for_addr_expr (t);
2630 new_side_effects = TREE_SIDE_EFFECTS (t);
2631 new_constant = TREE_CONSTANT (t);
2632
2633 if (old_constant != new_constant)
2634 {
2635 error ("constant not recomputed when ADDR_EXPR changed");
2636 return t;
2637 }
2638 if (old_side_effects != new_side_effects)
2639 {
2640 error ("side effects not recomputed when ADDR_EXPR changed");
2641 return t;
2642 }
2643
2644 if (!(TREE_CODE (base) == VAR_DECL
2645 || TREE_CODE (base) == PARM_DECL
2646 || TREE_CODE (base) == RESULT_DECL))
2647 return NULL_TREE;
2648
2649 if (DECL_GIMPLE_REG_P (base))
2650 {
2651 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2652 return base;
2653 }
2654
2655 return NULL_TREE;
2656 }
2657
2658 /* Callback for walk_tree, check that all elements with address taken are
2659 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2660 inside a PHI node. */
2661
2662 static tree
2663 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2664 {
2665 tree t = *tp, x;
2666
2667 if (TYPE_P (t))
2668 *walk_subtrees = 0;
2669
2670 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2671 #define CHECK_OP(N, MSG) \
2672 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2673 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2674
2675 switch (TREE_CODE (t))
2676 {
2677 case SSA_NAME:
2678 if (SSA_NAME_IN_FREE_LIST (t))
2679 {
2680 error ("SSA name in freelist but still referenced");
2681 return *tp;
2682 }
2683 break;
2684
2685 case INDIRECT_REF:
2686 error ("INDIRECT_REF in gimple IL");
2687 return t;
2688
2689 case MEM_REF:
2690 x = TREE_OPERAND (t, 0);
2691 if (!POINTER_TYPE_P (TREE_TYPE (x))
2692 || !is_gimple_mem_ref_addr (x))
2693 {
2694 error ("invalid first operand of MEM_REF");
2695 return x;
2696 }
2697 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2698 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2699 {
2700 error ("invalid offset operand of MEM_REF");
2701 return TREE_OPERAND (t, 1);
2702 }
2703 if (TREE_CODE (x) == ADDR_EXPR
2704 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2705 return x;
2706 *walk_subtrees = 0;
2707 break;
2708
2709 case ASSERT_EXPR:
2710 x = fold (ASSERT_EXPR_COND (t));
2711 if (x == boolean_false_node)
2712 {
2713 error ("ASSERT_EXPR with an always-false condition");
2714 return *tp;
2715 }
2716 break;
2717
2718 case MODIFY_EXPR:
2719 error ("MODIFY_EXPR not expected while having tuples");
2720 return *tp;
2721
2722 case ADDR_EXPR:
2723 {
2724 tree tem;
2725
2726 gcc_assert (is_gimple_address (t));
2727
2728 /* Skip any references (they will be checked when we recurse down the
2729 tree) and ensure that any variable used as a prefix is marked
2730 addressable. */
2731 for (x = TREE_OPERAND (t, 0);
2732 handled_component_p (x);
2733 x = TREE_OPERAND (x, 0))
2734 ;
2735
2736 if ((tem = verify_address (t, x)))
2737 return tem;
2738
2739 if (!(TREE_CODE (x) == VAR_DECL
2740 || TREE_CODE (x) == PARM_DECL
2741 || TREE_CODE (x) == RESULT_DECL))
2742 return NULL;
2743
2744 if (!TREE_ADDRESSABLE (x))
2745 {
2746 error ("address taken, but ADDRESSABLE bit not set");
2747 return x;
2748 }
2749
2750 break;
2751 }
2752
2753 case COND_EXPR:
2754 x = COND_EXPR_COND (t);
2755 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2756 {
2757 error ("non-integral used in condition");
2758 return x;
2759 }
2760 if (!is_gimple_condexpr (x))
2761 {
2762 error ("invalid conditional operand");
2763 return x;
2764 }
2765 break;
2766
2767 case NON_LVALUE_EXPR:
2768 case TRUTH_NOT_EXPR:
2769 gcc_unreachable ();
2770
2771 CASE_CONVERT:
2772 case FIX_TRUNC_EXPR:
2773 case FLOAT_EXPR:
2774 case NEGATE_EXPR:
2775 case ABS_EXPR:
2776 case BIT_NOT_EXPR:
2777 CHECK_OP (0, "invalid operand to unary operator");
2778 break;
2779
2780 case REALPART_EXPR:
2781 case IMAGPART_EXPR:
2782 case COMPONENT_REF:
2783 case ARRAY_REF:
2784 case ARRAY_RANGE_REF:
2785 case BIT_FIELD_REF:
2786 case VIEW_CONVERT_EXPR:
2787 /* We have a nest of references. Verify that each of the operands
2788 that determine where to reference is either a constant or a variable,
2789 verify that the base is valid, and then show we've already checked
2790 the subtrees. */
2791 while (handled_component_p (t))
2792 {
2793 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2794 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2795 else if (TREE_CODE (t) == ARRAY_REF
2796 || TREE_CODE (t) == ARRAY_RANGE_REF)
2797 {
2798 CHECK_OP (1, "invalid array index");
2799 if (TREE_OPERAND (t, 2))
2800 CHECK_OP (2, "invalid array lower bound");
2801 if (TREE_OPERAND (t, 3))
2802 CHECK_OP (3, "invalid array stride");
2803 }
2804 else if (TREE_CODE (t) == BIT_FIELD_REF)
2805 {
2806 if (!host_integerp (TREE_OPERAND (t, 1), 1)
2807 || !host_integerp (TREE_OPERAND (t, 2), 1))
2808 {
2809 error ("invalid position or size operand to BIT_FIELD_REF");
2810 return t;
2811 }
2812 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2813 && (TYPE_PRECISION (TREE_TYPE (t))
2814 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2815 {
2816 error ("integral result type precision does not match "
2817 "field size of BIT_FIELD_REF");
2818 return t;
2819 }
2820 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2821 && !AGGREGATE_TYPE_P (TREE_TYPE (t))
2822 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2823 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2824 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2825 {
2826 error ("mode precision of non-integral result does not "
2827 "match field size of BIT_FIELD_REF");
2828 return t;
2829 }
2830 }
2831
2832 t = TREE_OPERAND (t, 0);
2833 }
2834
2835 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2836 {
2837 error ("invalid reference prefix");
2838 return t;
2839 }
2840 *walk_subtrees = 0;
2841 break;
2842 case PLUS_EXPR:
2843 case MINUS_EXPR:
2844 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2845 POINTER_PLUS_EXPR. */
2846 if (POINTER_TYPE_P (TREE_TYPE (t)))
2847 {
2848 error ("invalid operand to plus/minus, type is a pointer");
2849 return t;
2850 }
2851 CHECK_OP (0, "invalid operand to binary operator");
2852 CHECK_OP (1, "invalid operand to binary operator");
2853 break;
2854
2855 case POINTER_PLUS_EXPR:
2856 /* Check to make sure the first operand is a pointer or reference type. */
2857 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
2858 {
2859 error ("invalid operand to pointer plus, first operand is not a pointer");
2860 return t;
2861 }
2862 /* Check to make sure the second operand is a ptrofftype. */
2863 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
2864 {
2865 error ("invalid operand to pointer plus, second operand is not an "
2866 "integer type of appropriate width");
2867 return t;
2868 }
2869 /* FALLTHROUGH */
2870 case LT_EXPR:
2871 case LE_EXPR:
2872 case GT_EXPR:
2873 case GE_EXPR:
2874 case EQ_EXPR:
2875 case NE_EXPR:
2876 case UNORDERED_EXPR:
2877 case ORDERED_EXPR:
2878 case UNLT_EXPR:
2879 case UNLE_EXPR:
2880 case UNGT_EXPR:
2881 case UNGE_EXPR:
2882 case UNEQ_EXPR:
2883 case LTGT_EXPR:
2884 case MULT_EXPR:
2885 case TRUNC_DIV_EXPR:
2886 case CEIL_DIV_EXPR:
2887 case FLOOR_DIV_EXPR:
2888 case ROUND_DIV_EXPR:
2889 case TRUNC_MOD_EXPR:
2890 case CEIL_MOD_EXPR:
2891 case FLOOR_MOD_EXPR:
2892 case ROUND_MOD_EXPR:
2893 case RDIV_EXPR:
2894 case EXACT_DIV_EXPR:
2895 case MIN_EXPR:
2896 case MAX_EXPR:
2897 case LSHIFT_EXPR:
2898 case RSHIFT_EXPR:
2899 case LROTATE_EXPR:
2900 case RROTATE_EXPR:
2901 case BIT_IOR_EXPR:
2902 case BIT_XOR_EXPR:
2903 case BIT_AND_EXPR:
2904 CHECK_OP (0, "invalid operand to binary operator");
2905 CHECK_OP (1, "invalid operand to binary operator");
2906 break;
2907
2908 case CONSTRUCTOR:
2909 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2910 *walk_subtrees = 0;
2911 break;
2912
2913 case CASE_LABEL_EXPR:
2914 if (CASE_CHAIN (t))
2915 {
2916 error ("invalid CASE_CHAIN");
2917 return t;
2918 }
2919 break;
2920
2921 default:
2922 break;
2923 }
2924 return NULL;
2925
2926 #undef CHECK_OP
2927 }
2928
2929
2930 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
2931 Returns true if there is an error, otherwise false. */
2932
2933 static bool
2934 verify_types_in_gimple_min_lval (tree expr)
2935 {
2936 tree op;
2937
2938 if (is_gimple_id (expr))
2939 return false;
2940
2941 if (TREE_CODE (expr) != TARGET_MEM_REF
2942 && TREE_CODE (expr) != MEM_REF)
2943 {
2944 error ("invalid expression for min lvalue");
2945 return true;
2946 }
2947
2948 /* TARGET_MEM_REFs are strange beasts. */
2949 if (TREE_CODE (expr) == TARGET_MEM_REF)
2950 return false;
2951
2952 op = TREE_OPERAND (expr, 0);
2953 if (!is_gimple_val (op))
2954 {
2955 error ("invalid operand in indirect reference");
2956 debug_generic_stmt (op);
2957 return true;
2958 }
2959 /* Memory references now generally can involve a value conversion. */
2960
2961 return false;
2962 }
2963
2964 /* Verify if EXPR is a valid GIMPLE reference expression. If
2965 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
2966 if there is an error, otherwise false. */
2967
2968 static bool
2969 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
2970 {
2971 while (handled_component_p (expr))
2972 {
2973 tree op = TREE_OPERAND (expr, 0);
2974
2975 if (TREE_CODE (expr) == ARRAY_REF
2976 || TREE_CODE (expr) == ARRAY_RANGE_REF)
2977 {
2978 if (!is_gimple_val (TREE_OPERAND (expr, 1))
2979 || (TREE_OPERAND (expr, 2)
2980 && !is_gimple_val (TREE_OPERAND (expr, 2)))
2981 || (TREE_OPERAND (expr, 3)
2982 && !is_gimple_val (TREE_OPERAND (expr, 3))))
2983 {
2984 error ("invalid operands to array reference");
2985 debug_generic_stmt (expr);
2986 return true;
2987 }
2988 }
2989
2990 /* Verify if the reference array element types are compatible. */
2991 if (TREE_CODE (expr) == ARRAY_REF
2992 && !useless_type_conversion_p (TREE_TYPE (expr),
2993 TREE_TYPE (TREE_TYPE (op))))
2994 {
2995 error ("type mismatch in array reference");
2996 debug_generic_stmt (TREE_TYPE (expr));
2997 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2998 return true;
2999 }
3000 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3001 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3002 TREE_TYPE (TREE_TYPE (op))))
3003 {
3004 error ("type mismatch in array range reference");
3005 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3006 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3007 return true;
3008 }
3009
3010 if ((TREE_CODE (expr) == REALPART_EXPR
3011 || TREE_CODE (expr) == IMAGPART_EXPR)
3012 && !useless_type_conversion_p (TREE_TYPE (expr),
3013 TREE_TYPE (TREE_TYPE (op))))
3014 {
3015 error ("type mismatch in real/imagpart reference");
3016 debug_generic_stmt (TREE_TYPE (expr));
3017 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3018 return true;
3019 }
3020
3021 if (TREE_CODE (expr) == COMPONENT_REF
3022 && !useless_type_conversion_p (TREE_TYPE (expr),
3023 TREE_TYPE (TREE_OPERAND (expr, 1))))
3024 {
3025 error ("type mismatch in component reference");
3026 debug_generic_stmt (TREE_TYPE (expr));
3027 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3028 return true;
3029 }
3030
3031 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3032 {
3033 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3034 that their operand is not an SSA name or an invariant when
3035 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3036 bug). Otherwise there is nothing to verify, gross mismatches at
3037 most invoke undefined behavior. */
3038 if (require_lvalue
3039 && (TREE_CODE (op) == SSA_NAME
3040 || is_gimple_min_invariant (op)))
3041 {
3042 error ("conversion of an SSA_NAME on the left hand side");
3043 debug_generic_stmt (expr);
3044 return true;
3045 }
3046 else if (TREE_CODE (op) == SSA_NAME
3047 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3048 {
3049 error ("conversion of register to a different size");
3050 debug_generic_stmt (expr);
3051 return true;
3052 }
3053 else if (!handled_component_p (op))
3054 return false;
3055 }
3056
3057 expr = op;
3058 }
3059
3060 if (TREE_CODE (expr) == MEM_REF)
3061 {
3062 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3063 {
3064 error ("invalid address operand in MEM_REF");
3065 debug_generic_stmt (expr);
3066 return true;
3067 }
3068 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3069 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3070 {
3071 error ("invalid offset operand in MEM_REF");
3072 debug_generic_stmt (expr);
3073 return true;
3074 }
3075 }
3076 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3077 {
3078 if (!TMR_BASE (expr)
3079 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3080 {
3081 error ("invalid address operand in TARGET_MEM_REF");
3082 return true;
3083 }
3084 if (!TMR_OFFSET (expr)
3085 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3086 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3087 {
3088 error ("invalid offset operand in TARGET_MEM_REF");
3089 debug_generic_stmt (expr);
3090 return true;
3091 }
3092 }
3093
3094 return ((require_lvalue || !is_gimple_min_invariant (expr))
3095 && verify_types_in_gimple_min_lval (expr));
3096 }
3097
3098 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3099 list of pointer-to types that is trivially convertible to DEST. */
3100
3101 static bool
3102 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3103 {
3104 tree src;
3105
3106 if (!TYPE_POINTER_TO (src_obj))
3107 return true;
3108
3109 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3110 if (useless_type_conversion_p (dest, src))
3111 return true;
3112
3113 return false;
3114 }
3115
3116 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3117 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3118
3119 static bool
3120 valid_fixed_convert_types_p (tree type1, tree type2)
3121 {
3122 return (FIXED_POINT_TYPE_P (type1)
3123 && (INTEGRAL_TYPE_P (type2)
3124 || SCALAR_FLOAT_TYPE_P (type2)
3125 || FIXED_POINT_TYPE_P (type2)));
3126 }
3127
3128 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3129 is a problem, otherwise false. */
3130
3131 static bool
3132 verify_gimple_call (gimple stmt)
3133 {
3134 tree fn = gimple_call_fn (stmt);
3135 tree fntype, fndecl;
3136 unsigned i;
3137
3138 if (gimple_call_internal_p (stmt))
3139 {
3140 if (fn)
3141 {
3142 error ("gimple call has two targets");
3143 debug_generic_stmt (fn);
3144 return true;
3145 }
3146 }
3147 else
3148 {
3149 if (!fn)
3150 {
3151 error ("gimple call has no target");
3152 return true;
3153 }
3154 }
3155
3156 if (fn && !is_gimple_call_addr (fn))
3157 {
3158 error ("invalid function in gimple call");
3159 debug_generic_stmt (fn);
3160 return true;
3161 }
3162
3163 if (fn
3164 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3165 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3166 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3167 {
3168 error ("non-function in gimple call");
3169 return true;
3170 }
3171
3172 fndecl = gimple_call_fndecl (stmt);
3173 if (fndecl
3174 && TREE_CODE (fndecl) == FUNCTION_DECL
3175 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3176 && !DECL_PURE_P (fndecl)
3177 && !TREE_READONLY (fndecl))
3178 {
3179 error ("invalid pure const state for function");
3180 return true;
3181 }
3182
3183 if (gimple_call_lhs (stmt)
3184 && (!is_gimple_lvalue (gimple_call_lhs (stmt))
3185 || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
3186 {
3187 error ("invalid LHS in gimple call");
3188 return true;
3189 }
3190
3191 if (gimple_call_lhs (stmt) && gimple_call_noreturn_p (stmt))
3192 {
3193 error ("LHS in noreturn call");
3194 return true;
3195 }
3196
3197 fntype = gimple_call_fntype (stmt);
3198 if (fntype
3199 && gimple_call_lhs (stmt)
3200 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3201 TREE_TYPE (fntype))
3202 /* ??? At least C++ misses conversions at assignments from
3203 void * call results.
3204 ??? Java is completely off. Especially with functions
3205 returning java.lang.Object.
3206 For now simply allow arbitrary pointer type conversions. */
3207 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3208 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3209 {
3210 error ("invalid conversion in gimple call");
3211 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3212 debug_generic_stmt (TREE_TYPE (fntype));
3213 return true;
3214 }
3215
3216 if (gimple_call_chain (stmt)
3217 && !is_gimple_val (gimple_call_chain (stmt)))
3218 {
3219 error ("invalid static chain in gimple call");
3220 debug_generic_stmt (gimple_call_chain (stmt));
3221 return true;
3222 }
3223
3224 /* If there is a static chain argument, this should not be an indirect
3225 call, and the decl should have DECL_STATIC_CHAIN set. */
3226 if (gimple_call_chain (stmt))
3227 {
3228 if (!gimple_call_fndecl (stmt))
3229 {
3230 error ("static chain in indirect gimple call");
3231 return true;
3232 }
3233 fn = TREE_OPERAND (fn, 0);
3234
3235 if (!DECL_STATIC_CHAIN (fn))
3236 {
3237 error ("static chain with function that doesn%'t use one");
3238 return true;
3239 }
3240 }
3241
3242 /* ??? The C frontend passes unpromoted arguments in case it
3243 didn't see a function declaration before the call. So for now
3244 leave the call arguments mostly unverified. Once we gimplify
3245 unit-at-a-time we have a chance to fix this. */
3246
3247 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3248 {
3249 tree arg = gimple_call_arg (stmt, i);
3250 if ((is_gimple_reg_type (TREE_TYPE (arg))
3251 && !is_gimple_val (arg))
3252 || (!is_gimple_reg_type (TREE_TYPE (arg))
3253 && !is_gimple_lvalue (arg)))
3254 {
3255 error ("invalid argument to gimple call");
3256 debug_generic_expr (arg);
3257 return true;
3258 }
3259 }
3260
3261 return false;
3262 }
3263
3264 /* Verifies the gimple comparison with the result type TYPE and
3265 the operands OP0 and OP1. */
3266
3267 static bool
3268 verify_gimple_comparison (tree type, tree op0, tree op1)
3269 {
3270 tree op0_type = TREE_TYPE (op0);
3271 tree op1_type = TREE_TYPE (op1);
3272
3273 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3274 {
3275 error ("invalid operands in gimple comparison");
3276 return true;
3277 }
3278
3279 /* For comparisons we do not have the operations type as the
3280 effective type the comparison is carried out in. Instead
3281 we require that either the first operand is trivially
3282 convertible into the second, or the other way around.
3283 Because we special-case pointers to void we allow
3284 comparisons of pointers with the same mode as well. */
3285 if (!useless_type_conversion_p (op0_type, op1_type)
3286 && !useless_type_conversion_p (op1_type, op0_type)
3287 && (!POINTER_TYPE_P (op0_type)
3288 || !POINTER_TYPE_P (op1_type)
3289 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3290 {
3291 error ("mismatching comparison operand types");
3292 debug_generic_expr (op0_type);
3293 debug_generic_expr (op1_type);
3294 return true;
3295 }
3296
3297 /* The resulting type of a comparison may be an effective boolean type. */
3298 if (INTEGRAL_TYPE_P (type)
3299 && (TREE_CODE (type) == BOOLEAN_TYPE
3300 || TYPE_PRECISION (type) == 1))
3301 ;
3302 /* Or an integer vector type with the same size and element count
3303 as the comparison operand types. */
3304 else if (TREE_CODE (type) == VECTOR_TYPE
3305 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3306 {
3307 if (TREE_CODE (op0_type) != VECTOR_TYPE
3308 || TREE_CODE (op1_type) != VECTOR_TYPE)
3309 {
3310 error ("non-vector operands in vector comparison");
3311 debug_generic_expr (op0_type);
3312 debug_generic_expr (op1_type);
3313 return true;
3314 }
3315
3316 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3317 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3318 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type)))))
3319 {
3320 error ("invalid vector comparison resulting type");
3321 debug_generic_expr (type);
3322 return true;
3323 }
3324 }
3325 else
3326 {
3327 error ("bogus comparison result type");
3328 debug_generic_expr (type);
3329 return true;
3330 }
3331
3332 return false;
3333 }
3334
3335 /* Verify a gimple assignment statement STMT with an unary rhs.
3336 Returns true if anything is wrong. */
3337
3338 static bool
3339 verify_gimple_assign_unary (gimple stmt)
3340 {
3341 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3342 tree lhs = gimple_assign_lhs (stmt);
3343 tree lhs_type = TREE_TYPE (lhs);
3344 tree rhs1 = gimple_assign_rhs1 (stmt);
3345 tree rhs1_type = TREE_TYPE (rhs1);
3346
3347 if (!is_gimple_reg (lhs))
3348 {
3349 error ("non-register as LHS of unary operation");
3350 return true;
3351 }
3352
3353 if (!is_gimple_val (rhs1))
3354 {
3355 error ("invalid operand in unary operation");
3356 return true;
3357 }
3358
3359 /* First handle conversions. */
3360 switch (rhs_code)
3361 {
3362 CASE_CONVERT:
3363 {
3364 /* Allow conversions from pointer type to integral type only if
3365 there is no sign or zero extension involved.
3366 For targets were the precision of ptrofftype doesn't match that
3367 of pointers we need to allow arbitrary conversions to ptrofftype. */
3368 if ((POINTER_TYPE_P (lhs_type)
3369 && INTEGRAL_TYPE_P (rhs1_type))
3370 || (POINTER_TYPE_P (rhs1_type)
3371 && INTEGRAL_TYPE_P (lhs_type)
3372 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3373 || ptrofftype_p (sizetype))))
3374 return false;
3375
3376 /* Allow conversion from integral to offset type and vice versa. */
3377 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3378 && INTEGRAL_TYPE_P (rhs1_type))
3379 || (INTEGRAL_TYPE_P (lhs_type)
3380 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3381 return false;
3382
3383 /* Otherwise assert we are converting between types of the
3384 same kind. */
3385 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3386 {
3387 error ("invalid types in nop conversion");
3388 debug_generic_expr (lhs_type);
3389 debug_generic_expr (rhs1_type);
3390 return true;
3391 }
3392
3393 return false;
3394 }
3395
3396 case ADDR_SPACE_CONVERT_EXPR:
3397 {
3398 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3399 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3400 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3401 {
3402 error ("invalid types in address space conversion");
3403 debug_generic_expr (lhs_type);
3404 debug_generic_expr (rhs1_type);
3405 return true;
3406 }
3407
3408 return false;
3409 }
3410
3411 case FIXED_CONVERT_EXPR:
3412 {
3413 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3414 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3415 {
3416 error ("invalid types in fixed-point conversion");
3417 debug_generic_expr (lhs_type);
3418 debug_generic_expr (rhs1_type);
3419 return true;
3420 }
3421
3422 return false;
3423 }
3424
3425 case FLOAT_EXPR:
3426 {
3427 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3428 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3429 || !VECTOR_FLOAT_TYPE_P(lhs_type)))
3430 {
3431 error ("invalid types in conversion to floating point");
3432 debug_generic_expr (lhs_type);
3433 debug_generic_expr (rhs1_type);
3434 return true;
3435 }
3436
3437 return false;
3438 }
3439
3440 case FIX_TRUNC_EXPR:
3441 {
3442 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3443 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3444 || !VECTOR_FLOAT_TYPE_P(rhs1_type)))
3445 {
3446 error ("invalid types in conversion to integer");
3447 debug_generic_expr (lhs_type);
3448 debug_generic_expr (rhs1_type);
3449 return true;
3450 }
3451
3452 return false;
3453 }
3454
3455 case VEC_UNPACK_HI_EXPR:
3456 case VEC_UNPACK_LO_EXPR:
3457 case REDUC_MAX_EXPR:
3458 case REDUC_MIN_EXPR:
3459 case REDUC_PLUS_EXPR:
3460 case VEC_UNPACK_FLOAT_HI_EXPR:
3461 case VEC_UNPACK_FLOAT_LO_EXPR:
3462 /* FIXME. */
3463 return false;
3464
3465 case NEGATE_EXPR:
3466 case ABS_EXPR:
3467 case BIT_NOT_EXPR:
3468 case PAREN_EXPR:
3469 case NON_LVALUE_EXPR:
3470 case CONJ_EXPR:
3471 break;
3472
3473 default:
3474 gcc_unreachable ();
3475 }
3476
3477 /* For the remaining codes assert there is no conversion involved. */
3478 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3479 {
3480 error ("non-trivial conversion in unary operation");
3481 debug_generic_expr (lhs_type);
3482 debug_generic_expr (rhs1_type);
3483 return true;
3484 }
3485
3486 return false;
3487 }
3488
3489 /* Verify a gimple assignment statement STMT with a binary rhs.
3490 Returns true if anything is wrong. */
3491
3492 static bool
3493 verify_gimple_assign_binary (gimple stmt)
3494 {
3495 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3496 tree lhs = gimple_assign_lhs (stmt);
3497 tree lhs_type = TREE_TYPE (lhs);
3498 tree rhs1 = gimple_assign_rhs1 (stmt);
3499 tree rhs1_type = TREE_TYPE (rhs1);
3500 tree rhs2 = gimple_assign_rhs2 (stmt);
3501 tree rhs2_type = TREE_TYPE (rhs2);
3502
3503 if (!is_gimple_reg (lhs))
3504 {
3505 error ("non-register as LHS of binary operation");
3506 return true;
3507 }
3508
3509 if (!is_gimple_val (rhs1)
3510 || !is_gimple_val (rhs2))
3511 {
3512 error ("invalid operands in binary operation");
3513 return true;
3514 }
3515
3516 /* First handle operations that involve different types. */
3517 switch (rhs_code)
3518 {
3519 case COMPLEX_EXPR:
3520 {
3521 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3522 || !(INTEGRAL_TYPE_P (rhs1_type)
3523 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3524 || !(INTEGRAL_TYPE_P (rhs2_type)
3525 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3526 {
3527 error ("type mismatch in complex expression");
3528 debug_generic_expr (lhs_type);
3529 debug_generic_expr (rhs1_type);
3530 debug_generic_expr (rhs2_type);
3531 return true;
3532 }
3533
3534 return false;
3535 }
3536
3537 case LSHIFT_EXPR:
3538 case RSHIFT_EXPR:
3539 case LROTATE_EXPR:
3540 case RROTATE_EXPR:
3541 {
3542 /* Shifts and rotates are ok on integral types, fixed point
3543 types and integer vector types. */
3544 if ((!INTEGRAL_TYPE_P (rhs1_type)
3545 && !FIXED_POINT_TYPE_P (rhs1_type)
3546 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3547 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3548 || (!INTEGRAL_TYPE_P (rhs2_type)
3549 /* Vector shifts of vectors are also ok. */
3550 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3551 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3552 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3553 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3554 || !useless_type_conversion_p (lhs_type, rhs1_type))
3555 {
3556 error ("type mismatch in shift expression");
3557 debug_generic_expr (lhs_type);
3558 debug_generic_expr (rhs1_type);
3559 debug_generic_expr (rhs2_type);
3560 return true;
3561 }
3562
3563 return false;
3564 }
3565
3566 case VEC_LSHIFT_EXPR:
3567 case VEC_RSHIFT_EXPR:
3568 {
3569 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3570 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3571 || POINTER_TYPE_P (TREE_TYPE (rhs1_type))
3572 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type))
3573 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3574 || (!INTEGRAL_TYPE_P (rhs2_type)
3575 && (TREE_CODE (rhs2_type) != VECTOR_TYPE
3576 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3577 || !useless_type_conversion_p (lhs_type, rhs1_type))
3578 {
3579 error ("type mismatch in vector shift expression");
3580 debug_generic_expr (lhs_type);
3581 debug_generic_expr (rhs1_type);
3582 debug_generic_expr (rhs2_type);
3583 return true;
3584 }
3585 /* For shifting a vector of non-integral components we
3586 only allow shifting by a constant multiple of the element size. */
3587 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3588 && (TREE_CODE (rhs2) != INTEGER_CST
3589 || !div_if_zero_remainder (EXACT_DIV_EXPR, rhs2,
3590 TYPE_SIZE (TREE_TYPE (rhs1_type)))))
3591 {
3592 error ("non-element sized vector shift of floating point vector");
3593 return true;
3594 }
3595
3596 return false;
3597 }
3598
3599 case WIDEN_LSHIFT_EXPR:
3600 {
3601 if (!INTEGRAL_TYPE_P (lhs_type)
3602 || !INTEGRAL_TYPE_P (rhs1_type)
3603 || TREE_CODE (rhs2) != INTEGER_CST
3604 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3605 {
3606 error ("type mismatch in widening vector shift expression");
3607 debug_generic_expr (lhs_type);
3608 debug_generic_expr (rhs1_type);
3609 debug_generic_expr (rhs2_type);
3610 return true;
3611 }
3612
3613 return false;
3614 }
3615
3616 case VEC_WIDEN_LSHIFT_HI_EXPR:
3617 case VEC_WIDEN_LSHIFT_LO_EXPR:
3618 {
3619 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3620 || TREE_CODE (lhs_type) != VECTOR_TYPE
3621 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3622 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3623 || TREE_CODE (rhs2) != INTEGER_CST
3624 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3625 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3626 {
3627 error ("type mismatch in widening vector shift expression");
3628 debug_generic_expr (lhs_type);
3629 debug_generic_expr (rhs1_type);
3630 debug_generic_expr (rhs2_type);
3631 return true;
3632 }
3633
3634 return false;
3635 }
3636
3637 case PLUS_EXPR:
3638 case MINUS_EXPR:
3639 {
3640 /* We use regular PLUS_EXPR and MINUS_EXPR for vectors.
3641 ??? This just makes the checker happy and may not be what is
3642 intended. */
3643 if (TREE_CODE (lhs_type) == VECTOR_TYPE
3644 && POINTER_TYPE_P (TREE_TYPE (lhs_type)))
3645 {
3646 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3647 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3648 {
3649 error ("invalid non-vector operands to vector valued plus");
3650 return true;
3651 }
3652 lhs_type = TREE_TYPE (lhs_type);
3653 rhs1_type = TREE_TYPE (rhs1_type);
3654 rhs2_type = TREE_TYPE (rhs2_type);
3655 /* PLUS_EXPR is commutative, so we might end up canonicalizing
3656 the pointer to 2nd place. */
3657 if (POINTER_TYPE_P (rhs2_type))
3658 {
3659 tree tem = rhs1_type;
3660 rhs1_type = rhs2_type;
3661 rhs2_type = tem;
3662 }
3663 goto do_pointer_plus_expr_check;
3664 }
3665 if (POINTER_TYPE_P (lhs_type)
3666 || POINTER_TYPE_P (rhs1_type)
3667 || POINTER_TYPE_P (rhs2_type))
3668 {
3669 error ("invalid (pointer) operands to plus/minus");
3670 return true;
3671 }
3672
3673 /* Continue with generic binary expression handling. */
3674 break;
3675 }
3676
3677 case POINTER_PLUS_EXPR:
3678 {
3679 do_pointer_plus_expr_check:
3680 if (!POINTER_TYPE_P (rhs1_type)
3681 || !useless_type_conversion_p (lhs_type, rhs1_type)
3682 || !ptrofftype_p (rhs2_type))
3683 {
3684 error ("type mismatch in pointer plus expression");
3685 debug_generic_stmt (lhs_type);
3686 debug_generic_stmt (rhs1_type);
3687 debug_generic_stmt (rhs2_type);
3688 return true;
3689 }
3690
3691 return false;
3692 }
3693
3694 case TRUTH_ANDIF_EXPR:
3695 case TRUTH_ORIF_EXPR:
3696 case TRUTH_AND_EXPR:
3697 case TRUTH_OR_EXPR:
3698 case TRUTH_XOR_EXPR:
3699
3700 gcc_unreachable ();
3701
3702 case LT_EXPR:
3703 case LE_EXPR:
3704 case GT_EXPR:
3705 case GE_EXPR:
3706 case EQ_EXPR:
3707 case NE_EXPR:
3708 case UNORDERED_EXPR:
3709 case ORDERED_EXPR:
3710 case UNLT_EXPR:
3711 case UNLE_EXPR:
3712 case UNGT_EXPR:
3713 case UNGE_EXPR:
3714 case UNEQ_EXPR:
3715 case LTGT_EXPR:
3716 /* Comparisons are also binary, but the result type is not
3717 connected to the operand types. */
3718 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3719
3720 case WIDEN_MULT_EXPR:
3721 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3722 return true;
3723 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3724 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3725
3726 case WIDEN_SUM_EXPR:
3727 case VEC_WIDEN_MULT_HI_EXPR:
3728 case VEC_WIDEN_MULT_LO_EXPR:
3729 case VEC_PACK_TRUNC_EXPR:
3730 case VEC_PACK_SAT_EXPR:
3731 case VEC_PACK_FIX_TRUNC_EXPR:
3732 /* FIXME. */
3733 return false;
3734
3735 case MULT_EXPR:
3736 case TRUNC_DIV_EXPR:
3737 case CEIL_DIV_EXPR:
3738 case FLOOR_DIV_EXPR:
3739 case ROUND_DIV_EXPR:
3740 case TRUNC_MOD_EXPR:
3741 case CEIL_MOD_EXPR:
3742 case FLOOR_MOD_EXPR:
3743 case ROUND_MOD_EXPR:
3744 case RDIV_EXPR:
3745 case EXACT_DIV_EXPR:
3746 case MIN_EXPR:
3747 case MAX_EXPR:
3748 case BIT_IOR_EXPR:
3749 case BIT_XOR_EXPR:
3750 case BIT_AND_EXPR:
3751 /* Continue with generic binary expression handling. */
3752 break;
3753
3754 default:
3755 gcc_unreachable ();
3756 }
3757
3758 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3759 || !useless_type_conversion_p (lhs_type, rhs2_type))
3760 {
3761 error ("type mismatch in binary expression");
3762 debug_generic_stmt (lhs_type);
3763 debug_generic_stmt (rhs1_type);
3764 debug_generic_stmt (rhs2_type);
3765 return true;
3766 }
3767
3768 return false;
3769 }
3770
3771 /* Verify a gimple assignment statement STMT with a ternary rhs.
3772 Returns true if anything is wrong. */
3773
3774 static bool
3775 verify_gimple_assign_ternary (gimple stmt)
3776 {
3777 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3778 tree lhs = gimple_assign_lhs (stmt);
3779 tree lhs_type = TREE_TYPE (lhs);
3780 tree rhs1 = gimple_assign_rhs1 (stmt);
3781 tree rhs1_type = TREE_TYPE (rhs1);
3782 tree rhs2 = gimple_assign_rhs2 (stmt);
3783 tree rhs2_type = TREE_TYPE (rhs2);
3784 tree rhs3 = gimple_assign_rhs3 (stmt);
3785 tree rhs3_type = TREE_TYPE (rhs3);
3786
3787 if (!is_gimple_reg (lhs))
3788 {
3789 error ("non-register as LHS of ternary operation");
3790 return true;
3791 }
3792
3793 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3794 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3795 || !is_gimple_val (rhs2)
3796 || !is_gimple_val (rhs3))
3797 {
3798 error ("invalid operands in ternary operation");
3799 return true;
3800 }
3801
3802 /* First handle operations that involve different types. */
3803 switch (rhs_code)
3804 {
3805 case WIDEN_MULT_PLUS_EXPR:
3806 case WIDEN_MULT_MINUS_EXPR:
3807 if ((!INTEGRAL_TYPE_P (rhs1_type)
3808 && !FIXED_POINT_TYPE_P (rhs1_type))
3809 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3810 || !useless_type_conversion_p (lhs_type, rhs3_type)
3811 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3812 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3813 {
3814 error ("type mismatch in widening multiply-accumulate expression");
3815 debug_generic_expr (lhs_type);
3816 debug_generic_expr (rhs1_type);
3817 debug_generic_expr (rhs2_type);
3818 debug_generic_expr (rhs3_type);
3819 return true;
3820 }
3821 break;
3822
3823 case FMA_EXPR:
3824 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3825 || !useless_type_conversion_p (lhs_type, rhs2_type)
3826 || !useless_type_conversion_p (lhs_type, rhs3_type))
3827 {
3828 error ("type mismatch in fused multiply-add expression");
3829 debug_generic_expr (lhs_type);
3830 debug_generic_expr (rhs1_type);
3831 debug_generic_expr (rhs2_type);
3832 debug_generic_expr (rhs3_type);
3833 return true;
3834 }
3835 break;
3836
3837 case COND_EXPR:
3838 case VEC_COND_EXPR:
3839 if (!useless_type_conversion_p (lhs_type, rhs2_type)
3840 || !useless_type_conversion_p (lhs_type, rhs3_type))
3841 {
3842 error ("type mismatch in conditional expression");
3843 debug_generic_expr (lhs_type);
3844 debug_generic_expr (rhs2_type);
3845 debug_generic_expr (rhs3_type);
3846 return true;
3847 }
3848 break;
3849
3850 case VEC_PERM_EXPR:
3851 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3852 || !useless_type_conversion_p (lhs_type, rhs2_type))
3853 {
3854 error ("type mismatch in vector permute expression");
3855 debug_generic_expr (lhs_type);
3856 debug_generic_expr (rhs1_type);
3857 debug_generic_expr (rhs2_type);
3858 debug_generic_expr (rhs3_type);
3859 return true;
3860 }
3861
3862 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3863 || TREE_CODE (rhs2_type) != VECTOR_TYPE
3864 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
3865 {
3866 error ("vector types expected in vector permute expression");
3867 debug_generic_expr (lhs_type);
3868 debug_generic_expr (rhs1_type);
3869 debug_generic_expr (rhs2_type);
3870 debug_generic_expr (rhs3_type);
3871 return true;
3872 }
3873
3874 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
3875 || TYPE_VECTOR_SUBPARTS (rhs2_type)
3876 != TYPE_VECTOR_SUBPARTS (rhs3_type)
3877 || TYPE_VECTOR_SUBPARTS (rhs3_type)
3878 != TYPE_VECTOR_SUBPARTS (lhs_type))
3879 {
3880 error ("vectors with different element number found "
3881 "in vector permute expression");
3882 debug_generic_expr (lhs_type);
3883 debug_generic_expr (rhs1_type);
3884 debug_generic_expr (rhs2_type);
3885 debug_generic_expr (rhs3_type);
3886 return true;
3887 }
3888
3889 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
3890 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
3891 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
3892 {
3893 error ("invalid mask type in vector permute expression");
3894 debug_generic_expr (lhs_type);
3895 debug_generic_expr (rhs1_type);
3896 debug_generic_expr (rhs2_type);
3897 debug_generic_expr (rhs3_type);
3898 return true;
3899 }
3900
3901 return false;
3902
3903 case DOT_PROD_EXPR:
3904 case REALIGN_LOAD_EXPR:
3905 /* FIXME. */
3906 return false;
3907
3908 default:
3909 gcc_unreachable ();
3910 }
3911 return false;
3912 }
3913
3914 /* Verify a gimple assignment statement STMT with a single rhs.
3915 Returns true if anything is wrong. */
3916
3917 static bool
3918 verify_gimple_assign_single (gimple stmt)
3919 {
3920 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3921 tree lhs = gimple_assign_lhs (stmt);
3922 tree lhs_type = TREE_TYPE (lhs);
3923 tree rhs1 = gimple_assign_rhs1 (stmt);
3924 tree rhs1_type = TREE_TYPE (rhs1);
3925 bool res = false;
3926
3927 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3928 {
3929 error ("non-trivial conversion at assignment");
3930 debug_generic_expr (lhs_type);
3931 debug_generic_expr (rhs1_type);
3932 return true;
3933 }
3934
3935 if (handled_component_p (lhs))
3936 res |= verify_types_in_gimple_reference (lhs, true);
3937
3938 /* Special codes we cannot handle via their class. */
3939 switch (rhs_code)
3940 {
3941 case ADDR_EXPR:
3942 {
3943 tree op = TREE_OPERAND (rhs1, 0);
3944 if (!is_gimple_addressable (op))
3945 {
3946 error ("invalid operand in unary expression");
3947 return true;
3948 }
3949
3950 /* Technically there is no longer a need for matching types, but
3951 gimple hygiene asks for this check. In LTO we can end up
3952 combining incompatible units and thus end up with addresses
3953 of globals that change their type to a common one. */
3954 if (!in_lto_p
3955 && !types_compatible_p (TREE_TYPE (op),
3956 TREE_TYPE (TREE_TYPE (rhs1)))
3957 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
3958 TREE_TYPE (op)))
3959 {
3960 error ("type mismatch in address expression");
3961 debug_generic_stmt (TREE_TYPE (rhs1));
3962 debug_generic_stmt (TREE_TYPE (op));
3963 return true;
3964 }
3965
3966 return verify_types_in_gimple_reference (op, true);
3967 }
3968
3969 /* tcc_reference */
3970 case INDIRECT_REF:
3971 error ("INDIRECT_REF in gimple IL");
3972 return true;
3973
3974 case COMPONENT_REF:
3975 case BIT_FIELD_REF:
3976 case ARRAY_REF:
3977 case ARRAY_RANGE_REF:
3978 case VIEW_CONVERT_EXPR:
3979 case REALPART_EXPR:
3980 case IMAGPART_EXPR:
3981 case TARGET_MEM_REF:
3982 case MEM_REF:
3983 if (!is_gimple_reg (lhs)
3984 && is_gimple_reg_type (TREE_TYPE (lhs)))
3985 {
3986 error ("invalid rhs for gimple memory store");
3987 debug_generic_stmt (lhs);
3988 debug_generic_stmt (rhs1);
3989 return true;
3990 }
3991 return res || verify_types_in_gimple_reference (rhs1, false);
3992
3993 /* tcc_constant */
3994 case SSA_NAME:
3995 case INTEGER_CST:
3996 case REAL_CST:
3997 case FIXED_CST:
3998 case COMPLEX_CST:
3999 case VECTOR_CST:
4000 case STRING_CST:
4001 return res;
4002
4003 /* tcc_declaration */
4004 case CONST_DECL:
4005 return res;
4006 case VAR_DECL:
4007 case PARM_DECL:
4008 if (!is_gimple_reg (lhs)
4009 && !is_gimple_reg (rhs1)
4010 && is_gimple_reg_type (TREE_TYPE (lhs)))
4011 {
4012 error ("invalid rhs for gimple memory store");
4013 debug_generic_stmt (lhs);
4014 debug_generic_stmt (rhs1);
4015 return true;
4016 }
4017 return res;
4018
4019 case CONSTRUCTOR:
4020 case OBJ_TYPE_REF:
4021 case ASSERT_EXPR:
4022 case WITH_SIZE_EXPR:
4023 /* FIXME. */
4024 return res;
4025
4026 default:;
4027 }
4028
4029 return res;
4030 }
4031
4032 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4033 is a problem, otherwise false. */
4034
4035 static bool
4036 verify_gimple_assign (gimple stmt)
4037 {
4038 switch (gimple_assign_rhs_class (stmt))
4039 {
4040 case GIMPLE_SINGLE_RHS:
4041 return verify_gimple_assign_single (stmt);
4042
4043 case GIMPLE_UNARY_RHS:
4044 return verify_gimple_assign_unary (stmt);
4045
4046 case GIMPLE_BINARY_RHS:
4047 return verify_gimple_assign_binary (stmt);
4048
4049 case GIMPLE_TERNARY_RHS:
4050 return verify_gimple_assign_ternary (stmt);
4051
4052 default:
4053 gcc_unreachable ();
4054 }
4055 }
4056
4057 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4058 is a problem, otherwise false. */
4059
4060 static bool
4061 verify_gimple_return (gimple stmt)
4062 {
4063 tree op = gimple_return_retval (stmt);
4064 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4065
4066 /* We cannot test for present return values as we do not fix up missing
4067 return values from the original source. */
4068 if (op == NULL)
4069 return false;
4070
4071 if (!is_gimple_val (op)
4072 && TREE_CODE (op) != RESULT_DECL)
4073 {
4074 error ("invalid operand in return statement");
4075 debug_generic_stmt (op);
4076 return true;
4077 }
4078
4079 if ((TREE_CODE (op) == RESULT_DECL
4080 && DECL_BY_REFERENCE (op))
4081 || (TREE_CODE (op) == SSA_NAME
4082 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4083 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4084 op = TREE_TYPE (op);
4085
4086 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4087 {
4088 error ("invalid conversion in return statement");
4089 debug_generic_stmt (restype);
4090 debug_generic_stmt (TREE_TYPE (op));
4091 return true;
4092 }
4093
4094 return false;
4095 }
4096
4097
4098 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4099 is a problem, otherwise false. */
4100
4101 static bool
4102 verify_gimple_goto (gimple stmt)
4103 {
4104 tree dest = gimple_goto_dest (stmt);
4105
4106 /* ??? We have two canonical forms of direct goto destinations, a
4107 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4108 if (TREE_CODE (dest) != LABEL_DECL
4109 && (!is_gimple_val (dest)
4110 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4111 {
4112 error ("goto destination is neither a label nor a pointer");
4113 return true;
4114 }
4115
4116 return false;
4117 }
4118
4119 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4120 is a problem, otherwise false. */
4121
4122 static bool
4123 verify_gimple_switch (gimple stmt)
4124 {
4125 unsigned int i, n;
4126 tree elt, prev_upper_bound = NULL_TREE;
4127 tree index_type, elt_type = NULL_TREE;
4128
4129 if (!is_gimple_val (gimple_switch_index (stmt)))
4130 {
4131 error ("invalid operand to switch statement");
4132 debug_generic_stmt (gimple_switch_index (stmt));
4133 return true;
4134 }
4135
4136 index_type = TREE_TYPE (gimple_switch_index (stmt));
4137 if (! INTEGRAL_TYPE_P (index_type))
4138 {
4139 error ("non-integral type switch statement");
4140 debug_generic_expr (index_type);
4141 return true;
4142 }
4143
4144 elt = gimple_switch_default_label (stmt);
4145 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4146 {
4147 error ("invalid default case label in switch statement");
4148 debug_generic_expr (elt);
4149 return true;
4150 }
4151
4152 n = gimple_switch_num_labels (stmt);
4153 for (i = 1; i < n; i++)
4154 {
4155 elt = gimple_switch_label (stmt, i);
4156
4157 if (! CASE_LOW (elt))
4158 {
4159 error ("invalid case label in switch statement");
4160 debug_generic_expr (elt);
4161 return true;
4162 }
4163 if (CASE_HIGH (elt)
4164 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4165 {
4166 error ("invalid case range in switch statement");
4167 debug_generic_expr (elt);
4168 return true;
4169 }
4170
4171 if (elt_type)
4172 {
4173 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4174 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4175 {
4176 error ("type mismatch for case label in switch statement");
4177 debug_generic_expr (elt);
4178 return true;
4179 }
4180 }
4181 else
4182 {
4183 elt_type = TREE_TYPE (CASE_LOW (elt));
4184 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4185 {
4186 error ("type precision mismatch in switch statement");
4187 return true;
4188 }
4189 }
4190
4191 if (prev_upper_bound)
4192 {
4193 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4194 {
4195 error ("case labels not sorted in switch statement");
4196 return true;
4197 }
4198 }
4199
4200 prev_upper_bound = CASE_HIGH (elt);
4201 if (! prev_upper_bound)
4202 prev_upper_bound = CASE_LOW (elt);
4203 }
4204
4205 return false;
4206 }
4207
4208 /* Verify a gimple debug statement STMT.
4209 Returns true if anything is wrong. */
4210
4211 static bool
4212 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4213 {
4214 /* There isn't much that could be wrong in a gimple debug stmt. A
4215 gimple debug bind stmt, for example, maps a tree, that's usually
4216 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4217 component or member of an aggregate type, to another tree, that
4218 can be an arbitrary expression. These stmts expand into debug
4219 insns, and are converted to debug notes by var-tracking.c. */
4220 return false;
4221 }
4222
4223 /* Verify a gimple label statement STMT.
4224 Returns true if anything is wrong. */
4225
4226 static bool
4227 verify_gimple_label (gimple stmt)
4228 {
4229 tree decl = gimple_label_label (stmt);
4230 int uid;
4231 bool err = false;
4232
4233 if (TREE_CODE (decl) != LABEL_DECL)
4234 return true;
4235
4236 uid = LABEL_DECL_UID (decl);
4237 if (cfun->cfg
4238 && (uid == -1
4239 || VEC_index (basic_block,
4240 label_to_block_map, uid) != gimple_bb (stmt)))
4241 {
4242 error ("incorrect entry in label_to_block_map");
4243 err |= true;
4244 }
4245
4246 uid = EH_LANDING_PAD_NR (decl);
4247 if (uid)
4248 {
4249 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4250 if (decl != lp->post_landing_pad)
4251 {
4252 error ("incorrect setting of landing pad number");
4253 err |= true;
4254 }
4255 }
4256
4257 return err;
4258 }
4259
4260 /* Verify the GIMPLE statement STMT. Returns true if there is an
4261 error, otherwise false. */
4262
4263 static bool
4264 verify_gimple_stmt (gimple stmt)
4265 {
4266 switch (gimple_code (stmt))
4267 {
4268 case GIMPLE_ASSIGN:
4269 return verify_gimple_assign (stmt);
4270
4271 case GIMPLE_LABEL:
4272 return verify_gimple_label (stmt);
4273
4274 case GIMPLE_CALL:
4275 return verify_gimple_call (stmt);
4276
4277 case GIMPLE_COND:
4278 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4279 {
4280 error ("invalid comparison code in gimple cond");
4281 return true;
4282 }
4283 if (!(!gimple_cond_true_label (stmt)
4284 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4285 || !(!gimple_cond_false_label (stmt)
4286 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4287 {
4288 error ("invalid labels in gimple cond");
4289 return true;
4290 }
4291
4292 return verify_gimple_comparison (boolean_type_node,
4293 gimple_cond_lhs (stmt),
4294 gimple_cond_rhs (stmt));
4295
4296 case GIMPLE_GOTO:
4297 return verify_gimple_goto (stmt);
4298
4299 case GIMPLE_SWITCH:
4300 return verify_gimple_switch (stmt);
4301
4302 case GIMPLE_RETURN:
4303 return verify_gimple_return (stmt);
4304
4305 case GIMPLE_ASM:
4306 return false;
4307
4308 case GIMPLE_TRANSACTION:
4309 return verify_gimple_transaction (stmt);
4310
4311 /* Tuples that do not have tree operands. */
4312 case GIMPLE_NOP:
4313 case GIMPLE_PREDICT:
4314 case GIMPLE_RESX:
4315 case GIMPLE_EH_DISPATCH:
4316 case GIMPLE_EH_MUST_NOT_THROW:
4317 return false;
4318
4319 CASE_GIMPLE_OMP:
4320 /* OpenMP directives are validated by the FE and never operated
4321 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4322 non-gimple expressions when the main index variable has had
4323 its address taken. This does not affect the loop itself
4324 because the header of an GIMPLE_OMP_FOR is merely used to determine
4325 how to setup the parallel iteration. */
4326 return false;
4327
4328 case GIMPLE_DEBUG:
4329 return verify_gimple_debug (stmt);
4330
4331 default:
4332 gcc_unreachable ();
4333 }
4334 }
4335
4336 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4337 and false otherwise. */
4338
4339 static bool
4340 verify_gimple_phi (gimple phi)
4341 {
4342 bool err = false;
4343 unsigned i;
4344 tree phi_result = gimple_phi_result (phi);
4345 bool virtual_p;
4346
4347 if (!phi_result)
4348 {
4349 error ("invalid PHI result");
4350 return true;
4351 }
4352
4353 virtual_p = !is_gimple_reg (phi_result);
4354 if (TREE_CODE (phi_result) != SSA_NAME
4355 || (virtual_p
4356 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4357 {
4358 error ("invalid PHI result");
4359 err = true;
4360 }
4361
4362 for (i = 0; i < gimple_phi_num_args (phi); i++)
4363 {
4364 tree t = gimple_phi_arg_def (phi, i);
4365
4366 if (!t)
4367 {
4368 error ("missing PHI def");
4369 err |= true;
4370 continue;
4371 }
4372 /* Addressable variables do have SSA_NAMEs but they
4373 are not considered gimple values. */
4374 else if ((TREE_CODE (t) == SSA_NAME
4375 && virtual_p != !is_gimple_reg (t))
4376 || (virtual_p
4377 && (TREE_CODE (t) != SSA_NAME
4378 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4379 || (!virtual_p
4380 && !is_gimple_val (t)))
4381 {
4382 error ("invalid PHI argument");
4383 debug_generic_expr (t);
4384 err |= true;
4385 }
4386 #ifdef ENABLE_TYPES_CHECKING
4387 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4388 {
4389 error ("incompatible types in PHI argument %u", i);
4390 debug_generic_stmt (TREE_TYPE (phi_result));
4391 debug_generic_stmt (TREE_TYPE (t));
4392 err |= true;
4393 }
4394 #endif
4395 }
4396
4397 return err;
4398 }
4399
4400 /* Verify the GIMPLE statements inside the sequence STMTS. */
4401
4402 static bool
4403 verify_gimple_in_seq_2 (gimple_seq stmts)
4404 {
4405 gimple_stmt_iterator ittr;
4406 bool err = false;
4407
4408 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4409 {
4410 gimple stmt = gsi_stmt (ittr);
4411
4412 switch (gimple_code (stmt))
4413 {
4414 case GIMPLE_BIND:
4415 err |= verify_gimple_in_seq_2 (gimple_bind_body (stmt));
4416 break;
4417
4418 case GIMPLE_TRY:
4419 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4420 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4421 break;
4422
4423 case GIMPLE_EH_FILTER:
4424 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4425 break;
4426
4427 case GIMPLE_EH_ELSE:
4428 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (stmt));
4429 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (stmt));
4430 break;
4431
4432 case GIMPLE_CATCH:
4433 err |= verify_gimple_in_seq_2 (gimple_catch_handler (stmt));
4434 break;
4435
4436 case GIMPLE_TRANSACTION:
4437 err |= verify_gimple_transaction (stmt);
4438 break;
4439
4440 default:
4441 {
4442 bool err2 = verify_gimple_stmt (stmt);
4443 if (err2)
4444 debug_gimple_stmt (stmt);
4445 err |= err2;
4446 }
4447 }
4448 }
4449
4450 return err;
4451 }
4452
4453 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4454 is a problem, otherwise false. */
4455
4456 static bool
4457 verify_gimple_transaction (gimple stmt)
4458 {
4459 tree lab = gimple_transaction_label (stmt);
4460 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4461 return true;
4462 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4463 }
4464
4465
4466 /* Verify the GIMPLE statements inside the statement list STMTS. */
4467
4468 DEBUG_FUNCTION void
4469 verify_gimple_in_seq (gimple_seq stmts)
4470 {
4471 timevar_push (TV_TREE_STMT_VERIFY);
4472 if (verify_gimple_in_seq_2 (stmts))
4473 internal_error ("verify_gimple failed");
4474 timevar_pop (TV_TREE_STMT_VERIFY);
4475 }
4476
4477 /* Return true when the T can be shared. */
4478
4479 bool
4480 tree_node_can_be_shared (tree t)
4481 {
4482 if (IS_TYPE_OR_DECL_P (t)
4483 || is_gimple_min_invariant (t)
4484 || TREE_CODE (t) == SSA_NAME
4485 || t == error_mark_node
4486 || TREE_CODE (t) == IDENTIFIER_NODE)
4487 return true;
4488
4489 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4490 return true;
4491
4492 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4493 && is_gimple_min_invariant (TREE_OPERAND (t, 1)))
4494 || TREE_CODE (t) == COMPONENT_REF
4495 || TREE_CODE (t) == REALPART_EXPR
4496 || TREE_CODE (t) == IMAGPART_EXPR)
4497 t = TREE_OPERAND (t, 0);
4498
4499 if (DECL_P (t))
4500 return true;
4501
4502 return false;
4503 }
4504
4505 /* Called via walk_gimple_stmt. Verify tree sharing. */
4506
4507 static tree
4508 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4509 {
4510 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4511 struct pointer_set_t *visited = (struct pointer_set_t *) wi->info;
4512
4513 if (tree_node_can_be_shared (*tp))
4514 {
4515 *walk_subtrees = false;
4516 return NULL;
4517 }
4518
4519 if (pointer_set_insert (visited, *tp))
4520 return *tp;
4521
4522 return NULL;
4523 }
4524
4525 static bool eh_error_found;
4526 static int
4527 verify_eh_throw_stmt_node (void **slot, void *data)
4528 {
4529 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4530 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4531
4532 if (!pointer_set_contains (visited, node->stmt))
4533 {
4534 error ("dead STMT in EH table");
4535 debug_gimple_stmt (node->stmt);
4536 eh_error_found = true;
4537 }
4538 return 1;
4539 }
4540
4541 /* Verify the GIMPLE statements in the CFG of FN. */
4542
4543 DEBUG_FUNCTION void
4544 verify_gimple_in_cfg (struct function *fn)
4545 {
4546 basic_block bb;
4547 bool err = false;
4548 struct pointer_set_t *visited, *visited_stmts;
4549
4550 timevar_push (TV_TREE_STMT_VERIFY);
4551 visited = pointer_set_create ();
4552 visited_stmts = pointer_set_create ();
4553
4554 FOR_EACH_BB_FN (bb, fn)
4555 {
4556 gimple_stmt_iterator gsi;
4557
4558 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4559 {
4560 gimple phi = gsi_stmt (gsi);
4561 bool err2 = false;
4562 unsigned i;
4563
4564 pointer_set_insert (visited_stmts, phi);
4565
4566 if (gimple_bb (phi) != bb)
4567 {
4568 error ("gimple_bb (phi) is set to a wrong basic block");
4569 err2 = true;
4570 }
4571
4572 err2 |= verify_gimple_phi (phi);
4573
4574 for (i = 0; i < gimple_phi_num_args (phi); i++)
4575 {
4576 tree arg = gimple_phi_arg_def (phi, i);
4577 tree addr = walk_tree (&arg, verify_node_sharing, visited, NULL);
4578 if (addr)
4579 {
4580 error ("incorrect sharing of tree nodes");
4581 debug_generic_expr (addr);
4582 err2 |= true;
4583 }
4584 }
4585
4586 if (err2)
4587 debug_gimple_stmt (phi);
4588 err |= err2;
4589 }
4590
4591 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4592 {
4593 gimple stmt = gsi_stmt (gsi);
4594 bool err2 = false;
4595 struct walk_stmt_info wi;
4596 tree addr;
4597 int lp_nr;
4598
4599 pointer_set_insert (visited_stmts, stmt);
4600
4601 if (gimple_bb (stmt) != bb)
4602 {
4603 error ("gimple_bb (stmt) is set to a wrong basic block");
4604 err2 = true;
4605 }
4606
4607 err2 |= verify_gimple_stmt (stmt);
4608
4609 memset (&wi, 0, sizeof (wi));
4610 wi.info = (void *) visited;
4611 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
4612 if (addr)
4613 {
4614 error ("incorrect sharing of tree nodes");
4615 debug_generic_expr (addr);
4616 err2 |= true;
4617 }
4618
4619 /* ??? Instead of not checking these stmts at all the walker
4620 should know its context via wi. */
4621 if (!is_gimple_debug (stmt)
4622 && !is_gimple_omp (stmt))
4623 {
4624 memset (&wi, 0, sizeof (wi));
4625 addr = walk_gimple_op (stmt, verify_expr, &wi);
4626 if (addr)
4627 {
4628 debug_generic_expr (addr);
4629 inform (gimple_location (stmt), "in statement");
4630 err2 |= true;
4631 }
4632 }
4633
4634 /* If the statement is marked as part of an EH region, then it is
4635 expected that the statement could throw. Verify that when we
4636 have optimizations that simplify statements such that we prove
4637 that they cannot throw, that we update other data structures
4638 to match. */
4639 lp_nr = lookup_stmt_eh_lp (stmt);
4640 if (lp_nr != 0)
4641 {
4642 if (!stmt_could_throw_p (stmt))
4643 {
4644 error ("statement marked for throw, but doesn%'t");
4645 err2 |= true;
4646 }
4647 else if (lp_nr > 0
4648 && !gsi_one_before_end_p (gsi)
4649 && stmt_can_throw_internal (stmt))
4650 {
4651 error ("statement marked for throw in middle of block");
4652 err2 |= true;
4653 }
4654 }
4655
4656 if (err2)
4657 debug_gimple_stmt (stmt);
4658 err |= err2;
4659 }
4660 }
4661
4662 eh_error_found = false;
4663 if (get_eh_throw_stmt_table (cfun))
4664 htab_traverse (get_eh_throw_stmt_table (cfun),
4665 verify_eh_throw_stmt_node,
4666 visited_stmts);
4667
4668 if (err || eh_error_found)
4669 internal_error ("verify_gimple failed");
4670
4671 pointer_set_destroy (visited);
4672 pointer_set_destroy (visited_stmts);
4673 verify_histograms ();
4674 timevar_pop (TV_TREE_STMT_VERIFY);
4675 }
4676
4677
4678 /* Verifies that the flow information is OK. */
4679
4680 static int
4681 gimple_verify_flow_info (void)
4682 {
4683 int err = 0;
4684 basic_block bb;
4685 gimple_stmt_iterator gsi;
4686 gimple stmt;
4687 edge e;
4688 edge_iterator ei;
4689
4690 if (ENTRY_BLOCK_PTR->il.gimple.seq || ENTRY_BLOCK_PTR->il.gimple.phi_nodes)
4691 {
4692 error ("ENTRY_BLOCK has IL associated with it");
4693 err = 1;
4694 }
4695
4696 if (EXIT_BLOCK_PTR->il.gimple.seq || EXIT_BLOCK_PTR->il.gimple.phi_nodes)
4697 {
4698 error ("EXIT_BLOCK has IL associated with it");
4699 err = 1;
4700 }
4701
4702 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4703 if (e->flags & EDGE_FALLTHRU)
4704 {
4705 error ("fallthru to exit from bb %d", e->src->index);
4706 err = 1;
4707 }
4708
4709 FOR_EACH_BB (bb)
4710 {
4711 bool found_ctrl_stmt = false;
4712
4713 stmt = NULL;
4714
4715 /* Skip labels on the start of basic block. */
4716 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4717 {
4718 tree label;
4719 gimple prev_stmt = stmt;
4720
4721 stmt = gsi_stmt (gsi);
4722
4723 if (gimple_code (stmt) != GIMPLE_LABEL)
4724 break;
4725
4726 label = gimple_label_label (stmt);
4727 if (prev_stmt && DECL_NONLOCAL (label))
4728 {
4729 error ("nonlocal label ");
4730 print_generic_expr (stderr, label, 0);
4731 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4732 bb->index);
4733 err = 1;
4734 }
4735
4736 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
4737 {
4738 error ("EH landing pad label ");
4739 print_generic_expr (stderr, label, 0);
4740 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4741 bb->index);
4742 err = 1;
4743 }
4744
4745 if (label_to_block (label) != bb)
4746 {
4747 error ("label ");
4748 print_generic_expr (stderr, label, 0);
4749 fprintf (stderr, " to block does not match in bb %d",
4750 bb->index);
4751 err = 1;
4752 }
4753
4754 if (decl_function_context (label) != current_function_decl)
4755 {
4756 error ("label ");
4757 print_generic_expr (stderr, label, 0);
4758 fprintf (stderr, " has incorrect context in bb %d",
4759 bb->index);
4760 err = 1;
4761 }
4762 }
4763
4764 /* Verify that body of basic block BB is free of control flow. */
4765 for (; !gsi_end_p (gsi); gsi_next (&gsi))
4766 {
4767 gimple stmt = gsi_stmt (gsi);
4768
4769 if (found_ctrl_stmt)
4770 {
4771 error ("control flow in the middle of basic block %d",
4772 bb->index);
4773 err = 1;
4774 }
4775
4776 if (stmt_ends_bb_p (stmt))
4777 found_ctrl_stmt = true;
4778
4779 if (gimple_code (stmt) == GIMPLE_LABEL)
4780 {
4781 error ("label ");
4782 print_generic_expr (stderr, gimple_label_label (stmt), 0);
4783 fprintf (stderr, " in the middle of basic block %d", bb->index);
4784 err = 1;
4785 }
4786 }
4787
4788 gsi = gsi_last_bb (bb);
4789 if (gsi_end_p (gsi))
4790 continue;
4791
4792 stmt = gsi_stmt (gsi);
4793
4794 if (gimple_code (stmt) == GIMPLE_LABEL)
4795 continue;
4796
4797 err |= verify_eh_edges (stmt);
4798
4799 if (is_ctrl_stmt (stmt))
4800 {
4801 FOR_EACH_EDGE (e, ei, bb->succs)
4802 if (e->flags & EDGE_FALLTHRU)
4803 {
4804 error ("fallthru edge after a control statement in bb %d",
4805 bb->index);
4806 err = 1;
4807 }
4808 }
4809
4810 if (gimple_code (stmt) != GIMPLE_COND)
4811 {
4812 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4813 after anything else but if statement. */
4814 FOR_EACH_EDGE (e, ei, bb->succs)
4815 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
4816 {
4817 error ("true/false edge after a non-GIMPLE_COND in bb %d",
4818 bb->index);
4819 err = 1;
4820 }
4821 }
4822
4823 switch (gimple_code (stmt))
4824 {
4825 case GIMPLE_COND:
4826 {
4827 edge true_edge;
4828 edge false_edge;
4829
4830 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
4831
4832 if (!true_edge
4833 || !false_edge
4834 || !(true_edge->flags & EDGE_TRUE_VALUE)
4835 || !(false_edge->flags & EDGE_FALSE_VALUE)
4836 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4837 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4838 || EDGE_COUNT (bb->succs) >= 3)
4839 {
4840 error ("wrong outgoing edge flags at end of bb %d",
4841 bb->index);
4842 err = 1;
4843 }
4844 }
4845 break;
4846
4847 case GIMPLE_GOTO:
4848 if (simple_goto_p (stmt))
4849 {
4850 error ("explicit goto at end of bb %d", bb->index);
4851 err = 1;
4852 }
4853 else
4854 {
4855 /* FIXME. We should double check that the labels in the
4856 destination blocks have their address taken. */
4857 FOR_EACH_EDGE (e, ei, bb->succs)
4858 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
4859 | EDGE_FALSE_VALUE))
4860 || !(e->flags & EDGE_ABNORMAL))
4861 {
4862 error ("wrong outgoing edge flags at end of bb %d",
4863 bb->index);
4864 err = 1;
4865 }
4866 }
4867 break;
4868
4869 case GIMPLE_CALL:
4870 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
4871 break;
4872 /* ... fallthru ... */
4873 case GIMPLE_RETURN:
4874 if (!single_succ_p (bb)
4875 || (single_succ_edge (bb)->flags
4876 & (EDGE_FALLTHRU | EDGE_ABNORMAL
4877 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4878 {
4879 error ("wrong outgoing edge flags at end of bb %d", bb->index);
4880 err = 1;
4881 }
4882 if (single_succ (bb) != EXIT_BLOCK_PTR)
4883 {
4884 error ("return edge does not point to exit in bb %d",
4885 bb->index);
4886 err = 1;
4887 }
4888 break;
4889
4890 case GIMPLE_SWITCH:
4891 {
4892 tree prev;
4893 edge e;
4894 size_t i, n;
4895
4896 n = gimple_switch_num_labels (stmt);
4897
4898 /* Mark all the destination basic blocks. */
4899 for (i = 0; i < n; ++i)
4900 {
4901 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
4902 basic_block label_bb = label_to_block (lab);
4903 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
4904 label_bb->aux = (void *)1;
4905 }
4906
4907 /* Verify that the case labels are sorted. */
4908 prev = gimple_switch_label (stmt, 0);
4909 for (i = 1; i < n; ++i)
4910 {
4911 tree c = gimple_switch_label (stmt, i);
4912 if (!CASE_LOW (c))
4913 {
4914 error ("found default case not at the start of "
4915 "case vector");
4916 err = 1;
4917 continue;
4918 }
4919 if (CASE_LOW (prev)
4920 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
4921 {
4922 error ("case labels not sorted: ");
4923 print_generic_expr (stderr, prev, 0);
4924 fprintf (stderr," is greater than ");
4925 print_generic_expr (stderr, c, 0);
4926 fprintf (stderr," but comes before it.\n");
4927 err = 1;
4928 }
4929 prev = c;
4930 }
4931 /* VRP will remove the default case if it can prove it will
4932 never be executed. So do not verify there always exists
4933 a default case here. */
4934
4935 FOR_EACH_EDGE (e, ei, bb->succs)
4936 {
4937 if (!e->dest->aux)
4938 {
4939 error ("extra outgoing edge %d->%d",
4940 bb->index, e->dest->index);
4941 err = 1;
4942 }
4943
4944 e->dest->aux = (void *)2;
4945 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
4946 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4947 {
4948 error ("wrong outgoing edge flags at end of bb %d",
4949 bb->index);
4950 err = 1;
4951 }
4952 }
4953
4954 /* Check that we have all of them. */
4955 for (i = 0; i < n; ++i)
4956 {
4957 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
4958 basic_block label_bb = label_to_block (lab);
4959
4960 if (label_bb->aux != (void *)2)
4961 {
4962 error ("missing edge %i->%i", bb->index, label_bb->index);
4963 err = 1;
4964 }
4965 }
4966
4967 FOR_EACH_EDGE (e, ei, bb->succs)
4968 e->dest->aux = (void *)0;
4969 }
4970 break;
4971
4972 case GIMPLE_EH_DISPATCH:
4973 err |= verify_eh_dispatch_edge (stmt);
4974 break;
4975
4976 default:
4977 break;
4978 }
4979 }
4980
4981 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
4982 verify_dominators (CDI_DOMINATORS);
4983
4984 return err;
4985 }
4986
4987
4988 /* Updates phi nodes after creating a forwarder block joined
4989 by edge FALLTHRU. */
4990
4991 static void
4992 gimple_make_forwarder_block (edge fallthru)
4993 {
4994 edge e;
4995 edge_iterator ei;
4996 basic_block dummy, bb;
4997 tree var;
4998 gimple_stmt_iterator gsi;
4999
5000 dummy = fallthru->src;
5001 bb = fallthru->dest;
5002
5003 if (single_pred_p (bb))
5004 return;
5005
5006 /* If we redirected a branch we must create new PHI nodes at the
5007 start of BB. */
5008 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5009 {
5010 gimple phi, new_phi;
5011
5012 phi = gsi_stmt (gsi);
5013 var = gimple_phi_result (phi);
5014 new_phi = create_phi_node (var, bb);
5015 SSA_NAME_DEF_STMT (var) = new_phi;
5016 gimple_phi_set_result (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
5017 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5018 UNKNOWN_LOCATION);
5019 }
5020
5021 /* Add the arguments we have stored on edges. */
5022 FOR_EACH_EDGE (e, ei, bb->preds)
5023 {
5024 if (e == fallthru)
5025 continue;
5026
5027 flush_pending_stmts (e);
5028 }
5029 }
5030
5031
5032 /* Return a non-special label in the head of basic block BLOCK.
5033 Create one if it doesn't exist. */
5034
5035 tree
5036 gimple_block_label (basic_block bb)
5037 {
5038 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5039 bool first = true;
5040 tree label;
5041 gimple stmt;
5042
5043 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5044 {
5045 stmt = gsi_stmt (i);
5046 if (gimple_code (stmt) != GIMPLE_LABEL)
5047 break;
5048 label = gimple_label_label (stmt);
5049 if (!DECL_NONLOCAL (label))
5050 {
5051 if (!first)
5052 gsi_move_before (&i, &s);
5053 return label;
5054 }
5055 }
5056
5057 label = create_artificial_label (UNKNOWN_LOCATION);
5058 stmt = gimple_build_label (label);
5059 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5060 return label;
5061 }
5062
5063
5064 /* Attempt to perform edge redirection by replacing a possibly complex
5065 jump instruction by a goto or by removing the jump completely.
5066 This can apply only if all edges now point to the same block. The
5067 parameters and return values are equivalent to
5068 redirect_edge_and_branch. */
5069
5070 static edge
5071 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5072 {
5073 basic_block src = e->src;
5074 gimple_stmt_iterator i;
5075 gimple stmt;
5076
5077 /* We can replace or remove a complex jump only when we have exactly
5078 two edges. */
5079 if (EDGE_COUNT (src->succs) != 2
5080 /* Verify that all targets will be TARGET. Specifically, the
5081 edge that is not E must also go to TARGET. */
5082 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5083 return NULL;
5084
5085 i = gsi_last_bb (src);
5086 if (gsi_end_p (i))
5087 return NULL;
5088
5089 stmt = gsi_stmt (i);
5090
5091 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5092 {
5093 gsi_remove (&i, true);
5094 e = ssa_redirect_edge (e, target);
5095 e->flags = EDGE_FALLTHRU;
5096 return e;
5097 }
5098
5099 return NULL;
5100 }
5101
5102
5103 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5104 edge representing the redirected branch. */
5105
5106 static edge
5107 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5108 {
5109 basic_block bb = e->src;
5110 gimple_stmt_iterator gsi;
5111 edge ret;
5112 gimple stmt;
5113
5114 if (e->flags & EDGE_ABNORMAL)
5115 return NULL;
5116
5117 if (e->dest == dest)
5118 return NULL;
5119
5120 if (e->flags & EDGE_EH)
5121 return redirect_eh_edge (e, dest);
5122
5123 if (e->src != ENTRY_BLOCK_PTR)
5124 {
5125 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5126 if (ret)
5127 return ret;
5128 }
5129
5130 gsi = gsi_last_bb (bb);
5131 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5132
5133 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5134 {
5135 case GIMPLE_COND:
5136 /* For COND_EXPR, we only need to redirect the edge. */
5137 break;
5138
5139 case GIMPLE_GOTO:
5140 /* No non-abnormal edges should lead from a non-simple goto, and
5141 simple ones should be represented implicitly. */
5142 gcc_unreachable ();
5143
5144 case GIMPLE_SWITCH:
5145 {
5146 tree label = gimple_block_label (dest);
5147 tree cases = get_cases_for_edge (e, stmt);
5148
5149 /* If we have a list of cases associated with E, then use it
5150 as it's a lot faster than walking the entire case vector. */
5151 if (cases)
5152 {
5153 edge e2 = find_edge (e->src, dest);
5154 tree last, first;
5155
5156 first = cases;
5157 while (cases)
5158 {
5159 last = cases;
5160 CASE_LABEL (cases) = label;
5161 cases = CASE_CHAIN (cases);
5162 }
5163
5164 /* If there was already an edge in the CFG, then we need
5165 to move all the cases associated with E to E2. */
5166 if (e2)
5167 {
5168 tree cases2 = get_cases_for_edge (e2, stmt);
5169
5170 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5171 CASE_CHAIN (cases2) = first;
5172 }
5173 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5174 }
5175 else
5176 {
5177 size_t i, n = gimple_switch_num_labels (stmt);
5178
5179 for (i = 0; i < n; i++)
5180 {
5181 tree elt = gimple_switch_label (stmt, i);
5182 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5183 CASE_LABEL (elt) = label;
5184 }
5185 }
5186 }
5187 break;
5188
5189 case GIMPLE_ASM:
5190 {
5191 int i, n = gimple_asm_nlabels (stmt);
5192 tree label = NULL;
5193
5194 for (i = 0; i < n; ++i)
5195 {
5196 tree cons = gimple_asm_label_op (stmt, i);
5197 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5198 {
5199 if (!label)
5200 label = gimple_block_label (dest);
5201 TREE_VALUE (cons) = label;
5202 }
5203 }
5204
5205 /* If we didn't find any label matching the former edge in the
5206 asm labels, we must be redirecting the fallthrough
5207 edge. */
5208 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5209 }
5210 break;
5211
5212 case GIMPLE_RETURN:
5213 gsi_remove (&gsi, true);
5214 e->flags |= EDGE_FALLTHRU;
5215 break;
5216
5217 case GIMPLE_OMP_RETURN:
5218 case GIMPLE_OMP_CONTINUE:
5219 case GIMPLE_OMP_SECTIONS_SWITCH:
5220 case GIMPLE_OMP_FOR:
5221 /* The edges from OMP constructs can be simply redirected. */
5222 break;
5223
5224 case GIMPLE_EH_DISPATCH:
5225 if (!(e->flags & EDGE_FALLTHRU))
5226 redirect_eh_dispatch_edge (stmt, e, dest);
5227 break;
5228
5229 case GIMPLE_TRANSACTION:
5230 /* The ABORT edge has a stored label associated with it, otherwise
5231 the edges are simply redirectable. */
5232 if (e->flags == 0)
5233 gimple_transaction_set_label (stmt, gimple_block_label (dest));
5234 break;
5235
5236 default:
5237 /* Otherwise it must be a fallthru edge, and we don't need to
5238 do anything besides redirecting it. */
5239 gcc_assert (e->flags & EDGE_FALLTHRU);
5240 break;
5241 }
5242
5243 /* Update/insert PHI nodes as necessary. */
5244
5245 /* Now update the edges in the CFG. */
5246 e = ssa_redirect_edge (e, dest);
5247
5248 return e;
5249 }
5250
5251 /* Returns true if it is possible to remove edge E by redirecting
5252 it to the destination of the other edge from E->src. */
5253
5254 static bool
5255 gimple_can_remove_branch_p (const_edge e)
5256 {
5257 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5258 return false;
5259
5260 return true;
5261 }
5262
5263 /* Simple wrapper, as we can always redirect fallthru edges. */
5264
5265 static basic_block
5266 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5267 {
5268 e = gimple_redirect_edge_and_branch (e, dest);
5269 gcc_assert (e);
5270
5271 return NULL;
5272 }
5273
5274
5275 /* Splits basic block BB after statement STMT (but at least after the
5276 labels). If STMT is NULL, BB is split just after the labels. */
5277
5278 static basic_block
5279 gimple_split_block (basic_block bb, void *stmt)
5280 {
5281 gimple_stmt_iterator gsi;
5282 gimple_stmt_iterator gsi_tgt;
5283 gimple act;
5284 gimple_seq list;
5285 basic_block new_bb;
5286 edge e;
5287 edge_iterator ei;
5288
5289 new_bb = create_empty_bb (bb);
5290
5291 /* Redirect the outgoing edges. */
5292 new_bb->succs = bb->succs;
5293 bb->succs = NULL;
5294 FOR_EACH_EDGE (e, ei, new_bb->succs)
5295 e->src = new_bb;
5296
5297 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5298 stmt = NULL;
5299
5300 /* Move everything from GSI to the new basic block. */
5301 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5302 {
5303 act = gsi_stmt (gsi);
5304 if (gimple_code (act) == GIMPLE_LABEL)
5305 continue;
5306
5307 if (!stmt)
5308 break;
5309
5310 if (stmt == act)
5311 {
5312 gsi_next (&gsi);
5313 break;
5314 }
5315 }
5316
5317 if (gsi_end_p (gsi))
5318 return new_bb;
5319
5320 /* Split the statement list - avoid re-creating new containers as this
5321 brings ugly quadratic memory consumption in the inliner.
5322 (We are still quadratic since we need to update stmt BB pointers,
5323 sadly.) */
5324 gsi_split_seq_before (&gsi, &list);
5325 set_bb_seq (new_bb, list);
5326 for (gsi_tgt = gsi_start (list);
5327 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5328 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5329
5330 return new_bb;
5331 }
5332
5333
5334 /* Moves basic block BB after block AFTER. */
5335
5336 static bool
5337 gimple_move_block_after (basic_block bb, basic_block after)
5338 {
5339 if (bb->prev_bb == after)
5340 return true;
5341
5342 unlink_block (bb);
5343 link_block (bb, after);
5344
5345 return true;
5346 }
5347
5348
5349 /* Return true if basic_block can be duplicated. */
5350
5351 static bool
5352 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5353 {
5354 return true;
5355 }
5356
5357 /* Create a duplicate of the basic block BB. NOTE: This does not
5358 preserve SSA form. */
5359
5360 static basic_block
5361 gimple_duplicate_bb (basic_block bb)
5362 {
5363 basic_block new_bb;
5364 gimple_stmt_iterator gsi, gsi_tgt;
5365 gimple_seq phis = phi_nodes (bb);
5366 gimple phi, stmt, copy;
5367
5368 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
5369
5370 /* Copy the PHI nodes. We ignore PHI node arguments here because
5371 the incoming edges have not been setup yet. */
5372 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
5373 {
5374 phi = gsi_stmt (gsi);
5375 copy = create_phi_node (gimple_phi_result (phi), new_bb);
5376 create_new_def_for (gimple_phi_result (copy), copy,
5377 gimple_phi_result_ptr (copy));
5378 }
5379
5380 gsi_tgt = gsi_start_bb (new_bb);
5381 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5382 {
5383 def_operand_p def_p;
5384 ssa_op_iter op_iter;
5385 tree lhs;
5386
5387 stmt = gsi_stmt (gsi);
5388 if (gimple_code (stmt) == GIMPLE_LABEL)
5389 continue;
5390
5391 /* Don't duplicate label debug stmts. */
5392 if (gimple_debug_bind_p (stmt)
5393 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5394 == LABEL_DECL)
5395 continue;
5396
5397 /* Create a new copy of STMT and duplicate STMT's virtual
5398 operands. */
5399 copy = gimple_copy (stmt);
5400 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5401
5402 maybe_duplicate_eh_stmt (copy, stmt);
5403 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5404
5405 /* When copying around a stmt writing into a local non-user
5406 aggregate, make sure it won't share stack slot with other
5407 vars. */
5408 lhs = gimple_get_lhs (stmt);
5409 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5410 {
5411 tree base = get_base_address (lhs);
5412 if (base
5413 && (TREE_CODE (base) == VAR_DECL
5414 || TREE_CODE (base) == RESULT_DECL)
5415 && DECL_IGNORED_P (base)
5416 && !TREE_STATIC (base)
5417 && !DECL_EXTERNAL (base)
5418 && (TREE_CODE (base) != VAR_DECL
5419 || !DECL_HAS_VALUE_EXPR_P (base)))
5420 DECL_NONSHAREABLE (base) = 1;
5421 }
5422
5423 /* Create new names for all the definitions created by COPY and
5424 add replacement mappings for each new name. */
5425 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5426 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5427 }
5428
5429 return new_bb;
5430 }
5431
5432 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5433
5434 static void
5435 add_phi_args_after_copy_edge (edge e_copy)
5436 {
5437 basic_block bb, bb_copy = e_copy->src, dest;
5438 edge e;
5439 edge_iterator ei;
5440 gimple phi, phi_copy;
5441 tree def;
5442 gimple_stmt_iterator psi, psi_copy;
5443
5444 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5445 return;
5446
5447 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5448
5449 if (e_copy->dest->flags & BB_DUPLICATED)
5450 dest = get_bb_original (e_copy->dest);
5451 else
5452 dest = e_copy->dest;
5453
5454 e = find_edge (bb, dest);
5455 if (!e)
5456 {
5457 /* During loop unrolling the target of the latch edge is copied.
5458 In this case we are not looking for edge to dest, but to
5459 duplicated block whose original was dest. */
5460 FOR_EACH_EDGE (e, ei, bb->succs)
5461 {
5462 if ((e->dest->flags & BB_DUPLICATED)
5463 && get_bb_original (e->dest) == dest)
5464 break;
5465 }
5466
5467 gcc_assert (e != NULL);
5468 }
5469
5470 for (psi = gsi_start_phis (e->dest),
5471 psi_copy = gsi_start_phis (e_copy->dest);
5472 !gsi_end_p (psi);
5473 gsi_next (&psi), gsi_next (&psi_copy))
5474 {
5475 phi = gsi_stmt (psi);
5476 phi_copy = gsi_stmt (psi_copy);
5477 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5478 add_phi_arg (phi_copy, def, e_copy,
5479 gimple_phi_arg_location_from_edge (phi, e));
5480 }
5481 }
5482
5483
5484 /* Basic block BB_COPY was created by code duplication. Add phi node
5485 arguments for edges going out of BB_COPY. The blocks that were
5486 duplicated have BB_DUPLICATED set. */
5487
5488 void
5489 add_phi_args_after_copy_bb (basic_block bb_copy)
5490 {
5491 edge e_copy;
5492 edge_iterator ei;
5493
5494 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5495 {
5496 add_phi_args_after_copy_edge (e_copy);
5497 }
5498 }
5499
5500 /* Blocks in REGION_COPY array of length N_REGION were created by
5501 duplication of basic blocks. Add phi node arguments for edges
5502 going from these blocks. If E_COPY is not NULL, also add
5503 phi node arguments for its destination.*/
5504
5505 void
5506 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5507 edge e_copy)
5508 {
5509 unsigned i;
5510
5511 for (i = 0; i < n_region; i++)
5512 region_copy[i]->flags |= BB_DUPLICATED;
5513
5514 for (i = 0; i < n_region; i++)
5515 add_phi_args_after_copy_bb (region_copy[i]);
5516 if (e_copy)
5517 add_phi_args_after_copy_edge (e_copy);
5518
5519 for (i = 0; i < n_region; i++)
5520 region_copy[i]->flags &= ~BB_DUPLICATED;
5521 }
5522
5523 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5524 important exit edge EXIT. By important we mean that no SSA name defined
5525 inside region is live over the other exit edges of the region. All entry
5526 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5527 to the duplicate of the region. SSA form, dominance and loop information
5528 is updated. The new basic blocks are stored to REGION_COPY in the same
5529 order as they had in REGION, provided that REGION_COPY is not NULL.
5530 The function returns false if it is unable to copy the region,
5531 true otherwise. */
5532
5533 bool
5534 gimple_duplicate_sese_region (edge entry, edge exit,
5535 basic_block *region, unsigned n_region,
5536 basic_block *region_copy)
5537 {
5538 unsigned i;
5539 bool free_region_copy = false, copying_header = false;
5540 struct loop *loop = entry->dest->loop_father;
5541 edge exit_copy;
5542 VEC (basic_block, heap) *doms;
5543 edge redirected;
5544 int total_freq = 0, entry_freq = 0;
5545 gcov_type total_count = 0, entry_count = 0;
5546
5547 if (!can_copy_bbs_p (region, n_region))
5548 return false;
5549
5550 /* Some sanity checking. Note that we do not check for all possible
5551 missuses of the functions. I.e. if you ask to copy something weird,
5552 it will work, but the state of structures probably will not be
5553 correct. */
5554 for (i = 0; i < n_region; i++)
5555 {
5556 /* We do not handle subloops, i.e. all the blocks must belong to the
5557 same loop. */
5558 if (region[i]->loop_father != loop)
5559 return false;
5560
5561 if (region[i] != entry->dest
5562 && region[i] == loop->header)
5563 return false;
5564 }
5565
5566 set_loop_copy (loop, loop);
5567
5568 /* In case the function is used for loop header copying (which is the primary
5569 use), ensure that EXIT and its copy will be new latch and entry edges. */
5570 if (loop->header == entry->dest)
5571 {
5572 copying_header = true;
5573 set_loop_copy (loop, loop_outer (loop));
5574
5575 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5576 return false;
5577
5578 for (i = 0; i < n_region; i++)
5579 if (region[i] != exit->src
5580 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5581 return false;
5582 }
5583
5584 if (!region_copy)
5585 {
5586 region_copy = XNEWVEC (basic_block, n_region);
5587 free_region_copy = true;
5588 }
5589
5590 gcc_assert (!need_ssa_update_p (cfun));
5591
5592 /* Record blocks outside the region that are dominated by something
5593 inside. */
5594 doms = NULL;
5595 initialize_original_copy_tables ();
5596
5597 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5598
5599 if (entry->dest->count)
5600 {
5601 total_count = entry->dest->count;
5602 entry_count = entry->count;
5603 /* Fix up corner cases, to avoid division by zero or creation of negative
5604 frequencies. */
5605 if (entry_count > total_count)
5606 entry_count = total_count;
5607 }
5608 else
5609 {
5610 total_freq = entry->dest->frequency;
5611 entry_freq = EDGE_FREQUENCY (entry);
5612 /* Fix up corner cases, to avoid division by zero or creation of negative
5613 frequencies. */
5614 if (total_freq == 0)
5615 total_freq = 1;
5616 else if (entry_freq > total_freq)
5617 entry_freq = total_freq;
5618 }
5619
5620 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5621 split_edge_bb_loc (entry));
5622 if (total_count)
5623 {
5624 scale_bbs_frequencies_gcov_type (region, n_region,
5625 total_count - entry_count,
5626 total_count);
5627 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
5628 total_count);
5629 }
5630 else
5631 {
5632 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5633 total_freq);
5634 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5635 }
5636
5637 if (copying_header)
5638 {
5639 loop->header = exit->dest;
5640 loop->latch = exit->src;
5641 }
5642
5643 /* Redirect the entry and add the phi node arguments. */
5644 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
5645 gcc_assert (redirected != NULL);
5646 flush_pending_stmts (entry);
5647
5648 /* Concerning updating of dominators: We must recount dominators
5649 for entry block and its copy. Anything that is outside of the
5650 region, but was dominated by something inside needs recounting as
5651 well. */
5652 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
5653 VEC_safe_push (basic_block, heap, doms, get_bb_original (entry->dest));
5654 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5655 VEC_free (basic_block, heap, doms);
5656
5657 /* Add the other PHI node arguments. */
5658 add_phi_args_after_copy (region_copy, n_region, NULL);
5659
5660 /* Update the SSA web. */
5661 update_ssa (TODO_update_ssa);
5662
5663 if (free_region_copy)
5664 free (region_copy);
5665
5666 free_original_copy_tables ();
5667 return true;
5668 }
5669
5670 /* Checks if BB is part of the region defined by N_REGION BBS. */
5671 static bool
5672 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
5673 {
5674 unsigned int n;
5675
5676 for (n = 0; n < n_region; n++)
5677 {
5678 if (bb == bbs[n])
5679 return true;
5680 }
5681 return false;
5682 }
5683
5684 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
5685 are stored to REGION_COPY in the same order in that they appear
5686 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5687 the region, EXIT an exit from it. The condition guarding EXIT
5688 is moved to ENTRY. Returns true if duplication succeeds, false
5689 otherwise.
5690
5691 For example,
5692
5693 some_code;
5694 if (cond)
5695 A;
5696 else
5697 B;
5698
5699 is transformed to
5700
5701 if (cond)
5702 {
5703 some_code;
5704 A;
5705 }
5706 else
5707 {
5708 some_code;
5709 B;
5710 }
5711 */
5712
5713 bool
5714 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
5715 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
5716 basic_block *region_copy ATTRIBUTE_UNUSED)
5717 {
5718 unsigned i;
5719 bool free_region_copy = false;
5720 struct loop *loop = exit->dest->loop_father;
5721 struct loop *orig_loop = entry->dest->loop_father;
5722 basic_block switch_bb, entry_bb, nentry_bb;
5723 VEC (basic_block, heap) *doms;
5724 int total_freq = 0, exit_freq = 0;
5725 gcov_type total_count = 0, exit_count = 0;
5726 edge exits[2], nexits[2], e;
5727 gimple_stmt_iterator gsi;
5728 gimple cond_stmt;
5729 edge sorig, snew;
5730 basic_block exit_bb;
5731 gimple_stmt_iterator psi;
5732 gimple phi;
5733 tree def;
5734 struct loop *target, *aloop, *cloop;
5735
5736 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
5737 exits[0] = exit;
5738 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
5739
5740 if (!can_copy_bbs_p (region, n_region))
5741 return false;
5742
5743 initialize_original_copy_tables ();
5744 set_loop_copy (orig_loop, loop);
5745
5746 target= loop;
5747 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
5748 {
5749 if (bb_part_of_region_p (aloop->header, region, n_region))
5750 {
5751 cloop = duplicate_loop (aloop, target);
5752 duplicate_subloops (aloop, cloop);
5753 }
5754 }
5755
5756 if (!region_copy)
5757 {
5758 region_copy = XNEWVEC (basic_block, n_region);
5759 free_region_copy = true;
5760 }
5761
5762 gcc_assert (!need_ssa_update_p (cfun));
5763
5764 /* Record blocks outside the region that are dominated by something
5765 inside. */
5766 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5767
5768 if (exit->src->count)
5769 {
5770 total_count = exit->src->count;
5771 exit_count = exit->count;
5772 /* Fix up corner cases, to avoid division by zero or creation of negative
5773 frequencies. */
5774 if (exit_count > total_count)
5775 exit_count = total_count;
5776 }
5777 else
5778 {
5779 total_freq = exit->src->frequency;
5780 exit_freq = EDGE_FREQUENCY (exit);
5781 /* Fix up corner cases, to avoid division by zero or creation of negative
5782 frequencies. */
5783 if (total_freq == 0)
5784 total_freq = 1;
5785 if (exit_freq > total_freq)
5786 exit_freq = total_freq;
5787 }
5788
5789 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
5790 split_edge_bb_loc (exit));
5791 if (total_count)
5792 {
5793 scale_bbs_frequencies_gcov_type (region, n_region,
5794 total_count - exit_count,
5795 total_count);
5796 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
5797 total_count);
5798 }
5799 else
5800 {
5801 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
5802 total_freq);
5803 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
5804 }
5805
5806 /* Create the switch block, and put the exit condition to it. */
5807 entry_bb = entry->dest;
5808 nentry_bb = get_bb_copy (entry_bb);
5809 if (!last_stmt (entry->src)
5810 || !stmt_ends_bb_p (last_stmt (entry->src)))
5811 switch_bb = entry->src;
5812 else
5813 switch_bb = split_edge (entry);
5814 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
5815
5816 gsi = gsi_last_bb (switch_bb);
5817 cond_stmt = last_stmt (exit->src);
5818 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
5819 cond_stmt = gimple_copy (cond_stmt);
5820
5821 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
5822
5823 sorig = single_succ_edge (switch_bb);
5824 sorig->flags = exits[1]->flags;
5825 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
5826
5827 /* Register the new edge from SWITCH_BB in loop exit lists. */
5828 rescan_loop_exit (snew, true, false);
5829
5830 /* Add the PHI node arguments. */
5831 add_phi_args_after_copy (region_copy, n_region, snew);
5832
5833 /* Get rid of now superfluous conditions and associated edges (and phi node
5834 arguments). */
5835 exit_bb = exit->dest;
5836
5837 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
5838 PENDING_STMT (e) = NULL;
5839
5840 /* The latch of ORIG_LOOP was copied, and so was the backedge
5841 to the original header. We redirect this backedge to EXIT_BB. */
5842 for (i = 0; i < n_region; i++)
5843 if (get_bb_original (region_copy[i]) == orig_loop->latch)
5844 {
5845 gcc_assert (single_succ_edge (region_copy[i]));
5846 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
5847 PENDING_STMT (e) = NULL;
5848 for (psi = gsi_start_phis (exit_bb);
5849 !gsi_end_p (psi);
5850 gsi_next (&psi))
5851 {
5852 phi = gsi_stmt (psi);
5853 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
5854 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
5855 }
5856 }
5857 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
5858 PENDING_STMT (e) = NULL;
5859
5860 /* Anything that is outside of the region, but was dominated by something
5861 inside needs to update dominance info. */
5862 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5863 VEC_free (basic_block, heap, doms);
5864 /* Update the SSA web. */
5865 update_ssa (TODO_update_ssa);
5866
5867 if (free_region_copy)
5868 free (region_copy);
5869
5870 free_original_copy_tables ();
5871 return true;
5872 }
5873
5874 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
5875 adding blocks when the dominator traversal reaches EXIT. This
5876 function silently assumes that ENTRY strictly dominates EXIT. */
5877
5878 void
5879 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
5880 VEC(basic_block,heap) **bbs_p)
5881 {
5882 basic_block son;
5883
5884 for (son = first_dom_son (CDI_DOMINATORS, entry);
5885 son;
5886 son = next_dom_son (CDI_DOMINATORS, son))
5887 {
5888 VEC_safe_push (basic_block, heap, *bbs_p, son);
5889 if (son != exit)
5890 gather_blocks_in_sese_region (son, exit, bbs_p);
5891 }
5892 }
5893
5894 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
5895 The duplicates are recorded in VARS_MAP. */
5896
5897 static void
5898 replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
5899 tree to_context)
5900 {
5901 tree t = *tp, new_t;
5902 struct function *f = DECL_STRUCT_FUNCTION (to_context);
5903 void **loc;
5904
5905 if (DECL_CONTEXT (t) == to_context)
5906 return;
5907
5908 loc = pointer_map_contains (vars_map, t);
5909
5910 if (!loc)
5911 {
5912 loc = pointer_map_insert (vars_map, t);
5913
5914 if (SSA_VAR_P (t))
5915 {
5916 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
5917 add_local_decl (f, new_t);
5918 }
5919 else
5920 {
5921 gcc_assert (TREE_CODE (t) == CONST_DECL);
5922 new_t = copy_node (t);
5923 }
5924 DECL_CONTEXT (new_t) = to_context;
5925
5926 *loc = new_t;
5927 }
5928 else
5929 new_t = (tree) *loc;
5930
5931 *tp = new_t;
5932 }
5933
5934
5935 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
5936 VARS_MAP maps old ssa names and var_decls to the new ones. */
5937
5938 static tree
5939 replace_ssa_name (tree name, struct pointer_map_t *vars_map,
5940 tree to_context)
5941 {
5942 void **loc;
5943 tree new_name, decl = SSA_NAME_VAR (name);
5944
5945 gcc_assert (is_gimple_reg (name));
5946
5947 loc = pointer_map_contains (vars_map, name);
5948
5949 if (!loc)
5950 {
5951 replace_by_duplicate_decl (&decl, vars_map, to_context);
5952
5953 push_cfun (DECL_STRUCT_FUNCTION (to_context));
5954 if (gimple_in_ssa_p (cfun))
5955 add_referenced_var (decl);
5956
5957 new_name = make_ssa_name (decl, SSA_NAME_DEF_STMT (name));
5958 if (SSA_NAME_IS_DEFAULT_DEF (name))
5959 set_default_def (decl, new_name);
5960 pop_cfun ();
5961
5962 loc = pointer_map_insert (vars_map, name);
5963 *loc = new_name;
5964 }
5965 else
5966 new_name = (tree) *loc;
5967
5968 return new_name;
5969 }
5970
5971 struct move_stmt_d
5972 {
5973 tree orig_block;
5974 tree new_block;
5975 tree from_context;
5976 tree to_context;
5977 struct pointer_map_t *vars_map;
5978 htab_t new_label_map;
5979 struct pointer_map_t *eh_map;
5980 bool remap_decls_p;
5981 };
5982
5983 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
5984 contained in *TP if it has been ORIG_BLOCK previously and change the
5985 DECL_CONTEXT of every local variable referenced in *TP. */
5986
5987 static tree
5988 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
5989 {
5990 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5991 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
5992 tree t = *tp;
5993
5994 if (EXPR_P (t))
5995 /* We should never have TREE_BLOCK set on non-statements. */
5996 gcc_assert (!TREE_BLOCK (t));
5997
5998 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
5999 {
6000 if (TREE_CODE (t) == SSA_NAME)
6001 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6002 else if (TREE_CODE (t) == LABEL_DECL)
6003 {
6004 if (p->new_label_map)
6005 {
6006 struct tree_map in, *out;
6007 in.base.from = t;
6008 out = (struct tree_map *)
6009 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6010 if (out)
6011 *tp = t = out->to;
6012 }
6013
6014 DECL_CONTEXT (t) = p->to_context;
6015 }
6016 else if (p->remap_decls_p)
6017 {
6018 /* Replace T with its duplicate. T should no longer appear in the
6019 parent function, so this looks wasteful; however, it may appear
6020 in referenced_vars, and more importantly, as virtual operands of
6021 statements, and in alias lists of other variables. It would be
6022 quite difficult to expunge it from all those places. ??? It might
6023 suffice to do this for addressable variables. */
6024 if ((TREE_CODE (t) == VAR_DECL
6025 && !is_global_var (t))
6026 || TREE_CODE (t) == CONST_DECL)
6027 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6028
6029 if (SSA_VAR_P (t)
6030 && gimple_in_ssa_p (cfun))
6031 {
6032 push_cfun (DECL_STRUCT_FUNCTION (p->to_context));
6033 add_referenced_var (*tp);
6034 pop_cfun ();
6035 }
6036 }
6037 *walk_subtrees = 0;
6038 }
6039 else if (TYPE_P (t))
6040 *walk_subtrees = 0;
6041
6042 return NULL_TREE;
6043 }
6044
6045 /* Helper for move_stmt_r. Given an EH region number for the source
6046 function, map that to the duplicate EH regio number in the dest. */
6047
6048 static int
6049 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6050 {
6051 eh_region old_r, new_r;
6052 void **slot;
6053
6054 old_r = get_eh_region_from_number (old_nr);
6055 slot = pointer_map_contains (p->eh_map, old_r);
6056 new_r = (eh_region) *slot;
6057
6058 return new_r->index;
6059 }
6060
6061 /* Similar, but operate on INTEGER_CSTs. */
6062
6063 static tree
6064 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6065 {
6066 int old_nr, new_nr;
6067
6068 old_nr = tree_low_cst (old_t_nr, 0);
6069 new_nr = move_stmt_eh_region_nr (old_nr, p);
6070
6071 return build_int_cst (integer_type_node, new_nr);
6072 }
6073
6074 /* Like move_stmt_op, but for gimple statements.
6075
6076 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6077 contained in the current statement in *GSI_P and change the
6078 DECL_CONTEXT of every local variable referenced in the current
6079 statement. */
6080
6081 static tree
6082 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6083 struct walk_stmt_info *wi)
6084 {
6085 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6086 gimple stmt = gsi_stmt (*gsi_p);
6087 tree block = gimple_block (stmt);
6088
6089 if (p->orig_block == NULL_TREE
6090 || block == p->orig_block
6091 || block == NULL_TREE)
6092 gimple_set_block (stmt, p->new_block);
6093 #ifdef ENABLE_CHECKING
6094 else if (block != p->new_block)
6095 {
6096 while (block && block != p->orig_block)
6097 block = BLOCK_SUPERCONTEXT (block);
6098 gcc_assert (block);
6099 }
6100 #endif
6101
6102 switch (gimple_code (stmt))
6103 {
6104 case GIMPLE_CALL:
6105 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6106 {
6107 tree r, fndecl = gimple_call_fndecl (stmt);
6108 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6109 switch (DECL_FUNCTION_CODE (fndecl))
6110 {
6111 case BUILT_IN_EH_COPY_VALUES:
6112 r = gimple_call_arg (stmt, 1);
6113 r = move_stmt_eh_region_tree_nr (r, p);
6114 gimple_call_set_arg (stmt, 1, r);
6115 /* FALLTHRU */
6116
6117 case BUILT_IN_EH_POINTER:
6118 case BUILT_IN_EH_FILTER:
6119 r = gimple_call_arg (stmt, 0);
6120 r = move_stmt_eh_region_tree_nr (r, p);
6121 gimple_call_set_arg (stmt, 0, r);
6122 break;
6123
6124 default:
6125 break;
6126 }
6127 }
6128 break;
6129
6130 case GIMPLE_RESX:
6131 {
6132 int r = gimple_resx_region (stmt);
6133 r = move_stmt_eh_region_nr (r, p);
6134 gimple_resx_set_region (stmt, r);
6135 }
6136 break;
6137
6138 case GIMPLE_EH_DISPATCH:
6139 {
6140 int r = gimple_eh_dispatch_region (stmt);
6141 r = move_stmt_eh_region_nr (r, p);
6142 gimple_eh_dispatch_set_region (stmt, r);
6143 }
6144 break;
6145
6146 case GIMPLE_OMP_RETURN:
6147 case GIMPLE_OMP_CONTINUE:
6148 break;
6149 default:
6150 if (is_gimple_omp (stmt))
6151 {
6152 /* Do not remap variables inside OMP directives. Variables
6153 referenced in clauses and directive header belong to the
6154 parent function and should not be moved into the child
6155 function. */
6156 bool save_remap_decls_p = p->remap_decls_p;
6157 p->remap_decls_p = false;
6158 *handled_ops_p = true;
6159
6160 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6161 move_stmt_op, wi);
6162
6163 p->remap_decls_p = save_remap_decls_p;
6164 }
6165 break;
6166 }
6167
6168 return NULL_TREE;
6169 }
6170
6171 /* Move basic block BB from function CFUN to function DEST_FN. The
6172 block is moved out of the original linked list and placed after
6173 block AFTER in the new list. Also, the block is removed from the
6174 original array of blocks and placed in DEST_FN's array of blocks.
6175 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6176 updated to reflect the moved edges.
6177
6178 The local variables are remapped to new instances, VARS_MAP is used
6179 to record the mapping. */
6180
6181 static void
6182 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6183 basic_block after, bool update_edge_count_p,
6184 struct move_stmt_d *d)
6185 {
6186 struct control_flow_graph *cfg;
6187 edge_iterator ei;
6188 edge e;
6189 gimple_stmt_iterator si;
6190 unsigned old_len, new_len;
6191
6192 /* Remove BB from dominance structures. */
6193 delete_from_dominance_info (CDI_DOMINATORS, bb);
6194 if (current_loops)
6195 remove_bb_from_loops (bb);
6196
6197 /* Link BB to the new linked list. */
6198 move_block_after (bb, after);
6199
6200 /* Update the edge count in the corresponding flowgraphs. */
6201 if (update_edge_count_p)
6202 FOR_EACH_EDGE (e, ei, bb->succs)
6203 {
6204 cfun->cfg->x_n_edges--;
6205 dest_cfun->cfg->x_n_edges++;
6206 }
6207
6208 /* Remove BB from the original basic block array. */
6209 VEC_replace (basic_block, cfun->cfg->x_basic_block_info, bb->index, NULL);
6210 cfun->cfg->x_n_basic_blocks--;
6211
6212 /* Grow DEST_CFUN's basic block array if needed. */
6213 cfg = dest_cfun->cfg;
6214 cfg->x_n_basic_blocks++;
6215 if (bb->index >= cfg->x_last_basic_block)
6216 cfg->x_last_basic_block = bb->index + 1;
6217
6218 old_len = VEC_length (basic_block, cfg->x_basic_block_info);
6219 if ((unsigned) cfg->x_last_basic_block >= old_len)
6220 {
6221 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6222 VEC_safe_grow_cleared (basic_block, gc, cfg->x_basic_block_info,
6223 new_len);
6224 }
6225
6226 VEC_replace (basic_block, cfg->x_basic_block_info,
6227 bb->index, bb);
6228
6229 /* Remap the variables in phi nodes. */
6230 for (si = gsi_start_phis (bb); !gsi_end_p (si); )
6231 {
6232 gimple phi = gsi_stmt (si);
6233 use_operand_p use;
6234 tree op = PHI_RESULT (phi);
6235 ssa_op_iter oi;
6236
6237 if (!is_gimple_reg (op))
6238 {
6239 /* Remove the phi nodes for virtual operands (alias analysis will be
6240 run for the new function, anyway). */
6241 remove_phi_node (&si, true);
6242 continue;
6243 }
6244
6245 SET_PHI_RESULT (phi,
6246 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6247 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6248 {
6249 op = USE_FROM_PTR (use);
6250 if (TREE_CODE (op) == SSA_NAME)
6251 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6252 }
6253
6254 gsi_next (&si);
6255 }
6256
6257 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6258 {
6259 gimple stmt = gsi_stmt (si);
6260 struct walk_stmt_info wi;
6261
6262 memset (&wi, 0, sizeof (wi));
6263 wi.info = d;
6264 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6265
6266 if (gimple_code (stmt) == GIMPLE_LABEL)
6267 {
6268 tree label = gimple_label_label (stmt);
6269 int uid = LABEL_DECL_UID (label);
6270
6271 gcc_assert (uid > -1);
6272
6273 old_len = VEC_length (basic_block, cfg->x_label_to_block_map);
6274 if (old_len <= (unsigned) uid)
6275 {
6276 new_len = 3 * uid / 2 + 1;
6277 VEC_safe_grow_cleared (basic_block, gc,
6278 cfg->x_label_to_block_map, new_len);
6279 }
6280
6281 VEC_replace (basic_block, cfg->x_label_to_block_map, uid, bb);
6282 VEC_replace (basic_block, cfun->cfg->x_label_to_block_map, uid, NULL);
6283
6284 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6285
6286 if (uid >= dest_cfun->cfg->last_label_uid)
6287 dest_cfun->cfg->last_label_uid = uid + 1;
6288 }
6289
6290 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6291 remove_stmt_from_eh_lp_fn (cfun, stmt);
6292
6293 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6294 gimple_remove_stmt_histograms (cfun, stmt);
6295
6296 /* We cannot leave any operands allocated from the operand caches of
6297 the current function. */
6298 free_stmt_operands (stmt);
6299 push_cfun (dest_cfun);
6300 update_stmt (stmt);
6301 pop_cfun ();
6302 }
6303
6304 FOR_EACH_EDGE (e, ei, bb->succs)
6305 if (e->goto_locus)
6306 {
6307 tree block = e->goto_block;
6308 if (d->orig_block == NULL_TREE
6309 || block == d->orig_block)
6310 e->goto_block = d->new_block;
6311 #ifdef ENABLE_CHECKING
6312 else if (block != d->new_block)
6313 {
6314 while (block && block != d->orig_block)
6315 block = BLOCK_SUPERCONTEXT (block);
6316 gcc_assert (block);
6317 }
6318 #endif
6319 }
6320 }
6321
6322 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6323 the outermost EH region. Use REGION as the incoming base EH region. */
6324
6325 static eh_region
6326 find_outermost_region_in_block (struct function *src_cfun,
6327 basic_block bb, eh_region region)
6328 {
6329 gimple_stmt_iterator si;
6330
6331 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6332 {
6333 gimple stmt = gsi_stmt (si);
6334 eh_region stmt_region;
6335 int lp_nr;
6336
6337 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6338 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6339 if (stmt_region)
6340 {
6341 if (region == NULL)
6342 region = stmt_region;
6343 else if (stmt_region != region)
6344 {
6345 region = eh_region_outermost (src_cfun, stmt_region, region);
6346 gcc_assert (region != NULL);
6347 }
6348 }
6349 }
6350
6351 return region;
6352 }
6353
6354 static tree
6355 new_label_mapper (tree decl, void *data)
6356 {
6357 htab_t hash = (htab_t) data;
6358 struct tree_map *m;
6359 void **slot;
6360
6361 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6362
6363 m = XNEW (struct tree_map);
6364 m->hash = DECL_UID (decl);
6365 m->base.from = decl;
6366 m->to = create_artificial_label (UNKNOWN_LOCATION);
6367 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6368 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6369 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6370
6371 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6372 gcc_assert (*slot == NULL);
6373
6374 *slot = m;
6375
6376 return m->to;
6377 }
6378
6379 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6380 subblocks. */
6381
6382 static void
6383 replace_block_vars_by_duplicates (tree block, struct pointer_map_t *vars_map,
6384 tree to_context)
6385 {
6386 tree *tp, t;
6387
6388 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6389 {
6390 t = *tp;
6391 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6392 continue;
6393 replace_by_duplicate_decl (&t, vars_map, to_context);
6394 if (t != *tp)
6395 {
6396 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6397 {
6398 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6399 DECL_HAS_VALUE_EXPR_P (t) = 1;
6400 }
6401 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6402 *tp = t;
6403 }
6404 }
6405
6406 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6407 replace_block_vars_by_duplicates (block, vars_map, to_context);
6408 }
6409
6410 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6411 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6412 single basic block in the original CFG and the new basic block is
6413 returned. DEST_CFUN must not have a CFG yet.
6414
6415 Note that the region need not be a pure SESE region. Blocks inside
6416 the region may contain calls to abort/exit. The only restriction
6417 is that ENTRY_BB should be the only entry point and it must
6418 dominate EXIT_BB.
6419
6420 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6421 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6422 to the new function.
6423
6424 All local variables referenced in the region are assumed to be in
6425 the corresponding BLOCK_VARS and unexpanded variable lists
6426 associated with DEST_CFUN. */
6427
6428 basic_block
6429 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
6430 basic_block exit_bb, tree orig_block)
6431 {
6432 VEC(basic_block,heap) *bbs, *dom_bbs;
6433 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6434 basic_block after, bb, *entry_pred, *exit_succ, abb;
6435 struct function *saved_cfun = cfun;
6436 int *entry_flag, *exit_flag;
6437 unsigned *entry_prob, *exit_prob;
6438 unsigned i, num_entry_edges, num_exit_edges;
6439 edge e;
6440 edge_iterator ei;
6441 htab_t new_label_map;
6442 struct pointer_map_t *vars_map, *eh_map;
6443 struct loop *loop = entry_bb->loop_father;
6444 struct move_stmt_d d;
6445
6446 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6447 region. */
6448 gcc_assert (entry_bb != exit_bb
6449 && (!exit_bb
6450 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
6451
6452 /* Collect all the blocks in the region. Manually add ENTRY_BB
6453 because it won't be added by dfs_enumerate_from. */
6454 bbs = NULL;
6455 VEC_safe_push (basic_block, heap, bbs, entry_bb);
6456 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6457
6458 /* The blocks that used to be dominated by something in BBS will now be
6459 dominated by the new block. */
6460 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6461 VEC_address (basic_block, bbs),
6462 VEC_length (basic_block, bbs));
6463
6464 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6465 the predecessor edges to ENTRY_BB and the successor edges to
6466 EXIT_BB so that we can re-attach them to the new basic block that
6467 will replace the region. */
6468 num_entry_edges = EDGE_COUNT (entry_bb->preds);
6469 entry_pred = (basic_block *) xcalloc (num_entry_edges, sizeof (basic_block));
6470 entry_flag = (int *) xcalloc (num_entry_edges, sizeof (int));
6471 entry_prob = XNEWVEC (unsigned, num_entry_edges);
6472 i = 0;
6473 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
6474 {
6475 entry_prob[i] = e->probability;
6476 entry_flag[i] = e->flags;
6477 entry_pred[i++] = e->src;
6478 remove_edge (e);
6479 }
6480
6481 if (exit_bb)
6482 {
6483 num_exit_edges = EDGE_COUNT (exit_bb->succs);
6484 exit_succ = (basic_block *) xcalloc (num_exit_edges,
6485 sizeof (basic_block));
6486 exit_flag = (int *) xcalloc (num_exit_edges, sizeof (int));
6487 exit_prob = XNEWVEC (unsigned, num_exit_edges);
6488 i = 0;
6489 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
6490 {
6491 exit_prob[i] = e->probability;
6492 exit_flag[i] = e->flags;
6493 exit_succ[i++] = e->dest;
6494 remove_edge (e);
6495 }
6496 }
6497 else
6498 {
6499 num_exit_edges = 0;
6500 exit_succ = NULL;
6501 exit_flag = NULL;
6502 exit_prob = NULL;
6503 }
6504
6505 /* Switch context to the child function to initialize DEST_FN's CFG. */
6506 gcc_assert (dest_cfun->cfg == NULL);
6507 push_cfun (dest_cfun);
6508
6509 init_empty_tree_cfg ();
6510
6511 /* Initialize EH information for the new function. */
6512 eh_map = NULL;
6513 new_label_map = NULL;
6514 if (saved_cfun->eh)
6515 {
6516 eh_region region = NULL;
6517
6518 FOR_EACH_VEC_ELT (basic_block, bbs, i, bb)
6519 region = find_outermost_region_in_block (saved_cfun, bb, region);
6520
6521 init_eh_for_function ();
6522 if (region != NULL)
6523 {
6524 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6525 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
6526 new_label_mapper, new_label_map);
6527 }
6528 }
6529
6530 pop_cfun ();
6531
6532 /* Move blocks from BBS into DEST_CFUN. */
6533 gcc_assert (VEC_length (basic_block, bbs) >= 2);
6534 after = dest_cfun->cfg->x_entry_block_ptr;
6535 vars_map = pointer_map_create ();
6536
6537 memset (&d, 0, sizeof (d));
6538 d.orig_block = orig_block;
6539 d.new_block = DECL_INITIAL (dest_cfun->decl);
6540 d.from_context = cfun->decl;
6541 d.to_context = dest_cfun->decl;
6542 d.vars_map = vars_map;
6543 d.new_label_map = new_label_map;
6544 d.eh_map = eh_map;
6545 d.remap_decls_p = true;
6546
6547 FOR_EACH_VEC_ELT (basic_block, bbs, i, bb)
6548 {
6549 /* No need to update edge counts on the last block. It has
6550 already been updated earlier when we detached the region from
6551 the original CFG. */
6552 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
6553 after = bb;
6554 }
6555
6556 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
6557 if (orig_block)
6558 {
6559 tree block;
6560 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6561 == NULL_TREE);
6562 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6563 = BLOCK_SUBBLOCKS (orig_block);
6564 for (block = BLOCK_SUBBLOCKS (orig_block);
6565 block; block = BLOCK_CHAIN (block))
6566 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
6567 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
6568 }
6569
6570 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
6571 vars_map, dest_cfun->decl);
6572
6573 if (new_label_map)
6574 htab_delete (new_label_map);
6575 if (eh_map)
6576 pointer_map_destroy (eh_map);
6577 pointer_map_destroy (vars_map);
6578
6579 /* Rewire the entry and exit blocks. The successor to the entry
6580 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6581 the child function. Similarly, the predecessor of DEST_FN's
6582 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
6583 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6584 various CFG manipulation function get to the right CFG.
6585
6586 FIXME, this is silly. The CFG ought to become a parameter to
6587 these helpers. */
6588 push_cfun (dest_cfun);
6589 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
6590 if (exit_bb)
6591 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
6592 pop_cfun ();
6593
6594 /* Back in the original function, the SESE region has disappeared,
6595 create a new basic block in its place. */
6596 bb = create_empty_bb (entry_pred[0]);
6597 if (current_loops)
6598 add_bb_to_loop (bb, loop);
6599 for (i = 0; i < num_entry_edges; i++)
6600 {
6601 e = make_edge (entry_pred[i], bb, entry_flag[i]);
6602 e->probability = entry_prob[i];
6603 }
6604
6605 for (i = 0; i < num_exit_edges; i++)
6606 {
6607 e = make_edge (bb, exit_succ[i], exit_flag[i]);
6608 e->probability = exit_prob[i];
6609 }
6610
6611 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
6612 FOR_EACH_VEC_ELT (basic_block, dom_bbs, i, abb)
6613 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
6614 VEC_free (basic_block, heap, dom_bbs);
6615
6616 if (exit_bb)
6617 {
6618 free (exit_prob);
6619 free (exit_flag);
6620 free (exit_succ);
6621 }
6622 free (entry_prob);
6623 free (entry_flag);
6624 free (entry_pred);
6625 VEC_free (basic_block, heap, bbs);
6626
6627 return bb;
6628 }
6629
6630
6631 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree-pass.h)
6632 */
6633
6634 void
6635 dump_function_to_file (tree fn, FILE *file, int flags)
6636 {
6637 tree arg, var;
6638 struct function *dsf;
6639 bool ignore_topmost_bind = false, any_var = false;
6640 basic_block bb;
6641 tree chain;
6642 bool tmclone = TREE_CODE (fn) == FUNCTION_DECL && decl_is_tm_clone (fn);
6643
6644 fprintf (file, "%s %s(", lang_hooks.decl_printable_name (fn, 2),
6645 tmclone ? "[tm-clone] " : "");
6646
6647 arg = DECL_ARGUMENTS (fn);
6648 while (arg)
6649 {
6650 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
6651 fprintf (file, " ");
6652 print_generic_expr (file, arg, dump_flags);
6653 if (flags & TDF_VERBOSE)
6654 print_node (file, "", arg, 4);
6655 if (DECL_CHAIN (arg))
6656 fprintf (file, ", ");
6657 arg = DECL_CHAIN (arg);
6658 }
6659 fprintf (file, ")\n");
6660
6661 if (flags & TDF_VERBOSE)
6662 print_node (file, "", fn, 2);
6663
6664 dsf = DECL_STRUCT_FUNCTION (fn);
6665 if (dsf && (flags & TDF_EH))
6666 dump_eh_tree (file, dsf);
6667
6668 if (flags & TDF_RAW && !gimple_has_body_p (fn))
6669 {
6670 dump_node (fn, TDF_SLIM | flags, file);
6671 return;
6672 }
6673
6674 /* Switch CFUN to point to FN. */
6675 push_cfun (DECL_STRUCT_FUNCTION (fn));
6676
6677 /* When GIMPLE is lowered, the variables are no longer available in
6678 BIND_EXPRs, so display them separately. */
6679 if (cfun && cfun->decl == fn && !VEC_empty (tree, cfun->local_decls))
6680 {
6681 unsigned ix;
6682 ignore_topmost_bind = true;
6683
6684 fprintf (file, "{\n");
6685 FOR_EACH_LOCAL_DECL (cfun, ix, var)
6686 {
6687 print_generic_decl (file, var, flags);
6688 if (flags & TDF_VERBOSE)
6689 print_node (file, "", var, 4);
6690 fprintf (file, "\n");
6691
6692 any_var = true;
6693 }
6694 }
6695
6696 if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
6697 {
6698 /* If the CFG has been built, emit a CFG-based dump. */
6699 check_bb_profile (ENTRY_BLOCK_PTR, file);
6700 if (!ignore_topmost_bind)
6701 fprintf (file, "{\n");
6702
6703 if (any_var && n_basic_blocks)
6704 fprintf (file, "\n");
6705
6706 FOR_EACH_BB (bb)
6707 gimple_dump_bb (bb, file, 2, flags);
6708
6709 fprintf (file, "}\n");
6710 check_bb_profile (EXIT_BLOCK_PTR, file);
6711 }
6712 else if (DECL_SAVED_TREE (fn) == NULL)
6713 {
6714 /* The function is now in GIMPLE form but the CFG has not been
6715 built yet. Emit the single sequence of GIMPLE statements
6716 that make up its body. */
6717 gimple_seq body = gimple_body (fn);
6718
6719 if (gimple_seq_first_stmt (body)
6720 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
6721 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
6722 print_gimple_seq (file, body, 0, flags);
6723 else
6724 {
6725 if (!ignore_topmost_bind)
6726 fprintf (file, "{\n");
6727
6728 if (any_var)
6729 fprintf (file, "\n");
6730
6731 print_gimple_seq (file, body, 2, flags);
6732 fprintf (file, "}\n");
6733 }
6734 }
6735 else
6736 {
6737 int indent;
6738
6739 /* Make a tree based dump. */
6740 chain = DECL_SAVED_TREE (fn);
6741
6742 if (chain && TREE_CODE (chain) == BIND_EXPR)
6743 {
6744 if (ignore_topmost_bind)
6745 {
6746 chain = BIND_EXPR_BODY (chain);
6747 indent = 2;
6748 }
6749 else
6750 indent = 0;
6751 }
6752 else
6753 {
6754 if (!ignore_topmost_bind)
6755 fprintf (file, "{\n");
6756 indent = 2;
6757 }
6758
6759 if (any_var)
6760 fprintf (file, "\n");
6761
6762 print_generic_stmt_indented (file, chain, flags, indent);
6763 if (ignore_topmost_bind)
6764 fprintf (file, "}\n");
6765 }
6766
6767 if (flags & TDF_ENUMERATE_LOCALS)
6768 dump_enumerated_decls (file, flags);
6769 fprintf (file, "\n\n");
6770
6771 /* Restore CFUN. */
6772 pop_cfun ();
6773 }
6774
6775
6776 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
6777
6778 DEBUG_FUNCTION void
6779 debug_function (tree fn, int flags)
6780 {
6781 dump_function_to_file (fn, stderr, flags);
6782 }
6783
6784
6785 /* Print on FILE the indexes for the predecessors of basic_block BB. */
6786
6787 static void
6788 print_pred_bbs (FILE *file, basic_block bb)
6789 {
6790 edge e;
6791 edge_iterator ei;
6792
6793 FOR_EACH_EDGE (e, ei, bb->preds)
6794 fprintf (file, "bb_%d ", e->src->index);
6795 }
6796
6797
6798 /* Print on FILE the indexes for the successors of basic_block BB. */
6799
6800 static void
6801 print_succ_bbs (FILE *file, basic_block bb)
6802 {
6803 edge e;
6804 edge_iterator ei;
6805
6806 FOR_EACH_EDGE (e, ei, bb->succs)
6807 fprintf (file, "bb_%d ", e->dest->index);
6808 }
6809
6810 /* Print to FILE the basic block BB following the VERBOSITY level. */
6811
6812 void
6813 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
6814 {
6815 char *s_indent = (char *) alloca ((size_t) indent + 1);
6816 memset ((void *) s_indent, ' ', (size_t) indent);
6817 s_indent[indent] = '\0';
6818
6819 /* Print basic_block's header. */
6820 if (verbosity >= 2)
6821 {
6822 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
6823 print_pred_bbs (file, bb);
6824 fprintf (file, "}, succs = {");
6825 print_succ_bbs (file, bb);
6826 fprintf (file, "})\n");
6827 }
6828
6829 /* Print basic_block's body. */
6830 if (verbosity >= 3)
6831 {
6832 fprintf (file, "%s {\n", s_indent);
6833 gimple_dump_bb (bb, file, indent + 4, TDF_VOPS|TDF_MEMSYMS);
6834 fprintf (file, "%s }\n", s_indent);
6835 }
6836 }
6837
6838 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
6839
6840 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
6841 VERBOSITY level this outputs the contents of the loop, or just its
6842 structure. */
6843
6844 static void
6845 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
6846 {
6847 char *s_indent;
6848 basic_block bb;
6849
6850 if (loop == NULL)
6851 return;
6852
6853 s_indent = (char *) alloca ((size_t) indent + 1);
6854 memset ((void *) s_indent, ' ', (size_t) indent);
6855 s_indent[indent] = '\0';
6856
6857 /* Print loop's header. */
6858 fprintf (file, "%sloop_%d (header = %d, latch = %d", s_indent,
6859 loop->num, loop->header->index, loop->latch->index);
6860 fprintf (file, ", niter = ");
6861 print_generic_expr (file, loop->nb_iterations, 0);
6862
6863 if (loop->any_upper_bound)
6864 {
6865 fprintf (file, ", upper_bound = ");
6866 dump_double_int (file, loop->nb_iterations_upper_bound, true);
6867 }
6868
6869 if (loop->any_estimate)
6870 {
6871 fprintf (file, ", estimate = ");
6872 dump_double_int (file, loop->nb_iterations_estimate, true);
6873 }
6874 fprintf (file, ")\n");
6875
6876 /* Print loop's body. */
6877 if (verbosity >= 1)
6878 {
6879 fprintf (file, "%s{\n", s_indent);
6880 FOR_EACH_BB (bb)
6881 if (bb->loop_father == loop)
6882 print_loops_bb (file, bb, indent, verbosity);
6883
6884 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
6885 fprintf (file, "%s}\n", s_indent);
6886 }
6887 }
6888
6889 /* Print the LOOP and its sibling loops on FILE, indented INDENT
6890 spaces. Following VERBOSITY level this outputs the contents of the
6891 loop, or just its structure. */
6892
6893 static void
6894 print_loop_and_siblings (FILE *file, struct loop *loop, int indent, int verbosity)
6895 {
6896 if (loop == NULL)
6897 return;
6898
6899 print_loop (file, loop, indent, verbosity);
6900 print_loop_and_siblings (file, loop->next, indent, verbosity);
6901 }
6902
6903 /* Follow a CFG edge from the entry point of the program, and on entry
6904 of a loop, pretty print the loop structure on FILE. */
6905
6906 void
6907 print_loops (FILE *file, int verbosity)
6908 {
6909 basic_block bb;
6910
6911 bb = ENTRY_BLOCK_PTR;
6912 if (bb && bb->loop_father)
6913 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
6914 }
6915
6916
6917 /* Debugging loops structure at tree level, at some VERBOSITY level. */
6918
6919 DEBUG_FUNCTION void
6920 debug_loops (int verbosity)
6921 {
6922 print_loops (stderr, verbosity);
6923 }
6924
6925 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
6926
6927 DEBUG_FUNCTION void
6928 debug_loop (struct loop *loop, int verbosity)
6929 {
6930 print_loop (stderr, loop, 0, verbosity);
6931 }
6932
6933 /* Print on stderr the code of loop number NUM, at some VERBOSITY
6934 level. */
6935
6936 DEBUG_FUNCTION void
6937 debug_loop_num (unsigned num, int verbosity)
6938 {
6939 debug_loop (get_loop (num), verbosity);
6940 }
6941
6942 /* Return true if BB ends with a call, possibly followed by some
6943 instructions that must stay with the call. Return false,
6944 otherwise. */
6945
6946 static bool
6947 gimple_block_ends_with_call_p (basic_block bb)
6948 {
6949 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6950 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
6951 }
6952
6953
6954 /* Return true if BB ends with a conditional branch. Return false,
6955 otherwise. */
6956
6957 static bool
6958 gimple_block_ends_with_condjump_p (const_basic_block bb)
6959 {
6960 gimple stmt = last_stmt (CONST_CAST_BB (bb));
6961 return (stmt && gimple_code (stmt) == GIMPLE_COND);
6962 }
6963
6964
6965 /* Return true if we need to add fake edge to exit at statement T.
6966 Helper function for gimple_flow_call_edges_add. */
6967
6968 static bool
6969 need_fake_edge_p (gimple t)
6970 {
6971 tree fndecl = NULL_TREE;
6972 int call_flags = 0;
6973
6974 /* NORETURN and LONGJMP calls already have an edge to exit.
6975 CONST and PURE calls do not need one.
6976 We don't currently check for CONST and PURE here, although
6977 it would be a good idea, because those attributes are
6978 figured out from the RTL in mark_constant_function, and
6979 the counter incrementation code from -fprofile-arcs
6980 leads to different results from -fbranch-probabilities. */
6981 if (is_gimple_call (t))
6982 {
6983 fndecl = gimple_call_fndecl (t);
6984 call_flags = gimple_call_flags (t);
6985 }
6986
6987 if (is_gimple_call (t)
6988 && fndecl
6989 && DECL_BUILT_IN (fndecl)
6990 && (call_flags & ECF_NOTHROW)
6991 && !(call_flags & ECF_RETURNS_TWICE)
6992 /* fork() doesn't really return twice, but the effect of
6993 wrapping it in __gcov_fork() which calls __gcov_flush()
6994 and clears the counters before forking has the same
6995 effect as returning twice. Force a fake edge. */
6996 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6997 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
6998 return false;
6999
7000 if (is_gimple_call (t))
7001 {
7002 edge_iterator ei;
7003 edge e;
7004 basic_block bb;
7005
7006 if (!(call_flags & ECF_NORETURN))
7007 return true;
7008
7009 bb = gimple_bb (t);
7010 FOR_EACH_EDGE (e, ei, bb->succs)
7011 if ((e->flags & EDGE_FAKE) == 0)
7012 return true;
7013 }
7014
7015 if (gimple_code (t) == GIMPLE_ASM
7016 && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
7017 return true;
7018
7019 return false;
7020 }
7021
7022
7023 /* Add fake edges to the function exit for any non constant and non
7024 noreturn calls (or noreturn calls with EH/abnormal edges),
7025 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7026 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7027 that were split.
7028
7029 The goal is to expose cases in which entering a basic block does
7030 not imply that all subsequent instructions must be executed. */
7031
7032 static int
7033 gimple_flow_call_edges_add (sbitmap blocks)
7034 {
7035 int i;
7036 int blocks_split = 0;
7037 int last_bb = last_basic_block;
7038 bool check_last_block = false;
7039
7040 if (n_basic_blocks == NUM_FIXED_BLOCKS)
7041 return 0;
7042
7043 if (! blocks)
7044 check_last_block = true;
7045 else
7046 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
7047
7048 /* In the last basic block, before epilogue generation, there will be
7049 a fallthru edge to EXIT. Special care is required if the last insn
7050 of the last basic block is a call because make_edge folds duplicate
7051 edges, which would result in the fallthru edge also being marked
7052 fake, which would result in the fallthru edge being removed by
7053 remove_fake_edges, which would result in an invalid CFG.
7054
7055 Moreover, we can't elide the outgoing fake edge, since the block
7056 profiler needs to take this into account in order to solve the minimal
7057 spanning tree in the case that the call doesn't return.
7058
7059 Handle this by adding a dummy instruction in a new last basic block. */
7060 if (check_last_block)
7061 {
7062 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
7063 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7064 gimple t = NULL;
7065
7066 if (!gsi_end_p (gsi))
7067 t = gsi_stmt (gsi);
7068
7069 if (t && need_fake_edge_p (t))
7070 {
7071 edge e;
7072
7073 e = find_edge (bb, EXIT_BLOCK_PTR);
7074 if (e)
7075 {
7076 gsi_insert_on_edge (e, gimple_build_nop ());
7077 gsi_commit_edge_inserts ();
7078 }
7079 }
7080 }
7081
7082 /* Now add fake edges to the function exit for any non constant
7083 calls since there is no way that we can determine if they will
7084 return or not... */
7085 for (i = 0; i < last_bb; i++)
7086 {
7087 basic_block bb = BASIC_BLOCK (i);
7088 gimple_stmt_iterator gsi;
7089 gimple stmt, last_stmt;
7090
7091 if (!bb)
7092 continue;
7093
7094 if (blocks && !TEST_BIT (blocks, i))
7095 continue;
7096
7097 gsi = gsi_last_nondebug_bb (bb);
7098 if (!gsi_end_p (gsi))
7099 {
7100 last_stmt = gsi_stmt (gsi);
7101 do
7102 {
7103 stmt = gsi_stmt (gsi);
7104 if (need_fake_edge_p (stmt))
7105 {
7106 edge e;
7107
7108 /* The handling above of the final block before the
7109 epilogue should be enough to verify that there is
7110 no edge to the exit block in CFG already.
7111 Calling make_edge in such case would cause us to
7112 mark that edge as fake and remove it later. */
7113 #ifdef ENABLE_CHECKING
7114 if (stmt == last_stmt)
7115 {
7116 e = find_edge (bb, EXIT_BLOCK_PTR);
7117 gcc_assert (e == NULL);
7118 }
7119 #endif
7120
7121 /* Note that the following may create a new basic block
7122 and renumber the existing basic blocks. */
7123 if (stmt != last_stmt)
7124 {
7125 e = split_block (bb, stmt);
7126 if (e)
7127 blocks_split++;
7128 }
7129 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
7130 }
7131 gsi_prev (&gsi);
7132 }
7133 while (!gsi_end_p (gsi));
7134 }
7135 }
7136
7137 if (blocks_split)
7138 verify_flow_info ();
7139
7140 return blocks_split;
7141 }
7142
7143 /* Removes edge E and all the blocks dominated by it, and updates dominance
7144 information. The IL in E->src needs to be updated separately.
7145 If dominance info is not available, only the edge E is removed.*/
7146
7147 void
7148 remove_edge_and_dominated_blocks (edge e)
7149 {
7150 VEC (basic_block, heap) *bbs_to_remove = NULL;
7151 VEC (basic_block, heap) *bbs_to_fix_dom = NULL;
7152 bitmap df, df_idom;
7153 edge f;
7154 edge_iterator ei;
7155 bool none_removed = false;
7156 unsigned i;
7157 basic_block bb, dbb;
7158 bitmap_iterator bi;
7159
7160 if (!dom_info_available_p (CDI_DOMINATORS))
7161 {
7162 remove_edge (e);
7163 return;
7164 }
7165
7166 /* No updating is needed for edges to exit. */
7167 if (e->dest == EXIT_BLOCK_PTR)
7168 {
7169 if (cfgcleanup_altered_bbs)
7170 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7171 remove_edge (e);
7172 return;
7173 }
7174
7175 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7176 that is not dominated by E->dest, then this set is empty. Otherwise,
7177 all the basic blocks dominated by E->dest are removed.
7178
7179 Also, to DF_IDOM we store the immediate dominators of the blocks in
7180 the dominance frontier of E (i.e., of the successors of the
7181 removed blocks, if there are any, and of E->dest otherwise). */
7182 FOR_EACH_EDGE (f, ei, e->dest->preds)
7183 {
7184 if (f == e)
7185 continue;
7186
7187 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7188 {
7189 none_removed = true;
7190 break;
7191 }
7192 }
7193
7194 df = BITMAP_ALLOC (NULL);
7195 df_idom = BITMAP_ALLOC (NULL);
7196
7197 if (none_removed)
7198 bitmap_set_bit (df_idom,
7199 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7200 else
7201 {
7202 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7203 FOR_EACH_VEC_ELT (basic_block, bbs_to_remove, i, bb)
7204 {
7205 FOR_EACH_EDGE (f, ei, bb->succs)
7206 {
7207 if (f->dest != EXIT_BLOCK_PTR)
7208 bitmap_set_bit (df, f->dest->index);
7209 }
7210 }
7211 FOR_EACH_VEC_ELT (basic_block, bbs_to_remove, i, bb)
7212 bitmap_clear_bit (df, bb->index);
7213
7214 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7215 {
7216 bb = BASIC_BLOCK (i);
7217 bitmap_set_bit (df_idom,
7218 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7219 }
7220 }
7221
7222 if (cfgcleanup_altered_bbs)
7223 {
7224 /* Record the set of the altered basic blocks. */
7225 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7226 bitmap_ior_into (cfgcleanup_altered_bbs, df);
7227 }
7228
7229 /* Remove E and the cancelled blocks. */
7230 if (none_removed)
7231 remove_edge (e);
7232 else
7233 {
7234 /* Walk backwards so as to get a chance to substitute all
7235 released DEFs into debug stmts. See
7236 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7237 details. */
7238 for (i = VEC_length (basic_block, bbs_to_remove); i-- > 0; )
7239 delete_basic_block (VEC_index (basic_block, bbs_to_remove, i));
7240 }
7241
7242 /* Update the dominance information. The immediate dominator may change only
7243 for blocks whose immediate dominator belongs to DF_IDOM:
7244
7245 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7246 removal. Let Z the arbitrary block such that idom(Z) = Y and
7247 Z dominates X after the removal. Before removal, there exists a path P
7248 from Y to X that avoids Z. Let F be the last edge on P that is
7249 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7250 dominates W, and because of P, Z does not dominate W), and W belongs to
7251 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7252 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
7253 {
7254 bb = BASIC_BLOCK (i);
7255 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
7256 dbb;
7257 dbb = next_dom_son (CDI_DOMINATORS, dbb))
7258 VEC_safe_push (basic_block, heap, bbs_to_fix_dom, dbb);
7259 }
7260
7261 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
7262
7263 BITMAP_FREE (df);
7264 BITMAP_FREE (df_idom);
7265 VEC_free (basic_block, heap, bbs_to_remove);
7266 VEC_free (basic_block, heap, bbs_to_fix_dom);
7267 }
7268
7269 /* Purge dead EH edges from basic block BB. */
7270
7271 bool
7272 gimple_purge_dead_eh_edges (basic_block bb)
7273 {
7274 bool changed = false;
7275 edge e;
7276 edge_iterator ei;
7277 gimple stmt = last_stmt (bb);
7278
7279 if (stmt && stmt_can_throw_internal (stmt))
7280 return false;
7281
7282 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7283 {
7284 if (e->flags & EDGE_EH)
7285 {
7286 remove_edge_and_dominated_blocks (e);
7287 changed = true;
7288 }
7289 else
7290 ei_next (&ei);
7291 }
7292
7293 return changed;
7294 }
7295
7296 /* Purge dead EH edges from basic block listed in BLOCKS. */
7297
7298 bool
7299 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
7300 {
7301 bool changed = false;
7302 unsigned i;
7303 bitmap_iterator bi;
7304
7305 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7306 {
7307 basic_block bb = BASIC_BLOCK (i);
7308
7309 /* Earlier gimple_purge_dead_eh_edges could have removed
7310 this basic block already. */
7311 gcc_assert (bb || changed);
7312 if (bb != NULL)
7313 changed |= gimple_purge_dead_eh_edges (bb);
7314 }
7315
7316 return changed;
7317 }
7318
7319 /* Purge dead abnormal call edges from basic block BB. */
7320
7321 bool
7322 gimple_purge_dead_abnormal_call_edges (basic_block bb)
7323 {
7324 bool changed = false;
7325 edge e;
7326 edge_iterator ei;
7327 gimple stmt = last_stmt (bb);
7328
7329 if (!cfun->has_nonlocal_label)
7330 return false;
7331
7332 if (stmt && stmt_can_make_abnormal_goto (stmt))
7333 return false;
7334
7335 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7336 {
7337 if (e->flags & EDGE_ABNORMAL)
7338 {
7339 remove_edge_and_dominated_blocks (e);
7340 changed = true;
7341 }
7342 else
7343 ei_next (&ei);
7344 }
7345
7346 return changed;
7347 }
7348
7349 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
7350
7351 bool
7352 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
7353 {
7354 bool changed = false;
7355 unsigned i;
7356 bitmap_iterator bi;
7357
7358 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7359 {
7360 basic_block bb = BASIC_BLOCK (i);
7361
7362 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
7363 this basic block already. */
7364 gcc_assert (bb || changed);
7365 if (bb != NULL)
7366 changed |= gimple_purge_dead_abnormal_call_edges (bb);
7367 }
7368
7369 return changed;
7370 }
7371
7372 /* This function is called whenever a new edge is created or
7373 redirected. */
7374
7375 static void
7376 gimple_execute_on_growing_pred (edge e)
7377 {
7378 basic_block bb = e->dest;
7379
7380 if (!gimple_seq_empty_p (phi_nodes (bb)))
7381 reserve_phi_args_for_new_edge (bb);
7382 }
7383
7384 /* This function is called immediately before edge E is removed from
7385 the edge vector E->dest->preds. */
7386
7387 static void
7388 gimple_execute_on_shrinking_pred (edge e)
7389 {
7390 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
7391 remove_phi_args (e);
7392 }
7393
7394 /*---------------------------------------------------------------------------
7395 Helper functions for Loop versioning
7396 ---------------------------------------------------------------------------*/
7397
7398 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
7399 of 'first'. Both of them are dominated by 'new_head' basic block. When
7400 'new_head' was created by 'second's incoming edge it received phi arguments
7401 on the edge by split_edge(). Later, additional edge 'e' was created to
7402 connect 'new_head' and 'first'. Now this routine adds phi args on this
7403 additional edge 'e' that new_head to second edge received as part of edge
7404 splitting. */
7405
7406 static void
7407 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
7408 basic_block new_head, edge e)
7409 {
7410 gimple phi1, phi2;
7411 gimple_stmt_iterator psi1, psi2;
7412 tree def;
7413 edge e2 = find_edge (new_head, second);
7414
7415 /* Because NEW_HEAD has been created by splitting SECOND's incoming
7416 edge, we should always have an edge from NEW_HEAD to SECOND. */
7417 gcc_assert (e2 != NULL);
7418
7419 /* Browse all 'second' basic block phi nodes and add phi args to
7420 edge 'e' for 'first' head. PHI args are always in correct order. */
7421
7422 for (psi2 = gsi_start_phis (second),
7423 psi1 = gsi_start_phis (first);
7424 !gsi_end_p (psi2) && !gsi_end_p (psi1);
7425 gsi_next (&psi2), gsi_next (&psi1))
7426 {
7427 phi1 = gsi_stmt (psi1);
7428 phi2 = gsi_stmt (psi2);
7429 def = PHI_ARG_DEF (phi2, e2->dest_idx);
7430 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
7431 }
7432 }
7433
7434
7435 /* Adds a if else statement to COND_BB with condition COND_EXPR.
7436 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
7437 the destination of the ELSE part. */
7438
7439 static void
7440 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
7441 basic_block second_head ATTRIBUTE_UNUSED,
7442 basic_block cond_bb, void *cond_e)
7443 {
7444 gimple_stmt_iterator gsi;
7445 gimple new_cond_expr;
7446 tree cond_expr = (tree) cond_e;
7447 edge e0;
7448
7449 /* Build new conditional expr */
7450 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
7451 NULL_TREE, NULL_TREE);
7452
7453 /* Add new cond in cond_bb. */
7454 gsi = gsi_last_bb (cond_bb);
7455 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
7456
7457 /* Adjust edges appropriately to connect new head with first head
7458 as well as second head. */
7459 e0 = single_succ_edge (cond_bb);
7460 e0->flags &= ~EDGE_FALLTHRU;
7461 e0->flags |= EDGE_FALSE_VALUE;
7462 }
7463
7464 struct cfg_hooks gimple_cfg_hooks = {
7465 "gimple",
7466 gimple_verify_flow_info,
7467 gimple_dump_bb, /* dump_bb */
7468 create_bb, /* create_basic_block */
7469 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
7470 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
7471 gimple_can_remove_branch_p, /* can_remove_branch_p */
7472 remove_bb, /* delete_basic_block */
7473 gimple_split_block, /* split_block */
7474 gimple_move_block_after, /* move_block_after */
7475 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
7476 gimple_merge_blocks, /* merge_blocks */
7477 gimple_predict_edge, /* predict_edge */
7478 gimple_predicted_by_p, /* predicted_by_p */
7479 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
7480 gimple_duplicate_bb, /* duplicate_block */
7481 gimple_split_edge, /* split_edge */
7482 gimple_make_forwarder_block, /* make_forward_block */
7483 NULL, /* tidy_fallthru_edge */
7484 NULL, /* force_nonfallthru */
7485 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
7486 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
7487 gimple_flow_call_edges_add, /* flow_call_edges_add */
7488 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
7489 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
7490 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
7491 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
7492 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
7493 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
7494 flush_pending_stmts /* flush_pending_stmts */
7495 };
7496
7497
7498 /* Split all critical edges. */
7499
7500 static unsigned int
7501 split_critical_edges (void)
7502 {
7503 basic_block bb;
7504 edge e;
7505 edge_iterator ei;
7506
7507 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
7508 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
7509 mappings around the calls to split_edge. */
7510 start_recording_case_labels ();
7511 FOR_ALL_BB (bb)
7512 {
7513 FOR_EACH_EDGE (e, ei, bb->succs)
7514 {
7515 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
7516 split_edge (e);
7517 /* PRE inserts statements to edges and expects that
7518 since split_critical_edges was done beforehand, committing edge
7519 insertions will not split more edges. In addition to critical
7520 edges we must split edges that have multiple successors and
7521 end by control flow statements, such as RESX.
7522 Go ahead and split them too. This matches the logic in
7523 gimple_find_edge_insert_loc. */
7524 else if ((!single_pred_p (e->dest)
7525 || !gimple_seq_empty_p (phi_nodes (e->dest))
7526 || e->dest == EXIT_BLOCK_PTR)
7527 && e->src != ENTRY_BLOCK_PTR
7528 && !(e->flags & EDGE_ABNORMAL))
7529 {
7530 gimple_stmt_iterator gsi;
7531
7532 gsi = gsi_last_bb (e->src);
7533 if (!gsi_end_p (gsi)
7534 && stmt_ends_bb_p (gsi_stmt (gsi))
7535 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
7536 && !gimple_call_builtin_p (gsi_stmt (gsi),
7537 BUILT_IN_RETURN)))
7538 split_edge (e);
7539 }
7540 }
7541 }
7542 end_recording_case_labels ();
7543 return 0;
7544 }
7545
7546 struct gimple_opt_pass pass_split_crit_edges =
7547 {
7548 {
7549 GIMPLE_PASS,
7550 "crited", /* name */
7551 NULL, /* gate */
7552 split_critical_edges, /* execute */
7553 NULL, /* sub */
7554 NULL, /* next */
7555 0, /* static_pass_number */
7556 TV_TREE_SPLIT_EDGES, /* tv_id */
7557 PROP_cfg, /* properties required */
7558 PROP_no_crit_edges, /* properties_provided */
7559 0, /* properties_destroyed */
7560 0, /* todo_flags_start */
7561 TODO_verify_flow /* todo_flags_finish */
7562 }
7563 };
7564
7565
7566 /* Build a ternary operation and gimplify it. Emit code before GSI.
7567 Return the gimple_val holding the result. */
7568
7569 tree
7570 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
7571 tree type, tree a, tree b, tree c)
7572 {
7573 tree ret;
7574 location_t loc = gimple_location (gsi_stmt (*gsi));
7575
7576 ret = fold_build3_loc (loc, code, type, a, b, c);
7577 STRIP_NOPS (ret);
7578
7579 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7580 GSI_SAME_STMT);
7581 }
7582
7583 /* Build a binary operation and gimplify it. Emit code before GSI.
7584 Return the gimple_val holding the result. */
7585
7586 tree
7587 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
7588 tree type, tree a, tree b)
7589 {
7590 tree ret;
7591
7592 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
7593 STRIP_NOPS (ret);
7594
7595 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7596 GSI_SAME_STMT);
7597 }
7598
7599 /* Build a unary operation and gimplify it. Emit code before GSI.
7600 Return the gimple_val holding the result. */
7601
7602 tree
7603 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
7604 tree a)
7605 {
7606 tree ret;
7607
7608 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
7609 STRIP_NOPS (ret);
7610
7611 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7612 GSI_SAME_STMT);
7613 }
7614
7615
7616 \f
7617 /* Emit return warnings. */
7618
7619 static unsigned int
7620 execute_warn_function_return (void)
7621 {
7622 source_location location;
7623 gimple last;
7624 edge e;
7625 edge_iterator ei;
7626
7627 /* If we have a path to EXIT, then we do return. */
7628 if (TREE_THIS_VOLATILE (cfun->decl)
7629 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
7630 {
7631 location = UNKNOWN_LOCATION;
7632 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7633 {
7634 last = last_stmt (e->src);
7635 if ((gimple_code (last) == GIMPLE_RETURN
7636 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
7637 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
7638 break;
7639 }
7640 if (location == UNKNOWN_LOCATION)
7641 location = cfun->function_end_locus;
7642 warning_at (location, 0, "%<noreturn%> function does return");
7643 }
7644
7645 /* If we see "return;" in some basic block, then we do reach the end
7646 without returning a value. */
7647 else if (warn_return_type
7648 && !TREE_NO_WARNING (cfun->decl)
7649 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
7650 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
7651 {
7652 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7653 {
7654 gimple last = last_stmt (e->src);
7655 if (gimple_code (last) == GIMPLE_RETURN
7656 && gimple_return_retval (last) == NULL
7657 && !gimple_no_warning_p (last))
7658 {
7659 location = gimple_location (last);
7660 if (location == UNKNOWN_LOCATION)
7661 location = cfun->function_end_locus;
7662 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
7663 TREE_NO_WARNING (cfun->decl) = 1;
7664 break;
7665 }
7666 }
7667 }
7668 return 0;
7669 }
7670
7671
7672 /* Given a basic block B which ends with a conditional and has
7673 precisely two successors, determine which of the edges is taken if
7674 the conditional is true and which is taken if the conditional is
7675 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
7676
7677 void
7678 extract_true_false_edges_from_block (basic_block b,
7679 edge *true_edge,
7680 edge *false_edge)
7681 {
7682 edge e = EDGE_SUCC (b, 0);
7683
7684 if (e->flags & EDGE_TRUE_VALUE)
7685 {
7686 *true_edge = e;
7687 *false_edge = EDGE_SUCC (b, 1);
7688 }
7689 else
7690 {
7691 *false_edge = e;
7692 *true_edge = EDGE_SUCC (b, 1);
7693 }
7694 }
7695
7696 struct gimple_opt_pass pass_warn_function_return =
7697 {
7698 {
7699 GIMPLE_PASS,
7700 "*warn_function_return", /* name */
7701 NULL, /* gate */
7702 execute_warn_function_return, /* execute */
7703 NULL, /* sub */
7704 NULL, /* next */
7705 0, /* static_pass_number */
7706 TV_NONE, /* tv_id */
7707 PROP_cfg, /* properties_required */
7708 0, /* properties_provided */
7709 0, /* properties_destroyed */
7710 0, /* todo_flags_start */
7711 0 /* todo_flags_finish */
7712 }
7713 };
7714
7715 /* Emit noreturn warnings. */
7716
7717 static unsigned int
7718 execute_warn_function_noreturn (void)
7719 {
7720 if (!TREE_THIS_VOLATILE (current_function_decl)
7721 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0)
7722 warn_function_noreturn (current_function_decl);
7723 return 0;
7724 }
7725
7726 static bool
7727 gate_warn_function_noreturn (void)
7728 {
7729 return warn_suggest_attribute_noreturn;
7730 }
7731
7732 struct gimple_opt_pass pass_warn_function_noreturn =
7733 {
7734 {
7735 GIMPLE_PASS,
7736 "*warn_function_noreturn", /* name */
7737 gate_warn_function_noreturn, /* gate */
7738 execute_warn_function_noreturn, /* execute */
7739 NULL, /* sub */
7740 NULL, /* next */
7741 0, /* static_pass_number */
7742 TV_NONE, /* tv_id */
7743 PROP_cfg, /* properties_required */
7744 0, /* properties_provided */
7745 0, /* properties_destroyed */
7746 0, /* todo_flags_start */
7747 0 /* todo_flags_finish */
7748 }
7749 };
7750
7751
7752 /* Walk a gimplified function and warn for functions whose return value is
7753 ignored and attribute((warn_unused_result)) is set. This is done before
7754 inlining, so we don't have to worry about that. */
7755
7756 static void
7757 do_warn_unused_result (gimple_seq seq)
7758 {
7759 tree fdecl, ftype;
7760 gimple_stmt_iterator i;
7761
7762 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
7763 {
7764 gimple g = gsi_stmt (i);
7765
7766 switch (gimple_code (g))
7767 {
7768 case GIMPLE_BIND:
7769 do_warn_unused_result (gimple_bind_body (g));
7770 break;
7771 case GIMPLE_TRY:
7772 do_warn_unused_result (gimple_try_eval (g));
7773 do_warn_unused_result (gimple_try_cleanup (g));
7774 break;
7775 case GIMPLE_CATCH:
7776 do_warn_unused_result (gimple_catch_handler (g));
7777 break;
7778 case GIMPLE_EH_FILTER:
7779 do_warn_unused_result (gimple_eh_filter_failure (g));
7780 break;
7781
7782 case GIMPLE_CALL:
7783 if (gimple_call_lhs (g))
7784 break;
7785 if (gimple_call_internal_p (g))
7786 break;
7787
7788 /* This is a naked call, as opposed to a GIMPLE_CALL with an
7789 LHS. All calls whose value is ignored should be
7790 represented like this. Look for the attribute. */
7791 fdecl = gimple_call_fndecl (g);
7792 ftype = gimple_call_fntype (g);
7793
7794 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
7795 {
7796 location_t loc = gimple_location (g);
7797
7798 if (fdecl)
7799 warning_at (loc, OPT_Wunused_result,
7800 "ignoring return value of %qD, "
7801 "declared with attribute warn_unused_result",
7802 fdecl);
7803 else
7804 warning_at (loc, OPT_Wunused_result,
7805 "ignoring return value of function "
7806 "declared with attribute warn_unused_result");
7807 }
7808 break;
7809
7810 default:
7811 /* Not a container, not a call, or a call whose value is used. */
7812 break;
7813 }
7814 }
7815 }
7816
7817 static unsigned int
7818 run_warn_unused_result (void)
7819 {
7820 do_warn_unused_result (gimple_body (current_function_decl));
7821 return 0;
7822 }
7823
7824 static bool
7825 gate_warn_unused_result (void)
7826 {
7827 return flag_warn_unused_result;
7828 }
7829
7830 struct gimple_opt_pass pass_warn_unused_result =
7831 {
7832 {
7833 GIMPLE_PASS,
7834 "*warn_unused_result", /* name */
7835 gate_warn_unused_result, /* gate */
7836 run_warn_unused_result, /* execute */
7837 NULL, /* sub */
7838 NULL, /* next */
7839 0, /* static_pass_number */
7840 TV_NONE, /* tv_id */
7841 PROP_gimple_any, /* properties_required */
7842 0, /* properties_provided */
7843 0, /* properties_destroyed */
7844 0, /* todo_flags_start */
7845 0, /* todo_flags_finish */
7846 }
7847 };