bind_c_array_params_2.f90: Add "-mno-explicit-relocs" for alpha*-*-* targets.
[gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
3 2010, 2011, 2012 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "flags.h"
30 #include "function.h"
31 #include "ggc.h"
32 #include "gimple-pretty-print.h"
33 #include "tree-flow.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
36 #include "diagnostic-core.h"
37 #include "except.h"
38 #include "cfgloop.h"
39 #include "tree-ssa-propagate.h"
40 #include "value-prof.h"
41 #include "pointer-set.h"
42 #include "tree-inline.h"
43 #include "target.h"
44
45 /* This file contains functions for building the Control Flow Graph (CFG)
46 for a function tree. */
47
48 /* Local declarations. */
49
50 /* Initial capacity for the basic block array. */
51 static const int initial_cfg_capacity = 20;
52
53 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
54 which use a particular edge. The CASE_LABEL_EXPRs are chained together
55 via their CASE_CHAIN field, which we clear after we're done with the
56 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
57
58 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
59 update the case vector in response to edge redirections.
60
61 Right now this table is set up and torn down at key points in the
62 compilation process. It would be nice if we could make the table
63 more persistent. The key is getting notification of changes to
64 the CFG (particularly edge removal, creation and redirection). */
65
66 static struct pointer_map_t *edge_to_cases;
67
68 /* If we record edge_to_cases, this bitmap will hold indexes
69 of basic blocks that end in a GIMPLE_SWITCH which we touched
70 due to edge manipulations. */
71
72 static bitmap touched_switch_bbs;
73
74 /* CFG statistics. */
75 struct cfg_stats_d
76 {
77 long num_merged_labels;
78 };
79
80 static struct cfg_stats_d cfg_stats;
81
82 /* Nonzero if we found a computed goto while building basic blocks. */
83 static bool found_computed_goto;
84
85 /* Hash table to store last discriminator assigned for each locus. */
86 struct locus_discrim_map
87 {
88 location_t locus;
89 int discriminator;
90 };
91 static htab_t discriminator_per_locus;
92
93 /* Basic blocks and flowgraphs. */
94 static void make_blocks (gimple_seq);
95 static void factor_computed_gotos (void);
96
97 /* Edges. */
98 static void make_edges (void);
99 static void make_cond_expr_edges (basic_block);
100 static void make_gimple_switch_edges (basic_block);
101 static void make_goto_expr_edges (basic_block);
102 static void make_gimple_asm_edges (basic_block);
103 static unsigned int locus_map_hash (const void *);
104 static int locus_map_eq (const void *, const void *);
105 static void assign_discriminator (location_t, basic_block);
106 static edge gimple_redirect_edge_and_branch (edge, basic_block);
107 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
108 static unsigned int split_critical_edges (void);
109
110 /* Various helpers. */
111 static inline bool stmt_starts_bb_p (gimple, gimple);
112 static int gimple_verify_flow_info (void);
113 static void gimple_make_forwarder_block (edge);
114 static void gimple_cfg2vcg (FILE *);
115 static gimple first_non_label_stmt (basic_block);
116 static bool verify_gimple_transaction (gimple);
117
118 /* Flowgraph optimization and cleanup. */
119 static void gimple_merge_blocks (basic_block, basic_block);
120 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
121 static void remove_bb (basic_block);
122 static edge find_taken_edge_computed_goto (basic_block, tree);
123 static edge find_taken_edge_cond_expr (basic_block, tree);
124 static edge find_taken_edge_switch_expr (basic_block, tree);
125 static tree find_case_label_for_value (gimple, tree);
126
127 void
128 init_empty_tree_cfg_for_function (struct function *fn)
129 {
130 /* Initialize the basic block array. */
131 init_flow (fn);
132 profile_status_for_function (fn) = PROFILE_ABSENT;
133 n_basic_blocks_for_function (fn) = NUM_FIXED_BLOCKS;
134 last_basic_block_for_function (fn) = NUM_FIXED_BLOCKS;
135 basic_block_info_for_function (fn)
136 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
137 VEC_safe_grow_cleared (basic_block, gc,
138 basic_block_info_for_function (fn),
139 initial_cfg_capacity);
140
141 /* Build a mapping of labels to their associated blocks. */
142 label_to_block_map_for_function (fn)
143 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
144 VEC_safe_grow_cleared (basic_block, gc,
145 label_to_block_map_for_function (fn),
146 initial_cfg_capacity);
147
148 SET_BASIC_BLOCK_FOR_FUNCTION (fn, ENTRY_BLOCK,
149 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn));
150 SET_BASIC_BLOCK_FOR_FUNCTION (fn, EXIT_BLOCK,
151 EXIT_BLOCK_PTR_FOR_FUNCTION (fn));
152
153 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)->next_bb
154 = EXIT_BLOCK_PTR_FOR_FUNCTION (fn);
155 EXIT_BLOCK_PTR_FOR_FUNCTION (fn)->prev_bb
156 = ENTRY_BLOCK_PTR_FOR_FUNCTION (fn);
157 }
158
159 void
160 init_empty_tree_cfg (void)
161 {
162 init_empty_tree_cfg_for_function (cfun);
163 }
164
165 /*---------------------------------------------------------------------------
166 Create basic blocks
167 ---------------------------------------------------------------------------*/
168
169 /* Entry point to the CFG builder for trees. SEQ is the sequence of
170 statements to be added to the flowgraph. */
171
172 static void
173 build_gimple_cfg (gimple_seq seq)
174 {
175 /* Register specific gimple functions. */
176 gimple_register_cfg_hooks ();
177
178 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
179
180 init_empty_tree_cfg ();
181
182 found_computed_goto = 0;
183 make_blocks (seq);
184
185 /* Computed gotos are hell to deal with, especially if there are
186 lots of them with a large number of destinations. So we factor
187 them to a common computed goto location before we build the
188 edge list. After we convert back to normal form, we will un-factor
189 the computed gotos since factoring introduces an unwanted jump. */
190 if (found_computed_goto)
191 factor_computed_gotos ();
192
193 /* Make sure there is always at least one block, even if it's empty. */
194 if (n_basic_blocks == NUM_FIXED_BLOCKS)
195 create_empty_bb (ENTRY_BLOCK_PTR);
196
197 /* Adjust the size of the array. */
198 if (VEC_length (basic_block, basic_block_info) < (size_t) n_basic_blocks)
199 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, n_basic_blocks);
200
201 /* To speed up statement iterator walks, we first purge dead labels. */
202 cleanup_dead_labels ();
203
204 /* Group case nodes to reduce the number of edges.
205 We do this after cleaning up dead labels because otherwise we miss
206 a lot of obvious case merging opportunities. */
207 group_case_labels ();
208
209 /* Create the edges of the flowgraph. */
210 discriminator_per_locus = htab_create (13, locus_map_hash, locus_map_eq,
211 free);
212 make_edges ();
213 cleanup_dead_labels ();
214 htab_delete (discriminator_per_locus);
215
216 /* Debugging dumps. */
217
218 /* Write the flowgraph to a VCG file. */
219 {
220 int local_dump_flags;
221 FILE *vcg_file = dump_begin (TDI_vcg, &local_dump_flags);
222 if (vcg_file)
223 {
224 gimple_cfg2vcg (vcg_file);
225 dump_end (TDI_vcg, vcg_file);
226 }
227 }
228 }
229
230 static unsigned int
231 execute_build_cfg (void)
232 {
233 gimple_seq body = gimple_body (current_function_decl);
234
235 build_gimple_cfg (body);
236 gimple_set_body (current_function_decl, NULL);
237 if (dump_file && (dump_flags & TDF_DETAILS))
238 {
239 fprintf (dump_file, "Scope blocks:\n");
240 dump_scope_blocks (dump_file, dump_flags);
241 }
242 return 0;
243 }
244
245 struct gimple_opt_pass pass_build_cfg =
246 {
247 {
248 GIMPLE_PASS,
249 "cfg", /* name */
250 NULL, /* gate */
251 execute_build_cfg, /* execute */
252 NULL, /* sub */
253 NULL, /* next */
254 0, /* static_pass_number */
255 TV_TREE_CFG, /* tv_id */
256 PROP_gimple_leh, /* properties_required */
257 PROP_cfg, /* properties_provided */
258 0, /* properties_destroyed */
259 0, /* todo_flags_start */
260 TODO_verify_stmts | TODO_cleanup_cfg /* todo_flags_finish */
261 }
262 };
263
264
265 /* Return true if T is a computed goto. */
266
267 static bool
268 computed_goto_p (gimple t)
269 {
270 return (gimple_code (t) == GIMPLE_GOTO
271 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
272 }
273
274
275 /* Search the CFG for any computed gotos. If found, factor them to a
276 common computed goto site. Also record the location of that site so
277 that we can un-factor the gotos after we have converted back to
278 normal form. */
279
280 static void
281 factor_computed_gotos (void)
282 {
283 basic_block bb;
284 tree factored_label_decl = NULL;
285 tree var = NULL;
286 gimple factored_computed_goto_label = NULL;
287 gimple factored_computed_goto = NULL;
288
289 /* We know there are one or more computed gotos in this function.
290 Examine the last statement in each basic block to see if the block
291 ends with a computed goto. */
292
293 FOR_EACH_BB (bb)
294 {
295 gimple_stmt_iterator gsi = gsi_last_bb (bb);
296 gimple last;
297
298 if (gsi_end_p (gsi))
299 continue;
300
301 last = gsi_stmt (gsi);
302
303 /* Ignore the computed goto we create when we factor the original
304 computed gotos. */
305 if (last == factored_computed_goto)
306 continue;
307
308 /* If the last statement is a computed goto, factor it. */
309 if (computed_goto_p (last))
310 {
311 gimple assignment;
312
313 /* The first time we find a computed goto we need to create
314 the factored goto block and the variable each original
315 computed goto will use for their goto destination. */
316 if (!factored_computed_goto)
317 {
318 basic_block new_bb = create_empty_bb (bb);
319 gimple_stmt_iterator new_gsi = gsi_start_bb (new_bb);
320
321 /* Create the destination of the factored goto. Each original
322 computed goto will put its desired destination into this
323 variable and jump to the label we create immediately
324 below. */
325 var = create_tmp_var (ptr_type_node, "gotovar");
326
327 /* Build a label for the new block which will contain the
328 factored computed goto. */
329 factored_label_decl = create_artificial_label (UNKNOWN_LOCATION);
330 factored_computed_goto_label
331 = gimple_build_label (factored_label_decl);
332 gsi_insert_after (&new_gsi, factored_computed_goto_label,
333 GSI_NEW_STMT);
334
335 /* Build our new computed goto. */
336 factored_computed_goto = gimple_build_goto (var);
337 gsi_insert_after (&new_gsi, factored_computed_goto, GSI_NEW_STMT);
338 }
339
340 /* Copy the original computed goto's destination into VAR. */
341 assignment = gimple_build_assign (var, gimple_goto_dest (last));
342 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
343
344 /* And re-vector the computed goto to the new destination. */
345 gimple_goto_set_dest (last, factored_label_decl);
346 }
347 }
348 }
349
350
351 /* Build a flowgraph for the sequence of stmts SEQ. */
352
353 static void
354 make_blocks (gimple_seq seq)
355 {
356 gimple_stmt_iterator i = gsi_start (seq);
357 gimple stmt = NULL;
358 bool start_new_block = true;
359 bool first_stmt_of_seq = true;
360 basic_block bb = ENTRY_BLOCK_PTR;
361
362 while (!gsi_end_p (i))
363 {
364 gimple prev_stmt;
365
366 prev_stmt = stmt;
367 stmt = gsi_stmt (i);
368
369 /* If the statement starts a new basic block or if we have determined
370 in a previous pass that we need to create a new block for STMT, do
371 so now. */
372 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
373 {
374 if (!first_stmt_of_seq)
375 gsi_split_seq_before (&i, &seq);
376 bb = create_basic_block (seq, NULL, bb);
377 start_new_block = false;
378 }
379
380 /* Now add STMT to BB and create the subgraphs for special statement
381 codes. */
382 gimple_set_bb (stmt, bb);
383
384 if (computed_goto_p (stmt))
385 found_computed_goto = true;
386
387 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
388 next iteration. */
389 if (stmt_ends_bb_p (stmt))
390 {
391 /* If the stmt can make abnormal goto use a new temporary
392 for the assignment to the LHS. This makes sure the old value
393 of the LHS is available on the abnormal edge. Otherwise
394 we will end up with overlapping life-ranges for abnormal
395 SSA names. */
396 if (gimple_has_lhs (stmt)
397 && stmt_can_make_abnormal_goto (stmt)
398 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
399 {
400 tree lhs = gimple_get_lhs (stmt);
401 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
402 gimple s = gimple_build_assign (lhs, tmp);
403 gimple_set_location (s, gimple_location (stmt));
404 gimple_set_block (s, gimple_block (stmt));
405 gimple_set_lhs (stmt, tmp);
406 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
407 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
408 DECL_GIMPLE_REG_P (tmp) = 1;
409 gsi_insert_after (&i, s, GSI_SAME_STMT);
410 }
411 start_new_block = true;
412 }
413
414 gsi_next (&i);
415 first_stmt_of_seq = false;
416 }
417 }
418
419
420 /* Create and return a new empty basic block after bb AFTER. */
421
422 static basic_block
423 create_bb (void *h, void *e, basic_block after)
424 {
425 basic_block bb;
426
427 gcc_assert (!e);
428
429 /* Create and initialize a new basic block. Since alloc_block uses
430 GC allocation that clears memory to allocate a basic block, we do
431 not have to clear the newly allocated basic block here. */
432 bb = alloc_block ();
433
434 bb->index = last_basic_block;
435 bb->flags = BB_NEW;
436 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
437
438 /* Add the new block to the linked list of blocks. */
439 link_block (bb, after);
440
441 /* Grow the basic block array if needed. */
442 if ((size_t) last_basic_block == VEC_length (basic_block, basic_block_info))
443 {
444 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
445 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
446 }
447
448 /* Add the newly created block to the array. */
449 SET_BASIC_BLOCK (last_basic_block, bb);
450
451 n_basic_blocks++;
452 last_basic_block++;
453
454 return bb;
455 }
456
457
458 /*---------------------------------------------------------------------------
459 Edge creation
460 ---------------------------------------------------------------------------*/
461
462 /* Fold COND_EXPR_COND of each COND_EXPR. */
463
464 void
465 fold_cond_expr_cond (void)
466 {
467 basic_block bb;
468
469 FOR_EACH_BB (bb)
470 {
471 gimple stmt = last_stmt (bb);
472
473 if (stmt && gimple_code (stmt) == GIMPLE_COND)
474 {
475 location_t loc = gimple_location (stmt);
476 tree cond;
477 bool zerop, onep;
478
479 fold_defer_overflow_warnings ();
480 cond = fold_binary_loc (loc, gimple_cond_code (stmt), boolean_type_node,
481 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
482 if (cond)
483 {
484 zerop = integer_zerop (cond);
485 onep = integer_onep (cond);
486 }
487 else
488 zerop = onep = false;
489
490 fold_undefer_overflow_warnings (zerop || onep,
491 stmt,
492 WARN_STRICT_OVERFLOW_CONDITIONAL);
493 if (zerop)
494 gimple_cond_make_false (stmt);
495 else if (onep)
496 gimple_cond_make_true (stmt);
497 }
498 }
499 }
500
501 /* Join all the blocks in the flowgraph. */
502
503 static void
504 make_edges (void)
505 {
506 basic_block bb;
507 struct omp_region *cur_region = NULL;
508
509 /* Create an edge from entry to the first block with executable
510 statements in it. */
511 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
512
513 /* Traverse the basic block array placing edges. */
514 FOR_EACH_BB (bb)
515 {
516 gimple last = last_stmt (bb);
517 bool fallthru;
518
519 if (last)
520 {
521 enum gimple_code code = gimple_code (last);
522 switch (code)
523 {
524 case GIMPLE_GOTO:
525 make_goto_expr_edges (bb);
526 fallthru = false;
527 break;
528 case GIMPLE_RETURN:
529 make_edge (bb, EXIT_BLOCK_PTR, 0);
530 fallthru = false;
531 break;
532 case GIMPLE_COND:
533 make_cond_expr_edges (bb);
534 fallthru = false;
535 break;
536 case GIMPLE_SWITCH:
537 make_gimple_switch_edges (bb);
538 fallthru = false;
539 break;
540 case GIMPLE_RESX:
541 make_eh_edges (last);
542 fallthru = false;
543 break;
544 case GIMPLE_EH_DISPATCH:
545 fallthru = make_eh_dispatch_edges (last);
546 break;
547
548 case GIMPLE_CALL:
549 /* If this function receives a nonlocal goto, then we need to
550 make edges from this call site to all the nonlocal goto
551 handlers. */
552 if (stmt_can_make_abnormal_goto (last))
553 make_abnormal_goto_edges (bb, true);
554
555 /* If this statement has reachable exception handlers, then
556 create abnormal edges to them. */
557 make_eh_edges (last);
558
559 /* BUILTIN_RETURN is really a return statement. */
560 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
561 make_edge (bb, EXIT_BLOCK_PTR, 0), fallthru = false;
562 /* Some calls are known not to return. */
563 else
564 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
565 break;
566
567 case GIMPLE_ASSIGN:
568 /* A GIMPLE_ASSIGN may throw internally and thus be considered
569 control-altering. */
570 if (is_ctrl_altering_stmt (last))
571 make_eh_edges (last);
572 fallthru = true;
573 break;
574
575 case GIMPLE_ASM:
576 make_gimple_asm_edges (bb);
577 fallthru = true;
578 break;
579
580 case GIMPLE_OMP_PARALLEL:
581 case GIMPLE_OMP_TASK:
582 case GIMPLE_OMP_FOR:
583 case GIMPLE_OMP_SINGLE:
584 case GIMPLE_OMP_MASTER:
585 case GIMPLE_OMP_ORDERED:
586 case GIMPLE_OMP_CRITICAL:
587 case GIMPLE_OMP_SECTION:
588 cur_region = new_omp_region (bb, code, cur_region);
589 fallthru = true;
590 break;
591
592 case GIMPLE_OMP_SECTIONS:
593 cur_region = new_omp_region (bb, code, cur_region);
594 fallthru = true;
595 break;
596
597 case GIMPLE_OMP_SECTIONS_SWITCH:
598 fallthru = false;
599 break;
600
601 case GIMPLE_OMP_ATOMIC_LOAD:
602 case GIMPLE_OMP_ATOMIC_STORE:
603 fallthru = true;
604 break;
605
606 case GIMPLE_OMP_RETURN:
607 /* In the case of a GIMPLE_OMP_SECTION, the edge will go
608 somewhere other than the next block. This will be
609 created later. */
610 cur_region->exit = bb;
611 fallthru = cur_region->type != GIMPLE_OMP_SECTION;
612 cur_region = cur_region->outer;
613 break;
614
615 case GIMPLE_OMP_CONTINUE:
616 cur_region->cont = bb;
617 switch (cur_region->type)
618 {
619 case GIMPLE_OMP_FOR:
620 /* Mark all GIMPLE_OMP_FOR and GIMPLE_OMP_CONTINUE
621 succs edges as abnormal to prevent splitting
622 them. */
623 single_succ_edge (cur_region->entry)->flags |= EDGE_ABNORMAL;
624 /* Make the loopback edge. */
625 make_edge (bb, single_succ (cur_region->entry),
626 EDGE_ABNORMAL);
627
628 /* Create an edge from GIMPLE_OMP_FOR to exit, which
629 corresponds to the case that the body of the loop
630 is not executed at all. */
631 make_edge (cur_region->entry, bb->next_bb, EDGE_ABNORMAL);
632 make_edge (bb, bb->next_bb, EDGE_FALLTHRU | EDGE_ABNORMAL);
633 fallthru = false;
634 break;
635
636 case GIMPLE_OMP_SECTIONS:
637 /* Wire up the edges into and out of the nested sections. */
638 {
639 basic_block switch_bb = single_succ (cur_region->entry);
640
641 struct omp_region *i;
642 for (i = cur_region->inner; i ; i = i->next)
643 {
644 gcc_assert (i->type == GIMPLE_OMP_SECTION);
645 make_edge (switch_bb, i->entry, 0);
646 make_edge (i->exit, bb, EDGE_FALLTHRU);
647 }
648
649 /* Make the loopback edge to the block with
650 GIMPLE_OMP_SECTIONS_SWITCH. */
651 make_edge (bb, switch_bb, 0);
652
653 /* Make the edge from the switch to exit. */
654 make_edge (switch_bb, bb->next_bb, 0);
655 fallthru = false;
656 }
657 break;
658
659 default:
660 gcc_unreachable ();
661 }
662 break;
663
664 case GIMPLE_TRANSACTION:
665 {
666 tree abort_label = gimple_transaction_label (last);
667 if (abort_label)
668 make_edge (bb, label_to_block (abort_label), 0);
669 fallthru = true;
670 }
671 break;
672
673 default:
674 gcc_assert (!stmt_ends_bb_p (last));
675 fallthru = true;
676 }
677 }
678 else
679 fallthru = true;
680
681 if (fallthru)
682 {
683 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
684 if (last)
685 assign_discriminator (gimple_location (last), bb->next_bb);
686 }
687 }
688
689 if (root_omp_region)
690 free_omp_regions ();
691
692 /* Fold COND_EXPR_COND of each COND_EXPR. */
693 fold_cond_expr_cond ();
694 }
695
696 /* Trivial hash function for a location_t. ITEM is a pointer to
697 a hash table entry that maps a location_t to a discriminator. */
698
699 static unsigned int
700 locus_map_hash (const void *item)
701 {
702 return ((const struct locus_discrim_map *) item)->locus;
703 }
704
705 /* Equality function for the locus-to-discriminator map. VA and VB
706 point to the two hash table entries to compare. */
707
708 static int
709 locus_map_eq (const void *va, const void *vb)
710 {
711 const struct locus_discrim_map *a = (const struct locus_discrim_map *) va;
712 const struct locus_discrim_map *b = (const struct locus_discrim_map *) vb;
713 return a->locus == b->locus;
714 }
715
716 /* Find the next available discriminator value for LOCUS. The
717 discriminator distinguishes among several basic blocks that
718 share a common locus, allowing for more accurate sample-based
719 profiling. */
720
721 static int
722 next_discriminator_for_locus (location_t locus)
723 {
724 struct locus_discrim_map item;
725 struct locus_discrim_map **slot;
726
727 item.locus = locus;
728 item.discriminator = 0;
729 slot = (struct locus_discrim_map **)
730 htab_find_slot_with_hash (discriminator_per_locus, (void *) &item,
731 (hashval_t) locus, INSERT);
732 gcc_assert (slot);
733 if (*slot == HTAB_EMPTY_ENTRY)
734 {
735 *slot = XNEW (struct locus_discrim_map);
736 gcc_assert (*slot);
737 (*slot)->locus = locus;
738 (*slot)->discriminator = 0;
739 }
740 (*slot)->discriminator++;
741 return (*slot)->discriminator;
742 }
743
744 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
745
746 static bool
747 same_line_p (location_t locus1, location_t locus2)
748 {
749 expanded_location from, to;
750
751 if (locus1 == locus2)
752 return true;
753
754 from = expand_location (locus1);
755 to = expand_location (locus2);
756
757 if (from.line != to.line)
758 return false;
759 if (from.file == to.file)
760 return true;
761 return (from.file != NULL
762 && to.file != NULL
763 && filename_cmp (from.file, to.file) == 0);
764 }
765
766 /* Assign a unique discriminator value to block BB if it begins at the same
767 LOCUS as its predecessor block. */
768
769 static void
770 assign_discriminator (location_t locus, basic_block bb)
771 {
772 gimple first_in_to_bb, last_in_to_bb;
773
774 if (locus == 0 || bb->discriminator != 0)
775 return;
776
777 first_in_to_bb = first_non_label_stmt (bb);
778 last_in_to_bb = last_stmt (bb);
779 if ((first_in_to_bb && same_line_p (locus, gimple_location (first_in_to_bb)))
780 || (last_in_to_bb && same_line_p (locus, gimple_location (last_in_to_bb))))
781 bb->discriminator = next_discriminator_for_locus (locus);
782 }
783
784 /* Create the edges for a GIMPLE_COND starting at block BB. */
785
786 static void
787 make_cond_expr_edges (basic_block bb)
788 {
789 gimple entry = last_stmt (bb);
790 gimple then_stmt, else_stmt;
791 basic_block then_bb, else_bb;
792 tree then_label, else_label;
793 edge e;
794 location_t entry_locus;
795
796 gcc_assert (entry);
797 gcc_assert (gimple_code (entry) == GIMPLE_COND);
798
799 entry_locus = gimple_location (entry);
800
801 /* Entry basic blocks for each component. */
802 then_label = gimple_cond_true_label (entry);
803 else_label = gimple_cond_false_label (entry);
804 then_bb = label_to_block (then_label);
805 else_bb = label_to_block (else_label);
806 then_stmt = first_stmt (then_bb);
807 else_stmt = first_stmt (else_bb);
808
809 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
810 assign_discriminator (entry_locus, then_bb);
811 e->goto_locus = gimple_location (then_stmt);
812 if (e->goto_locus)
813 e->goto_block = gimple_block (then_stmt);
814 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
815 if (e)
816 {
817 assign_discriminator (entry_locus, else_bb);
818 e->goto_locus = gimple_location (else_stmt);
819 if (e->goto_locus)
820 e->goto_block = gimple_block (else_stmt);
821 }
822
823 /* We do not need the labels anymore. */
824 gimple_cond_set_true_label (entry, NULL_TREE);
825 gimple_cond_set_false_label (entry, NULL_TREE);
826 }
827
828
829 /* Called for each element in the hash table (P) as we delete the
830 edge to cases hash table.
831
832 Clear all the TREE_CHAINs to prevent problems with copying of
833 SWITCH_EXPRs and structure sharing rules, then free the hash table
834 element. */
835
836 static bool
837 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
838 void *data ATTRIBUTE_UNUSED)
839 {
840 tree t, next;
841
842 for (t = (tree) *value; t; t = next)
843 {
844 next = CASE_CHAIN (t);
845 CASE_CHAIN (t) = NULL;
846 }
847
848 *value = NULL;
849 return true;
850 }
851
852 /* Start recording information mapping edges to case labels. */
853
854 void
855 start_recording_case_labels (void)
856 {
857 gcc_assert (edge_to_cases == NULL);
858 edge_to_cases = pointer_map_create ();
859 touched_switch_bbs = BITMAP_ALLOC (NULL);
860 }
861
862 /* Return nonzero if we are recording information for case labels. */
863
864 static bool
865 recording_case_labels_p (void)
866 {
867 return (edge_to_cases != NULL);
868 }
869
870 /* Stop recording information mapping edges to case labels and
871 remove any information we have recorded. */
872 void
873 end_recording_case_labels (void)
874 {
875 bitmap_iterator bi;
876 unsigned i;
877 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
878 pointer_map_destroy (edge_to_cases);
879 edge_to_cases = NULL;
880 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
881 {
882 basic_block bb = BASIC_BLOCK (i);
883 if (bb)
884 {
885 gimple stmt = last_stmt (bb);
886 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
887 group_case_labels_stmt (stmt);
888 }
889 }
890 BITMAP_FREE (touched_switch_bbs);
891 }
892
893 /* If we are inside a {start,end}_recording_cases block, then return
894 a chain of CASE_LABEL_EXPRs from T which reference E.
895
896 Otherwise return NULL. */
897
898 static tree
899 get_cases_for_edge (edge e, gimple t)
900 {
901 void **slot;
902 size_t i, n;
903
904 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
905 chains available. Return NULL so the caller can detect this case. */
906 if (!recording_case_labels_p ())
907 return NULL;
908
909 slot = pointer_map_contains (edge_to_cases, e);
910 if (slot)
911 return (tree) *slot;
912
913 /* If we did not find E in the hash table, then this must be the first
914 time we have been queried for information about E & T. Add all the
915 elements from T to the hash table then perform the query again. */
916
917 n = gimple_switch_num_labels (t);
918 for (i = 0; i < n; i++)
919 {
920 tree elt = gimple_switch_label (t, i);
921 tree lab = CASE_LABEL (elt);
922 basic_block label_bb = label_to_block (lab);
923 edge this_edge = find_edge (e->src, label_bb);
924
925 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
926 a new chain. */
927 slot = pointer_map_insert (edge_to_cases, this_edge);
928 CASE_CHAIN (elt) = (tree) *slot;
929 *slot = elt;
930 }
931
932 return (tree) *pointer_map_contains (edge_to_cases, e);
933 }
934
935 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
936
937 static void
938 make_gimple_switch_edges (basic_block bb)
939 {
940 gimple entry = last_stmt (bb);
941 location_t entry_locus;
942 size_t i, n;
943
944 entry_locus = gimple_location (entry);
945
946 n = gimple_switch_num_labels (entry);
947
948 for (i = 0; i < n; ++i)
949 {
950 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
951 basic_block label_bb = label_to_block (lab);
952 make_edge (bb, label_bb, 0);
953 assign_discriminator (entry_locus, label_bb);
954 }
955 }
956
957
958 /* Return the basic block holding label DEST. */
959
960 basic_block
961 label_to_block_fn (struct function *ifun, tree dest)
962 {
963 int uid = LABEL_DECL_UID (dest);
964
965 /* We would die hard when faced by an undefined label. Emit a label to
966 the very first basic block. This will hopefully make even the dataflow
967 and undefined variable warnings quite right. */
968 if (seen_error () && uid < 0)
969 {
970 gimple_stmt_iterator gsi = gsi_start_bb (BASIC_BLOCK (NUM_FIXED_BLOCKS));
971 gimple stmt;
972
973 stmt = gimple_build_label (dest);
974 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
975 uid = LABEL_DECL_UID (dest);
976 }
977 if (VEC_length (basic_block, ifun->cfg->x_label_to_block_map)
978 <= (unsigned int) uid)
979 return NULL;
980 return VEC_index (basic_block, ifun->cfg->x_label_to_block_map, uid);
981 }
982
983 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
984 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
985
986 void
987 make_abnormal_goto_edges (basic_block bb, bool for_call)
988 {
989 basic_block target_bb;
990 gimple_stmt_iterator gsi;
991
992 FOR_EACH_BB (target_bb)
993 for (gsi = gsi_start_bb (target_bb); !gsi_end_p (gsi); gsi_next (&gsi))
994 {
995 gimple label_stmt = gsi_stmt (gsi);
996 tree target;
997
998 if (gimple_code (label_stmt) != GIMPLE_LABEL)
999 break;
1000
1001 target = gimple_label_label (label_stmt);
1002
1003 /* Make an edge to every label block that has been marked as a
1004 potential target for a computed goto or a non-local goto. */
1005 if ((FORCED_LABEL (target) && !for_call)
1006 || (DECL_NONLOCAL (target) && for_call))
1007 {
1008 make_edge (bb, target_bb, EDGE_ABNORMAL);
1009 break;
1010 }
1011 }
1012 }
1013
1014 /* Create edges for a goto statement at block BB. */
1015
1016 static void
1017 make_goto_expr_edges (basic_block bb)
1018 {
1019 gimple_stmt_iterator last = gsi_last_bb (bb);
1020 gimple goto_t = gsi_stmt (last);
1021
1022 /* A simple GOTO creates normal edges. */
1023 if (simple_goto_p (goto_t))
1024 {
1025 tree dest = gimple_goto_dest (goto_t);
1026 basic_block label_bb = label_to_block (dest);
1027 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1028 e->goto_locus = gimple_location (goto_t);
1029 assign_discriminator (e->goto_locus, label_bb);
1030 if (e->goto_locus)
1031 e->goto_block = gimple_block (goto_t);
1032 gsi_remove (&last, true);
1033 return;
1034 }
1035
1036 /* A computed GOTO creates abnormal edges. */
1037 make_abnormal_goto_edges (bb, false);
1038 }
1039
1040 /* Create edges for an asm statement with labels at block BB. */
1041
1042 static void
1043 make_gimple_asm_edges (basic_block bb)
1044 {
1045 gimple stmt = last_stmt (bb);
1046 location_t stmt_loc = gimple_location (stmt);
1047 int i, n = gimple_asm_nlabels (stmt);
1048
1049 for (i = 0; i < n; ++i)
1050 {
1051 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1052 basic_block label_bb = label_to_block (label);
1053 make_edge (bb, label_bb, 0);
1054 assign_discriminator (stmt_loc, label_bb);
1055 }
1056 }
1057
1058 /*---------------------------------------------------------------------------
1059 Flowgraph analysis
1060 ---------------------------------------------------------------------------*/
1061
1062 /* Cleanup useless labels in basic blocks. This is something we wish
1063 to do early because it allows us to group case labels before creating
1064 the edges for the CFG, and it speeds up block statement iterators in
1065 all passes later on.
1066 We rerun this pass after CFG is created, to get rid of the labels that
1067 are no longer referenced. After then we do not run it any more, since
1068 (almost) no new labels should be created. */
1069
1070 /* A map from basic block index to the leading label of that block. */
1071 static struct label_record
1072 {
1073 /* The label. */
1074 tree label;
1075
1076 /* True if the label is referenced from somewhere. */
1077 bool used;
1078 } *label_for_bb;
1079
1080 /* Given LABEL return the first label in the same basic block. */
1081
1082 static tree
1083 main_block_label (tree label)
1084 {
1085 basic_block bb = label_to_block (label);
1086 tree main_label = label_for_bb[bb->index].label;
1087
1088 /* label_to_block possibly inserted undefined label into the chain. */
1089 if (!main_label)
1090 {
1091 label_for_bb[bb->index].label = label;
1092 main_label = label;
1093 }
1094
1095 label_for_bb[bb->index].used = true;
1096 return main_label;
1097 }
1098
1099 /* Clean up redundant labels within the exception tree. */
1100
1101 static void
1102 cleanup_dead_labels_eh (void)
1103 {
1104 eh_landing_pad lp;
1105 eh_region r;
1106 tree lab;
1107 int i;
1108
1109 if (cfun->eh == NULL)
1110 return;
1111
1112 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
1113 if (lp && lp->post_landing_pad)
1114 {
1115 lab = main_block_label (lp->post_landing_pad);
1116 if (lab != lp->post_landing_pad)
1117 {
1118 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1119 EH_LANDING_PAD_NR (lab) = lp->index;
1120 }
1121 }
1122
1123 FOR_ALL_EH_REGION (r)
1124 switch (r->type)
1125 {
1126 case ERT_CLEANUP:
1127 case ERT_MUST_NOT_THROW:
1128 break;
1129
1130 case ERT_TRY:
1131 {
1132 eh_catch c;
1133 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1134 {
1135 lab = c->label;
1136 if (lab)
1137 c->label = main_block_label (lab);
1138 }
1139 }
1140 break;
1141
1142 case ERT_ALLOWED_EXCEPTIONS:
1143 lab = r->u.allowed.label;
1144 if (lab)
1145 r->u.allowed.label = main_block_label (lab);
1146 break;
1147 }
1148 }
1149
1150
1151 /* Cleanup redundant labels. This is a three-step process:
1152 1) Find the leading label for each block.
1153 2) Redirect all references to labels to the leading labels.
1154 3) Cleanup all useless labels. */
1155
1156 void
1157 cleanup_dead_labels (void)
1158 {
1159 basic_block bb;
1160 label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
1161
1162 /* Find a suitable label for each block. We use the first user-defined
1163 label if there is one, or otherwise just the first label we see. */
1164 FOR_EACH_BB (bb)
1165 {
1166 gimple_stmt_iterator i;
1167
1168 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1169 {
1170 tree label;
1171 gimple stmt = gsi_stmt (i);
1172
1173 if (gimple_code (stmt) != GIMPLE_LABEL)
1174 break;
1175
1176 label = gimple_label_label (stmt);
1177
1178 /* If we have not yet seen a label for the current block,
1179 remember this one and see if there are more labels. */
1180 if (!label_for_bb[bb->index].label)
1181 {
1182 label_for_bb[bb->index].label = label;
1183 continue;
1184 }
1185
1186 /* If we did see a label for the current block already, but it
1187 is an artificially created label, replace it if the current
1188 label is a user defined label. */
1189 if (!DECL_ARTIFICIAL (label)
1190 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1191 {
1192 label_for_bb[bb->index].label = label;
1193 break;
1194 }
1195 }
1196 }
1197
1198 /* Now redirect all jumps/branches to the selected label.
1199 First do so for each block ending in a control statement. */
1200 FOR_EACH_BB (bb)
1201 {
1202 gimple stmt = last_stmt (bb);
1203 tree label, new_label;
1204
1205 if (!stmt)
1206 continue;
1207
1208 switch (gimple_code (stmt))
1209 {
1210 case GIMPLE_COND:
1211 label = gimple_cond_true_label (stmt);
1212 if (label)
1213 {
1214 new_label = main_block_label (label);
1215 if (new_label != label)
1216 gimple_cond_set_true_label (stmt, new_label);
1217 }
1218
1219 label = gimple_cond_false_label (stmt);
1220 if (label)
1221 {
1222 new_label = main_block_label (label);
1223 if (new_label != label)
1224 gimple_cond_set_false_label (stmt, new_label);
1225 }
1226 break;
1227
1228 case GIMPLE_SWITCH:
1229 {
1230 size_t i, n = gimple_switch_num_labels (stmt);
1231
1232 /* Replace all destination labels. */
1233 for (i = 0; i < n; ++i)
1234 {
1235 tree case_label = gimple_switch_label (stmt, i);
1236 label = CASE_LABEL (case_label);
1237 new_label = main_block_label (label);
1238 if (new_label != label)
1239 CASE_LABEL (case_label) = new_label;
1240 }
1241 break;
1242 }
1243
1244 case GIMPLE_ASM:
1245 {
1246 int i, n = gimple_asm_nlabels (stmt);
1247
1248 for (i = 0; i < n; ++i)
1249 {
1250 tree cons = gimple_asm_label_op (stmt, i);
1251 tree label = main_block_label (TREE_VALUE (cons));
1252 TREE_VALUE (cons) = label;
1253 }
1254 break;
1255 }
1256
1257 /* We have to handle gotos until they're removed, and we don't
1258 remove them until after we've created the CFG edges. */
1259 case GIMPLE_GOTO:
1260 if (!computed_goto_p (stmt))
1261 {
1262 label = gimple_goto_dest (stmt);
1263 new_label = main_block_label (label);
1264 if (new_label != label)
1265 gimple_goto_set_dest (stmt, new_label);
1266 }
1267 break;
1268
1269 case GIMPLE_TRANSACTION:
1270 {
1271 tree label = gimple_transaction_label (stmt);
1272 if (label)
1273 {
1274 tree new_label = main_block_label (label);
1275 if (new_label != label)
1276 gimple_transaction_set_label (stmt, new_label);
1277 }
1278 }
1279 break;
1280
1281 default:
1282 break;
1283 }
1284 }
1285
1286 /* Do the same for the exception region tree labels. */
1287 cleanup_dead_labels_eh ();
1288
1289 /* Finally, purge dead labels. All user-defined labels and labels that
1290 can be the target of non-local gotos and labels which have their
1291 address taken are preserved. */
1292 FOR_EACH_BB (bb)
1293 {
1294 gimple_stmt_iterator i;
1295 tree label_for_this_bb = label_for_bb[bb->index].label;
1296
1297 if (!label_for_this_bb)
1298 continue;
1299
1300 /* If the main label of the block is unused, we may still remove it. */
1301 if (!label_for_bb[bb->index].used)
1302 label_for_this_bb = NULL;
1303
1304 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1305 {
1306 tree label;
1307 gimple stmt = gsi_stmt (i);
1308
1309 if (gimple_code (stmt) != GIMPLE_LABEL)
1310 break;
1311
1312 label = gimple_label_label (stmt);
1313
1314 if (label == label_for_this_bb
1315 || !DECL_ARTIFICIAL (label)
1316 || DECL_NONLOCAL (label)
1317 || FORCED_LABEL (label))
1318 gsi_next (&i);
1319 else
1320 gsi_remove (&i, true);
1321 }
1322 }
1323
1324 free (label_for_bb);
1325 }
1326
1327 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1328 the ones jumping to the same label.
1329 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1330
1331 void
1332 group_case_labels_stmt (gimple stmt)
1333 {
1334 int old_size = gimple_switch_num_labels (stmt);
1335 int i, j, new_size = old_size;
1336 basic_block default_bb = NULL;
1337 bool has_default;
1338
1339 /* The default label is always the first case in a switch
1340 statement after gimplification if it was not optimized
1341 away */
1342 if (!CASE_LOW (gimple_switch_default_label (stmt))
1343 && !CASE_HIGH (gimple_switch_default_label (stmt)))
1344 {
1345 tree default_case = gimple_switch_default_label (stmt);
1346 default_bb = label_to_block (CASE_LABEL (default_case));
1347 has_default = true;
1348 }
1349 else
1350 has_default = false;
1351
1352 /* Look for possible opportunities to merge cases. */
1353 if (has_default)
1354 i = 1;
1355 else
1356 i = 0;
1357 while (i < old_size)
1358 {
1359 tree base_case, base_high;
1360 basic_block base_bb;
1361
1362 base_case = gimple_switch_label (stmt, i);
1363
1364 gcc_assert (base_case);
1365 base_bb = label_to_block (CASE_LABEL (base_case));
1366
1367 /* Discard cases that have the same destination as the
1368 default case. */
1369 if (base_bb == default_bb)
1370 {
1371 gimple_switch_set_label (stmt, i, NULL_TREE);
1372 i++;
1373 new_size--;
1374 continue;
1375 }
1376
1377 base_high = CASE_HIGH (base_case)
1378 ? CASE_HIGH (base_case)
1379 : CASE_LOW (base_case);
1380 i++;
1381
1382 /* Try to merge case labels. Break out when we reach the end
1383 of the label vector or when we cannot merge the next case
1384 label with the current one. */
1385 while (i < old_size)
1386 {
1387 tree merge_case = gimple_switch_label (stmt, i);
1388 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1389 double_int bhp1 = double_int_add (tree_to_double_int (base_high),
1390 double_int_one);
1391
1392 /* Merge the cases if they jump to the same place,
1393 and their ranges are consecutive. */
1394 if (merge_bb == base_bb
1395 && double_int_equal_p (tree_to_double_int (CASE_LOW (merge_case)),
1396 bhp1))
1397 {
1398 base_high = CASE_HIGH (merge_case) ?
1399 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1400 CASE_HIGH (base_case) = base_high;
1401 gimple_switch_set_label (stmt, i, NULL_TREE);
1402 new_size--;
1403 i++;
1404 }
1405 else
1406 break;
1407 }
1408 }
1409
1410 /* Compress the case labels in the label vector, and adjust the
1411 length of the vector. */
1412 for (i = 0, j = 0; i < new_size; i++)
1413 {
1414 while (! gimple_switch_label (stmt, j))
1415 j++;
1416 gimple_switch_set_label (stmt, i,
1417 gimple_switch_label (stmt, j++));
1418 }
1419
1420 gcc_assert (new_size <= old_size);
1421 gimple_switch_set_num_labels (stmt, new_size);
1422 }
1423
1424 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1425 and scan the sorted vector of cases. Combine the ones jumping to the
1426 same label. */
1427
1428 void
1429 group_case_labels (void)
1430 {
1431 basic_block bb;
1432
1433 FOR_EACH_BB (bb)
1434 {
1435 gimple stmt = last_stmt (bb);
1436 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1437 group_case_labels_stmt (stmt);
1438 }
1439 }
1440
1441 /* Checks whether we can merge block B into block A. */
1442
1443 static bool
1444 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1445 {
1446 gimple stmt;
1447 gimple_stmt_iterator gsi;
1448 gimple_seq phis;
1449
1450 if (!single_succ_p (a))
1451 return false;
1452
1453 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1454 return false;
1455
1456 if (single_succ (a) != b)
1457 return false;
1458
1459 if (!single_pred_p (b))
1460 return false;
1461
1462 if (b == EXIT_BLOCK_PTR)
1463 return false;
1464
1465 /* If A ends by a statement causing exceptions or something similar, we
1466 cannot merge the blocks. */
1467 stmt = last_stmt (a);
1468 if (stmt && stmt_ends_bb_p (stmt))
1469 return false;
1470
1471 /* Do not allow a block with only a non-local label to be merged. */
1472 if (stmt
1473 && gimple_code (stmt) == GIMPLE_LABEL
1474 && DECL_NONLOCAL (gimple_label_label (stmt)))
1475 return false;
1476
1477 /* Examine the labels at the beginning of B. */
1478 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
1479 {
1480 tree lab;
1481 stmt = gsi_stmt (gsi);
1482 if (gimple_code (stmt) != GIMPLE_LABEL)
1483 break;
1484 lab = gimple_label_label (stmt);
1485
1486 /* Do not remove user forced labels or for -O0 any user labels. */
1487 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1488 return false;
1489 }
1490
1491 /* Protect the loop latches. */
1492 if (current_loops && b->loop_father->latch == b)
1493 return false;
1494
1495 /* It must be possible to eliminate all phi nodes in B. If ssa form
1496 is not up-to-date and a name-mapping is registered, we cannot eliminate
1497 any phis. Symbols marked for renaming are never a problem though. */
1498 phis = phi_nodes (b);
1499 if (!gimple_seq_empty_p (phis)
1500 && name_mappings_registered_p ())
1501 return false;
1502
1503 /* When not optimizing, don't merge if we'd lose goto_locus. */
1504 if (!optimize
1505 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1506 {
1507 location_t goto_locus = single_succ_edge (a)->goto_locus;
1508 gimple_stmt_iterator prev, next;
1509 prev = gsi_last_nondebug_bb (a);
1510 next = gsi_after_labels (b);
1511 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1512 gsi_next_nondebug (&next);
1513 if ((gsi_end_p (prev)
1514 || gimple_location (gsi_stmt (prev)) != goto_locus)
1515 && (gsi_end_p (next)
1516 || gimple_location (gsi_stmt (next)) != goto_locus))
1517 return false;
1518 }
1519
1520 return true;
1521 }
1522
1523 /* Return true if the var whose chain of uses starts at PTR has no
1524 nondebug uses. */
1525 bool
1526 has_zero_uses_1 (const ssa_use_operand_t *head)
1527 {
1528 const ssa_use_operand_t *ptr;
1529
1530 for (ptr = head->next; ptr != head; ptr = ptr->next)
1531 if (!is_gimple_debug (USE_STMT (ptr)))
1532 return false;
1533
1534 return true;
1535 }
1536
1537 /* Return true if the var whose chain of uses starts at PTR has a
1538 single nondebug use. Set USE_P and STMT to that single nondebug
1539 use, if so, or to NULL otherwise. */
1540 bool
1541 single_imm_use_1 (const ssa_use_operand_t *head,
1542 use_operand_p *use_p, gimple *stmt)
1543 {
1544 ssa_use_operand_t *ptr, *single_use = 0;
1545
1546 for (ptr = head->next; ptr != head; ptr = ptr->next)
1547 if (!is_gimple_debug (USE_STMT (ptr)))
1548 {
1549 if (single_use)
1550 {
1551 single_use = NULL;
1552 break;
1553 }
1554 single_use = ptr;
1555 }
1556
1557 if (use_p)
1558 *use_p = single_use;
1559
1560 if (stmt)
1561 *stmt = single_use ? single_use->loc.stmt : NULL;
1562
1563 return !!single_use;
1564 }
1565
1566 /* Replaces all uses of NAME by VAL. */
1567
1568 void
1569 replace_uses_by (tree name, tree val)
1570 {
1571 imm_use_iterator imm_iter;
1572 use_operand_p use;
1573 gimple stmt;
1574 edge e;
1575
1576 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1577 {
1578 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1579 {
1580 replace_exp (use, val);
1581
1582 if (gimple_code (stmt) == GIMPLE_PHI)
1583 {
1584 e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
1585 if (e->flags & EDGE_ABNORMAL)
1586 {
1587 /* This can only occur for virtual operands, since
1588 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1589 would prevent replacement. */
1590 gcc_checking_assert (!is_gimple_reg (name));
1591 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1592 }
1593 }
1594 }
1595
1596 if (gimple_code (stmt) != GIMPLE_PHI)
1597 {
1598 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1599 gimple orig_stmt = stmt;
1600 size_t i;
1601
1602 /* Mark the block if we changed the last stmt in it. */
1603 if (cfgcleanup_altered_bbs
1604 && stmt_ends_bb_p (stmt))
1605 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1606
1607 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1608 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1609 only change sth from non-invariant to invariant, and only
1610 when propagating constants. */
1611 if (is_gimple_min_invariant (val))
1612 for (i = 0; i < gimple_num_ops (stmt); i++)
1613 {
1614 tree op = gimple_op (stmt, i);
1615 /* Operands may be empty here. For example, the labels
1616 of a GIMPLE_COND are nulled out following the creation
1617 of the corresponding CFG edges. */
1618 if (op && TREE_CODE (op) == ADDR_EXPR)
1619 recompute_tree_invariant_for_addr_expr (op);
1620 }
1621
1622 if (fold_stmt (&gsi))
1623 stmt = gsi_stmt (gsi);
1624
1625 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1626 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1627
1628 update_stmt (stmt);
1629 }
1630 }
1631
1632 gcc_checking_assert (has_zero_uses (name));
1633
1634 /* Also update the trees stored in loop structures. */
1635 if (current_loops)
1636 {
1637 struct loop *loop;
1638 loop_iterator li;
1639
1640 FOR_EACH_LOOP (li, loop, 0)
1641 {
1642 substitute_in_loop_info (loop, name, val);
1643 }
1644 }
1645 }
1646
1647 /* Merge block B into block A. */
1648
1649 static void
1650 gimple_merge_blocks (basic_block a, basic_block b)
1651 {
1652 gimple_stmt_iterator last, gsi, psi;
1653
1654 if (dump_file)
1655 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1656
1657 /* Remove all single-valued PHI nodes from block B of the form
1658 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1659 gsi = gsi_last_bb (a);
1660 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1661 {
1662 gimple phi = gsi_stmt (psi);
1663 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1664 gimple copy;
1665 bool may_replace_uses = !is_gimple_reg (def)
1666 || may_propagate_copy (def, use);
1667
1668 /* In case we maintain loop closed ssa form, do not propagate arguments
1669 of loop exit phi nodes. */
1670 if (current_loops
1671 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1672 && is_gimple_reg (def)
1673 && TREE_CODE (use) == SSA_NAME
1674 && a->loop_father != b->loop_father)
1675 may_replace_uses = false;
1676
1677 if (!may_replace_uses)
1678 {
1679 gcc_assert (is_gimple_reg (def));
1680
1681 /* Note that just emitting the copies is fine -- there is no problem
1682 with ordering of phi nodes. This is because A is the single
1683 predecessor of B, therefore results of the phi nodes cannot
1684 appear as arguments of the phi nodes. */
1685 copy = gimple_build_assign (def, use);
1686 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1687 remove_phi_node (&psi, false);
1688 }
1689 else
1690 {
1691 /* If we deal with a PHI for virtual operands, we can simply
1692 propagate these without fussing with folding or updating
1693 the stmt. */
1694 if (!is_gimple_reg (def))
1695 {
1696 imm_use_iterator iter;
1697 use_operand_p use_p;
1698 gimple stmt;
1699
1700 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1701 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1702 SET_USE (use_p, use);
1703
1704 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1705 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1706 }
1707 else
1708 replace_uses_by (def, use);
1709
1710 remove_phi_node (&psi, true);
1711 }
1712 }
1713
1714 /* Ensure that B follows A. */
1715 move_block_after (b, a);
1716
1717 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1718 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1719
1720 /* Remove labels from B and set gimple_bb to A for other statements. */
1721 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1722 {
1723 gimple stmt = gsi_stmt (gsi);
1724 if (gimple_code (stmt) == GIMPLE_LABEL)
1725 {
1726 tree label = gimple_label_label (stmt);
1727 int lp_nr;
1728
1729 gsi_remove (&gsi, false);
1730
1731 /* Now that we can thread computed gotos, we might have
1732 a situation where we have a forced label in block B
1733 However, the label at the start of block B might still be
1734 used in other ways (think about the runtime checking for
1735 Fortran assigned gotos). So we can not just delete the
1736 label. Instead we move the label to the start of block A. */
1737 if (FORCED_LABEL (label))
1738 {
1739 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1740 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1741 }
1742 /* Other user labels keep around in a form of a debug stmt. */
1743 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1744 {
1745 gimple dbg = gimple_build_debug_bind (label,
1746 integer_zero_node,
1747 stmt);
1748 gimple_debug_bind_reset_value (dbg);
1749 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1750 }
1751
1752 lp_nr = EH_LANDING_PAD_NR (label);
1753 if (lp_nr)
1754 {
1755 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1756 lp->post_landing_pad = NULL;
1757 }
1758 }
1759 else
1760 {
1761 gimple_set_bb (stmt, a);
1762 gsi_next (&gsi);
1763 }
1764 }
1765
1766 /* Merge the sequences. */
1767 last = gsi_last_bb (a);
1768 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1769 set_bb_seq (b, NULL);
1770
1771 if (cfgcleanup_altered_bbs)
1772 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1773 }
1774
1775
1776 /* Return the one of two successors of BB that is not reachable by a
1777 complex edge, if there is one. Else, return BB. We use
1778 this in optimizations that use post-dominators for their heuristics,
1779 to catch the cases in C++ where function calls are involved. */
1780
1781 basic_block
1782 single_noncomplex_succ (basic_block bb)
1783 {
1784 edge e0, e1;
1785 if (EDGE_COUNT (bb->succs) != 2)
1786 return bb;
1787
1788 e0 = EDGE_SUCC (bb, 0);
1789 e1 = EDGE_SUCC (bb, 1);
1790 if (e0->flags & EDGE_COMPLEX)
1791 return e1->dest;
1792 if (e1->flags & EDGE_COMPLEX)
1793 return e0->dest;
1794
1795 return bb;
1796 }
1797
1798 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1799
1800 void
1801 notice_special_calls (gimple call)
1802 {
1803 int flags = gimple_call_flags (call);
1804
1805 if (flags & ECF_MAY_BE_ALLOCA)
1806 cfun->calls_alloca = true;
1807 if (flags & ECF_RETURNS_TWICE)
1808 cfun->calls_setjmp = true;
1809 }
1810
1811
1812 /* Clear flags set by notice_special_calls. Used by dead code removal
1813 to update the flags. */
1814
1815 void
1816 clear_special_calls (void)
1817 {
1818 cfun->calls_alloca = false;
1819 cfun->calls_setjmp = false;
1820 }
1821
1822 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1823
1824 static void
1825 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1826 {
1827 /* Since this block is no longer reachable, we can just delete all
1828 of its PHI nodes. */
1829 remove_phi_nodes (bb);
1830
1831 /* Remove edges to BB's successors. */
1832 while (EDGE_COUNT (bb->succs) > 0)
1833 remove_edge (EDGE_SUCC (bb, 0));
1834 }
1835
1836
1837 /* Remove statements of basic block BB. */
1838
1839 static void
1840 remove_bb (basic_block bb)
1841 {
1842 gimple_stmt_iterator i;
1843
1844 if (dump_file)
1845 {
1846 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1847 if (dump_flags & TDF_DETAILS)
1848 {
1849 dump_bb (dump_file, bb, 0, dump_flags);
1850 fprintf (dump_file, "\n");
1851 }
1852 }
1853
1854 if (current_loops)
1855 {
1856 struct loop *loop = bb->loop_father;
1857
1858 /* If a loop gets removed, clean up the information associated
1859 with it. */
1860 if (loop->latch == bb
1861 || loop->header == bb)
1862 free_numbers_of_iterations_estimates_loop (loop);
1863 }
1864
1865 /* Remove all the instructions in the block. */
1866 if (bb_seq (bb) != NULL)
1867 {
1868 /* Walk backwards so as to get a chance to substitute all
1869 released DEFs into debug stmts. See
1870 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
1871 details. */
1872 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
1873 {
1874 gimple stmt = gsi_stmt (i);
1875 if (gimple_code (stmt) == GIMPLE_LABEL
1876 && (FORCED_LABEL (gimple_label_label (stmt))
1877 || DECL_NONLOCAL (gimple_label_label (stmt))))
1878 {
1879 basic_block new_bb;
1880 gimple_stmt_iterator new_gsi;
1881
1882 /* A non-reachable non-local label may still be referenced.
1883 But it no longer needs to carry the extra semantics of
1884 non-locality. */
1885 if (DECL_NONLOCAL (gimple_label_label (stmt)))
1886 {
1887 DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
1888 FORCED_LABEL (gimple_label_label (stmt)) = 1;
1889 }
1890
1891 new_bb = bb->prev_bb;
1892 new_gsi = gsi_start_bb (new_bb);
1893 gsi_remove (&i, false);
1894 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
1895 }
1896 else
1897 {
1898 /* Release SSA definitions if we are in SSA. Note that we
1899 may be called when not in SSA. For example,
1900 final_cleanup calls this function via
1901 cleanup_tree_cfg. */
1902 if (gimple_in_ssa_p (cfun))
1903 release_defs (stmt);
1904
1905 gsi_remove (&i, true);
1906 }
1907
1908 if (gsi_end_p (i))
1909 i = gsi_last_bb (bb);
1910 else
1911 gsi_prev (&i);
1912 }
1913 }
1914
1915 remove_phi_nodes_and_edges_for_unreachable_block (bb);
1916 bb->il.gimple.seq = NULL;
1917 bb->il.gimple.phi_nodes = NULL;
1918 }
1919
1920
1921 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
1922 predicate VAL, return the edge that will be taken out of the block.
1923 If VAL does not match a unique edge, NULL is returned. */
1924
1925 edge
1926 find_taken_edge (basic_block bb, tree val)
1927 {
1928 gimple stmt;
1929
1930 stmt = last_stmt (bb);
1931
1932 gcc_assert (stmt);
1933 gcc_assert (is_ctrl_stmt (stmt));
1934
1935 if (val == NULL)
1936 return NULL;
1937
1938 if (!is_gimple_min_invariant (val))
1939 return NULL;
1940
1941 if (gimple_code (stmt) == GIMPLE_COND)
1942 return find_taken_edge_cond_expr (bb, val);
1943
1944 if (gimple_code (stmt) == GIMPLE_SWITCH)
1945 return find_taken_edge_switch_expr (bb, val);
1946
1947 if (computed_goto_p (stmt))
1948 {
1949 /* Only optimize if the argument is a label, if the argument is
1950 not a label then we can not construct a proper CFG.
1951
1952 It may be the case that we only need to allow the LABEL_REF to
1953 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
1954 appear inside a LABEL_EXPR just to be safe. */
1955 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
1956 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
1957 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
1958 return NULL;
1959 }
1960
1961 gcc_unreachable ();
1962 }
1963
1964 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
1965 statement, determine which of the outgoing edges will be taken out of the
1966 block. Return NULL if either edge may be taken. */
1967
1968 static edge
1969 find_taken_edge_computed_goto (basic_block bb, tree val)
1970 {
1971 basic_block dest;
1972 edge e = NULL;
1973
1974 dest = label_to_block (val);
1975 if (dest)
1976 {
1977 e = find_edge (bb, dest);
1978 gcc_assert (e != NULL);
1979 }
1980
1981 return e;
1982 }
1983
1984 /* Given a constant value VAL and the entry block BB to a COND_EXPR
1985 statement, determine which of the two edges will be taken out of the
1986 block. Return NULL if either edge may be taken. */
1987
1988 static edge
1989 find_taken_edge_cond_expr (basic_block bb, tree val)
1990 {
1991 edge true_edge, false_edge;
1992
1993 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1994
1995 gcc_assert (TREE_CODE (val) == INTEGER_CST);
1996 return (integer_zerop (val) ? false_edge : true_edge);
1997 }
1998
1999 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2000 statement, determine which edge will be taken out of the block. Return
2001 NULL if any edge may be taken. */
2002
2003 static edge
2004 find_taken_edge_switch_expr (basic_block bb, tree val)
2005 {
2006 basic_block dest_bb;
2007 edge e;
2008 gimple switch_stmt;
2009 tree taken_case;
2010
2011 switch_stmt = last_stmt (bb);
2012 taken_case = find_case_label_for_value (switch_stmt, val);
2013 dest_bb = label_to_block (CASE_LABEL (taken_case));
2014
2015 e = find_edge (bb, dest_bb);
2016 gcc_assert (e);
2017 return e;
2018 }
2019
2020
2021 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2022 We can make optimal use here of the fact that the case labels are
2023 sorted: We can do a binary search for a case matching VAL. */
2024
2025 static tree
2026 find_case_label_for_value (gimple switch_stmt, tree val)
2027 {
2028 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2029 tree default_case = gimple_switch_default_label (switch_stmt);
2030
2031 for (low = 0, high = n; high - low > 1; )
2032 {
2033 size_t i = (high + low) / 2;
2034 tree t = gimple_switch_label (switch_stmt, i);
2035 int cmp;
2036
2037 /* Cache the result of comparing CASE_LOW and val. */
2038 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2039
2040 if (cmp > 0)
2041 high = i;
2042 else
2043 low = i;
2044
2045 if (CASE_HIGH (t) == NULL)
2046 {
2047 /* A singe-valued case label. */
2048 if (cmp == 0)
2049 return t;
2050 }
2051 else
2052 {
2053 /* A case range. We can only handle integer ranges. */
2054 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2055 return t;
2056 }
2057 }
2058
2059 return default_case;
2060 }
2061
2062
2063 /* Dump a basic block on stderr. */
2064
2065 void
2066 gimple_debug_bb (basic_block bb)
2067 {
2068 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2069 }
2070
2071
2072 /* Dump basic block with index N on stderr. */
2073
2074 basic_block
2075 gimple_debug_bb_n (int n)
2076 {
2077 gimple_debug_bb (BASIC_BLOCK (n));
2078 return BASIC_BLOCK (n);
2079 }
2080
2081
2082 /* Dump the CFG on stderr.
2083
2084 FLAGS are the same used by the tree dumping functions
2085 (see TDF_* in tree-pass.h). */
2086
2087 void
2088 gimple_debug_cfg (int flags)
2089 {
2090 gimple_dump_cfg (stderr, flags);
2091 }
2092
2093
2094 /* Dump the program showing basic block boundaries on the given FILE.
2095
2096 FLAGS are the same used by the tree dumping functions (see TDF_* in
2097 tree.h). */
2098
2099 void
2100 gimple_dump_cfg (FILE *file, int flags)
2101 {
2102 if (flags & TDF_DETAILS)
2103 {
2104 dump_function_header (file, current_function_decl, flags);
2105 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2106 n_basic_blocks, n_edges, last_basic_block);
2107
2108 brief_dump_cfg (file, flags | TDF_COMMENT);
2109 fprintf (file, "\n");
2110 }
2111
2112 if (flags & TDF_STATS)
2113 dump_cfg_stats (file);
2114
2115 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2116 }
2117
2118
2119 /* Dump CFG statistics on FILE. */
2120
2121 void
2122 dump_cfg_stats (FILE *file)
2123 {
2124 static long max_num_merged_labels = 0;
2125 unsigned long size, total = 0;
2126 long num_edges;
2127 basic_block bb;
2128 const char * const fmt_str = "%-30s%-13s%12s\n";
2129 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2130 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2131 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2132 const char *funcname = current_function_name ();
2133
2134 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2135
2136 fprintf (file, "---------------------------------------------------------\n");
2137 fprintf (file, fmt_str, "", " Number of ", "Memory");
2138 fprintf (file, fmt_str, "", " instances ", "used ");
2139 fprintf (file, "---------------------------------------------------------\n");
2140
2141 size = n_basic_blocks * sizeof (struct basic_block_def);
2142 total += size;
2143 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2144 SCALE (size), LABEL (size));
2145
2146 num_edges = 0;
2147 FOR_EACH_BB (bb)
2148 num_edges += EDGE_COUNT (bb->succs);
2149 size = num_edges * sizeof (struct edge_def);
2150 total += size;
2151 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2152
2153 fprintf (file, "---------------------------------------------------------\n");
2154 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2155 LABEL (total));
2156 fprintf (file, "---------------------------------------------------------\n");
2157 fprintf (file, "\n");
2158
2159 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2160 max_num_merged_labels = cfg_stats.num_merged_labels;
2161
2162 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2163 cfg_stats.num_merged_labels, max_num_merged_labels);
2164
2165 fprintf (file, "\n");
2166 }
2167
2168
2169 /* Dump CFG statistics on stderr. Keep extern so that it's always
2170 linked in the final executable. */
2171
2172 DEBUG_FUNCTION void
2173 debug_cfg_stats (void)
2174 {
2175 dump_cfg_stats (stderr);
2176 }
2177
2178
2179 /* Dump the flowgraph to a .vcg FILE. */
2180
2181 static void
2182 gimple_cfg2vcg (FILE *file)
2183 {
2184 edge e;
2185 edge_iterator ei;
2186 basic_block bb;
2187 const char *funcname = current_function_name ();
2188
2189 /* Write the file header. */
2190 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2191 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2192 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2193
2194 /* Write blocks and edges. */
2195 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2196 {
2197 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2198 e->dest->index);
2199
2200 if (e->flags & EDGE_FAKE)
2201 fprintf (file, " linestyle: dotted priority: 10");
2202 else
2203 fprintf (file, " linestyle: solid priority: 100");
2204
2205 fprintf (file, " }\n");
2206 }
2207 fputc ('\n', file);
2208
2209 FOR_EACH_BB (bb)
2210 {
2211 enum gimple_code head_code, end_code;
2212 const char *head_name, *end_name;
2213 int head_line = 0;
2214 int end_line = 0;
2215 gimple first = first_stmt (bb);
2216 gimple last = last_stmt (bb);
2217
2218 if (first)
2219 {
2220 head_code = gimple_code (first);
2221 head_name = gimple_code_name[head_code];
2222 head_line = get_lineno (first);
2223 }
2224 else
2225 head_name = "no-statement";
2226
2227 if (last)
2228 {
2229 end_code = gimple_code (last);
2230 end_name = gimple_code_name[end_code];
2231 end_line = get_lineno (last);
2232 }
2233 else
2234 end_name = "no-statement";
2235
2236 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2237 bb->index, bb->index, head_name, head_line, end_name,
2238 end_line);
2239
2240 FOR_EACH_EDGE (e, ei, bb->succs)
2241 {
2242 if (e->dest == EXIT_BLOCK_PTR)
2243 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2244 else
2245 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2246
2247 if (e->flags & EDGE_FAKE)
2248 fprintf (file, " priority: 10 linestyle: dotted");
2249 else
2250 fprintf (file, " priority: 100 linestyle: solid");
2251
2252 fprintf (file, " }\n");
2253 }
2254
2255 if (bb->next_bb != EXIT_BLOCK_PTR)
2256 fputc ('\n', file);
2257 }
2258
2259 fputs ("}\n\n", file);
2260 }
2261
2262
2263
2264 /*---------------------------------------------------------------------------
2265 Miscellaneous helpers
2266 ---------------------------------------------------------------------------*/
2267
2268 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2269 flow. Transfers of control flow associated with EH are excluded. */
2270
2271 static bool
2272 call_can_make_abnormal_goto (gimple t)
2273 {
2274 /* If the function has no non-local labels, then a call cannot make an
2275 abnormal transfer of control. */
2276 if (!cfun->has_nonlocal_label)
2277 return false;
2278
2279 /* Likewise if the call has no side effects. */
2280 if (!gimple_has_side_effects (t))
2281 return false;
2282
2283 /* Likewise if the called function is leaf. */
2284 if (gimple_call_flags (t) & ECF_LEAF)
2285 return false;
2286
2287 return true;
2288 }
2289
2290
2291 /* Return true if T can make an abnormal transfer of control flow.
2292 Transfers of control flow associated with EH are excluded. */
2293
2294 bool
2295 stmt_can_make_abnormal_goto (gimple t)
2296 {
2297 if (computed_goto_p (t))
2298 return true;
2299 if (is_gimple_call (t))
2300 return call_can_make_abnormal_goto (t);
2301 return false;
2302 }
2303
2304
2305 /* Return true if T represents a stmt that always transfers control. */
2306
2307 bool
2308 is_ctrl_stmt (gimple t)
2309 {
2310 switch (gimple_code (t))
2311 {
2312 case GIMPLE_COND:
2313 case GIMPLE_SWITCH:
2314 case GIMPLE_GOTO:
2315 case GIMPLE_RETURN:
2316 case GIMPLE_RESX:
2317 return true;
2318 default:
2319 return false;
2320 }
2321 }
2322
2323
2324 /* Return true if T is a statement that may alter the flow of control
2325 (e.g., a call to a non-returning function). */
2326
2327 bool
2328 is_ctrl_altering_stmt (gimple t)
2329 {
2330 gcc_assert (t);
2331
2332 switch (gimple_code (t))
2333 {
2334 case GIMPLE_CALL:
2335 {
2336 int flags = gimple_call_flags (t);
2337
2338 /* A call alters control flow if it can make an abnormal goto. */
2339 if (call_can_make_abnormal_goto (t))
2340 return true;
2341
2342 /* A call also alters control flow if it does not return. */
2343 if (flags & ECF_NORETURN)
2344 return true;
2345
2346 /* TM ending statements have backedges out of the transaction.
2347 Return true so we split the basic block containing them.
2348 Note that the TM_BUILTIN test is merely an optimization. */
2349 if ((flags & ECF_TM_BUILTIN)
2350 && is_tm_ending_fndecl (gimple_call_fndecl (t)))
2351 return true;
2352
2353 /* BUILT_IN_RETURN call is same as return statement. */
2354 if (gimple_call_builtin_p (t, BUILT_IN_RETURN))
2355 return true;
2356 }
2357 break;
2358
2359 case GIMPLE_EH_DISPATCH:
2360 /* EH_DISPATCH branches to the individual catch handlers at
2361 this level of a try or allowed-exceptions region. It can
2362 fallthru to the next statement as well. */
2363 return true;
2364
2365 case GIMPLE_ASM:
2366 if (gimple_asm_nlabels (t) > 0)
2367 return true;
2368 break;
2369
2370 CASE_GIMPLE_OMP:
2371 /* OpenMP directives alter control flow. */
2372 return true;
2373
2374 case GIMPLE_TRANSACTION:
2375 /* A transaction start alters control flow. */
2376 return true;
2377
2378 default:
2379 break;
2380 }
2381
2382 /* If a statement can throw, it alters control flow. */
2383 return stmt_can_throw_internal (t);
2384 }
2385
2386
2387 /* Return true if T is a simple local goto. */
2388
2389 bool
2390 simple_goto_p (gimple t)
2391 {
2392 return (gimple_code (t) == GIMPLE_GOTO
2393 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2394 }
2395
2396
2397 /* Return true if STMT should start a new basic block. PREV_STMT is
2398 the statement preceding STMT. It is used when STMT is a label or a
2399 case label. Labels should only start a new basic block if their
2400 previous statement wasn't a label. Otherwise, sequence of labels
2401 would generate unnecessary basic blocks that only contain a single
2402 label. */
2403
2404 static inline bool
2405 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2406 {
2407 if (stmt == NULL)
2408 return false;
2409
2410 /* Labels start a new basic block only if the preceding statement
2411 wasn't a label of the same type. This prevents the creation of
2412 consecutive blocks that have nothing but a single label. */
2413 if (gimple_code (stmt) == GIMPLE_LABEL)
2414 {
2415 /* Nonlocal and computed GOTO targets always start a new block. */
2416 if (DECL_NONLOCAL (gimple_label_label (stmt))
2417 || FORCED_LABEL (gimple_label_label (stmt)))
2418 return true;
2419
2420 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2421 {
2422 if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
2423 return true;
2424
2425 cfg_stats.num_merged_labels++;
2426 return false;
2427 }
2428 else
2429 return true;
2430 }
2431
2432 return false;
2433 }
2434
2435
2436 /* Return true if T should end a basic block. */
2437
2438 bool
2439 stmt_ends_bb_p (gimple t)
2440 {
2441 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2442 }
2443
2444 /* Remove block annotations and other data structures. */
2445
2446 void
2447 delete_tree_cfg_annotations (void)
2448 {
2449 label_to_block_map = NULL;
2450 }
2451
2452
2453 /* Return the first statement in basic block BB. */
2454
2455 gimple
2456 first_stmt (basic_block bb)
2457 {
2458 gimple_stmt_iterator i = gsi_start_bb (bb);
2459 gimple stmt = NULL;
2460
2461 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2462 {
2463 gsi_next (&i);
2464 stmt = NULL;
2465 }
2466 return stmt;
2467 }
2468
2469 /* Return the first non-label statement in basic block BB. */
2470
2471 static gimple
2472 first_non_label_stmt (basic_block bb)
2473 {
2474 gimple_stmt_iterator i = gsi_start_bb (bb);
2475 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2476 gsi_next (&i);
2477 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2478 }
2479
2480 /* Return the last statement in basic block BB. */
2481
2482 gimple
2483 last_stmt (basic_block bb)
2484 {
2485 gimple_stmt_iterator i = gsi_last_bb (bb);
2486 gimple stmt = NULL;
2487
2488 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2489 {
2490 gsi_prev (&i);
2491 stmt = NULL;
2492 }
2493 return stmt;
2494 }
2495
2496 /* Return the last statement of an otherwise empty block. Return NULL
2497 if the block is totally empty, or if it contains more than one
2498 statement. */
2499
2500 gimple
2501 last_and_only_stmt (basic_block bb)
2502 {
2503 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2504 gimple last, prev;
2505
2506 if (gsi_end_p (i))
2507 return NULL;
2508
2509 last = gsi_stmt (i);
2510 gsi_prev_nondebug (&i);
2511 if (gsi_end_p (i))
2512 return last;
2513
2514 /* Empty statements should no longer appear in the instruction stream.
2515 Everything that might have appeared before should be deleted by
2516 remove_useless_stmts, and the optimizers should just gsi_remove
2517 instead of smashing with build_empty_stmt.
2518
2519 Thus the only thing that should appear here in a block containing
2520 one executable statement is a label. */
2521 prev = gsi_stmt (i);
2522 if (gimple_code (prev) == GIMPLE_LABEL)
2523 return last;
2524 else
2525 return NULL;
2526 }
2527
2528 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2529
2530 static void
2531 reinstall_phi_args (edge new_edge, edge old_edge)
2532 {
2533 edge_var_map_vector v;
2534 edge_var_map *vm;
2535 int i;
2536 gimple_stmt_iterator phis;
2537
2538 v = redirect_edge_var_map_vector (old_edge);
2539 if (!v)
2540 return;
2541
2542 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2543 VEC_iterate (edge_var_map, v, i, vm) && !gsi_end_p (phis);
2544 i++, gsi_next (&phis))
2545 {
2546 gimple phi = gsi_stmt (phis);
2547 tree result = redirect_edge_var_map_result (vm);
2548 tree arg = redirect_edge_var_map_def (vm);
2549
2550 gcc_assert (result == gimple_phi_result (phi));
2551
2552 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2553 }
2554
2555 redirect_edge_var_map_clear (old_edge);
2556 }
2557
2558 /* Returns the basic block after which the new basic block created
2559 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2560 near its "logical" location. This is of most help to humans looking
2561 at debugging dumps. */
2562
2563 static basic_block
2564 split_edge_bb_loc (edge edge_in)
2565 {
2566 basic_block dest = edge_in->dest;
2567 basic_block dest_prev = dest->prev_bb;
2568
2569 if (dest_prev)
2570 {
2571 edge e = find_edge (dest_prev, dest);
2572 if (e && !(e->flags & EDGE_COMPLEX))
2573 return edge_in->src;
2574 }
2575 return dest_prev;
2576 }
2577
2578 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2579 Abort on abnormal edges. */
2580
2581 static basic_block
2582 gimple_split_edge (edge edge_in)
2583 {
2584 basic_block new_bb, after_bb, dest;
2585 edge new_edge, e;
2586
2587 /* Abnormal edges cannot be split. */
2588 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2589
2590 dest = edge_in->dest;
2591
2592 after_bb = split_edge_bb_loc (edge_in);
2593
2594 new_bb = create_empty_bb (after_bb);
2595 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2596 new_bb->count = edge_in->count;
2597 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2598 new_edge->probability = REG_BR_PROB_BASE;
2599 new_edge->count = edge_in->count;
2600
2601 e = redirect_edge_and_branch (edge_in, new_bb);
2602 gcc_assert (e == edge_in);
2603 reinstall_phi_args (new_edge, e);
2604
2605 return new_bb;
2606 }
2607
2608
2609 /* Verify properties of the address expression T with base object BASE. */
2610
2611 static tree
2612 verify_address (tree t, tree base)
2613 {
2614 bool old_constant;
2615 bool old_side_effects;
2616 bool new_constant;
2617 bool new_side_effects;
2618
2619 old_constant = TREE_CONSTANT (t);
2620 old_side_effects = TREE_SIDE_EFFECTS (t);
2621
2622 recompute_tree_invariant_for_addr_expr (t);
2623 new_side_effects = TREE_SIDE_EFFECTS (t);
2624 new_constant = TREE_CONSTANT (t);
2625
2626 if (old_constant != new_constant)
2627 {
2628 error ("constant not recomputed when ADDR_EXPR changed");
2629 return t;
2630 }
2631 if (old_side_effects != new_side_effects)
2632 {
2633 error ("side effects not recomputed when ADDR_EXPR changed");
2634 return t;
2635 }
2636
2637 if (!(TREE_CODE (base) == VAR_DECL
2638 || TREE_CODE (base) == PARM_DECL
2639 || TREE_CODE (base) == RESULT_DECL))
2640 return NULL_TREE;
2641
2642 if (DECL_GIMPLE_REG_P (base))
2643 {
2644 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2645 return base;
2646 }
2647
2648 return NULL_TREE;
2649 }
2650
2651 /* Callback for walk_tree, check that all elements with address taken are
2652 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2653 inside a PHI node. */
2654
2655 static tree
2656 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2657 {
2658 tree t = *tp, x;
2659
2660 if (TYPE_P (t))
2661 *walk_subtrees = 0;
2662
2663 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2664 #define CHECK_OP(N, MSG) \
2665 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2666 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2667
2668 switch (TREE_CODE (t))
2669 {
2670 case SSA_NAME:
2671 if (SSA_NAME_IN_FREE_LIST (t))
2672 {
2673 error ("SSA name in freelist but still referenced");
2674 return *tp;
2675 }
2676 break;
2677
2678 case INDIRECT_REF:
2679 error ("INDIRECT_REF in gimple IL");
2680 return t;
2681
2682 case MEM_REF:
2683 x = TREE_OPERAND (t, 0);
2684 if (!POINTER_TYPE_P (TREE_TYPE (x))
2685 || !is_gimple_mem_ref_addr (x))
2686 {
2687 error ("invalid first operand of MEM_REF");
2688 return x;
2689 }
2690 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2691 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2692 {
2693 error ("invalid offset operand of MEM_REF");
2694 return TREE_OPERAND (t, 1);
2695 }
2696 if (TREE_CODE (x) == ADDR_EXPR
2697 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2698 return x;
2699 *walk_subtrees = 0;
2700 break;
2701
2702 case ASSERT_EXPR:
2703 x = fold (ASSERT_EXPR_COND (t));
2704 if (x == boolean_false_node)
2705 {
2706 error ("ASSERT_EXPR with an always-false condition");
2707 return *tp;
2708 }
2709 break;
2710
2711 case MODIFY_EXPR:
2712 error ("MODIFY_EXPR not expected while having tuples");
2713 return *tp;
2714
2715 case ADDR_EXPR:
2716 {
2717 tree tem;
2718
2719 gcc_assert (is_gimple_address (t));
2720
2721 /* Skip any references (they will be checked when we recurse down the
2722 tree) and ensure that any variable used as a prefix is marked
2723 addressable. */
2724 for (x = TREE_OPERAND (t, 0);
2725 handled_component_p (x);
2726 x = TREE_OPERAND (x, 0))
2727 ;
2728
2729 if ((tem = verify_address (t, x)))
2730 return tem;
2731
2732 if (!(TREE_CODE (x) == VAR_DECL
2733 || TREE_CODE (x) == PARM_DECL
2734 || TREE_CODE (x) == RESULT_DECL))
2735 return NULL;
2736
2737 if (!TREE_ADDRESSABLE (x))
2738 {
2739 error ("address taken, but ADDRESSABLE bit not set");
2740 return x;
2741 }
2742
2743 break;
2744 }
2745
2746 case COND_EXPR:
2747 x = COND_EXPR_COND (t);
2748 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2749 {
2750 error ("non-integral used in condition");
2751 return x;
2752 }
2753 if (!is_gimple_condexpr (x))
2754 {
2755 error ("invalid conditional operand");
2756 return x;
2757 }
2758 break;
2759
2760 case NON_LVALUE_EXPR:
2761 case TRUTH_NOT_EXPR:
2762 gcc_unreachable ();
2763
2764 CASE_CONVERT:
2765 case FIX_TRUNC_EXPR:
2766 case FLOAT_EXPR:
2767 case NEGATE_EXPR:
2768 case ABS_EXPR:
2769 case BIT_NOT_EXPR:
2770 CHECK_OP (0, "invalid operand to unary operator");
2771 break;
2772
2773 case REALPART_EXPR:
2774 case IMAGPART_EXPR:
2775 case COMPONENT_REF:
2776 case ARRAY_REF:
2777 case ARRAY_RANGE_REF:
2778 case BIT_FIELD_REF:
2779 case VIEW_CONVERT_EXPR:
2780 /* We have a nest of references. Verify that each of the operands
2781 that determine where to reference is either a constant or a variable,
2782 verify that the base is valid, and then show we've already checked
2783 the subtrees. */
2784 while (handled_component_p (t))
2785 {
2786 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2787 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2788 else if (TREE_CODE (t) == ARRAY_REF
2789 || TREE_CODE (t) == ARRAY_RANGE_REF)
2790 {
2791 CHECK_OP (1, "invalid array index");
2792 if (TREE_OPERAND (t, 2))
2793 CHECK_OP (2, "invalid array lower bound");
2794 if (TREE_OPERAND (t, 3))
2795 CHECK_OP (3, "invalid array stride");
2796 }
2797 else if (TREE_CODE (t) == BIT_FIELD_REF)
2798 {
2799 if (!host_integerp (TREE_OPERAND (t, 1), 1)
2800 || !host_integerp (TREE_OPERAND (t, 2), 1))
2801 {
2802 error ("invalid position or size operand to BIT_FIELD_REF");
2803 return t;
2804 }
2805 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2806 && (TYPE_PRECISION (TREE_TYPE (t))
2807 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2808 {
2809 error ("integral result type precision does not match "
2810 "field size of BIT_FIELD_REF");
2811 return t;
2812 }
2813 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2814 && !AGGREGATE_TYPE_P (TREE_TYPE (t))
2815 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2816 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2817 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2818 {
2819 error ("mode precision of non-integral result does not "
2820 "match field size of BIT_FIELD_REF");
2821 return t;
2822 }
2823 }
2824
2825 t = TREE_OPERAND (t, 0);
2826 }
2827
2828 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2829 {
2830 error ("invalid reference prefix");
2831 return t;
2832 }
2833 *walk_subtrees = 0;
2834 break;
2835 case PLUS_EXPR:
2836 case MINUS_EXPR:
2837 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2838 POINTER_PLUS_EXPR. */
2839 if (POINTER_TYPE_P (TREE_TYPE (t)))
2840 {
2841 error ("invalid operand to plus/minus, type is a pointer");
2842 return t;
2843 }
2844 CHECK_OP (0, "invalid operand to binary operator");
2845 CHECK_OP (1, "invalid operand to binary operator");
2846 break;
2847
2848 case POINTER_PLUS_EXPR:
2849 /* Check to make sure the first operand is a pointer or reference type. */
2850 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
2851 {
2852 error ("invalid operand to pointer plus, first operand is not a pointer");
2853 return t;
2854 }
2855 /* Check to make sure the second operand is a ptrofftype. */
2856 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
2857 {
2858 error ("invalid operand to pointer plus, second operand is not an "
2859 "integer type of appropriate width");
2860 return t;
2861 }
2862 /* FALLTHROUGH */
2863 case LT_EXPR:
2864 case LE_EXPR:
2865 case GT_EXPR:
2866 case GE_EXPR:
2867 case EQ_EXPR:
2868 case NE_EXPR:
2869 case UNORDERED_EXPR:
2870 case ORDERED_EXPR:
2871 case UNLT_EXPR:
2872 case UNLE_EXPR:
2873 case UNGT_EXPR:
2874 case UNGE_EXPR:
2875 case UNEQ_EXPR:
2876 case LTGT_EXPR:
2877 case MULT_EXPR:
2878 case TRUNC_DIV_EXPR:
2879 case CEIL_DIV_EXPR:
2880 case FLOOR_DIV_EXPR:
2881 case ROUND_DIV_EXPR:
2882 case TRUNC_MOD_EXPR:
2883 case CEIL_MOD_EXPR:
2884 case FLOOR_MOD_EXPR:
2885 case ROUND_MOD_EXPR:
2886 case RDIV_EXPR:
2887 case EXACT_DIV_EXPR:
2888 case MIN_EXPR:
2889 case MAX_EXPR:
2890 case LSHIFT_EXPR:
2891 case RSHIFT_EXPR:
2892 case LROTATE_EXPR:
2893 case RROTATE_EXPR:
2894 case BIT_IOR_EXPR:
2895 case BIT_XOR_EXPR:
2896 case BIT_AND_EXPR:
2897 CHECK_OP (0, "invalid operand to binary operator");
2898 CHECK_OP (1, "invalid operand to binary operator");
2899 break;
2900
2901 case CONSTRUCTOR:
2902 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2903 *walk_subtrees = 0;
2904 break;
2905
2906 case CASE_LABEL_EXPR:
2907 if (CASE_CHAIN (t))
2908 {
2909 error ("invalid CASE_CHAIN");
2910 return t;
2911 }
2912 break;
2913
2914 default:
2915 break;
2916 }
2917 return NULL;
2918
2919 #undef CHECK_OP
2920 }
2921
2922
2923 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
2924 Returns true if there is an error, otherwise false. */
2925
2926 static bool
2927 verify_types_in_gimple_min_lval (tree expr)
2928 {
2929 tree op;
2930
2931 if (is_gimple_id (expr))
2932 return false;
2933
2934 if (TREE_CODE (expr) != TARGET_MEM_REF
2935 && TREE_CODE (expr) != MEM_REF)
2936 {
2937 error ("invalid expression for min lvalue");
2938 return true;
2939 }
2940
2941 /* TARGET_MEM_REFs are strange beasts. */
2942 if (TREE_CODE (expr) == TARGET_MEM_REF)
2943 return false;
2944
2945 op = TREE_OPERAND (expr, 0);
2946 if (!is_gimple_val (op))
2947 {
2948 error ("invalid operand in indirect reference");
2949 debug_generic_stmt (op);
2950 return true;
2951 }
2952 /* Memory references now generally can involve a value conversion. */
2953
2954 return false;
2955 }
2956
2957 /* Verify if EXPR is a valid GIMPLE reference expression. If
2958 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
2959 if there is an error, otherwise false. */
2960
2961 static bool
2962 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
2963 {
2964 while (handled_component_p (expr))
2965 {
2966 tree op = TREE_OPERAND (expr, 0);
2967
2968 if (TREE_CODE (expr) == ARRAY_REF
2969 || TREE_CODE (expr) == ARRAY_RANGE_REF)
2970 {
2971 if (!is_gimple_val (TREE_OPERAND (expr, 1))
2972 || (TREE_OPERAND (expr, 2)
2973 && !is_gimple_val (TREE_OPERAND (expr, 2)))
2974 || (TREE_OPERAND (expr, 3)
2975 && !is_gimple_val (TREE_OPERAND (expr, 3))))
2976 {
2977 error ("invalid operands to array reference");
2978 debug_generic_stmt (expr);
2979 return true;
2980 }
2981 }
2982
2983 /* Verify if the reference array element types are compatible. */
2984 if (TREE_CODE (expr) == ARRAY_REF
2985 && !useless_type_conversion_p (TREE_TYPE (expr),
2986 TREE_TYPE (TREE_TYPE (op))))
2987 {
2988 error ("type mismatch in array reference");
2989 debug_generic_stmt (TREE_TYPE (expr));
2990 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2991 return true;
2992 }
2993 if (TREE_CODE (expr) == ARRAY_RANGE_REF
2994 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
2995 TREE_TYPE (TREE_TYPE (op))))
2996 {
2997 error ("type mismatch in array range reference");
2998 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
2999 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3000 return true;
3001 }
3002
3003 if ((TREE_CODE (expr) == REALPART_EXPR
3004 || TREE_CODE (expr) == IMAGPART_EXPR)
3005 && !useless_type_conversion_p (TREE_TYPE (expr),
3006 TREE_TYPE (TREE_TYPE (op))))
3007 {
3008 error ("type mismatch in real/imagpart reference");
3009 debug_generic_stmt (TREE_TYPE (expr));
3010 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3011 return true;
3012 }
3013
3014 if (TREE_CODE (expr) == COMPONENT_REF
3015 && !useless_type_conversion_p (TREE_TYPE (expr),
3016 TREE_TYPE (TREE_OPERAND (expr, 1))))
3017 {
3018 error ("type mismatch in component reference");
3019 debug_generic_stmt (TREE_TYPE (expr));
3020 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3021 return true;
3022 }
3023
3024 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3025 {
3026 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3027 that their operand is not an SSA name or an invariant when
3028 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3029 bug). Otherwise there is nothing to verify, gross mismatches at
3030 most invoke undefined behavior. */
3031 if (require_lvalue
3032 && (TREE_CODE (op) == SSA_NAME
3033 || is_gimple_min_invariant (op)))
3034 {
3035 error ("conversion of an SSA_NAME on the left hand side");
3036 debug_generic_stmt (expr);
3037 return true;
3038 }
3039 else if (TREE_CODE (op) == SSA_NAME
3040 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3041 {
3042 error ("conversion of register to a different size");
3043 debug_generic_stmt (expr);
3044 return true;
3045 }
3046 else if (!handled_component_p (op))
3047 return false;
3048 }
3049
3050 expr = op;
3051 }
3052
3053 if (TREE_CODE (expr) == MEM_REF)
3054 {
3055 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3056 {
3057 error ("invalid address operand in MEM_REF");
3058 debug_generic_stmt (expr);
3059 return true;
3060 }
3061 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3062 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3063 {
3064 error ("invalid offset operand in MEM_REF");
3065 debug_generic_stmt (expr);
3066 return true;
3067 }
3068 }
3069 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3070 {
3071 if (!TMR_BASE (expr)
3072 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3073 {
3074 error ("invalid address operand in TARGET_MEM_REF");
3075 return true;
3076 }
3077 if (!TMR_OFFSET (expr)
3078 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3079 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3080 {
3081 error ("invalid offset operand in TARGET_MEM_REF");
3082 debug_generic_stmt (expr);
3083 return true;
3084 }
3085 }
3086
3087 return ((require_lvalue || !is_gimple_min_invariant (expr))
3088 && verify_types_in_gimple_min_lval (expr));
3089 }
3090
3091 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3092 list of pointer-to types that is trivially convertible to DEST. */
3093
3094 static bool
3095 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3096 {
3097 tree src;
3098
3099 if (!TYPE_POINTER_TO (src_obj))
3100 return true;
3101
3102 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3103 if (useless_type_conversion_p (dest, src))
3104 return true;
3105
3106 return false;
3107 }
3108
3109 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3110 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3111
3112 static bool
3113 valid_fixed_convert_types_p (tree type1, tree type2)
3114 {
3115 return (FIXED_POINT_TYPE_P (type1)
3116 && (INTEGRAL_TYPE_P (type2)
3117 || SCALAR_FLOAT_TYPE_P (type2)
3118 || FIXED_POINT_TYPE_P (type2)));
3119 }
3120
3121 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3122 is a problem, otherwise false. */
3123
3124 static bool
3125 verify_gimple_call (gimple stmt)
3126 {
3127 tree fn = gimple_call_fn (stmt);
3128 tree fntype, fndecl;
3129 unsigned i;
3130
3131 if (gimple_call_internal_p (stmt))
3132 {
3133 if (fn)
3134 {
3135 error ("gimple call has two targets");
3136 debug_generic_stmt (fn);
3137 return true;
3138 }
3139 }
3140 else
3141 {
3142 if (!fn)
3143 {
3144 error ("gimple call has no target");
3145 return true;
3146 }
3147 }
3148
3149 if (fn && !is_gimple_call_addr (fn))
3150 {
3151 error ("invalid function in gimple call");
3152 debug_generic_stmt (fn);
3153 return true;
3154 }
3155
3156 if (fn
3157 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3158 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3159 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3160 {
3161 error ("non-function in gimple call");
3162 return true;
3163 }
3164
3165 fndecl = gimple_call_fndecl (stmt);
3166 if (fndecl
3167 && TREE_CODE (fndecl) == FUNCTION_DECL
3168 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3169 && !DECL_PURE_P (fndecl)
3170 && !TREE_READONLY (fndecl))
3171 {
3172 error ("invalid pure const state for function");
3173 return true;
3174 }
3175
3176 if (gimple_call_lhs (stmt)
3177 && (!is_gimple_lvalue (gimple_call_lhs (stmt))
3178 || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
3179 {
3180 error ("invalid LHS in gimple call");
3181 return true;
3182 }
3183
3184 if (gimple_call_lhs (stmt) && gimple_call_noreturn_p (stmt))
3185 {
3186 error ("LHS in noreturn call");
3187 return true;
3188 }
3189
3190 fntype = gimple_call_fntype (stmt);
3191 if (fntype
3192 && gimple_call_lhs (stmt)
3193 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3194 TREE_TYPE (fntype))
3195 /* ??? At least C++ misses conversions at assignments from
3196 void * call results.
3197 ??? Java is completely off. Especially with functions
3198 returning java.lang.Object.
3199 For now simply allow arbitrary pointer type conversions. */
3200 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3201 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3202 {
3203 error ("invalid conversion in gimple call");
3204 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3205 debug_generic_stmt (TREE_TYPE (fntype));
3206 return true;
3207 }
3208
3209 if (gimple_call_chain (stmt)
3210 && !is_gimple_val (gimple_call_chain (stmt)))
3211 {
3212 error ("invalid static chain in gimple call");
3213 debug_generic_stmt (gimple_call_chain (stmt));
3214 return true;
3215 }
3216
3217 /* If there is a static chain argument, this should not be an indirect
3218 call, and the decl should have DECL_STATIC_CHAIN set. */
3219 if (gimple_call_chain (stmt))
3220 {
3221 if (!gimple_call_fndecl (stmt))
3222 {
3223 error ("static chain in indirect gimple call");
3224 return true;
3225 }
3226 fn = TREE_OPERAND (fn, 0);
3227
3228 if (!DECL_STATIC_CHAIN (fn))
3229 {
3230 error ("static chain with function that doesn%'t use one");
3231 return true;
3232 }
3233 }
3234
3235 /* ??? The C frontend passes unpromoted arguments in case it
3236 didn't see a function declaration before the call. So for now
3237 leave the call arguments mostly unverified. Once we gimplify
3238 unit-at-a-time we have a chance to fix this. */
3239
3240 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3241 {
3242 tree arg = gimple_call_arg (stmt, i);
3243 if ((is_gimple_reg_type (TREE_TYPE (arg))
3244 && !is_gimple_val (arg))
3245 || (!is_gimple_reg_type (TREE_TYPE (arg))
3246 && !is_gimple_lvalue (arg)))
3247 {
3248 error ("invalid argument to gimple call");
3249 debug_generic_expr (arg);
3250 return true;
3251 }
3252 }
3253
3254 return false;
3255 }
3256
3257 /* Verifies the gimple comparison with the result type TYPE and
3258 the operands OP0 and OP1. */
3259
3260 static bool
3261 verify_gimple_comparison (tree type, tree op0, tree op1)
3262 {
3263 tree op0_type = TREE_TYPE (op0);
3264 tree op1_type = TREE_TYPE (op1);
3265
3266 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3267 {
3268 error ("invalid operands in gimple comparison");
3269 return true;
3270 }
3271
3272 /* For comparisons we do not have the operations type as the
3273 effective type the comparison is carried out in. Instead
3274 we require that either the first operand is trivially
3275 convertible into the second, or the other way around.
3276 Because we special-case pointers to void we allow
3277 comparisons of pointers with the same mode as well. */
3278 if (!useless_type_conversion_p (op0_type, op1_type)
3279 && !useless_type_conversion_p (op1_type, op0_type)
3280 && (!POINTER_TYPE_P (op0_type)
3281 || !POINTER_TYPE_P (op1_type)
3282 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3283 {
3284 error ("mismatching comparison operand types");
3285 debug_generic_expr (op0_type);
3286 debug_generic_expr (op1_type);
3287 return true;
3288 }
3289
3290 /* The resulting type of a comparison may be an effective boolean type. */
3291 if (INTEGRAL_TYPE_P (type)
3292 && (TREE_CODE (type) == BOOLEAN_TYPE
3293 || TYPE_PRECISION (type) == 1))
3294 ;
3295 /* Or an integer vector type with the same size and element count
3296 as the comparison operand types. */
3297 else if (TREE_CODE (type) == VECTOR_TYPE
3298 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3299 {
3300 if (TREE_CODE (op0_type) != VECTOR_TYPE
3301 || TREE_CODE (op1_type) != VECTOR_TYPE)
3302 {
3303 error ("non-vector operands in vector comparison");
3304 debug_generic_expr (op0_type);
3305 debug_generic_expr (op1_type);
3306 return true;
3307 }
3308
3309 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3310 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3311 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type)))))
3312 {
3313 error ("invalid vector comparison resulting type");
3314 debug_generic_expr (type);
3315 return true;
3316 }
3317 }
3318 else
3319 {
3320 error ("bogus comparison result type");
3321 debug_generic_expr (type);
3322 return true;
3323 }
3324
3325 return false;
3326 }
3327
3328 /* Verify a gimple assignment statement STMT with an unary rhs.
3329 Returns true if anything is wrong. */
3330
3331 static bool
3332 verify_gimple_assign_unary (gimple stmt)
3333 {
3334 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3335 tree lhs = gimple_assign_lhs (stmt);
3336 tree lhs_type = TREE_TYPE (lhs);
3337 tree rhs1 = gimple_assign_rhs1 (stmt);
3338 tree rhs1_type = TREE_TYPE (rhs1);
3339
3340 if (!is_gimple_reg (lhs))
3341 {
3342 error ("non-register as LHS of unary operation");
3343 return true;
3344 }
3345
3346 if (!is_gimple_val (rhs1))
3347 {
3348 error ("invalid operand in unary operation");
3349 return true;
3350 }
3351
3352 /* First handle conversions. */
3353 switch (rhs_code)
3354 {
3355 CASE_CONVERT:
3356 {
3357 /* Allow conversions from pointer type to integral type only if
3358 there is no sign or zero extension involved.
3359 For targets were the precision of ptrofftype doesn't match that
3360 of pointers we need to allow arbitrary conversions to ptrofftype. */
3361 if ((POINTER_TYPE_P (lhs_type)
3362 && INTEGRAL_TYPE_P (rhs1_type))
3363 || (POINTER_TYPE_P (rhs1_type)
3364 && INTEGRAL_TYPE_P (lhs_type)
3365 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3366 || ptrofftype_p (sizetype))))
3367 return false;
3368
3369 /* Allow conversion from integral to offset type and vice versa. */
3370 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3371 && INTEGRAL_TYPE_P (rhs1_type))
3372 || (INTEGRAL_TYPE_P (lhs_type)
3373 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3374 return false;
3375
3376 /* Otherwise assert we are converting between types of the
3377 same kind. */
3378 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3379 {
3380 error ("invalid types in nop conversion");
3381 debug_generic_expr (lhs_type);
3382 debug_generic_expr (rhs1_type);
3383 return true;
3384 }
3385
3386 return false;
3387 }
3388
3389 case ADDR_SPACE_CONVERT_EXPR:
3390 {
3391 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3392 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3393 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3394 {
3395 error ("invalid types in address space conversion");
3396 debug_generic_expr (lhs_type);
3397 debug_generic_expr (rhs1_type);
3398 return true;
3399 }
3400
3401 return false;
3402 }
3403
3404 case FIXED_CONVERT_EXPR:
3405 {
3406 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3407 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3408 {
3409 error ("invalid types in fixed-point conversion");
3410 debug_generic_expr (lhs_type);
3411 debug_generic_expr (rhs1_type);
3412 return true;
3413 }
3414
3415 return false;
3416 }
3417
3418 case FLOAT_EXPR:
3419 {
3420 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3421 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3422 || !VECTOR_FLOAT_TYPE_P(lhs_type)))
3423 {
3424 error ("invalid types in conversion to floating point");
3425 debug_generic_expr (lhs_type);
3426 debug_generic_expr (rhs1_type);
3427 return true;
3428 }
3429
3430 return false;
3431 }
3432
3433 case FIX_TRUNC_EXPR:
3434 {
3435 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3436 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3437 || !VECTOR_FLOAT_TYPE_P(rhs1_type)))
3438 {
3439 error ("invalid types in conversion to integer");
3440 debug_generic_expr (lhs_type);
3441 debug_generic_expr (rhs1_type);
3442 return true;
3443 }
3444
3445 return false;
3446 }
3447
3448 case VEC_UNPACK_HI_EXPR:
3449 case VEC_UNPACK_LO_EXPR:
3450 case REDUC_MAX_EXPR:
3451 case REDUC_MIN_EXPR:
3452 case REDUC_PLUS_EXPR:
3453 case VEC_UNPACK_FLOAT_HI_EXPR:
3454 case VEC_UNPACK_FLOAT_LO_EXPR:
3455 /* FIXME. */
3456 return false;
3457
3458 case NEGATE_EXPR:
3459 case ABS_EXPR:
3460 case BIT_NOT_EXPR:
3461 case PAREN_EXPR:
3462 case NON_LVALUE_EXPR:
3463 case CONJ_EXPR:
3464 break;
3465
3466 default:
3467 gcc_unreachable ();
3468 }
3469
3470 /* For the remaining codes assert there is no conversion involved. */
3471 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3472 {
3473 error ("non-trivial conversion in unary operation");
3474 debug_generic_expr (lhs_type);
3475 debug_generic_expr (rhs1_type);
3476 return true;
3477 }
3478
3479 return false;
3480 }
3481
3482 /* Verify a gimple assignment statement STMT with a binary rhs.
3483 Returns true if anything is wrong. */
3484
3485 static bool
3486 verify_gimple_assign_binary (gimple stmt)
3487 {
3488 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3489 tree lhs = gimple_assign_lhs (stmt);
3490 tree lhs_type = TREE_TYPE (lhs);
3491 tree rhs1 = gimple_assign_rhs1 (stmt);
3492 tree rhs1_type = TREE_TYPE (rhs1);
3493 tree rhs2 = gimple_assign_rhs2 (stmt);
3494 tree rhs2_type = TREE_TYPE (rhs2);
3495
3496 if (!is_gimple_reg (lhs))
3497 {
3498 error ("non-register as LHS of binary operation");
3499 return true;
3500 }
3501
3502 if (!is_gimple_val (rhs1)
3503 || !is_gimple_val (rhs2))
3504 {
3505 error ("invalid operands in binary operation");
3506 return true;
3507 }
3508
3509 /* First handle operations that involve different types. */
3510 switch (rhs_code)
3511 {
3512 case COMPLEX_EXPR:
3513 {
3514 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3515 || !(INTEGRAL_TYPE_P (rhs1_type)
3516 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3517 || !(INTEGRAL_TYPE_P (rhs2_type)
3518 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3519 {
3520 error ("type mismatch in complex expression");
3521 debug_generic_expr (lhs_type);
3522 debug_generic_expr (rhs1_type);
3523 debug_generic_expr (rhs2_type);
3524 return true;
3525 }
3526
3527 return false;
3528 }
3529
3530 case LSHIFT_EXPR:
3531 case RSHIFT_EXPR:
3532 case LROTATE_EXPR:
3533 case RROTATE_EXPR:
3534 {
3535 /* Shifts and rotates are ok on integral types, fixed point
3536 types and integer vector types. */
3537 if ((!INTEGRAL_TYPE_P (rhs1_type)
3538 && !FIXED_POINT_TYPE_P (rhs1_type)
3539 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3540 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3541 || (!INTEGRAL_TYPE_P (rhs2_type)
3542 /* Vector shifts of vectors are also ok. */
3543 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3544 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3545 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3546 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3547 || !useless_type_conversion_p (lhs_type, rhs1_type))
3548 {
3549 error ("type mismatch in shift expression");
3550 debug_generic_expr (lhs_type);
3551 debug_generic_expr (rhs1_type);
3552 debug_generic_expr (rhs2_type);
3553 return true;
3554 }
3555
3556 return false;
3557 }
3558
3559 case VEC_LSHIFT_EXPR:
3560 case VEC_RSHIFT_EXPR:
3561 {
3562 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3563 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3564 || POINTER_TYPE_P (TREE_TYPE (rhs1_type))
3565 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type))
3566 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3567 || (!INTEGRAL_TYPE_P (rhs2_type)
3568 && (TREE_CODE (rhs2_type) != VECTOR_TYPE
3569 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3570 || !useless_type_conversion_p (lhs_type, rhs1_type))
3571 {
3572 error ("type mismatch in vector shift expression");
3573 debug_generic_expr (lhs_type);
3574 debug_generic_expr (rhs1_type);
3575 debug_generic_expr (rhs2_type);
3576 return true;
3577 }
3578 /* For shifting a vector of non-integral components we
3579 only allow shifting by a constant multiple of the element size. */
3580 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3581 && (TREE_CODE (rhs2) != INTEGER_CST
3582 || !div_if_zero_remainder (EXACT_DIV_EXPR, rhs2,
3583 TYPE_SIZE (TREE_TYPE (rhs1_type)))))
3584 {
3585 error ("non-element sized vector shift of floating point vector");
3586 return true;
3587 }
3588
3589 return false;
3590 }
3591
3592 case WIDEN_LSHIFT_EXPR:
3593 {
3594 if (!INTEGRAL_TYPE_P (lhs_type)
3595 || !INTEGRAL_TYPE_P (rhs1_type)
3596 || TREE_CODE (rhs2) != INTEGER_CST
3597 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3598 {
3599 error ("type mismatch in widening vector shift expression");
3600 debug_generic_expr (lhs_type);
3601 debug_generic_expr (rhs1_type);
3602 debug_generic_expr (rhs2_type);
3603 return true;
3604 }
3605
3606 return false;
3607 }
3608
3609 case VEC_WIDEN_LSHIFT_HI_EXPR:
3610 case VEC_WIDEN_LSHIFT_LO_EXPR:
3611 {
3612 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3613 || TREE_CODE (lhs_type) != VECTOR_TYPE
3614 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3615 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3616 || TREE_CODE (rhs2) != INTEGER_CST
3617 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3618 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3619 {
3620 error ("type mismatch in widening vector shift expression");
3621 debug_generic_expr (lhs_type);
3622 debug_generic_expr (rhs1_type);
3623 debug_generic_expr (rhs2_type);
3624 return true;
3625 }
3626
3627 return false;
3628 }
3629
3630 case PLUS_EXPR:
3631 case MINUS_EXPR:
3632 {
3633 /* We use regular PLUS_EXPR and MINUS_EXPR for vectors.
3634 ??? This just makes the checker happy and may not be what is
3635 intended. */
3636 if (TREE_CODE (lhs_type) == VECTOR_TYPE
3637 && POINTER_TYPE_P (TREE_TYPE (lhs_type)))
3638 {
3639 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3640 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3641 {
3642 error ("invalid non-vector operands to vector valued plus");
3643 return true;
3644 }
3645 lhs_type = TREE_TYPE (lhs_type);
3646 rhs1_type = TREE_TYPE (rhs1_type);
3647 rhs2_type = TREE_TYPE (rhs2_type);
3648 /* PLUS_EXPR is commutative, so we might end up canonicalizing
3649 the pointer to 2nd place. */
3650 if (POINTER_TYPE_P (rhs2_type))
3651 {
3652 tree tem = rhs1_type;
3653 rhs1_type = rhs2_type;
3654 rhs2_type = tem;
3655 }
3656 goto do_pointer_plus_expr_check;
3657 }
3658 if (POINTER_TYPE_P (lhs_type)
3659 || POINTER_TYPE_P (rhs1_type)
3660 || POINTER_TYPE_P (rhs2_type))
3661 {
3662 error ("invalid (pointer) operands to plus/minus");
3663 return true;
3664 }
3665
3666 /* Continue with generic binary expression handling. */
3667 break;
3668 }
3669
3670 case POINTER_PLUS_EXPR:
3671 {
3672 do_pointer_plus_expr_check:
3673 if (!POINTER_TYPE_P (rhs1_type)
3674 || !useless_type_conversion_p (lhs_type, rhs1_type)
3675 || !ptrofftype_p (rhs2_type))
3676 {
3677 error ("type mismatch in pointer plus expression");
3678 debug_generic_stmt (lhs_type);
3679 debug_generic_stmt (rhs1_type);
3680 debug_generic_stmt (rhs2_type);
3681 return true;
3682 }
3683
3684 return false;
3685 }
3686
3687 case TRUTH_ANDIF_EXPR:
3688 case TRUTH_ORIF_EXPR:
3689 case TRUTH_AND_EXPR:
3690 case TRUTH_OR_EXPR:
3691 case TRUTH_XOR_EXPR:
3692
3693 gcc_unreachable ();
3694
3695 case LT_EXPR:
3696 case LE_EXPR:
3697 case GT_EXPR:
3698 case GE_EXPR:
3699 case EQ_EXPR:
3700 case NE_EXPR:
3701 case UNORDERED_EXPR:
3702 case ORDERED_EXPR:
3703 case UNLT_EXPR:
3704 case UNLE_EXPR:
3705 case UNGT_EXPR:
3706 case UNGE_EXPR:
3707 case UNEQ_EXPR:
3708 case LTGT_EXPR:
3709 /* Comparisons are also binary, but the result type is not
3710 connected to the operand types. */
3711 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3712
3713 case WIDEN_MULT_EXPR:
3714 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3715 return true;
3716 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3717 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3718
3719 case WIDEN_SUM_EXPR:
3720 case VEC_WIDEN_MULT_HI_EXPR:
3721 case VEC_WIDEN_MULT_LO_EXPR:
3722 case VEC_WIDEN_MULT_EVEN_EXPR:
3723 case VEC_WIDEN_MULT_ODD_EXPR:
3724 case VEC_PACK_TRUNC_EXPR:
3725 case VEC_PACK_SAT_EXPR:
3726 case VEC_PACK_FIX_TRUNC_EXPR:
3727 /* FIXME. */
3728 return false;
3729
3730 case MULT_EXPR:
3731 case MULT_HIGHPART_EXPR:
3732 case TRUNC_DIV_EXPR:
3733 case CEIL_DIV_EXPR:
3734 case FLOOR_DIV_EXPR:
3735 case ROUND_DIV_EXPR:
3736 case TRUNC_MOD_EXPR:
3737 case CEIL_MOD_EXPR:
3738 case FLOOR_MOD_EXPR:
3739 case ROUND_MOD_EXPR:
3740 case RDIV_EXPR:
3741 case EXACT_DIV_EXPR:
3742 case MIN_EXPR:
3743 case MAX_EXPR:
3744 case BIT_IOR_EXPR:
3745 case BIT_XOR_EXPR:
3746 case BIT_AND_EXPR:
3747 /* Continue with generic binary expression handling. */
3748 break;
3749
3750 default:
3751 gcc_unreachable ();
3752 }
3753
3754 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3755 || !useless_type_conversion_p (lhs_type, rhs2_type))
3756 {
3757 error ("type mismatch in binary expression");
3758 debug_generic_stmt (lhs_type);
3759 debug_generic_stmt (rhs1_type);
3760 debug_generic_stmt (rhs2_type);
3761 return true;
3762 }
3763
3764 return false;
3765 }
3766
3767 /* Verify a gimple assignment statement STMT with a ternary rhs.
3768 Returns true if anything is wrong. */
3769
3770 static bool
3771 verify_gimple_assign_ternary (gimple stmt)
3772 {
3773 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3774 tree lhs = gimple_assign_lhs (stmt);
3775 tree lhs_type = TREE_TYPE (lhs);
3776 tree rhs1 = gimple_assign_rhs1 (stmt);
3777 tree rhs1_type = TREE_TYPE (rhs1);
3778 tree rhs2 = gimple_assign_rhs2 (stmt);
3779 tree rhs2_type = TREE_TYPE (rhs2);
3780 tree rhs3 = gimple_assign_rhs3 (stmt);
3781 tree rhs3_type = TREE_TYPE (rhs3);
3782
3783 if (!is_gimple_reg (lhs))
3784 {
3785 error ("non-register as LHS of ternary operation");
3786 return true;
3787 }
3788
3789 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3790 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3791 || !is_gimple_val (rhs2)
3792 || !is_gimple_val (rhs3))
3793 {
3794 error ("invalid operands in ternary operation");
3795 return true;
3796 }
3797
3798 /* First handle operations that involve different types. */
3799 switch (rhs_code)
3800 {
3801 case WIDEN_MULT_PLUS_EXPR:
3802 case WIDEN_MULT_MINUS_EXPR:
3803 if ((!INTEGRAL_TYPE_P (rhs1_type)
3804 && !FIXED_POINT_TYPE_P (rhs1_type))
3805 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3806 || !useless_type_conversion_p (lhs_type, rhs3_type)
3807 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3808 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3809 {
3810 error ("type mismatch in widening multiply-accumulate expression");
3811 debug_generic_expr (lhs_type);
3812 debug_generic_expr (rhs1_type);
3813 debug_generic_expr (rhs2_type);
3814 debug_generic_expr (rhs3_type);
3815 return true;
3816 }
3817 break;
3818
3819 case FMA_EXPR:
3820 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3821 || !useless_type_conversion_p (lhs_type, rhs2_type)
3822 || !useless_type_conversion_p (lhs_type, rhs3_type))
3823 {
3824 error ("type mismatch in fused multiply-add expression");
3825 debug_generic_expr (lhs_type);
3826 debug_generic_expr (rhs1_type);
3827 debug_generic_expr (rhs2_type);
3828 debug_generic_expr (rhs3_type);
3829 return true;
3830 }
3831 break;
3832
3833 case COND_EXPR:
3834 case VEC_COND_EXPR:
3835 if (!useless_type_conversion_p (lhs_type, rhs2_type)
3836 || !useless_type_conversion_p (lhs_type, rhs3_type))
3837 {
3838 error ("type mismatch in conditional expression");
3839 debug_generic_expr (lhs_type);
3840 debug_generic_expr (rhs2_type);
3841 debug_generic_expr (rhs3_type);
3842 return true;
3843 }
3844 break;
3845
3846 case VEC_PERM_EXPR:
3847 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3848 || !useless_type_conversion_p (lhs_type, rhs2_type))
3849 {
3850 error ("type mismatch in vector permute expression");
3851 debug_generic_expr (lhs_type);
3852 debug_generic_expr (rhs1_type);
3853 debug_generic_expr (rhs2_type);
3854 debug_generic_expr (rhs3_type);
3855 return true;
3856 }
3857
3858 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3859 || TREE_CODE (rhs2_type) != VECTOR_TYPE
3860 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
3861 {
3862 error ("vector types expected in vector permute expression");
3863 debug_generic_expr (lhs_type);
3864 debug_generic_expr (rhs1_type);
3865 debug_generic_expr (rhs2_type);
3866 debug_generic_expr (rhs3_type);
3867 return true;
3868 }
3869
3870 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
3871 || TYPE_VECTOR_SUBPARTS (rhs2_type)
3872 != TYPE_VECTOR_SUBPARTS (rhs3_type)
3873 || TYPE_VECTOR_SUBPARTS (rhs3_type)
3874 != TYPE_VECTOR_SUBPARTS (lhs_type))
3875 {
3876 error ("vectors with different element number found "
3877 "in vector permute expression");
3878 debug_generic_expr (lhs_type);
3879 debug_generic_expr (rhs1_type);
3880 debug_generic_expr (rhs2_type);
3881 debug_generic_expr (rhs3_type);
3882 return true;
3883 }
3884
3885 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
3886 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
3887 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
3888 {
3889 error ("invalid mask type in vector permute expression");
3890 debug_generic_expr (lhs_type);
3891 debug_generic_expr (rhs1_type);
3892 debug_generic_expr (rhs2_type);
3893 debug_generic_expr (rhs3_type);
3894 return true;
3895 }
3896
3897 return false;
3898
3899 case DOT_PROD_EXPR:
3900 case REALIGN_LOAD_EXPR:
3901 /* FIXME. */
3902 return false;
3903
3904 default:
3905 gcc_unreachable ();
3906 }
3907 return false;
3908 }
3909
3910 /* Verify a gimple assignment statement STMT with a single rhs.
3911 Returns true if anything is wrong. */
3912
3913 static bool
3914 verify_gimple_assign_single (gimple stmt)
3915 {
3916 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3917 tree lhs = gimple_assign_lhs (stmt);
3918 tree lhs_type = TREE_TYPE (lhs);
3919 tree rhs1 = gimple_assign_rhs1 (stmt);
3920 tree rhs1_type = TREE_TYPE (rhs1);
3921 bool res = false;
3922
3923 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3924 {
3925 error ("non-trivial conversion at assignment");
3926 debug_generic_expr (lhs_type);
3927 debug_generic_expr (rhs1_type);
3928 return true;
3929 }
3930
3931 if (handled_component_p (lhs))
3932 res |= verify_types_in_gimple_reference (lhs, true);
3933
3934 /* Special codes we cannot handle via their class. */
3935 switch (rhs_code)
3936 {
3937 case ADDR_EXPR:
3938 {
3939 tree op = TREE_OPERAND (rhs1, 0);
3940 if (!is_gimple_addressable (op))
3941 {
3942 error ("invalid operand in unary expression");
3943 return true;
3944 }
3945
3946 /* Technically there is no longer a need for matching types, but
3947 gimple hygiene asks for this check. In LTO we can end up
3948 combining incompatible units and thus end up with addresses
3949 of globals that change their type to a common one. */
3950 if (!in_lto_p
3951 && !types_compatible_p (TREE_TYPE (op),
3952 TREE_TYPE (TREE_TYPE (rhs1)))
3953 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
3954 TREE_TYPE (op)))
3955 {
3956 error ("type mismatch in address expression");
3957 debug_generic_stmt (TREE_TYPE (rhs1));
3958 debug_generic_stmt (TREE_TYPE (op));
3959 return true;
3960 }
3961
3962 return verify_types_in_gimple_reference (op, true);
3963 }
3964
3965 /* tcc_reference */
3966 case INDIRECT_REF:
3967 error ("INDIRECT_REF in gimple IL");
3968 return true;
3969
3970 case COMPONENT_REF:
3971 case BIT_FIELD_REF:
3972 case ARRAY_REF:
3973 case ARRAY_RANGE_REF:
3974 case VIEW_CONVERT_EXPR:
3975 case REALPART_EXPR:
3976 case IMAGPART_EXPR:
3977 case TARGET_MEM_REF:
3978 case MEM_REF:
3979 if (!is_gimple_reg (lhs)
3980 && is_gimple_reg_type (TREE_TYPE (lhs)))
3981 {
3982 error ("invalid rhs for gimple memory store");
3983 debug_generic_stmt (lhs);
3984 debug_generic_stmt (rhs1);
3985 return true;
3986 }
3987 return res || verify_types_in_gimple_reference (rhs1, false);
3988
3989 /* tcc_constant */
3990 case SSA_NAME:
3991 case INTEGER_CST:
3992 case REAL_CST:
3993 case FIXED_CST:
3994 case COMPLEX_CST:
3995 case VECTOR_CST:
3996 case STRING_CST:
3997 return res;
3998
3999 /* tcc_declaration */
4000 case CONST_DECL:
4001 return res;
4002 case VAR_DECL:
4003 case PARM_DECL:
4004 if (!is_gimple_reg (lhs)
4005 && !is_gimple_reg (rhs1)
4006 && is_gimple_reg_type (TREE_TYPE (lhs)))
4007 {
4008 error ("invalid rhs for gimple memory store");
4009 debug_generic_stmt (lhs);
4010 debug_generic_stmt (rhs1);
4011 return true;
4012 }
4013 return res;
4014
4015 case CONSTRUCTOR:
4016 case OBJ_TYPE_REF:
4017 case ASSERT_EXPR:
4018 case WITH_SIZE_EXPR:
4019 /* FIXME. */
4020 return res;
4021
4022 default:;
4023 }
4024
4025 return res;
4026 }
4027
4028 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4029 is a problem, otherwise false. */
4030
4031 static bool
4032 verify_gimple_assign (gimple stmt)
4033 {
4034 switch (gimple_assign_rhs_class (stmt))
4035 {
4036 case GIMPLE_SINGLE_RHS:
4037 return verify_gimple_assign_single (stmt);
4038
4039 case GIMPLE_UNARY_RHS:
4040 return verify_gimple_assign_unary (stmt);
4041
4042 case GIMPLE_BINARY_RHS:
4043 return verify_gimple_assign_binary (stmt);
4044
4045 case GIMPLE_TERNARY_RHS:
4046 return verify_gimple_assign_ternary (stmt);
4047
4048 default:
4049 gcc_unreachable ();
4050 }
4051 }
4052
4053 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4054 is a problem, otherwise false. */
4055
4056 static bool
4057 verify_gimple_return (gimple stmt)
4058 {
4059 tree op = gimple_return_retval (stmt);
4060 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4061
4062 /* We cannot test for present return values as we do not fix up missing
4063 return values from the original source. */
4064 if (op == NULL)
4065 return false;
4066
4067 if (!is_gimple_val (op)
4068 && TREE_CODE (op) != RESULT_DECL)
4069 {
4070 error ("invalid operand in return statement");
4071 debug_generic_stmt (op);
4072 return true;
4073 }
4074
4075 if ((TREE_CODE (op) == RESULT_DECL
4076 && DECL_BY_REFERENCE (op))
4077 || (TREE_CODE (op) == SSA_NAME
4078 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4079 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4080 op = TREE_TYPE (op);
4081
4082 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4083 {
4084 error ("invalid conversion in return statement");
4085 debug_generic_stmt (restype);
4086 debug_generic_stmt (TREE_TYPE (op));
4087 return true;
4088 }
4089
4090 return false;
4091 }
4092
4093
4094 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4095 is a problem, otherwise false. */
4096
4097 static bool
4098 verify_gimple_goto (gimple stmt)
4099 {
4100 tree dest = gimple_goto_dest (stmt);
4101
4102 /* ??? We have two canonical forms of direct goto destinations, a
4103 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4104 if (TREE_CODE (dest) != LABEL_DECL
4105 && (!is_gimple_val (dest)
4106 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4107 {
4108 error ("goto destination is neither a label nor a pointer");
4109 return true;
4110 }
4111
4112 return false;
4113 }
4114
4115 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4116 is a problem, otherwise false. */
4117
4118 static bool
4119 verify_gimple_switch (gimple stmt)
4120 {
4121 unsigned int i, n;
4122 tree elt, prev_upper_bound = NULL_TREE;
4123 tree index_type, elt_type = NULL_TREE;
4124
4125 if (!is_gimple_val (gimple_switch_index (stmt)))
4126 {
4127 error ("invalid operand to switch statement");
4128 debug_generic_stmt (gimple_switch_index (stmt));
4129 return true;
4130 }
4131
4132 index_type = TREE_TYPE (gimple_switch_index (stmt));
4133 if (! INTEGRAL_TYPE_P (index_type))
4134 {
4135 error ("non-integral type switch statement");
4136 debug_generic_expr (index_type);
4137 return true;
4138 }
4139
4140 elt = gimple_switch_default_label (stmt);
4141 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4142 {
4143 error ("invalid default case label in switch statement");
4144 debug_generic_expr (elt);
4145 return true;
4146 }
4147
4148 n = gimple_switch_num_labels (stmt);
4149 for (i = 1; i < n; i++)
4150 {
4151 elt = gimple_switch_label (stmt, i);
4152
4153 if (! CASE_LOW (elt))
4154 {
4155 error ("invalid case label in switch statement");
4156 debug_generic_expr (elt);
4157 return true;
4158 }
4159 if (CASE_HIGH (elt)
4160 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4161 {
4162 error ("invalid case range in switch statement");
4163 debug_generic_expr (elt);
4164 return true;
4165 }
4166
4167 if (elt_type)
4168 {
4169 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4170 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4171 {
4172 error ("type mismatch for case label in switch statement");
4173 debug_generic_expr (elt);
4174 return true;
4175 }
4176 }
4177 else
4178 {
4179 elt_type = TREE_TYPE (CASE_LOW (elt));
4180 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4181 {
4182 error ("type precision mismatch in switch statement");
4183 return true;
4184 }
4185 }
4186
4187 if (prev_upper_bound)
4188 {
4189 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4190 {
4191 error ("case labels not sorted in switch statement");
4192 return true;
4193 }
4194 }
4195
4196 prev_upper_bound = CASE_HIGH (elt);
4197 if (! prev_upper_bound)
4198 prev_upper_bound = CASE_LOW (elt);
4199 }
4200
4201 return false;
4202 }
4203
4204 /* Verify a gimple debug statement STMT.
4205 Returns true if anything is wrong. */
4206
4207 static bool
4208 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4209 {
4210 /* There isn't much that could be wrong in a gimple debug stmt. A
4211 gimple debug bind stmt, for example, maps a tree, that's usually
4212 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4213 component or member of an aggregate type, to another tree, that
4214 can be an arbitrary expression. These stmts expand into debug
4215 insns, and are converted to debug notes by var-tracking.c. */
4216 return false;
4217 }
4218
4219 /* Verify a gimple label statement STMT.
4220 Returns true if anything is wrong. */
4221
4222 static bool
4223 verify_gimple_label (gimple stmt)
4224 {
4225 tree decl = gimple_label_label (stmt);
4226 int uid;
4227 bool err = false;
4228
4229 if (TREE_CODE (decl) != LABEL_DECL)
4230 return true;
4231
4232 uid = LABEL_DECL_UID (decl);
4233 if (cfun->cfg
4234 && (uid == -1
4235 || VEC_index (basic_block,
4236 label_to_block_map, uid) != gimple_bb (stmt)))
4237 {
4238 error ("incorrect entry in label_to_block_map");
4239 err |= true;
4240 }
4241
4242 uid = EH_LANDING_PAD_NR (decl);
4243 if (uid)
4244 {
4245 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4246 if (decl != lp->post_landing_pad)
4247 {
4248 error ("incorrect setting of landing pad number");
4249 err |= true;
4250 }
4251 }
4252
4253 return err;
4254 }
4255
4256 /* Verify the GIMPLE statement STMT. Returns true if there is an
4257 error, otherwise false. */
4258
4259 static bool
4260 verify_gimple_stmt (gimple stmt)
4261 {
4262 switch (gimple_code (stmt))
4263 {
4264 case GIMPLE_ASSIGN:
4265 return verify_gimple_assign (stmt);
4266
4267 case GIMPLE_LABEL:
4268 return verify_gimple_label (stmt);
4269
4270 case GIMPLE_CALL:
4271 return verify_gimple_call (stmt);
4272
4273 case GIMPLE_COND:
4274 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4275 {
4276 error ("invalid comparison code in gimple cond");
4277 return true;
4278 }
4279 if (!(!gimple_cond_true_label (stmt)
4280 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4281 || !(!gimple_cond_false_label (stmt)
4282 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4283 {
4284 error ("invalid labels in gimple cond");
4285 return true;
4286 }
4287
4288 return verify_gimple_comparison (boolean_type_node,
4289 gimple_cond_lhs (stmt),
4290 gimple_cond_rhs (stmt));
4291
4292 case GIMPLE_GOTO:
4293 return verify_gimple_goto (stmt);
4294
4295 case GIMPLE_SWITCH:
4296 return verify_gimple_switch (stmt);
4297
4298 case GIMPLE_RETURN:
4299 return verify_gimple_return (stmt);
4300
4301 case GIMPLE_ASM:
4302 return false;
4303
4304 case GIMPLE_TRANSACTION:
4305 return verify_gimple_transaction (stmt);
4306
4307 /* Tuples that do not have tree operands. */
4308 case GIMPLE_NOP:
4309 case GIMPLE_PREDICT:
4310 case GIMPLE_RESX:
4311 case GIMPLE_EH_DISPATCH:
4312 case GIMPLE_EH_MUST_NOT_THROW:
4313 return false;
4314
4315 CASE_GIMPLE_OMP:
4316 /* OpenMP directives are validated by the FE and never operated
4317 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4318 non-gimple expressions when the main index variable has had
4319 its address taken. This does not affect the loop itself
4320 because the header of an GIMPLE_OMP_FOR is merely used to determine
4321 how to setup the parallel iteration. */
4322 return false;
4323
4324 case GIMPLE_DEBUG:
4325 return verify_gimple_debug (stmt);
4326
4327 default:
4328 gcc_unreachable ();
4329 }
4330 }
4331
4332 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4333 and false otherwise. */
4334
4335 static bool
4336 verify_gimple_phi (gimple phi)
4337 {
4338 bool err = false;
4339 unsigned i;
4340 tree phi_result = gimple_phi_result (phi);
4341 bool virtual_p;
4342
4343 if (!phi_result)
4344 {
4345 error ("invalid PHI result");
4346 return true;
4347 }
4348
4349 virtual_p = !is_gimple_reg (phi_result);
4350 if (TREE_CODE (phi_result) != SSA_NAME
4351 || (virtual_p
4352 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4353 {
4354 error ("invalid PHI result");
4355 err = true;
4356 }
4357
4358 for (i = 0; i < gimple_phi_num_args (phi); i++)
4359 {
4360 tree t = gimple_phi_arg_def (phi, i);
4361
4362 if (!t)
4363 {
4364 error ("missing PHI def");
4365 err |= true;
4366 continue;
4367 }
4368 /* Addressable variables do have SSA_NAMEs but they
4369 are not considered gimple values. */
4370 else if ((TREE_CODE (t) == SSA_NAME
4371 && virtual_p != !is_gimple_reg (t))
4372 || (virtual_p
4373 && (TREE_CODE (t) != SSA_NAME
4374 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4375 || (!virtual_p
4376 && !is_gimple_val (t)))
4377 {
4378 error ("invalid PHI argument");
4379 debug_generic_expr (t);
4380 err |= true;
4381 }
4382 #ifdef ENABLE_TYPES_CHECKING
4383 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4384 {
4385 error ("incompatible types in PHI argument %u", i);
4386 debug_generic_stmt (TREE_TYPE (phi_result));
4387 debug_generic_stmt (TREE_TYPE (t));
4388 err |= true;
4389 }
4390 #endif
4391 }
4392
4393 return err;
4394 }
4395
4396 /* Verify the GIMPLE statements inside the sequence STMTS. */
4397
4398 static bool
4399 verify_gimple_in_seq_2 (gimple_seq stmts)
4400 {
4401 gimple_stmt_iterator ittr;
4402 bool err = false;
4403
4404 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4405 {
4406 gimple stmt = gsi_stmt (ittr);
4407
4408 switch (gimple_code (stmt))
4409 {
4410 case GIMPLE_BIND:
4411 err |= verify_gimple_in_seq_2 (gimple_bind_body (stmt));
4412 break;
4413
4414 case GIMPLE_TRY:
4415 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4416 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4417 break;
4418
4419 case GIMPLE_EH_FILTER:
4420 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4421 break;
4422
4423 case GIMPLE_EH_ELSE:
4424 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (stmt));
4425 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (stmt));
4426 break;
4427
4428 case GIMPLE_CATCH:
4429 err |= verify_gimple_in_seq_2 (gimple_catch_handler (stmt));
4430 break;
4431
4432 case GIMPLE_TRANSACTION:
4433 err |= verify_gimple_transaction (stmt);
4434 break;
4435
4436 default:
4437 {
4438 bool err2 = verify_gimple_stmt (stmt);
4439 if (err2)
4440 debug_gimple_stmt (stmt);
4441 err |= err2;
4442 }
4443 }
4444 }
4445
4446 return err;
4447 }
4448
4449 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4450 is a problem, otherwise false. */
4451
4452 static bool
4453 verify_gimple_transaction (gimple stmt)
4454 {
4455 tree lab = gimple_transaction_label (stmt);
4456 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4457 return true;
4458 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4459 }
4460
4461
4462 /* Verify the GIMPLE statements inside the statement list STMTS. */
4463
4464 DEBUG_FUNCTION void
4465 verify_gimple_in_seq (gimple_seq stmts)
4466 {
4467 timevar_push (TV_TREE_STMT_VERIFY);
4468 if (verify_gimple_in_seq_2 (stmts))
4469 internal_error ("verify_gimple failed");
4470 timevar_pop (TV_TREE_STMT_VERIFY);
4471 }
4472
4473 /* Return true when the T can be shared. */
4474
4475 bool
4476 tree_node_can_be_shared (tree t)
4477 {
4478 if (IS_TYPE_OR_DECL_P (t)
4479 || is_gimple_min_invariant (t)
4480 || TREE_CODE (t) == SSA_NAME
4481 || t == error_mark_node
4482 || TREE_CODE (t) == IDENTIFIER_NODE)
4483 return true;
4484
4485 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4486 return true;
4487
4488 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4489 && is_gimple_min_invariant (TREE_OPERAND (t, 1)))
4490 || TREE_CODE (t) == COMPONENT_REF
4491 || TREE_CODE (t) == REALPART_EXPR
4492 || TREE_CODE (t) == IMAGPART_EXPR)
4493 t = TREE_OPERAND (t, 0);
4494
4495 if (DECL_P (t))
4496 return true;
4497
4498 return false;
4499 }
4500
4501 /* Called via walk_gimple_stmt. Verify tree sharing. */
4502
4503 static tree
4504 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4505 {
4506 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4507 struct pointer_set_t *visited = (struct pointer_set_t *) wi->info;
4508
4509 if (tree_node_can_be_shared (*tp))
4510 {
4511 *walk_subtrees = false;
4512 return NULL;
4513 }
4514
4515 if (pointer_set_insert (visited, *tp))
4516 return *tp;
4517
4518 return NULL;
4519 }
4520
4521 static bool eh_error_found;
4522 static int
4523 verify_eh_throw_stmt_node (void **slot, void *data)
4524 {
4525 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4526 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4527
4528 if (!pointer_set_contains (visited, node->stmt))
4529 {
4530 error ("dead STMT in EH table");
4531 debug_gimple_stmt (node->stmt);
4532 eh_error_found = true;
4533 }
4534 return 1;
4535 }
4536
4537 /* Verify the GIMPLE statements in the CFG of FN. */
4538
4539 DEBUG_FUNCTION void
4540 verify_gimple_in_cfg (struct function *fn)
4541 {
4542 basic_block bb;
4543 bool err = false;
4544 struct pointer_set_t *visited, *visited_stmts;
4545
4546 timevar_push (TV_TREE_STMT_VERIFY);
4547 visited = pointer_set_create ();
4548 visited_stmts = pointer_set_create ();
4549
4550 FOR_EACH_BB_FN (bb, fn)
4551 {
4552 gimple_stmt_iterator gsi;
4553
4554 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4555 {
4556 gimple phi = gsi_stmt (gsi);
4557 bool err2 = false;
4558 unsigned i;
4559
4560 pointer_set_insert (visited_stmts, phi);
4561
4562 if (gimple_bb (phi) != bb)
4563 {
4564 error ("gimple_bb (phi) is set to a wrong basic block");
4565 err2 = true;
4566 }
4567
4568 err2 |= verify_gimple_phi (phi);
4569
4570 for (i = 0; i < gimple_phi_num_args (phi); i++)
4571 {
4572 tree arg = gimple_phi_arg_def (phi, i);
4573 tree addr = walk_tree (&arg, verify_node_sharing, visited, NULL);
4574 if (addr)
4575 {
4576 error ("incorrect sharing of tree nodes");
4577 debug_generic_expr (addr);
4578 err2 |= true;
4579 }
4580 }
4581
4582 if (err2)
4583 debug_gimple_stmt (phi);
4584 err |= err2;
4585 }
4586
4587 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4588 {
4589 gimple stmt = gsi_stmt (gsi);
4590 bool err2 = false;
4591 struct walk_stmt_info wi;
4592 tree addr;
4593 int lp_nr;
4594
4595 pointer_set_insert (visited_stmts, stmt);
4596
4597 if (gimple_bb (stmt) != bb)
4598 {
4599 error ("gimple_bb (stmt) is set to a wrong basic block");
4600 err2 = true;
4601 }
4602
4603 err2 |= verify_gimple_stmt (stmt);
4604
4605 memset (&wi, 0, sizeof (wi));
4606 wi.info = (void *) visited;
4607 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
4608 if (addr)
4609 {
4610 error ("incorrect sharing of tree nodes");
4611 debug_generic_expr (addr);
4612 err2 |= true;
4613 }
4614
4615 /* ??? Instead of not checking these stmts at all the walker
4616 should know its context via wi. */
4617 if (!is_gimple_debug (stmt)
4618 && !is_gimple_omp (stmt))
4619 {
4620 memset (&wi, 0, sizeof (wi));
4621 addr = walk_gimple_op (stmt, verify_expr, &wi);
4622 if (addr)
4623 {
4624 debug_generic_expr (addr);
4625 inform (gimple_location (stmt), "in statement");
4626 err2 |= true;
4627 }
4628 }
4629
4630 /* If the statement is marked as part of an EH region, then it is
4631 expected that the statement could throw. Verify that when we
4632 have optimizations that simplify statements such that we prove
4633 that they cannot throw, that we update other data structures
4634 to match. */
4635 lp_nr = lookup_stmt_eh_lp (stmt);
4636 if (lp_nr != 0)
4637 {
4638 if (!stmt_could_throw_p (stmt))
4639 {
4640 error ("statement marked for throw, but doesn%'t");
4641 err2 |= true;
4642 }
4643 else if (lp_nr > 0
4644 && !gsi_one_before_end_p (gsi)
4645 && stmt_can_throw_internal (stmt))
4646 {
4647 error ("statement marked for throw in middle of block");
4648 err2 |= true;
4649 }
4650 }
4651
4652 if (err2)
4653 debug_gimple_stmt (stmt);
4654 err |= err2;
4655 }
4656 }
4657
4658 eh_error_found = false;
4659 if (get_eh_throw_stmt_table (cfun))
4660 htab_traverse (get_eh_throw_stmt_table (cfun),
4661 verify_eh_throw_stmt_node,
4662 visited_stmts);
4663
4664 if (err || eh_error_found)
4665 internal_error ("verify_gimple failed");
4666
4667 pointer_set_destroy (visited);
4668 pointer_set_destroy (visited_stmts);
4669 verify_histograms ();
4670 timevar_pop (TV_TREE_STMT_VERIFY);
4671 }
4672
4673
4674 /* Verifies that the flow information is OK. */
4675
4676 static int
4677 gimple_verify_flow_info (void)
4678 {
4679 int err = 0;
4680 basic_block bb;
4681 gimple_stmt_iterator gsi;
4682 gimple stmt;
4683 edge e;
4684 edge_iterator ei;
4685
4686 if (ENTRY_BLOCK_PTR->il.gimple.seq || ENTRY_BLOCK_PTR->il.gimple.phi_nodes)
4687 {
4688 error ("ENTRY_BLOCK has IL associated with it");
4689 err = 1;
4690 }
4691
4692 if (EXIT_BLOCK_PTR->il.gimple.seq || EXIT_BLOCK_PTR->il.gimple.phi_nodes)
4693 {
4694 error ("EXIT_BLOCK has IL associated with it");
4695 err = 1;
4696 }
4697
4698 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4699 if (e->flags & EDGE_FALLTHRU)
4700 {
4701 error ("fallthru to exit from bb %d", e->src->index);
4702 err = 1;
4703 }
4704
4705 FOR_EACH_BB (bb)
4706 {
4707 bool found_ctrl_stmt = false;
4708
4709 stmt = NULL;
4710
4711 /* Skip labels on the start of basic block. */
4712 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4713 {
4714 tree label;
4715 gimple prev_stmt = stmt;
4716
4717 stmt = gsi_stmt (gsi);
4718
4719 if (gimple_code (stmt) != GIMPLE_LABEL)
4720 break;
4721
4722 label = gimple_label_label (stmt);
4723 if (prev_stmt && DECL_NONLOCAL (label))
4724 {
4725 error ("nonlocal label ");
4726 print_generic_expr (stderr, label, 0);
4727 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4728 bb->index);
4729 err = 1;
4730 }
4731
4732 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
4733 {
4734 error ("EH landing pad label ");
4735 print_generic_expr (stderr, label, 0);
4736 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4737 bb->index);
4738 err = 1;
4739 }
4740
4741 if (label_to_block (label) != bb)
4742 {
4743 error ("label ");
4744 print_generic_expr (stderr, label, 0);
4745 fprintf (stderr, " to block does not match in bb %d",
4746 bb->index);
4747 err = 1;
4748 }
4749
4750 if (decl_function_context (label) != current_function_decl)
4751 {
4752 error ("label ");
4753 print_generic_expr (stderr, label, 0);
4754 fprintf (stderr, " has incorrect context in bb %d",
4755 bb->index);
4756 err = 1;
4757 }
4758 }
4759
4760 /* Verify that body of basic block BB is free of control flow. */
4761 for (; !gsi_end_p (gsi); gsi_next (&gsi))
4762 {
4763 gimple stmt = gsi_stmt (gsi);
4764
4765 if (found_ctrl_stmt)
4766 {
4767 error ("control flow in the middle of basic block %d",
4768 bb->index);
4769 err = 1;
4770 }
4771
4772 if (stmt_ends_bb_p (stmt))
4773 found_ctrl_stmt = true;
4774
4775 if (gimple_code (stmt) == GIMPLE_LABEL)
4776 {
4777 error ("label ");
4778 print_generic_expr (stderr, gimple_label_label (stmt), 0);
4779 fprintf (stderr, " in the middle of basic block %d", bb->index);
4780 err = 1;
4781 }
4782 }
4783
4784 gsi = gsi_last_bb (bb);
4785 if (gsi_end_p (gsi))
4786 continue;
4787
4788 stmt = gsi_stmt (gsi);
4789
4790 if (gimple_code (stmt) == GIMPLE_LABEL)
4791 continue;
4792
4793 err |= verify_eh_edges (stmt);
4794
4795 if (is_ctrl_stmt (stmt))
4796 {
4797 FOR_EACH_EDGE (e, ei, bb->succs)
4798 if (e->flags & EDGE_FALLTHRU)
4799 {
4800 error ("fallthru edge after a control statement in bb %d",
4801 bb->index);
4802 err = 1;
4803 }
4804 }
4805
4806 if (gimple_code (stmt) != GIMPLE_COND)
4807 {
4808 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4809 after anything else but if statement. */
4810 FOR_EACH_EDGE (e, ei, bb->succs)
4811 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
4812 {
4813 error ("true/false edge after a non-GIMPLE_COND in bb %d",
4814 bb->index);
4815 err = 1;
4816 }
4817 }
4818
4819 switch (gimple_code (stmt))
4820 {
4821 case GIMPLE_COND:
4822 {
4823 edge true_edge;
4824 edge false_edge;
4825
4826 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
4827
4828 if (!true_edge
4829 || !false_edge
4830 || !(true_edge->flags & EDGE_TRUE_VALUE)
4831 || !(false_edge->flags & EDGE_FALSE_VALUE)
4832 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4833 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4834 || EDGE_COUNT (bb->succs) >= 3)
4835 {
4836 error ("wrong outgoing edge flags at end of bb %d",
4837 bb->index);
4838 err = 1;
4839 }
4840 }
4841 break;
4842
4843 case GIMPLE_GOTO:
4844 if (simple_goto_p (stmt))
4845 {
4846 error ("explicit goto at end of bb %d", bb->index);
4847 err = 1;
4848 }
4849 else
4850 {
4851 /* FIXME. We should double check that the labels in the
4852 destination blocks have their address taken. */
4853 FOR_EACH_EDGE (e, ei, bb->succs)
4854 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
4855 | EDGE_FALSE_VALUE))
4856 || !(e->flags & EDGE_ABNORMAL))
4857 {
4858 error ("wrong outgoing edge flags at end of bb %d",
4859 bb->index);
4860 err = 1;
4861 }
4862 }
4863 break;
4864
4865 case GIMPLE_CALL:
4866 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
4867 break;
4868 /* ... fallthru ... */
4869 case GIMPLE_RETURN:
4870 if (!single_succ_p (bb)
4871 || (single_succ_edge (bb)->flags
4872 & (EDGE_FALLTHRU | EDGE_ABNORMAL
4873 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4874 {
4875 error ("wrong outgoing edge flags at end of bb %d", bb->index);
4876 err = 1;
4877 }
4878 if (single_succ (bb) != EXIT_BLOCK_PTR)
4879 {
4880 error ("return edge does not point to exit in bb %d",
4881 bb->index);
4882 err = 1;
4883 }
4884 break;
4885
4886 case GIMPLE_SWITCH:
4887 {
4888 tree prev;
4889 edge e;
4890 size_t i, n;
4891
4892 n = gimple_switch_num_labels (stmt);
4893
4894 /* Mark all the destination basic blocks. */
4895 for (i = 0; i < n; ++i)
4896 {
4897 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
4898 basic_block label_bb = label_to_block (lab);
4899 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
4900 label_bb->aux = (void *)1;
4901 }
4902
4903 /* Verify that the case labels are sorted. */
4904 prev = gimple_switch_label (stmt, 0);
4905 for (i = 1; i < n; ++i)
4906 {
4907 tree c = gimple_switch_label (stmt, i);
4908 if (!CASE_LOW (c))
4909 {
4910 error ("found default case not at the start of "
4911 "case vector");
4912 err = 1;
4913 continue;
4914 }
4915 if (CASE_LOW (prev)
4916 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
4917 {
4918 error ("case labels not sorted: ");
4919 print_generic_expr (stderr, prev, 0);
4920 fprintf (stderr," is greater than ");
4921 print_generic_expr (stderr, c, 0);
4922 fprintf (stderr," but comes before it.\n");
4923 err = 1;
4924 }
4925 prev = c;
4926 }
4927 /* VRP will remove the default case if it can prove it will
4928 never be executed. So do not verify there always exists
4929 a default case here. */
4930
4931 FOR_EACH_EDGE (e, ei, bb->succs)
4932 {
4933 if (!e->dest->aux)
4934 {
4935 error ("extra outgoing edge %d->%d",
4936 bb->index, e->dest->index);
4937 err = 1;
4938 }
4939
4940 e->dest->aux = (void *)2;
4941 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
4942 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4943 {
4944 error ("wrong outgoing edge flags at end of bb %d",
4945 bb->index);
4946 err = 1;
4947 }
4948 }
4949
4950 /* Check that we have all of them. */
4951 for (i = 0; i < n; ++i)
4952 {
4953 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
4954 basic_block label_bb = label_to_block (lab);
4955
4956 if (label_bb->aux != (void *)2)
4957 {
4958 error ("missing edge %i->%i", bb->index, label_bb->index);
4959 err = 1;
4960 }
4961 }
4962
4963 FOR_EACH_EDGE (e, ei, bb->succs)
4964 e->dest->aux = (void *)0;
4965 }
4966 break;
4967
4968 case GIMPLE_EH_DISPATCH:
4969 err |= verify_eh_dispatch_edge (stmt);
4970 break;
4971
4972 default:
4973 break;
4974 }
4975 }
4976
4977 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
4978 verify_dominators (CDI_DOMINATORS);
4979
4980 return err;
4981 }
4982
4983
4984 /* Updates phi nodes after creating a forwarder block joined
4985 by edge FALLTHRU. */
4986
4987 static void
4988 gimple_make_forwarder_block (edge fallthru)
4989 {
4990 edge e;
4991 edge_iterator ei;
4992 basic_block dummy, bb;
4993 tree var;
4994 gimple_stmt_iterator gsi;
4995
4996 dummy = fallthru->src;
4997 bb = fallthru->dest;
4998
4999 if (single_pred_p (bb))
5000 return;
5001
5002 /* If we redirected a branch we must create new PHI nodes at the
5003 start of BB. */
5004 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5005 {
5006 gimple phi, new_phi;
5007
5008 phi = gsi_stmt (gsi);
5009 var = gimple_phi_result (phi);
5010 new_phi = create_phi_node (var, bb);
5011 SSA_NAME_DEF_STMT (var) = new_phi;
5012 gimple_phi_set_result (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
5013 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5014 UNKNOWN_LOCATION);
5015 }
5016
5017 /* Add the arguments we have stored on edges. */
5018 FOR_EACH_EDGE (e, ei, bb->preds)
5019 {
5020 if (e == fallthru)
5021 continue;
5022
5023 flush_pending_stmts (e);
5024 }
5025 }
5026
5027
5028 /* Return a non-special label in the head of basic block BLOCK.
5029 Create one if it doesn't exist. */
5030
5031 tree
5032 gimple_block_label (basic_block bb)
5033 {
5034 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5035 bool first = true;
5036 tree label;
5037 gimple stmt;
5038
5039 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5040 {
5041 stmt = gsi_stmt (i);
5042 if (gimple_code (stmt) != GIMPLE_LABEL)
5043 break;
5044 label = gimple_label_label (stmt);
5045 if (!DECL_NONLOCAL (label))
5046 {
5047 if (!first)
5048 gsi_move_before (&i, &s);
5049 return label;
5050 }
5051 }
5052
5053 label = create_artificial_label (UNKNOWN_LOCATION);
5054 stmt = gimple_build_label (label);
5055 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5056 return label;
5057 }
5058
5059
5060 /* Attempt to perform edge redirection by replacing a possibly complex
5061 jump instruction by a goto or by removing the jump completely.
5062 This can apply only if all edges now point to the same block. The
5063 parameters and return values are equivalent to
5064 redirect_edge_and_branch. */
5065
5066 static edge
5067 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5068 {
5069 basic_block src = e->src;
5070 gimple_stmt_iterator i;
5071 gimple stmt;
5072
5073 /* We can replace or remove a complex jump only when we have exactly
5074 two edges. */
5075 if (EDGE_COUNT (src->succs) != 2
5076 /* Verify that all targets will be TARGET. Specifically, the
5077 edge that is not E must also go to TARGET. */
5078 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5079 return NULL;
5080
5081 i = gsi_last_bb (src);
5082 if (gsi_end_p (i))
5083 return NULL;
5084
5085 stmt = gsi_stmt (i);
5086
5087 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5088 {
5089 gsi_remove (&i, true);
5090 e = ssa_redirect_edge (e, target);
5091 e->flags = EDGE_FALLTHRU;
5092 return e;
5093 }
5094
5095 return NULL;
5096 }
5097
5098
5099 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5100 edge representing the redirected branch. */
5101
5102 static edge
5103 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5104 {
5105 basic_block bb = e->src;
5106 gimple_stmt_iterator gsi;
5107 edge ret;
5108 gimple stmt;
5109
5110 if (e->flags & EDGE_ABNORMAL)
5111 return NULL;
5112
5113 if (e->dest == dest)
5114 return NULL;
5115
5116 if (e->flags & EDGE_EH)
5117 return redirect_eh_edge (e, dest);
5118
5119 if (e->src != ENTRY_BLOCK_PTR)
5120 {
5121 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5122 if (ret)
5123 return ret;
5124 }
5125
5126 gsi = gsi_last_bb (bb);
5127 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5128
5129 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5130 {
5131 case GIMPLE_COND:
5132 /* For COND_EXPR, we only need to redirect the edge. */
5133 break;
5134
5135 case GIMPLE_GOTO:
5136 /* No non-abnormal edges should lead from a non-simple goto, and
5137 simple ones should be represented implicitly. */
5138 gcc_unreachable ();
5139
5140 case GIMPLE_SWITCH:
5141 {
5142 tree label = gimple_block_label (dest);
5143 tree cases = get_cases_for_edge (e, stmt);
5144
5145 /* If we have a list of cases associated with E, then use it
5146 as it's a lot faster than walking the entire case vector. */
5147 if (cases)
5148 {
5149 edge e2 = find_edge (e->src, dest);
5150 tree last, first;
5151
5152 first = cases;
5153 while (cases)
5154 {
5155 last = cases;
5156 CASE_LABEL (cases) = label;
5157 cases = CASE_CHAIN (cases);
5158 }
5159
5160 /* If there was already an edge in the CFG, then we need
5161 to move all the cases associated with E to E2. */
5162 if (e2)
5163 {
5164 tree cases2 = get_cases_for_edge (e2, stmt);
5165
5166 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5167 CASE_CHAIN (cases2) = first;
5168 }
5169 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5170 }
5171 else
5172 {
5173 size_t i, n = gimple_switch_num_labels (stmt);
5174
5175 for (i = 0; i < n; i++)
5176 {
5177 tree elt = gimple_switch_label (stmt, i);
5178 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5179 CASE_LABEL (elt) = label;
5180 }
5181 }
5182 }
5183 break;
5184
5185 case GIMPLE_ASM:
5186 {
5187 int i, n = gimple_asm_nlabels (stmt);
5188 tree label = NULL;
5189
5190 for (i = 0; i < n; ++i)
5191 {
5192 tree cons = gimple_asm_label_op (stmt, i);
5193 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5194 {
5195 if (!label)
5196 label = gimple_block_label (dest);
5197 TREE_VALUE (cons) = label;
5198 }
5199 }
5200
5201 /* If we didn't find any label matching the former edge in the
5202 asm labels, we must be redirecting the fallthrough
5203 edge. */
5204 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5205 }
5206 break;
5207
5208 case GIMPLE_RETURN:
5209 gsi_remove (&gsi, true);
5210 e->flags |= EDGE_FALLTHRU;
5211 break;
5212
5213 case GIMPLE_OMP_RETURN:
5214 case GIMPLE_OMP_CONTINUE:
5215 case GIMPLE_OMP_SECTIONS_SWITCH:
5216 case GIMPLE_OMP_FOR:
5217 /* The edges from OMP constructs can be simply redirected. */
5218 break;
5219
5220 case GIMPLE_EH_DISPATCH:
5221 if (!(e->flags & EDGE_FALLTHRU))
5222 redirect_eh_dispatch_edge (stmt, e, dest);
5223 break;
5224
5225 case GIMPLE_TRANSACTION:
5226 /* The ABORT edge has a stored label associated with it, otherwise
5227 the edges are simply redirectable. */
5228 if (e->flags == 0)
5229 gimple_transaction_set_label (stmt, gimple_block_label (dest));
5230 break;
5231
5232 default:
5233 /* Otherwise it must be a fallthru edge, and we don't need to
5234 do anything besides redirecting it. */
5235 gcc_assert (e->flags & EDGE_FALLTHRU);
5236 break;
5237 }
5238
5239 /* Update/insert PHI nodes as necessary. */
5240
5241 /* Now update the edges in the CFG. */
5242 e = ssa_redirect_edge (e, dest);
5243
5244 return e;
5245 }
5246
5247 /* Returns true if it is possible to remove edge E by redirecting
5248 it to the destination of the other edge from E->src. */
5249
5250 static bool
5251 gimple_can_remove_branch_p (const_edge e)
5252 {
5253 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5254 return false;
5255
5256 return true;
5257 }
5258
5259 /* Simple wrapper, as we can always redirect fallthru edges. */
5260
5261 static basic_block
5262 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5263 {
5264 e = gimple_redirect_edge_and_branch (e, dest);
5265 gcc_assert (e);
5266
5267 return NULL;
5268 }
5269
5270
5271 /* Splits basic block BB after statement STMT (but at least after the
5272 labels). If STMT is NULL, BB is split just after the labels. */
5273
5274 static basic_block
5275 gimple_split_block (basic_block bb, void *stmt)
5276 {
5277 gimple_stmt_iterator gsi;
5278 gimple_stmt_iterator gsi_tgt;
5279 gimple act;
5280 gimple_seq list;
5281 basic_block new_bb;
5282 edge e;
5283 edge_iterator ei;
5284
5285 new_bb = create_empty_bb (bb);
5286
5287 /* Redirect the outgoing edges. */
5288 new_bb->succs = bb->succs;
5289 bb->succs = NULL;
5290 FOR_EACH_EDGE (e, ei, new_bb->succs)
5291 e->src = new_bb;
5292
5293 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5294 stmt = NULL;
5295
5296 /* Move everything from GSI to the new basic block. */
5297 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5298 {
5299 act = gsi_stmt (gsi);
5300 if (gimple_code (act) == GIMPLE_LABEL)
5301 continue;
5302
5303 if (!stmt)
5304 break;
5305
5306 if (stmt == act)
5307 {
5308 gsi_next (&gsi);
5309 break;
5310 }
5311 }
5312
5313 if (gsi_end_p (gsi))
5314 return new_bb;
5315
5316 /* Split the statement list - avoid re-creating new containers as this
5317 brings ugly quadratic memory consumption in the inliner.
5318 (We are still quadratic since we need to update stmt BB pointers,
5319 sadly.) */
5320 gsi_split_seq_before (&gsi, &list);
5321 set_bb_seq (new_bb, list);
5322 for (gsi_tgt = gsi_start (list);
5323 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5324 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5325
5326 return new_bb;
5327 }
5328
5329
5330 /* Moves basic block BB after block AFTER. */
5331
5332 static bool
5333 gimple_move_block_after (basic_block bb, basic_block after)
5334 {
5335 if (bb->prev_bb == after)
5336 return true;
5337
5338 unlink_block (bb);
5339 link_block (bb, after);
5340
5341 return true;
5342 }
5343
5344
5345 /* Return true if basic_block can be duplicated. */
5346
5347 static bool
5348 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5349 {
5350 return true;
5351 }
5352
5353 /* Create a duplicate of the basic block BB. NOTE: This does not
5354 preserve SSA form. */
5355
5356 static basic_block
5357 gimple_duplicate_bb (basic_block bb)
5358 {
5359 basic_block new_bb;
5360 gimple_stmt_iterator gsi, gsi_tgt;
5361 gimple_seq phis = phi_nodes (bb);
5362 gimple phi, stmt, copy;
5363
5364 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
5365
5366 /* Copy the PHI nodes. We ignore PHI node arguments here because
5367 the incoming edges have not been setup yet. */
5368 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
5369 {
5370 phi = gsi_stmt (gsi);
5371 copy = create_phi_node (gimple_phi_result (phi), new_bb);
5372 create_new_def_for (gimple_phi_result (copy), copy,
5373 gimple_phi_result_ptr (copy));
5374 }
5375
5376 gsi_tgt = gsi_start_bb (new_bb);
5377 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5378 {
5379 def_operand_p def_p;
5380 ssa_op_iter op_iter;
5381 tree lhs;
5382
5383 stmt = gsi_stmt (gsi);
5384 if (gimple_code (stmt) == GIMPLE_LABEL)
5385 continue;
5386
5387 /* Don't duplicate label debug stmts. */
5388 if (gimple_debug_bind_p (stmt)
5389 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5390 == LABEL_DECL)
5391 continue;
5392
5393 /* Create a new copy of STMT and duplicate STMT's virtual
5394 operands. */
5395 copy = gimple_copy (stmt);
5396 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5397
5398 maybe_duplicate_eh_stmt (copy, stmt);
5399 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5400
5401 /* When copying around a stmt writing into a local non-user
5402 aggregate, make sure it won't share stack slot with other
5403 vars. */
5404 lhs = gimple_get_lhs (stmt);
5405 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5406 {
5407 tree base = get_base_address (lhs);
5408 if (base
5409 && (TREE_CODE (base) == VAR_DECL
5410 || TREE_CODE (base) == RESULT_DECL)
5411 && DECL_IGNORED_P (base)
5412 && !TREE_STATIC (base)
5413 && !DECL_EXTERNAL (base)
5414 && (TREE_CODE (base) != VAR_DECL
5415 || !DECL_HAS_VALUE_EXPR_P (base)))
5416 DECL_NONSHAREABLE (base) = 1;
5417 }
5418
5419 /* Create new names for all the definitions created by COPY and
5420 add replacement mappings for each new name. */
5421 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5422 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5423 }
5424
5425 return new_bb;
5426 }
5427
5428 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5429
5430 static void
5431 add_phi_args_after_copy_edge (edge e_copy)
5432 {
5433 basic_block bb, bb_copy = e_copy->src, dest;
5434 edge e;
5435 edge_iterator ei;
5436 gimple phi, phi_copy;
5437 tree def;
5438 gimple_stmt_iterator psi, psi_copy;
5439
5440 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5441 return;
5442
5443 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5444
5445 if (e_copy->dest->flags & BB_DUPLICATED)
5446 dest = get_bb_original (e_copy->dest);
5447 else
5448 dest = e_copy->dest;
5449
5450 e = find_edge (bb, dest);
5451 if (!e)
5452 {
5453 /* During loop unrolling the target of the latch edge is copied.
5454 In this case we are not looking for edge to dest, but to
5455 duplicated block whose original was dest. */
5456 FOR_EACH_EDGE (e, ei, bb->succs)
5457 {
5458 if ((e->dest->flags & BB_DUPLICATED)
5459 && get_bb_original (e->dest) == dest)
5460 break;
5461 }
5462
5463 gcc_assert (e != NULL);
5464 }
5465
5466 for (psi = gsi_start_phis (e->dest),
5467 psi_copy = gsi_start_phis (e_copy->dest);
5468 !gsi_end_p (psi);
5469 gsi_next (&psi), gsi_next (&psi_copy))
5470 {
5471 phi = gsi_stmt (psi);
5472 phi_copy = gsi_stmt (psi_copy);
5473 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5474 add_phi_arg (phi_copy, def, e_copy,
5475 gimple_phi_arg_location_from_edge (phi, e));
5476 }
5477 }
5478
5479
5480 /* Basic block BB_COPY was created by code duplication. Add phi node
5481 arguments for edges going out of BB_COPY. The blocks that were
5482 duplicated have BB_DUPLICATED set. */
5483
5484 void
5485 add_phi_args_after_copy_bb (basic_block bb_copy)
5486 {
5487 edge e_copy;
5488 edge_iterator ei;
5489
5490 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5491 {
5492 add_phi_args_after_copy_edge (e_copy);
5493 }
5494 }
5495
5496 /* Blocks in REGION_COPY array of length N_REGION were created by
5497 duplication of basic blocks. Add phi node arguments for edges
5498 going from these blocks. If E_COPY is not NULL, also add
5499 phi node arguments for its destination.*/
5500
5501 void
5502 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5503 edge e_copy)
5504 {
5505 unsigned i;
5506
5507 for (i = 0; i < n_region; i++)
5508 region_copy[i]->flags |= BB_DUPLICATED;
5509
5510 for (i = 0; i < n_region; i++)
5511 add_phi_args_after_copy_bb (region_copy[i]);
5512 if (e_copy)
5513 add_phi_args_after_copy_edge (e_copy);
5514
5515 for (i = 0; i < n_region; i++)
5516 region_copy[i]->flags &= ~BB_DUPLICATED;
5517 }
5518
5519 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5520 important exit edge EXIT. By important we mean that no SSA name defined
5521 inside region is live over the other exit edges of the region. All entry
5522 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5523 to the duplicate of the region. SSA form, dominance and loop information
5524 is updated. The new basic blocks are stored to REGION_COPY in the same
5525 order as they had in REGION, provided that REGION_COPY is not NULL.
5526 The function returns false if it is unable to copy the region,
5527 true otherwise. */
5528
5529 bool
5530 gimple_duplicate_sese_region (edge entry, edge exit,
5531 basic_block *region, unsigned n_region,
5532 basic_block *region_copy)
5533 {
5534 unsigned i;
5535 bool free_region_copy = false, copying_header = false;
5536 struct loop *loop = entry->dest->loop_father;
5537 edge exit_copy;
5538 VEC (basic_block, heap) *doms;
5539 edge redirected;
5540 int total_freq = 0, entry_freq = 0;
5541 gcov_type total_count = 0, entry_count = 0;
5542
5543 if (!can_copy_bbs_p (region, n_region))
5544 return false;
5545
5546 /* Some sanity checking. Note that we do not check for all possible
5547 missuses of the functions. I.e. if you ask to copy something weird,
5548 it will work, but the state of structures probably will not be
5549 correct. */
5550 for (i = 0; i < n_region; i++)
5551 {
5552 /* We do not handle subloops, i.e. all the blocks must belong to the
5553 same loop. */
5554 if (region[i]->loop_father != loop)
5555 return false;
5556
5557 if (region[i] != entry->dest
5558 && region[i] == loop->header)
5559 return false;
5560 }
5561
5562 set_loop_copy (loop, loop);
5563
5564 /* In case the function is used for loop header copying (which is the primary
5565 use), ensure that EXIT and its copy will be new latch and entry edges. */
5566 if (loop->header == entry->dest)
5567 {
5568 copying_header = true;
5569 set_loop_copy (loop, loop_outer (loop));
5570
5571 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5572 return false;
5573
5574 for (i = 0; i < n_region; i++)
5575 if (region[i] != exit->src
5576 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5577 return false;
5578 }
5579
5580 if (!region_copy)
5581 {
5582 region_copy = XNEWVEC (basic_block, n_region);
5583 free_region_copy = true;
5584 }
5585
5586 gcc_assert (!need_ssa_update_p (cfun));
5587
5588 /* Record blocks outside the region that are dominated by something
5589 inside. */
5590 doms = NULL;
5591 initialize_original_copy_tables ();
5592
5593 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5594
5595 if (entry->dest->count)
5596 {
5597 total_count = entry->dest->count;
5598 entry_count = entry->count;
5599 /* Fix up corner cases, to avoid division by zero or creation of negative
5600 frequencies. */
5601 if (entry_count > total_count)
5602 entry_count = total_count;
5603 }
5604 else
5605 {
5606 total_freq = entry->dest->frequency;
5607 entry_freq = EDGE_FREQUENCY (entry);
5608 /* Fix up corner cases, to avoid division by zero or creation of negative
5609 frequencies. */
5610 if (total_freq == 0)
5611 total_freq = 1;
5612 else if (entry_freq > total_freq)
5613 entry_freq = total_freq;
5614 }
5615
5616 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5617 split_edge_bb_loc (entry));
5618 if (total_count)
5619 {
5620 scale_bbs_frequencies_gcov_type (region, n_region,
5621 total_count - entry_count,
5622 total_count);
5623 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
5624 total_count);
5625 }
5626 else
5627 {
5628 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5629 total_freq);
5630 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5631 }
5632
5633 if (copying_header)
5634 {
5635 loop->header = exit->dest;
5636 loop->latch = exit->src;
5637 }
5638
5639 /* Redirect the entry and add the phi node arguments. */
5640 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
5641 gcc_assert (redirected != NULL);
5642 flush_pending_stmts (entry);
5643
5644 /* Concerning updating of dominators: We must recount dominators
5645 for entry block and its copy. Anything that is outside of the
5646 region, but was dominated by something inside needs recounting as
5647 well. */
5648 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
5649 VEC_safe_push (basic_block, heap, doms, get_bb_original (entry->dest));
5650 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5651 VEC_free (basic_block, heap, doms);
5652
5653 /* Add the other PHI node arguments. */
5654 add_phi_args_after_copy (region_copy, n_region, NULL);
5655
5656 /* Update the SSA web. */
5657 update_ssa (TODO_update_ssa);
5658
5659 if (free_region_copy)
5660 free (region_copy);
5661
5662 free_original_copy_tables ();
5663 return true;
5664 }
5665
5666 /* Checks if BB is part of the region defined by N_REGION BBS. */
5667 static bool
5668 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
5669 {
5670 unsigned int n;
5671
5672 for (n = 0; n < n_region; n++)
5673 {
5674 if (bb == bbs[n])
5675 return true;
5676 }
5677 return false;
5678 }
5679
5680 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
5681 are stored to REGION_COPY in the same order in that they appear
5682 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5683 the region, EXIT an exit from it. The condition guarding EXIT
5684 is moved to ENTRY. Returns true if duplication succeeds, false
5685 otherwise.
5686
5687 For example,
5688
5689 some_code;
5690 if (cond)
5691 A;
5692 else
5693 B;
5694
5695 is transformed to
5696
5697 if (cond)
5698 {
5699 some_code;
5700 A;
5701 }
5702 else
5703 {
5704 some_code;
5705 B;
5706 }
5707 */
5708
5709 bool
5710 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
5711 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
5712 basic_block *region_copy ATTRIBUTE_UNUSED)
5713 {
5714 unsigned i;
5715 bool free_region_copy = false;
5716 struct loop *loop = exit->dest->loop_father;
5717 struct loop *orig_loop = entry->dest->loop_father;
5718 basic_block switch_bb, entry_bb, nentry_bb;
5719 VEC (basic_block, heap) *doms;
5720 int total_freq = 0, exit_freq = 0;
5721 gcov_type total_count = 0, exit_count = 0;
5722 edge exits[2], nexits[2], e;
5723 gimple_stmt_iterator gsi;
5724 gimple cond_stmt;
5725 edge sorig, snew;
5726 basic_block exit_bb;
5727 gimple_stmt_iterator psi;
5728 gimple phi;
5729 tree def;
5730 struct loop *target, *aloop, *cloop;
5731
5732 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
5733 exits[0] = exit;
5734 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
5735
5736 if (!can_copy_bbs_p (region, n_region))
5737 return false;
5738
5739 initialize_original_copy_tables ();
5740 set_loop_copy (orig_loop, loop);
5741
5742 target= loop;
5743 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
5744 {
5745 if (bb_part_of_region_p (aloop->header, region, n_region))
5746 {
5747 cloop = duplicate_loop (aloop, target);
5748 duplicate_subloops (aloop, cloop);
5749 }
5750 }
5751
5752 if (!region_copy)
5753 {
5754 region_copy = XNEWVEC (basic_block, n_region);
5755 free_region_copy = true;
5756 }
5757
5758 gcc_assert (!need_ssa_update_p (cfun));
5759
5760 /* Record blocks outside the region that are dominated by something
5761 inside. */
5762 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5763
5764 if (exit->src->count)
5765 {
5766 total_count = exit->src->count;
5767 exit_count = exit->count;
5768 /* Fix up corner cases, to avoid division by zero or creation of negative
5769 frequencies. */
5770 if (exit_count > total_count)
5771 exit_count = total_count;
5772 }
5773 else
5774 {
5775 total_freq = exit->src->frequency;
5776 exit_freq = EDGE_FREQUENCY (exit);
5777 /* Fix up corner cases, to avoid division by zero or creation of negative
5778 frequencies. */
5779 if (total_freq == 0)
5780 total_freq = 1;
5781 if (exit_freq > total_freq)
5782 exit_freq = total_freq;
5783 }
5784
5785 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
5786 split_edge_bb_loc (exit));
5787 if (total_count)
5788 {
5789 scale_bbs_frequencies_gcov_type (region, n_region,
5790 total_count - exit_count,
5791 total_count);
5792 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
5793 total_count);
5794 }
5795 else
5796 {
5797 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
5798 total_freq);
5799 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
5800 }
5801
5802 /* Create the switch block, and put the exit condition to it. */
5803 entry_bb = entry->dest;
5804 nentry_bb = get_bb_copy (entry_bb);
5805 if (!last_stmt (entry->src)
5806 || !stmt_ends_bb_p (last_stmt (entry->src)))
5807 switch_bb = entry->src;
5808 else
5809 switch_bb = split_edge (entry);
5810 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
5811
5812 gsi = gsi_last_bb (switch_bb);
5813 cond_stmt = last_stmt (exit->src);
5814 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
5815 cond_stmt = gimple_copy (cond_stmt);
5816
5817 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
5818
5819 sorig = single_succ_edge (switch_bb);
5820 sorig->flags = exits[1]->flags;
5821 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
5822
5823 /* Register the new edge from SWITCH_BB in loop exit lists. */
5824 rescan_loop_exit (snew, true, false);
5825
5826 /* Add the PHI node arguments. */
5827 add_phi_args_after_copy (region_copy, n_region, snew);
5828
5829 /* Get rid of now superfluous conditions and associated edges (and phi node
5830 arguments). */
5831 exit_bb = exit->dest;
5832
5833 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
5834 PENDING_STMT (e) = NULL;
5835
5836 /* The latch of ORIG_LOOP was copied, and so was the backedge
5837 to the original header. We redirect this backedge to EXIT_BB. */
5838 for (i = 0; i < n_region; i++)
5839 if (get_bb_original (region_copy[i]) == orig_loop->latch)
5840 {
5841 gcc_assert (single_succ_edge (region_copy[i]));
5842 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
5843 PENDING_STMT (e) = NULL;
5844 for (psi = gsi_start_phis (exit_bb);
5845 !gsi_end_p (psi);
5846 gsi_next (&psi))
5847 {
5848 phi = gsi_stmt (psi);
5849 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
5850 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
5851 }
5852 }
5853 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
5854 PENDING_STMT (e) = NULL;
5855
5856 /* Anything that is outside of the region, but was dominated by something
5857 inside needs to update dominance info. */
5858 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5859 VEC_free (basic_block, heap, doms);
5860 /* Update the SSA web. */
5861 update_ssa (TODO_update_ssa);
5862
5863 if (free_region_copy)
5864 free (region_copy);
5865
5866 free_original_copy_tables ();
5867 return true;
5868 }
5869
5870 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
5871 adding blocks when the dominator traversal reaches EXIT. This
5872 function silently assumes that ENTRY strictly dominates EXIT. */
5873
5874 void
5875 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
5876 VEC(basic_block,heap) **bbs_p)
5877 {
5878 basic_block son;
5879
5880 for (son = first_dom_son (CDI_DOMINATORS, entry);
5881 son;
5882 son = next_dom_son (CDI_DOMINATORS, son))
5883 {
5884 VEC_safe_push (basic_block, heap, *bbs_p, son);
5885 if (son != exit)
5886 gather_blocks_in_sese_region (son, exit, bbs_p);
5887 }
5888 }
5889
5890 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
5891 The duplicates are recorded in VARS_MAP. */
5892
5893 static void
5894 replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
5895 tree to_context)
5896 {
5897 tree t = *tp, new_t;
5898 struct function *f = DECL_STRUCT_FUNCTION (to_context);
5899 void **loc;
5900
5901 if (DECL_CONTEXT (t) == to_context)
5902 return;
5903
5904 loc = pointer_map_contains (vars_map, t);
5905
5906 if (!loc)
5907 {
5908 loc = pointer_map_insert (vars_map, t);
5909
5910 if (SSA_VAR_P (t))
5911 {
5912 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
5913 add_local_decl (f, new_t);
5914 }
5915 else
5916 {
5917 gcc_assert (TREE_CODE (t) == CONST_DECL);
5918 new_t = copy_node (t);
5919 }
5920 DECL_CONTEXT (new_t) = to_context;
5921
5922 *loc = new_t;
5923 }
5924 else
5925 new_t = (tree) *loc;
5926
5927 *tp = new_t;
5928 }
5929
5930
5931 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
5932 VARS_MAP maps old ssa names and var_decls to the new ones. */
5933
5934 static tree
5935 replace_ssa_name (tree name, struct pointer_map_t *vars_map,
5936 tree to_context)
5937 {
5938 void **loc;
5939 tree new_name, decl = SSA_NAME_VAR (name);
5940
5941 gcc_assert (is_gimple_reg (name));
5942
5943 loc = pointer_map_contains (vars_map, name);
5944
5945 if (!loc)
5946 {
5947 replace_by_duplicate_decl (&decl, vars_map, to_context);
5948
5949 push_cfun (DECL_STRUCT_FUNCTION (to_context));
5950 if (gimple_in_ssa_p (cfun))
5951 add_referenced_var (decl);
5952
5953 new_name = make_ssa_name (decl, SSA_NAME_DEF_STMT (name));
5954 if (SSA_NAME_IS_DEFAULT_DEF (name))
5955 set_default_def (decl, new_name);
5956 pop_cfun ();
5957
5958 loc = pointer_map_insert (vars_map, name);
5959 *loc = new_name;
5960 }
5961 else
5962 new_name = (tree) *loc;
5963
5964 return new_name;
5965 }
5966
5967 struct move_stmt_d
5968 {
5969 tree orig_block;
5970 tree new_block;
5971 tree from_context;
5972 tree to_context;
5973 struct pointer_map_t *vars_map;
5974 htab_t new_label_map;
5975 struct pointer_map_t *eh_map;
5976 bool remap_decls_p;
5977 };
5978
5979 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
5980 contained in *TP if it has been ORIG_BLOCK previously and change the
5981 DECL_CONTEXT of every local variable referenced in *TP. */
5982
5983 static tree
5984 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
5985 {
5986 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5987 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
5988 tree t = *tp;
5989
5990 if (EXPR_P (t))
5991 /* We should never have TREE_BLOCK set on non-statements. */
5992 gcc_assert (!TREE_BLOCK (t));
5993
5994 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
5995 {
5996 if (TREE_CODE (t) == SSA_NAME)
5997 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
5998 else if (TREE_CODE (t) == LABEL_DECL)
5999 {
6000 if (p->new_label_map)
6001 {
6002 struct tree_map in, *out;
6003 in.base.from = t;
6004 out = (struct tree_map *)
6005 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6006 if (out)
6007 *tp = t = out->to;
6008 }
6009
6010 DECL_CONTEXT (t) = p->to_context;
6011 }
6012 else if (p->remap_decls_p)
6013 {
6014 /* Replace T with its duplicate. T should no longer appear in the
6015 parent function, so this looks wasteful; however, it may appear
6016 in referenced_vars, and more importantly, as virtual operands of
6017 statements, and in alias lists of other variables. It would be
6018 quite difficult to expunge it from all those places. ??? It might
6019 suffice to do this for addressable variables. */
6020 if ((TREE_CODE (t) == VAR_DECL
6021 && !is_global_var (t))
6022 || TREE_CODE (t) == CONST_DECL)
6023 {
6024 struct function *to_fn = DECL_STRUCT_FUNCTION (p->to_context);
6025 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6026 if (gimple_referenced_vars (to_fn))
6027 add_referenced_var_1 (*tp, to_fn);
6028 }
6029 }
6030 *walk_subtrees = 0;
6031 }
6032 else if (TYPE_P (t))
6033 *walk_subtrees = 0;
6034
6035 return NULL_TREE;
6036 }
6037
6038 /* Helper for move_stmt_r. Given an EH region number for the source
6039 function, map that to the duplicate EH regio number in the dest. */
6040
6041 static int
6042 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6043 {
6044 eh_region old_r, new_r;
6045 void **slot;
6046
6047 old_r = get_eh_region_from_number (old_nr);
6048 slot = pointer_map_contains (p->eh_map, old_r);
6049 new_r = (eh_region) *slot;
6050
6051 return new_r->index;
6052 }
6053
6054 /* Similar, but operate on INTEGER_CSTs. */
6055
6056 static tree
6057 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6058 {
6059 int old_nr, new_nr;
6060
6061 old_nr = tree_low_cst (old_t_nr, 0);
6062 new_nr = move_stmt_eh_region_nr (old_nr, p);
6063
6064 return build_int_cst (integer_type_node, new_nr);
6065 }
6066
6067 /* Like move_stmt_op, but for gimple statements.
6068
6069 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6070 contained in the current statement in *GSI_P and change the
6071 DECL_CONTEXT of every local variable referenced in the current
6072 statement. */
6073
6074 static tree
6075 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6076 struct walk_stmt_info *wi)
6077 {
6078 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6079 gimple stmt = gsi_stmt (*gsi_p);
6080 tree block = gimple_block (stmt);
6081
6082 if (p->orig_block == NULL_TREE
6083 || block == p->orig_block
6084 || block == NULL_TREE)
6085 gimple_set_block (stmt, p->new_block);
6086 #ifdef ENABLE_CHECKING
6087 else if (block != p->new_block)
6088 {
6089 while (block && block != p->orig_block)
6090 block = BLOCK_SUPERCONTEXT (block);
6091 gcc_assert (block);
6092 }
6093 #endif
6094
6095 switch (gimple_code (stmt))
6096 {
6097 case GIMPLE_CALL:
6098 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6099 {
6100 tree r, fndecl = gimple_call_fndecl (stmt);
6101 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6102 switch (DECL_FUNCTION_CODE (fndecl))
6103 {
6104 case BUILT_IN_EH_COPY_VALUES:
6105 r = gimple_call_arg (stmt, 1);
6106 r = move_stmt_eh_region_tree_nr (r, p);
6107 gimple_call_set_arg (stmt, 1, r);
6108 /* FALLTHRU */
6109
6110 case BUILT_IN_EH_POINTER:
6111 case BUILT_IN_EH_FILTER:
6112 r = gimple_call_arg (stmt, 0);
6113 r = move_stmt_eh_region_tree_nr (r, p);
6114 gimple_call_set_arg (stmt, 0, r);
6115 break;
6116
6117 default:
6118 break;
6119 }
6120 }
6121 break;
6122
6123 case GIMPLE_RESX:
6124 {
6125 int r = gimple_resx_region (stmt);
6126 r = move_stmt_eh_region_nr (r, p);
6127 gimple_resx_set_region (stmt, r);
6128 }
6129 break;
6130
6131 case GIMPLE_EH_DISPATCH:
6132 {
6133 int r = gimple_eh_dispatch_region (stmt);
6134 r = move_stmt_eh_region_nr (r, p);
6135 gimple_eh_dispatch_set_region (stmt, r);
6136 }
6137 break;
6138
6139 case GIMPLE_OMP_RETURN:
6140 case GIMPLE_OMP_CONTINUE:
6141 break;
6142 default:
6143 if (is_gimple_omp (stmt))
6144 {
6145 /* Do not remap variables inside OMP directives. Variables
6146 referenced in clauses and directive header belong to the
6147 parent function and should not be moved into the child
6148 function. */
6149 bool save_remap_decls_p = p->remap_decls_p;
6150 p->remap_decls_p = false;
6151 *handled_ops_p = true;
6152
6153 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6154 move_stmt_op, wi);
6155
6156 p->remap_decls_p = save_remap_decls_p;
6157 }
6158 break;
6159 }
6160
6161 return NULL_TREE;
6162 }
6163
6164 /* Move basic block BB from function CFUN to function DEST_FN. The
6165 block is moved out of the original linked list and placed after
6166 block AFTER in the new list. Also, the block is removed from the
6167 original array of blocks and placed in DEST_FN's array of blocks.
6168 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6169 updated to reflect the moved edges.
6170
6171 The local variables are remapped to new instances, VARS_MAP is used
6172 to record the mapping. */
6173
6174 static void
6175 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6176 basic_block after, bool update_edge_count_p,
6177 struct move_stmt_d *d)
6178 {
6179 struct control_flow_graph *cfg;
6180 edge_iterator ei;
6181 edge e;
6182 gimple_stmt_iterator si;
6183 unsigned old_len, new_len;
6184
6185 /* Remove BB from dominance structures. */
6186 delete_from_dominance_info (CDI_DOMINATORS, bb);
6187 if (current_loops)
6188 remove_bb_from_loops (bb);
6189
6190 /* Link BB to the new linked list. */
6191 move_block_after (bb, after);
6192
6193 /* Update the edge count in the corresponding flowgraphs. */
6194 if (update_edge_count_p)
6195 FOR_EACH_EDGE (e, ei, bb->succs)
6196 {
6197 cfun->cfg->x_n_edges--;
6198 dest_cfun->cfg->x_n_edges++;
6199 }
6200
6201 /* Remove BB from the original basic block array. */
6202 VEC_replace (basic_block, cfun->cfg->x_basic_block_info, bb->index, NULL);
6203 cfun->cfg->x_n_basic_blocks--;
6204
6205 /* Grow DEST_CFUN's basic block array if needed. */
6206 cfg = dest_cfun->cfg;
6207 cfg->x_n_basic_blocks++;
6208 if (bb->index >= cfg->x_last_basic_block)
6209 cfg->x_last_basic_block = bb->index + 1;
6210
6211 old_len = VEC_length (basic_block, cfg->x_basic_block_info);
6212 if ((unsigned) cfg->x_last_basic_block >= old_len)
6213 {
6214 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6215 VEC_safe_grow_cleared (basic_block, gc, cfg->x_basic_block_info,
6216 new_len);
6217 }
6218
6219 VEC_replace (basic_block, cfg->x_basic_block_info,
6220 bb->index, bb);
6221
6222 /* Remap the variables in phi nodes. */
6223 for (si = gsi_start_phis (bb); !gsi_end_p (si); )
6224 {
6225 gimple phi = gsi_stmt (si);
6226 use_operand_p use;
6227 tree op = PHI_RESULT (phi);
6228 ssa_op_iter oi;
6229
6230 if (!is_gimple_reg (op))
6231 {
6232 /* Remove the phi nodes for virtual operands (alias analysis will be
6233 run for the new function, anyway). */
6234 remove_phi_node (&si, true);
6235 continue;
6236 }
6237
6238 SET_PHI_RESULT (phi,
6239 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6240 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6241 {
6242 op = USE_FROM_PTR (use);
6243 if (TREE_CODE (op) == SSA_NAME)
6244 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6245 }
6246
6247 gsi_next (&si);
6248 }
6249
6250 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6251 {
6252 gimple stmt = gsi_stmt (si);
6253 struct walk_stmt_info wi;
6254
6255 memset (&wi, 0, sizeof (wi));
6256 wi.info = d;
6257 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6258
6259 if (gimple_code (stmt) == GIMPLE_LABEL)
6260 {
6261 tree label = gimple_label_label (stmt);
6262 int uid = LABEL_DECL_UID (label);
6263
6264 gcc_assert (uid > -1);
6265
6266 old_len = VEC_length (basic_block, cfg->x_label_to_block_map);
6267 if (old_len <= (unsigned) uid)
6268 {
6269 new_len = 3 * uid / 2 + 1;
6270 VEC_safe_grow_cleared (basic_block, gc,
6271 cfg->x_label_to_block_map, new_len);
6272 }
6273
6274 VEC_replace (basic_block, cfg->x_label_to_block_map, uid, bb);
6275 VEC_replace (basic_block, cfun->cfg->x_label_to_block_map, uid, NULL);
6276
6277 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6278
6279 if (uid >= dest_cfun->cfg->last_label_uid)
6280 dest_cfun->cfg->last_label_uid = uid + 1;
6281 }
6282
6283 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6284 remove_stmt_from_eh_lp_fn (cfun, stmt);
6285
6286 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6287 gimple_remove_stmt_histograms (cfun, stmt);
6288
6289 /* We cannot leave any operands allocated from the operand caches of
6290 the current function. */
6291 free_stmt_operands (stmt);
6292 push_cfun (dest_cfun);
6293 update_stmt (stmt);
6294 pop_cfun ();
6295 }
6296
6297 FOR_EACH_EDGE (e, ei, bb->succs)
6298 if (e->goto_locus)
6299 {
6300 tree block = e->goto_block;
6301 if (d->orig_block == NULL_TREE
6302 || block == d->orig_block)
6303 e->goto_block = d->new_block;
6304 #ifdef ENABLE_CHECKING
6305 else if (block != d->new_block)
6306 {
6307 while (block && block != d->orig_block)
6308 block = BLOCK_SUPERCONTEXT (block);
6309 gcc_assert (block);
6310 }
6311 #endif
6312 }
6313 }
6314
6315 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6316 the outermost EH region. Use REGION as the incoming base EH region. */
6317
6318 static eh_region
6319 find_outermost_region_in_block (struct function *src_cfun,
6320 basic_block bb, eh_region region)
6321 {
6322 gimple_stmt_iterator si;
6323
6324 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6325 {
6326 gimple stmt = gsi_stmt (si);
6327 eh_region stmt_region;
6328 int lp_nr;
6329
6330 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6331 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6332 if (stmt_region)
6333 {
6334 if (region == NULL)
6335 region = stmt_region;
6336 else if (stmt_region != region)
6337 {
6338 region = eh_region_outermost (src_cfun, stmt_region, region);
6339 gcc_assert (region != NULL);
6340 }
6341 }
6342 }
6343
6344 return region;
6345 }
6346
6347 static tree
6348 new_label_mapper (tree decl, void *data)
6349 {
6350 htab_t hash = (htab_t) data;
6351 struct tree_map *m;
6352 void **slot;
6353
6354 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6355
6356 m = XNEW (struct tree_map);
6357 m->hash = DECL_UID (decl);
6358 m->base.from = decl;
6359 m->to = create_artificial_label (UNKNOWN_LOCATION);
6360 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6361 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6362 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6363
6364 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6365 gcc_assert (*slot == NULL);
6366
6367 *slot = m;
6368
6369 return m->to;
6370 }
6371
6372 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6373 subblocks. */
6374
6375 static void
6376 replace_block_vars_by_duplicates (tree block, struct pointer_map_t *vars_map,
6377 tree to_context)
6378 {
6379 tree *tp, t;
6380
6381 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6382 {
6383 t = *tp;
6384 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6385 continue;
6386 replace_by_duplicate_decl (&t, vars_map, to_context);
6387 if (t != *tp)
6388 {
6389 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6390 {
6391 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6392 DECL_HAS_VALUE_EXPR_P (t) = 1;
6393 }
6394 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6395 *tp = t;
6396 }
6397 }
6398
6399 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6400 replace_block_vars_by_duplicates (block, vars_map, to_context);
6401 }
6402
6403 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6404 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6405 single basic block in the original CFG and the new basic block is
6406 returned. DEST_CFUN must not have a CFG yet.
6407
6408 Note that the region need not be a pure SESE region. Blocks inside
6409 the region may contain calls to abort/exit. The only restriction
6410 is that ENTRY_BB should be the only entry point and it must
6411 dominate EXIT_BB.
6412
6413 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6414 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6415 to the new function.
6416
6417 All local variables referenced in the region are assumed to be in
6418 the corresponding BLOCK_VARS and unexpanded variable lists
6419 associated with DEST_CFUN. */
6420
6421 basic_block
6422 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
6423 basic_block exit_bb, tree orig_block)
6424 {
6425 VEC(basic_block,heap) *bbs, *dom_bbs;
6426 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6427 basic_block after, bb, *entry_pred, *exit_succ, abb;
6428 struct function *saved_cfun = cfun;
6429 int *entry_flag, *exit_flag;
6430 unsigned *entry_prob, *exit_prob;
6431 unsigned i, num_entry_edges, num_exit_edges;
6432 edge e;
6433 edge_iterator ei;
6434 htab_t new_label_map;
6435 struct pointer_map_t *vars_map, *eh_map;
6436 struct loop *loop = entry_bb->loop_father;
6437 struct move_stmt_d d;
6438
6439 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6440 region. */
6441 gcc_assert (entry_bb != exit_bb
6442 && (!exit_bb
6443 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
6444
6445 /* Collect all the blocks in the region. Manually add ENTRY_BB
6446 because it won't be added by dfs_enumerate_from. */
6447 bbs = NULL;
6448 VEC_safe_push (basic_block, heap, bbs, entry_bb);
6449 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6450
6451 /* The blocks that used to be dominated by something in BBS will now be
6452 dominated by the new block. */
6453 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6454 VEC_address (basic_block, bbs),
6455 VEC_length (basic_block, bbs));
6456
6457 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6458 the predecessor edges to ENTRY_BB and the successor edges to
6459 EXIT_BB so that we can re-attach them to the new basic block that
6460 will replace the region. */
6461 num_entry_edges = EDGE_COUNT (entry_bb->preds);
6462 entry_pred = (basic_block *) xcalloc (num_entry_edges, sizeof (basic_block));
6463 entry_flag = (int *) xcalloc (num_entry_edges, sizeof (int));
6464 entry_prob = XNEWVEC (unsigned, num_entry_edges);
6465 i = 0;
6466 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
6467 {
6468 entry_prob[i] = e->probability;
6469 entry_flag[i] = e->flags;
6470 entry_pred[i++] = e->src;
6471 remove_edge (e);
6472 }
6473
6474 if (exit_bb)
6475 {
6476 num_exit_edges = EDGE_COUNT (exit_bb->succs);
6477 exit_succ = (basic_block *) xcalloc (num_exit_edges,
6478 sizeof (basic_block));
6479 exit_flag = (int *) xcalloc (num_exit_edges, sizeof (int));
6480 exit_prob = XNEWVEC (unsigned, num_exit_edges);
6481 i = 0;
6482 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
6483 {
6484 exit_prob[i] = e->probability;
6485 exit_flag[i] = e->flags;
6486 exit_succ[i++] = e->dest;
6487 remove_edge (e);
6488 }
6489 }
6490 else
6491 {
6492 num_exit_edges = 0;
6493 exit_succ = NULL;
6494 exit_flag = NULL;
6495 exit_prob = NULL;
6496 }
6497
6498 /* Switch context to the child function to initialize DEST_FN's CFG. */
6499 gcc_assert (dest_cfun->cfg == NULL);
6500 push_cfun (dest_cfun);
6501
6502 init_empty_tree_cfg ();
6503
6504 /* Initialize EH information for the new function. */
6505 eh_map = NULL;
6506 new_label_map = NULL;
6507 if (saved_cfun->eh)
6508 {
6509 eh_region region = NULL;
6510
6511 FOR_EACH_VEC_ELT (basic_block, bbs, i, bb)
6512 region = find_outermost_region_in_block (saved_cfun, bb, region);
6513
6514 init_eh_for_function ();
6515 if (region != NULL)
6516 {
6517 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6518 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
6519 new_label_mapper, new_label_map);
6520 }
6521 }
6522
6523 pop_cfun ();
6524
6525 /* Move blocks from BBS into DEST_CFUN. */
6526 gcc_assert (VEC_length (basic_block, bbs) >= 2);
6527 after = dest_cfun->cfg->x_entry_block_ptr;
6528 vars_map = pointer_map_create ();
6529
6530 memset (&d, 0, sizeof (d));
6531 d.orig_block = orig_block;
6532 d.new_block = DECL_INITIAL (dest_cfun->decl);
6533 d.from_context = cfun->decl;
6534 d.to_context = dest_cfun->decl;
6535 d.vars_map = vars_map;
6536 d.new_label_map = new_label_map;
6537 d.eh_map = eh_map;
6538 d.remap_decls_p = true;
6539
6540 FOR_EACH_VEC_ELT (basic_block, bbs, i, bb)
6541 {
6542 /* No need to update edge counts on the last block. It has
6543 already been updated earlier when we detached the region from
6544 the original CFG. */
6545 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
6546 after = bb;
6547 }
6548
6549 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
6550 if (orig_block)
6551 {
6552 tree block;
6553 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6554 == NULL_TREE);
6555 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6556 = BLOCK_SUBBLOCKS (orig_block);
6557 for (block = BLOCK_SUBBLOCKS (orig_block);
6558 block; block = BLOCK_CHAIN (block))
6559 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
6560 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
6561 }
6562
6563 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
6564 vars_map, dest_cfun->decl);
6565
6566 if (new_label_map)
6567 htab_delete (new_label_map);
6568 if (eh_map)
6569 pointer_map_destroy (eh_map);
6570 pointer_map_destroy (vars_map);
6571
6572 /* Rewire the entry and exit blocks. The successor to the entry
6573 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6574 the child function. Similarly, the predecessor of DEST_FN's
6575 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
6576 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6577 various CFG manipulation function get to the right CFG.
6578
6579 FIXME, this is silly. The CFG ought to become a parameter to
6580 these helpers. */
6581 push_cfun (dest_cfun);
6582 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
6583 if (exit_bb)
6584 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
6585 pop_cfun ();
6586
6587 /* Back in the original function, the SESE region has disappeared,
6588 create a new basic block in its place. */
6589 bb = create_empty_bb (entry_pred[0]);
6590 if (current_loops)
6591 add_bb_to_loop (bb, loop);
6592 for (i = 0; i < num_entry_edges; i++)
6593 {
6594 e = make_edge (entry_pred[i], bb, entry_flag[i]);
6595 e->probability = entry_prob[i];
6596 }
6597
6598 for (i = 0; i < num_exit_edges; i++)
6599 {
6600 e = make_edge (bb, exit_succ[i], exit_flag[i]);
6601 e->probability = exit_prob[i];
6602 }
6603
6604 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
6605 FOR_EACH_VEC_ELT (basic_block, dom_bbs, i, abb)
6606 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
6607 VEC_free (basic_block, heap, dom_bbs);
6608
6609 if (exit_bb)
6610 {
6611 free (exit_prob);
6612 free (exit_flag);
6613 free (exit_succ);
6614 }
6615 free (entry_prob);
6616 free (entry_flag);
6617 free (entry_pred);
6618 VEC_free (basic_block, heap, bbs);
6619
6620 return bb;
6621 }
6622
6623
6624 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree-pass.h)
6625 */
6626
6627 void
6628 dump_function_to_file (tree fn, FILE *file, int flags)
6629 {
6630 tree arg, var;
6631 struct function *dsf;
6632 bool ignore_topmost_bind = false, any_var = false;
6633 basic_block bb;
6634 tree chain;
6635 bool tmclone = TREE_CODE (fn) == FUNCTION_DECL && decl_is_tm_clone (fn);
6636
6637 fprintf (file, "%s %s(", current_function_name (),
6638 tmclone ? "[tm-clone] " : "");
6639
6640 arg = DECL_ARGUMENTS (fn);
6641 while (arg)
6642 {
6643 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
6644 fprintf (file, " ");
6645 print_generic_expr (file, arg, dump_flags);
6646 if (flags & TDF_VERBOSE)
6647 print_node (file, "", arg, 4);
6648 if (DECL_CHAIN (arg))
6649 fprintf (file, ", ");
6650 arg = DECL_CHAIN (arg);
6651 }
6652 fprintf (file, ")\n");
6653
6654 if (flags & TDF_VERBOSE)
6655 print_node (file, "", fn, 2);
6656
6657 dsf = DECL_STRUCT_FUNCTION (fn);
6658 if (dsf && (flags & TDF_EH))
6659 dump_eh_tree (file, dsf);
6660
6661 if (flags & TDF_RAW && !gimple_has_body_p (fn))
6662 {
6663 dump_node (fn, TDF_SLIM | flags, file);
6664 return;
6665 }
6666
6667 /* Switch CFUN to point to FN. */
6668 push_cfun (DECL_STRUCT_FUNCTION (fn));
6669
6670 /* When GIMPLE is lowered, the variables are no longer available in
6671 BIND_EXPRs, so display them separately. */
6672 if (cfun && cfun->decl == fn && !VEC_empty (tree, cfun->local_decls))
6673 {
6674 unsigned ix;
6675 ignore_topmost_bind = true;
6676
6677 fprintf (file, "{\n");
6678 FOR_EACH_LOCAL_DECL (cfun, ix, var)
6679 {
6680 print_generic_decl (file, var, flags);
6681 if (flags & TDF_VERBOSE)
6682 print_node (file, "", var, 4);
6683 fprintf (file, "\n");
6684
6685 any_var = true;
6686 }
6687 }
6688
6689 if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
6690 {
6691 /* If the CFG has been built, emit a CFG-based dump. */
6692 if (!ignore_topmost_bind)
6693 fprintf (file, "{\n");
6694
6695 if (any_var && n_basic_blocks)
6696 fprintf (file, "\n");
6697
6698 FOR_EACH_BB (bb)
6699 dump_bb (file, bb, 2, flags | TDF_COMMENT);
6700
6701 fprintf (file, "}\n");
6702 }
6703 else if (DECL_SAVED_TREE (fn) == NULL)
6704 {
6705 /* The function is now in GIMPLE form but the CFG has not been
6706 built yet. Emit the single sequence of GIMPLE statements
6707 that make up its body. */
6708 gimple_seq body = gimple_body (fn);
6709
6710 if (gimple_seq_first_stmt (body)
6711 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
6712 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
6713 print_gimple_seq (file, body, 0, flags);
6714 else
6715 {
6716 if (!ignore_topmost_bind)
6717 fprintf (file, "{\n");
6718
6719 if (any_var)
6720 fprintf (file, "\n");
6721
6722 print_gimple_seq (file, body, 2, flags);
6723 fprintf (file, "}\n");
6724 }
6725 }
6726 else
6727 {
6728 int indent;
6729
6730 /* Make a tree based dump. */
6731 chain = DECL_SAVED_TREE (fn);
6732
6733 if (chain && TREE_CODE (chain) == BIND_EXPR)
6734 {
6735 if (ignore_topmost_bind)
6736 {
6737 chain = BIND_EXPR_BODY (chain);
6738 indent = 2;
6739 }
6740 else
6741 indent = 0;
6742 }
6743 else
6744 {
6745 if (!ignore_topmost_bind)
6746 fprintf (file, "{\n");
6747 indent = 2;
6748 }
6749
6750 if (any_var)
6751 fprintf (file, "\n");
6752
6753 print_generic_stmt_indented (file, chain, flags, indent);
6754 if (ignore_topmost_bind)
6755 fprintf (file, "}\n");
6756 }
6757
6758 if (flags & TDF_ENUMERATE_LOCALS)
6759 dump_enumerated_decls (file, flags);
6760 fprintf (file, "\n\n");
6761
6762 /* Restore CFUN. */
6763 pop_cfun ();
6764 }
6765
6766
6767 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
6768
6769 DEBUG_FUNCTION void
6770 debug_function (tree fn, int flags)
6771 {
6772 dump_function_to_file (fn, stderr, flags);
6773 }
6774
6775
6776 /* Print on FILE the indexes for the predecessors of basic_block BB. */
6777
6778 static void
6779 print_pred_bbs (FILE *file, basic_block bb)
6780 {
6781 edge e;
6782 edge_iterator ei;
6783
6784 FOR_EACH_EDGE (e, ei, bb->preds)
6785 fprintf (file, "bb_%d ", e->src->index);
6786 }
6787
6788
6789 /* Print on FILE the indexes for the successors of basic_block BB. */
6790
6791 static void
6792 print_succ_bbs (FILE *file, basic_block bb)
6793 {
6794 edge e;
6795 edge_iterator ei;
6796
6797 FOR_EACH_EDGE (e, ei, bb->succs)
6798 fprintf (file, "bb_%d ", e->dest->index);
6799 }
6800
6801 /* Print to FILE the basic block BB following the VERBOSITY level. */
6802
6803 void
6804 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
6805 {
6806 char *s_indent = (char *) alloca ((size_t) indent + 1);
6807 memset ((void *) s_indent, ' ', (size_t) indent);
6808 s_indent[indent] = '\0';
6809
6810 /* Print basic_block's header. */
6811 if (verbosity >= 2)
6812 {
6813 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
6814 print_pred_bbs (file, bb);
6815 fprintf (file, "}, succs = {");
6816 print_succ_bbs (file, bb);
6817 fprintf (file, "})\n");
6818 }
6819
6820 /* Print basic_block's body. */
6821 if (verbosity >= 3)
6822 {
6823 fprintf (file, "%s {\n", s_indent);
6824 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
6825 fprintf (file, "%s }\n", s_indent);
6826 }
6827 }
6828
6829 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
6830
6831 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
6832 VERBOSITY level this outputs the contents of the loop, or just its
6833 structure. */
6834
6835 static void
6836 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
6837 {
6838 char *s_indent;
6839 basic_block bb;
6840
6841 if (loop == NULL)
6842 return;
6843
6844 s_indent = (char *) alloca ((size_t) indent + 1);
6845 memset ((void *) s_indent, ' ', (size_t) indent);
6846 s_indent[indent] = '\0';
6847
6848 /* Print loop's header. */
6849 fprintf (file, "%sloop_%d (header = %d, latch = %d", s_indent,
6850 loop->num, loop->header->index, loop->latch->index);
6851 fprintf (file, ", niter = ");
6852 print_generic_expr (file, loop->nb_iterations, 0);
6853
6854 if (loop->any_upper_bound)
6855 {
6856 fprintf (file, ", upper_bound = ");
6857 dump_double_int (file, loop->nb_iterations_upper_bound, true);
6858 }
6859
6860 if (loop->any_estimate)
6861 {
6862 fprintf (file, ", estimate = ");
6863 dump_double_int (file, loop->nb_iterations_estimate, true);
6864 }
6865 fprintf (file, ")\n");
6866
6867 /* Print loop's body. */
6868 if (verbosity >= 1)
6869 {
6870 fprintf (file, "%s{\n", s_indent);
6871 FOR_EACH_BB (bb)
6872 if (bb->loop_father == loop)
6873 print_loops_bb (file, bb, indent, verbosity);
6874
6875 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
6876 fprintf (file, "%s}\n", s_indent);
6877 }
6878 }
6879
6880 /* Print the LOOP and its sibling loops on FILE, indented INDENT
6881 spaces. Following VERBOSITY level this outputs the contents of the
6882 loop, or just its structure. */
6883
6884 static void
6885 print_loop_and_siblings (FILE *file, struct loop *loop, int indent, int verbosity)
6886 {
6887 if (loop == NULL)
6888 return;
6889
6890 print_loop (file, loop, indent, verbosity);
6891 print_loop_and_siblings (file, loop->next, indent, verbosity);
6892 }
6893
6894 /* Follow a CFG edge from the entry point of the program, and on entry
6895 of a loop, pretty print the loop structure on FILE. */
6896
6897 void
6898 print_loops (FILE *file, int verbosity)
6899 {
6900 basic_block bb;
6901
6902 bb = ENTRY_BLOCK_PTR;
6903 if (bb && bb->loop_father)
6904 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
6905 }
6906
6907
6908 /* Debugging loops structure at tree level, at some VERBOSITY level. */
6909
6910 DEBUG_FUNCTION void
6911 debug_loops (int verbosity)
6912 {
6913 print_loops (stderr, verbosity);
6914 }
6915
6916 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
6917
6918 DEBUG_FUNCTION void
6919 debug_loop (struct loop *loop, int verbosity)
6920 {
6921 print_loop (stderr, loop, 0, verbosity);
6922 }
6923
6924 /* Print on stderr the code of loop number NUM, at some VERBOSITY
6925 level. */
6926
6927 DEBUG_FUNCTION void
6928 debug_loop_num (unsigned num, int verbosity)
6929 {
6930 debug_loop (get_loop (num), verbosity);
6931 }
6932
6933 /* Return true if BB ends with a call, possibly followed by some
6934 instructions that must stay with the call. Return false,
6935 otherwise. */
6936
6937 static bool
6938 gimple_block_ends_with_call_p (basic_block bb)
6939 {
6940 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6941 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
6942 }
6943
6944
6945 /* Return true if BB ends with a conditional branch. Return false,
6946 otherwise. */
6947
6948 static bool
6949 gimple_block_ends_with_condjump_p (const_basic_block bb)
6950 {
6951 gimple stmt = last_stmt (CONST_CAST_BB (bb));
6952 return (stmt && gimple_code (stmt) == GIMPLE_COND);
6953 }
6954
6955
6956 /* Return true if we need to add fake edge to exit at statement T.
6957 Helper function for gimple_flow_call_edges_add. */
6958
6959 static bool
6960 need_fake_edge_p (gimple t)
6961 {
6962 tree fndecl = NULL_TREE;
6963 int call_flags = 0;
6964
6965 /* NORETURN and LONGJMP calls already have an edge to exit.
6966 CONST and PURE calls do not need one.
6967 We don't currently check for CONST and PURE here, although
6968 it would be a good idea, because those attributes are
6969 figured out from the RTL in mark_constant_function, and
6970 the counter incrementation code from -fprofile-arcs
6971 leads to different results from -fbranch-probabilities. */
6972 if (is_gimple_call (t))
6973 {
6974 fndecl = gimple_call_fndecl (t);
6975 call_flags = gimple_call_flags (t);
6976 }
6977
6978 if (is_gimple_call (t)
6979 && fndecl
6980 && DECL_BUILT_IN (fndecl)
6981 && (call_flags & ECF_NOTHROW)
6982 && !(call_flags & ECF_RETURNS_TWICE)
6983 /* fork() doesn't really return twice, but the effect of
6984 wrapping it in __gcov_fork() which calls __gcov_flush()
6985 and clears the counters before forking has the same
6986 effect as returning twice. Force a fake edge. */
6987 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6988 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
6989 return false;
6990
6991 if (is_gimple_call (t))
6992 {
6993 edge_iterator ei;
6994 edge e;
6995 basic_block bb;
6996
6997 if (!(call_flags & ECF_NORETURN))
6998 return true;
6999
7000 bb = gimple_bb (t);
7001 FOR_EACH_EDGE (e, ei, bb->succs)
7002 if ((e->flags & EDGE_FAKE) == 0)
7003 return true;
7004 }
7005
7006 if (gimple_code (t) == GIMPLE_ASM
7007 && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
7008 return true;
7009
7010 return false;
7011 }
7012
7013
7014 /* Add fake edges to the function exit for any non constant and non
7015 noreturn calls (or noreturn calls with EH/abnormal edges),
7016 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7017 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7018 that were split.
7019
7020 The goal is to expose cases in which entering a basic block does
7021 not imply that all subsequent instructions must be executed. */
7022
7023 static int
7024 gimple_flow_call_edges_add (sbitmap blocks)
7025 {
7026 int i;
7027 int blocks_split = 0;
7028 int last_bb = last_basic_block;
7029 bool check_last_block = false;
7030
7031 if (n_basic_blocks == NUM_FIXED_BLOCKS)
7032 return 0;
7033
7034 if (! blocks)
7035 check_last_block = true;
7036 else
7037 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
7038
7039 /* In the last basic block, before epilogue generation, there will be
7040 a fallthru edge to EXIT. Special care is required if the last insn
7041 of the last basic block is a call because make_edge folds duplicate
7042 edges, which would result in the fallthru edge also being marked
7043 fake, which would result in the fallthru edge being removed by
7044 remove_fake_edges, which would result in an invalid CFG.
7045
7046 Moreover, we can't elide the outgoing fake edge, since the block
7047 profiler needs to take this into account in order to solve the minimal
7048 spanning tree in the case that the call doesn't return.
7049
7050 Handle this by adding a dummy instruction in a new last basic block. */
7051 if (check_last_block)
7052 {
7053 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
7054 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7055 gimple t = NULL;
7056
7057 if (!gsi_end_p (gsi))
7058 t = gsi_stmt (gsi);
7059
7060 if (t && need_fake_edge_p (t))
7061 {
7062 edge e;
7063
7064 e = find_edge (bb, EXIT_BLOCK_PTR);
7065 if (e)
7066 {
7067 gsi_insert_on_edge (e, gimple_build_nop ());
7068 gsi_commit_edge_inserts ();
7069 }
7070 }
7071 }
7072
7073 /* Now add fake edges to the function exit for any non constant
7074 calls since there is no way that we can determine if they will
7075 return or not... */
7076 for (i = 0; i < last_bb; i++)
7077 {
7078 basic_block bb = BASIC_BLOCK (i);
7079 gimple_stmt_iterator gsi;
7080 gimple stmt, last_stmt;
7081
7082 if (!bb)
7083 continue;
7084
7085 if (blocks && !TEST_BIT (blocks, i))
7086 continue;
7087
7088 gsi = gsi_last_nondebug_bb (bb);
7089 if (!gsi_end_p (gsi))
7090 {
7091 last_stmt = gsi_stmt (gsi);
7092 do
7093 {
7094 stmt = gsi_stmt (gsi);
7095 if (need_fake_edge_p (stmt))
7096 {
7097 edge e;
7098
7099 /* The handling above of the final block before the
7100 epilogue should be enough to verify that there is
7101 no edge to the exit block in CFG already.
7102 Calling make_edge in such case would cause us to
7103 mark that edge as fake and remove it later. */
7104 #ifdef ENABLE_CHECKING
7105 if (stmt == last_stmt)
7106 {
7107 e = find_edge (bb, EXIT_BLOCK_PTR);
7108 gcc_assert (e == NULL);
7109 }
7110 #endif
7111
7112 /* Note that the following may create a new basic block
7113 and renumber the existing basic blocks. */
7114 if (stmt != last_stmt)
7115 {
7116 e = split_block (bb, stmt);
7117 if (e)
7118 blocks_split++;
7119 }
7120 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
7121 }
7122 gsi_prev (&gsi);
7123 }
7124 while (!gsi_end_p (gsi));
7125 }
7126 }
7127
7128 if (blocks_split)
7129 verify_flow_info ();
7130
7131 return blocks_split;
7132 }
7133
7134 /* Removes edge E and all the blocks dominated by it, and updates dominance
7135 information. The IL in E->src needs to be updated separately.
7136 If dominance info is not available, only the edge E is removed.*/
7137
7138 void
7139 remove_edge_and_dominated_blocks (edge e)
7140 {
7141 VEC (basic_block, heap) *bbs_to_remove = NULL;
7142 VEC (basic_block, heap) *bbs_to_fix_dom = NULL;
7143 bitmap df, df_idom;
7144 edge f;
7145 edge_iterator ei;
7146 bool none_removed = false;
7147 unsigned i;
7148 basic_block bb, dbb;
7149 bitmap_iterator bi;
7150
7151 if (!dom_info_available_p (CDI_DOMINATORS))
7152 {
7153 remove_edge (e);
7154 return;
7155 }
7156
7157 /* No updating is needed for edges to exit. */
7158 if (e->dest == EXIT_BLOCK_PTR)
7159 {
7160 if (cfgcleanup_altered_bbs)
7161 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7162 remove_edge (e);
7163 return;
7164 }
7165
7166 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7167 that is not dominated by E->dest, then this set is empty. Otherwise,
7168 all the basic blocks dominated by E->dest are removed.
7169
7170 Also, to DF_IDOM we store the immediate dominators of the blocks in
7171 the dominance frontier of E (i.e., of the successors of the
7172 removed blocks, if there are any, and of E->dest otherwise). */
7173 FOR_EACH_EDGE (f, ei, e->dest->preds)
7174 {
7175 if (f == e)
7176 continue;
7177
7178 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7179 {
7180 none_removed = true;
7181 break;
7182 }
7183 }
7184
7185 df = BITMAP_ALLOC (NULL);
7186 df_idom = BITMAP_ALLOC (NULL);
7187
7188 if (none_removed)
7189 bitmap_set_bit (df_idom,
7190 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7191 else
7192 {
7193 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7194 FOR_EACH_VEC_ELT (basic_block, bbs_to_remove, i, bb)
7195 {
7196 FOR_EACH_EDGE (f, ei, bb->succs)
7197 {
7198 if (f->dest != EXIT_BLOCK_PTR)
7199 bitmap_set_bit (df, f->dest->index);
7200 }
7201 }
7202 FOR_EACH_VEC_ELT (basic_block, bbs_to_remove, i, bb)
7203 bitmap_clear_bit (df, bb->index);
7204
7205 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7206 {
7207 bb = BASIC_BLOCK (i);
7208 bitmap_set_bit (df_idom,
7209 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7210 }
7211 }
7212
7213 if (cfgcleanup_altered_bbs)
7214 {
7215 /* Record the set of the altered basic blocks. */
7216 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7217 bitmap_ior_into (cfgcleanup_altered_bbs, df);
7218 }
7219
7220 /* Remove E and the cancelled blocks. */
7221 if (none_removed)
7222 remove_edge (e);
7223 else
7224 {
7225 /* Walk backwards so as to get a chance to substitute all
7226 released DEFs into debug stmts. See
7227 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7228 details. */
7229 for (i = VEC_length (basic_block, bbs_to_remove); i-- > 0; )
7230 delete_basic_block (VEC_index (basic_block, bbs_to_remove, i));
7231 }
7232
7233 /* Update the dominance information. The immediate dominator may change only
7234 for blocks whose immediate dominator belongs to DF_IDOM:
7235
7236 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7237 removal. Let Z the arbitrary block such that idom(Z) = Y and
7238 Z dominates X after the removal. Before removal, there exists a path P
7239 from Y to X that avoids Z. Let F be the last edge on P that is
7240 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7241 dominates W, and because of P, Z does not dominate W), and W belongs to
7242 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7243 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
7244 {
7245 bb = BASIC_BLOCK (i);
7246 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
7247 dbb;
7248 dbb = next_dom_son (CDI_DOMINATORS, dbb))
7249 VEC_safe_push (basic_block, heap, bbs_to_fix_dom, dbb);
7250 }
7251
7252 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
7253
7254 BITMAP_FREE (df);
7255 BITMAP_FREE (df_idom);
7256 VEC_free (basic_block, heap, bbs_to_remove);
7257 VEC_free (basic_block, heap, bbs_to_fix_dom);
7258 }
7259
7260 /* Purge dead EH edges from basic block BB. */
7261
7262 bool
7263 gimple_purge_dead_eh_edges (basic_block bb)
7264 {
7265 bool changed = false;
7266 edge e;
7267 edge_iterator ei;
7268 gimple stmt = last_stmt (bb);
7269
7270 if (stmt && stmt_can_throw_internal (stmt))
7271 return false;
7272
7273 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7274 {
7275 if (e->flags & EDGE_EH)
7276 {
7277 remove_edge_and_dominated_blocks (e);
7278 changed = true;
7279 }
7280 else
7281 ei_next (&ei);
7282 }
7283
7284 return changed;
7285 }
7286
7287 /* Purge dead EH edges from basic block listed in BLOCKS. */
7288
7289 bool
7290 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
7291 {
7292 bool changed = false;
7293 unsigned i;
7294 bitmap_iterator bi;
7295
7296 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7297 {
7298 basic_block bb = BASIC_BLOCK (i);
7299
7300 /* Earlier gimple_purge_dead_eh_edges could have removed
7301 this basic block already. */
7302 gcc_assert (bb || changed);
7303 if (bb != NULL)
7304 changed |= gimple_purge_dead_eh_edges (bb);
7305 }
7306
7307 return changed;
7308 }
7309
7310 /* Purge dead abnormal call edges from basic block BB. */
7311
7312 bool
7313 gimple_purge_dead_abnormal_call_edges (basic_block bb)
7314 {
7315 bool changed = false;
7316 edge e;
7317 edge_iterator ei;
7318 gimple stmt = last_stmt (bb);
7319
7320 if (!cfun->has_nonlocal_label)
7321 return false;
7322
7323 if (stmt && stmt_can_make_abnormal_goto (stmt))
7324 return false;
7325
7326 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7327 {
7328 if (e->flags & EDGE_ABNORMAL)
7329 {
7330 remove_edge_and_dominated_blocks (e);
7331 changed = true;
7332 }
7333 else
7334 ei_next (&ei);
7335 }
7336
7337 return changed;
7338 }
7339
7340 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
7341
7342 bool
7343 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
7344 {
7345 bool changed = false;
7346 unsigned i;
7347 bitmap_iterator bi;
7348
7349 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7350 {
7351 basic_block bb = BASIC_BLOCK (i);
7352
7353 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
7354 this basic block already. */
7355 gcc_assert (bb || changed);
7356 if (bb != NULL)
7357 changed |= gimple_purge_dead_abnormal_call_edges (bb);
7358 }
7359
7360 return changed;
7361 }
7362
7363 /* This function is called whenever a new edge is created or
7364 redirected. */
7365
7366 static void
7367 gimple_execute_on_growing_pred (edge e)
7368 {
7369 basic_block bb = e->dest;
7370
7371 if (!gimple_seq_empty_p (phi_nodes (bb)))
7372 reserve_phi_args_for_new_edge (bb);
7373 }
7374
7375 /* This function is called immediately before edge E is removed from
7376 the edge vector E->dest->preds. */
7377
7378 static void
7379 gimple_execute_on_shrinking_pred (edge e)
7380 {
7381 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
7382 remove_phi_args (e);
7383 }
7384
7385 /*---------------------------------------------------------------------------
7386 Helper functions for Loop versioning
7387 ---------------------------------------------------------------------------*/
7388
7389 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
7390 of 'first'. Both of them are dominated by 'new_head' basic block. When
7391 'new_head' was created by 'second's incoming edge it received phi arguments
7392 on the edge by split_edge(). Later, additional edge 'e' was created to
7393 connect 'new_head' and 'first'. Now this routine adds phi args on this
7394 additional edge 'e' that new_head to second edge received as part of edge
7395 splitting. */
7396
7397 static void
7398 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
7399 basic_block new_head, edge e)
7400 {
7401 gimple phi1, phi2;
7402 gimple_stmt_iterator psi1, psi2;
7403 tree def;
7404 edge e2 = find_edge (new_head, second);
7405
7406 /* Because NEW_HEAD has been created by splitting SECOND's incoming
7407 edge, we should always have an edge from NEW_HEAD to SECOND. */
7408 gcc_assert (e2 != NULL);
7409
7410 /* Browse all 'second' basic block phi nodes and add phi args to
7411 edge 'e' for 'first' head. PHI args are always in correct order. */
7412
7413 for (psi2 = gsi_start_phis (second),
7414 psi1 = gsi_start_phis (first);
7415 !gsi_end_p (psi2) && !gsi_end_p (psi1);
7416 gsi_next (&psi2), gsi_next (&psi1))
7417 {
7418 phi1 = gsi_stmt (psi1);
7419 phi2 = gsi_stmt (psi2);
7420 def = PHI_ARG_DEF (phi2, e2->dest_idx);
7421 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
7422 }
7423 }
7424
7425
7426 /* Adds a if else statement to COND_BB with condition COND_EXPR.
7427 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
7428 the destination of the ELSE part. */
7429
7430 static void
7431 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
7432 basic_block second_head ATTRIBUTE_UNUSED,
7433 basic_block cond_bb, void *cond_e)
7434 {
7435 gimple_stmt_iterator gsi;
7436 gimple new_cond_expr;
7437 tree cond_expr = (tree) cond_e;
7438 edge e0;
7439
7440 /* Build new conditional expr */
7441 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
7442 NULL_TREE, NULL_TREE);
7443
7444 /* Add new cond in cond_bb. */
7445 gsi = gsi_last_bb (cond_bb);
7446 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
7447
7448 /* Adjust edges appropriately to connect new head with first head
7449 as well as second head. */
7450 e0 = single_succ_edge (cond_bb);
7451 e0->flags &= ~EDGE_FALLTHRU;
7452 e0->flags |= EDGE_FALSE_VALUE;
7453 }
7454
7455 struct cfg_hooks gimple_cfg_hooks = {
7456 "gimple",
7457 gimple_verify_flow_info,
7458 gimple_dump_bb, /* dump_bb */
7459 create_bb, /* create_basic_block */
7460 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
7461 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
7462 gimple_can_remove_branch_p, /* can_remove_branch_p */
7463 remove_bb, /* delete_basic_block */
7464 gimple_split_block, /* split_block */
7465 gimple_move_block_after, /* move_block_after */
7466 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
7467 gimple_merge_blocks, /* merge_blocks */
7468 gimple_predict_edge, /* predict_edge */
7469 gimple_predicted_by_p, /* predicted_by_p */
7470 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
7471 gimple_duplicate_bb, /* duplicate_block */
7472 gimple_split_edge, /* split_edge */
7473 gimple_make_forwarder_block, /* make_forward_block */
7474 NULL, /* tidy_fallthru_edge */
7475 NULL, /* force_nonfallthru */
7476 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
7477 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
7478 gimple_flow_call_edges_add, /* flow_call_edges_add */
7479 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
7480 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
7481 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
7482 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
7483 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
7484 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
7485 flush_pending_stmts /* flush_pending_stmts */
7486 };
7487
7488
7489 /* Split all critical edges. */
7490
7491 static unsigned int
7492 split_critical_edges (void)
7493 {
7494 basic_block bb;
7495 edge e;
7496 edge_iterator ei;
7497
7498 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
7499 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
7500 mappings around the calls to split_edge. */
7501 start_recording_case_labels ();
7502 FOR_ALL_BB (bb)
7503 {
7504 FOR_EACH_EDGE (e, ei, bb->succs)
7505 {
7506 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
7507 split_edge (e);
7508 /* PRE inserts statements to edges and expects that
7509 since split_critical_edges was done beforehand, committing edge
7510 insertions will not split more edges. In addition to critical
7511 edges we must split edges that have multiple successors and
7512 end by control flow statements, such as RESX.
7513 Go ahead and split them too. This matches the logic in
7514 gimple_find_edge_insert_loc. */
7515 else if ((!single_pred_p (e->dest)
7516 || !gimple_seq_empty_p (phi_nodes (e->dest))
7517 || e->dest == EXIT_BLOCK_PTR)
7518 && e->src != ENTRY_BLOCK_PTR
7519 && !(e->flags & EDGE_ABNORMAL))
7520 {
7521 gimple_stmt_iterator gsi;
7522
7523 gsi = gsi_last_bb (e->src);
7524 if (!gsi_end_p (gsi)
7525 && stmt_ends_bb_p (gsi_stmt (gsi))
7526 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
7527 && !gimple_call_builtin_p (gsi_stmt (gsi),
7528 BUILT_IN_RETURN)))
7529 split_edge (e);
7530 }
7531 }
7532 }
7533 end_recording_case_labels ();
7534 return 0;
7535 }
7536
7537 struct gimple_opt_pass pass_split_crit_edges =
7538 {
7539 {
7540 GIMPLE_PASS,
7541 "crited", /* name */
7542 NULL, /* gate */
7543 split_critical_edges, /* execute */
7544 NULL, /* sub */
7545 NULL, /* next */
7546 0, /* static_pass_number */
7547 TV_TREE_SPLIT_EDGES, /* tv_id */
7548 PROP_cfg, /* properties required */
7549 PROP_no_crit_edges, /* properties_provided */
7550 0, /* properties_destroyed */
7551 0, /* todo_flags_start */
7552 TODO_verify_flow /* todo_flags_finish */
7553 }
7554 };
7555
7556
7557 /* Build a ternary operation and gimplify it. Emit code before GSI.
7558 Return the gimple_val holding the result. */
7559
7560 tree
7561 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
7562 tree type, tree a, tree b, tree c)
7563 {
7564 tree ret;
7565 location_t loc = gimple_location (gsi_stmt (*gsi));
7566
7567 ret = fold_build3_loc (loc, code, type, a, b, c);
7568 STRIP_NOPS (ret);
7569
7570 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7571 GSI_SAME_STMT);
7572 }
7573
7574 /* Build a binary operation and gimplify it. Emit code before GSI.
7575 Return the gimple_val holding the result. */
7576
7577 tree
7578 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
7579 tree type, tree a, tree b)
7580 {
7581 tree ret;
7582
7583 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
7584 STRIP_NOPS (ret);
7585
7586 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7587 GSI_SAME_STMT);
7588 }
7589
7590 /* Build a unary operation and gimplify it. Emit code before GSI.
7591 Return the gimple_val holding the result. */
7592
7593 tree
7594 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
7595 tree a)
7596 {
7597 tree ret;
7598
7599 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
7600 STRIP_NOPS (ret);
7601
7602 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7603 GSI_SAME_STMT);
7604 }
7605
7606
7607 \f
7608 /* Emit return warnings. */
7609
7610 static unsigned int
7611 execute_warn_function_return (void)
7612 {
7613 source_location location;
7614 gimple last;
7615 edge e;
7616 edge_iterator ei;
7617
7618 if (!targetm.warn_func_return (cfun->decl))
7619 return 0;
7620
7621 /* If we have a path to EXIT, then we do return. */
7622 if (TREE_THIS_VOLATILE (cfun->decl)
7623 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
7624 {
7625 location = UNKNOWN_LOCATION;
7626 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7627 {
7628 last = last_stmt (e->src);
7629 if ((gimple_code (last) == GIMPLE_RETURN
7630 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
7631 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
7632 break;
7633 }
7634 if (location == UNKNOWN_LOCATION)
7635 location = cfun->function_end_locus;
7636 warning_at (location, 0, "%<noreturn%> function does return");
7637 }
7638
7639 /* If we see "return;" in some basic block, then we do reach the end
7640 without returning a value. */
7641 else if (warn_return_type
7642 && !TREE_NO_WARNING (cfun->decl)
7643 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
7644 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
7645 {
7646 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7647 {
7648 gimple last = last_stmt (e->src);
7649 if (gimple_code (last) == GIMPLE_RETURN
7650 && gimple_return_retval (last) == NULL
7651 && !gimple_no_warning_p (last))
7652 {
7653 location = gimple_location (last);
7654 if (location == UNKNOWN_LOCATION)
7655 location = cfun->function_end_locus;
7656 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
7657 TREE_NO_WARNING (cfun->decl) = 1;
7658 break;
7659 }
7660 }
7661 }
7662 return 0;
7663 }
7664
7665
7666 /* Given a basic block B which ends with a conditional and has
7667 precisely two successors, determine which of the edges is taken if
7668 the conditional is true and which is taken if the conditional is
7669 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
7670
7671 void
7672 extract_true_false_edges_from_block (basic_block b,
7673 edge *true_edge,
7674 edge *false_edge)
7675 {
7676 edge e = EDGE_SUCC (b, 0);
7677
7678 if (e->flags & EDGE_TRUE_VALUE)
7679 {
7680 *true_edge = e;
7681 *false_edge = EDGE_SUCC (b, 1);
7682 }
7683 else
7684 {
7685 *false_edge = e;
7686 *true_edge = EDGE_SUCC (b, 1);
7687 }
7688 }
7689
7690 struct gimple_opt_pass pass_warn_function_return =
7691 {
7692 {
7693 GIMPLE_PASS,
7694 "*warn_function_return", /* name */
7695 NULL, /* gate */
7696 execute_warn_function_return, /* execute */
7697 NULL, /* sub */
7698 NULL, /* next */
7699 0, /* static_pass_number */
7700 TV_NONE, /* tv_id */
7701 PROP_cfg, /* properties_required */
7702 0, /* properties_provided */
7703 0, /* properties_destroyed */
7704 0, /* todo_flags_start */
7705 0 /* todo_flags_finish */
7706 }
7707 };
7708
7709 /* Emit noreturn warnings. */
7710
7711 static unsigned int
7712 execute_warn_function_noreturn (void)
7713 {
7714 if (!TREE_THIS_VOLATILE (current_function_decl)
7715 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0)
7716 warn_function_noreturn (current_function_decl);
7717 return 0;
7718 }
7719
7720 static bool
7721 gate_warn_function_noreturn (void)
7722 {
7723 return warn_suggest_attribute_noreturn;
7724 }
7725
7726 struct gimple_opt_pass pass_warn_function_noreturn =
7727 {
7728 {
7729 GIMPLE_PASS,
7730 "*warn_function_noreturn", /* name */
7731 gate_warn_function_noreturn, /* gate */
7732 execute_warn_function_noreturn, /* execute */
7733 NULL, /* sub */
7734 NULL, /* next */
7735 0, /* static_pass_number */
7736 TV_NONE, /* tv_id */
7737 PROP_cfg, /* properties_required */
7738 0, /* properties_provided */
7739 0, /* properties_destroyed */
7740 0, /* todo_flags_start */
7741 0 /* todo_flags_finish */
7742 }
7743 };
7744
7745
7746 /* Walk a gimplified function and warn for functions whose return value is
7747 ignored and attribute((warn_unused_result)) is set. This is done before
7748 inlining, so we don't have to worry about that. */
7749
7750 static void
7751 do_warn_unused_result (gimple_seq seq)
7752 {
7753 tree fdecl, ftype;
7754 gimple_stmt_iterator i;
7755
7756 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
7757 {
7758 gimple g = gsi_stmt (i);
7759
7760 switch (gimple_code (g))
7761 {
7762 case GIMPLE_BIND:
7763 do_warn_unused_result (gimple_bind_body (g));
7764 break;
7765 case GIMPLE_TRY:
7766 do_warn_unused_result (gimple_try_eval (g));
7767 do_warn_unused_result (gimple_try_cleanup (g));
7768 break;
7769 case GIMPLE_CATCH:
7770 do_warn_unused_result (gimple_catch_handler (g));
7771 break;
7772 case GIMPLE_EH_FILTER:
7773 do_warn_unused_result (gimple_eh_filter_failure (g));
7774 break;
7775
7776 case GIMPLE_CALL:
7777 if (gimple_call_lhs (g))
7778 break;
7779 if (gimple_call_internal_p (g))
7780 break;
7781
7782 /* This is a naked call, as opposed to a GIMPLE_CALL with an
7783 LHS. All calls whose value is ignored should be
7784 represented like this. Look for the attribute. */
7785 fdecl = gimple_call_fndecl (g);
7786 ftype = gimple_call_fntype (g);
7787
7788 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
7789 {
7790 location_t loc = gimple_location (g);
7791
7792 if (fdecl)
7793 warning_at (loc, OPT_Wunused_result,
7794 "ignoring return value of %qD, "
7795 "declared with attribute warn_unused_result",
7796 fdecl);
7797 else
7798 warning_at (loc, OPT_Wunused_result,
7799 "ignoring return value of function "
7800 "declared with attribute warn_unused_result");
7801 }
7802 break;
7803
7804 default:
7805 /* Not a container, not a call, or a call whose value is used. */
7806 break;
7807 }
7808 }
7809 }
7810
7811 static unsigned int
7812 run_warn_unused_result (void)
7813 {
7814 do_warn_unused_result (gimple_body (current_function_decl));
7815 return 0;
7816 }
7817
7818 static bool
7819 gate_warn_unused_result (void)
7820 {
7821 return flag_warn_unused_result;
7822 }
7823
7824 struct gimple_opt_pass pass_warn_unused_result =
7825 {
7826 {
7827 GIMPLE_PASS,
7828 "*warn_unused_result", /* name */
7829 gate_warn_unused_result, /* gate */
7830 run_warn_unused_result, /* execute */
7831 NULL, /* sub */
7832 NULL, /* next */
7833 0, /* static_pass_number */
7834 TV_NONE, /* tv_id */
7835 PROP_gimple_any, /* properties_required */
7836 0, /* properties_provided */
7837 0, /* properties_destroyed */
7838 0, /* todo_flags_start */
7839 0, /* todo_flags_finish */
7840 }
7841 };