cfgloopanal.c (check_irred): Move into ...
[gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "output.h"
32 #include "flags.h"
33 #include "function.h"
34 #include "expr.h"
35 #include "ggc.h"
36 #include "langhooks.h"
37 #include "diagnostic.h"
38 #include "tree-flow.h"
39 #include "timevar.h"
40 #include "tree-dump.h"
41 #include "tree-pass.h"
42 #include "toplev.h"
43 #include "except.h"
44 #include "cfgloop.h"
45 #include "cfglayout.h"
46 #include "tree-ssa-propagate.h"
47 #include "value-prof.h"
48 #include "pointer-set.h"
49 #include "tree-inline.h"
50
51 /* This file contains functions for building the Control Flow Graph (CFG)
52 for a function tree. */
53
54 /* Local declarations. */
55
56 /* Initial capacity for the basic block array. */
57 static const int initial_cfg_capacity = 20;
58
59 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
60 which use a particular edge. The CASE_LABEL_EXPRs are chained together
61 via their TREE_CHAIN field, which we clear after we're done with the
62 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
63
64 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
65 update the case vector in response to edge redirections.
66
67 Right now this table is set up and torn down at key points in the
68 compilation process. It would be nice if we could make the table
69 more persistent. The key is getting notification of changes to
70 the CFG (particularly edge removal, creation and redirection). */
71
72 static struct pointer_map_t *edge_to_cases;
73
74 /* CFG statistics. */
75 struct cfg_stats_d
76 {
77 long num_merged_labels;
78 };
79
80 static struct cfg_stats_d cfg_stats;
81
82 /* Nonzero if we found a computed goto while building basic blocks. */
83 static bool found_computed_goto;
84
85 /* Hash table to store last discriminator assigned for each locus. */
86 struct locus_discrim_map
87 {
88 location_t locus;
89 int discriminator;
90 };
91 static htab_t discriminator_per_locus;
92
93 /* Basic blocks and flowgraphs. */
94 static void make_blocks (gimple_seq);
95 static void factor_computed_gotos (void);
96
97 /* Edges. */
98 static void make_edges (void);
99 static void make_cond_expr_edges (basic_block);
100 static void make_gimple_switch_edges (basic_block);
101 static void make_goto_expr_edges (basic_block);
102 static unsigned int locus_map_hash (const void *);
103 static int locus_map_eq (const void *, const void *);
104 static void assign_discriminator (location_t, basic_block);
105 static edge gimple_redirect_edge_and_branch (edge, basic_block);
106 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
107 static unsigned int split_critical_edges (void);
108
109 /* Various helpers. */
110 static inline bool stmt_starts_bb_p (gimple, gimple);
111 static int gimple_verify_flow_info (void);
112 static void gimple_make_forwarder_block (edge);
113 static void gimple_cfg2vcg (FILE *);
114 static gimple first_non_label_stmt (basic_block);
115
116 /* Flowgraph optimization and cleanup. */
117 static void gimple_merge_blocks (basic_block, basic_block);
118 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
119 static void remove_bb (basic_block);
120 static edge find_taken_edge_computed_goto (basic_block, tree);
121 static edge find_taken_edge_cond_expr (basic_block, tree);
122 static edge find_taken_edge_switch_expr (basic_block, tree);
123 static tree find_case_label_for_value (gimple, tree);
124
125 void
126 init_empty_tree_cfg_for_function (struct function *fn)
127 {
128 /* Initialize the basic block array. */
129 init_flow (fn);
130 profile_status_for_function (fn) = PROFILE_ABSENT;
131 n_basic_blocks_for_function (fn) = NUM_FIXED_BLOCKS;
132 last_basic_block_for_function (fn) = NUM_FIXED_BLOCKS;
133 basic_block_info_for_function (fn)
134 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
135 VEC_safe_grow_cleared (basic_block, gc,
136 basic_block_info_for_function (fn),
137 initial_cfg_capacity);
138
139 /* Build a mapping of labels to their associated blocks. */
140 label_to_block_map_for_function (fn)
141 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
142 VEC_safe_grow_cleared (basic_block, gc,
143 label_to_block_map_for_function (fn),
144 initial_cfg_capacity);
145
146 SET_BASIC_BLOCK_FOR_FUNCTION (fn, ENTRY_BLOCK,
147 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn));
148 SET_BASIC_BLOCK_FOR_FUNCTION (fn, EXIT_BLOCK,
149 EXIT_BLOCK_PTR_FOR_FUNCTION (fn));
150
151 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)->next_bb
152 = EXIT_BLOCK_PTR_FOR_FUNCTION (fn);
153 EXIT_BLOCK_PTR_FOR_FUNCTION (fn)->prev_bb
154 = ENTRY_BLOCK_PTR_FOR_FUNCTION (fn);
155 }
156
157 void
158 init_empty_tree_cfg (void)
159 {
160 init_empty_tree_cfg_for_function (cfun);
161 }
162
163 /*---------------------------------------------------------------------------
164 Create basic blocks
165 ---------------------------------------------------------------------------*/
166
167 /* Entry point to the CFG builder for trees. SEQ is the sequence of
168 statements to be added to the flowgraph. */
169
170 static void
171 build_gimple_cfg (gimple_seq seq)
172 {
173 /* Register specific gimple functions. */
174 gimple_register_cfg_hooks ();
175
176 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
177
178 init_empty_tree_cfg ();
179
180 found_computed_goto = 0;
181 make_blocks (seq);
182
183 /* Computed gotos are hell to deal with, especially if there are
184 lots of them with a large number of destinations. So we factor
185 them to a common computed goto location before we build the
186 edge list. After we convert back to normal form, we will un-factor
187 the computed gotos since factoring introduces an unwanted jump. */
188 if (found_computed_goto)
189 factor_computed_gotos ();
190
191 /* Make sure there is always at least one block, even if it's empty. */
192 if (n_basic_blocks == NUM_FIXED_BLOCKS)
193 create_empty_bb (ENTRY_BLOCK_PTR);
194
195 /* Adjust the size of the array. */
196 if (VEC_length (basic_block, basic_block_info) < (size_t) n_basic_blocks)
197 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, n_basic_blocks);
198
199 /* To speed up statement iterator walks, we first purge dead labels. */
200 cleanup_dead_labels ();
201
202 /* Group case nodes to reduce the number of edges.
203 We do this after cleaning up dead labels because otherwise we miss
204 a lot of obvious case merging opportunities. */
205 group_case_labels ();
206
207 /* Create the edges of the flowgraph. */
208 discriminator_per_locus = htab_create (13, locus_map_hash, locus_map_eq,
209 free);
210 make_edges ();
211 cleanup_dead_labels ();
212 htab_delete (discriminator_per_locus);
213
214 /* Debugging dumps. */
215
216 /* Write the flowgraph to a VCG file. */
217 {
218 int local_dump_flags;
219 FILE *vcg_file = dump_begin (TDI_vcg, &local_dump_flags);
220 if (vcg_file)
221 {
222 gimple_cfg2vcg (vcg_file);
223 dump_end (TDI_vcg, vcg_file);
224 }
225 }
226
227 #ifdef ENABLE_CHECKING
228 verify_stmts ();
229 #endif
230 }
231
232 static unsigned int
233 execute_build_cfg (void)
234 {
235 gimple_seq body = gimple_body (current_function_decl);
236
237 build_gimple_cfg (body);
238 gimple_set_body (current_function_decl, NULL);
239 if (dump_file && (dump_flags & TDF_DETAILS))
240 {
241 fprintf (dump_file, "Scope blocks:\n");
242 dump_scope_blocks (dump_file, dump_flags);
243 }
244 return 0;
245 }
246
247 struct gimple_opt_pass pass_build_cfg =
248 {
249 {
250 GIMPLE_PASS,
251 "cfg", /* name */
252 NULL, /* gate */
253 execute_build_cfg, /* execute */
254 NULL, /* sub */
255 NULL, /* next */
256 0, /* static_pass_number */
257 TV_TREE_CFG, /* tv_id */
258 PROP_gimple_leh, /* properties_required */
259 PROP_cfg, /* properties_provided */
260 0, /* properties_destroyed */
261 0, /* todo_flags_start */
262 TODO_verify_stmts | TODO_cleanup_cfg
263 | TODO_dump_func /* todo_flags_finish */
264 }
265 };
266
267
268 /* Return true if T is a computed goto. */
269
270 static bool
271 computed_goto_p (gimple t)
272 {
273 return (gimple_code (t) == GIMPLE_GOTO
274 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
275 }
276
277
278 /* Search the CFG for any computed gotos. If found, factor them to a
279 common computed goto site. Also record the location of that site so
280 that we can un-factor the gotos after we have converted back to
281 normal form. */
282
283 static void
284 factor_computed_gotos (void)
285 {
286 basic_block bb;
287 tree factored_label_decl = NULL;
288 tree var = NULL;
289 gimple factored_computed_goto_label = NULL;
290 gimple factored_computed_goto = NULL;
291
292 /* We know there are one or more computed gotos in this function.
293 Examine the last statement in each basic block to see if the block
294 ends with a computed goto. */
295
296 FOR_EACH_BB (bb)
297 {
298 gimple_stmt_iterator gsi = gsi_last_bb (bb);
299 gimple last;
300
301 if (gsi_end_p (gsi))
302 continue;
303
304 last = gsi_stmt (gsi);
305
306 /* Ignore the computed goto we create when we factor the original
307 computed gotos. */
308 if (last == factored_computed_goto)
309 continue;
310
311 /* If the last statement is a computed goto, factor it. */
312 if (computed_goto_p (last))
313 {
314 gimple assignment;
315
316 /* The first time we find a computed goto we need to create
317 the factored goto block and the variable each original
318 computed goto will use for their goto destination. */
319 if (!factored_computed_goto)
320 {
321 basic_block new_bb = create_empty_bb (bb);
322 gimple_stmt_iterator new_gsi = gsi_start_bb (new_bb);
323
324 /* Create the destination of the factored goto. Each original
325 computed goto will put its desired destination into this
326 variable and jump to the label we create immediately
327 below. */
328 var = create_tmp_var (ptr_type_node, "gotovar");
329
330 /* Build a label for the new block which will contain the
331 factored computed goto. */
332 factored_label_decl = create_artificial_label (UNKNOWN_LOCATION);
333 factored_computed_goto_label
334 = gimple_build_label (factored_label_decl);
335 gsi_insert_after (&new_gsi, factored_computed_goto_label,
336 GSI_NEW_STMT);
337
338 /* Build our new computed goto. */
339 factored_computed_goto = gimple_build_goto (var);
340 gsi_insert_after (&new_gsi, factored_computed_goto, GSI_NEW_STMT);
341 }
342
343 /* Copy the original computed goto's destination into VAR. */
344 assignment = gimple_build_assign (var, gimple_goto_dest (last));
345 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
346
347 /* And re-vector the computed goto to the new destination. */
348 gimple_goto_set_dest (last, factored_label_decl);
349 }
350 }
351 }
352
353
354 /* Build a flowgraph for the sequence of stmts SEQ. */
355
356 static void
357 make_blocks (gimple_seq seq)
358 {
359 gimple_stmt_iterator i = gsi_start (seq);
360 gimple stmt = NULL;
361 bool start_new_block = true;
362 bool first_stmt_of_seq = true;
363 basic_block bb = ENTRY_BLOCK_PTR;
364
365 while (!gsi_end_p (i))
366 {
367 gimple prev_stmt;
368
369 prev_stmt = stmt;
370 stmt = gsi_stmt (i);
371
372 /* If the statement starts a new basic block or if we have determined
373 in a previous pass that we need to create a new block for STMT, do
374 so now. */
375 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
376 {
377 if (!first_stmt_of_seq)
378 seq = gsi_split_seq_before (&i);
379 bb = create_basic_block (seq, NULL, bb);
380 start_new_block = false;
381 }
382
383 /* Now add STMT to BB and create the subgraphs for special statement
384 codes. */
385 gimple_set_bb (stmt, bb);
386
387 if (computed_goto_p (stmt))
388 found_computed_goto = true;
389
390 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
391 next iteration. */
392 if (stmt_ends_bb_p (stmt))
393 {
394 /* If the stmt can make abnormal goto use a new temporary
395 for the assignment to the LHS. This makes sure the old value
396 of the LHS is available on the abnormal edge. Otherwise
397 we will end up with overlapping life-ranges for abnormal
398 SSA names. */
399 if (gimple_has_lhs (stmt)
400 && stmt_can_make_abnormal_goto (stmt)
401 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
402 {
403 tree lhs = gimple_get_lhs (stmt);
404 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
405 gimple s = gimple_build_assign (lhs, tmp);
406 gimple_set_location (s, gimple_location (stmt));
407 gimple_set_block (s, gimple_block (stmt));
408 gimple_set_lhs (stmt, tmp);
409 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
410 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
411 DECL_GIMPLE_REG_P (tmp) = 1;
412 gsi_insert_after (&i, s, GSI_SAME_STMT);
413 }
414 start_new_block = true;
415 }
416
417 gsi_next (&i);
418 first_stmt_of_seq = false;
419 }
420 }
421
422
423 /* Create and return a new empty basic block after bb AFTER. */
424
425 static basic_block
426 create_bb (void *h, void *e, basic_block after)
427 {
428 basic_block bb;
429
430 gcc_assert (!e);
431
432 /* Create and initialize a new basic block. Since alloc_block uses
433 ggc_alloc_cleared to allocate a basic block, we do not have to
434 clear the newly allocated basic block here. */
435 bb = alloc_block ();
436
437 bb->index = last_basic_block;
438 bb->flags = BB_NEW;
439 bb->il.gimple = GGC_CNEW (struct gimple_bb_info);
440 set_bb_seq (bb, h ? (gimple_seq) h : gimple_seq_alloc ());
441
442 /* Add the new block to the linked list of blocks. */
443 link_block (bb, after);
444
445 /* Grow the basic block array if needed. */
446 if ((size_t) last_basic_block == VEC_length (basic_block, basic_block_info))
447 {
448 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
449 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
450 }
451
452 /* Add the newly created block to the array. */
453 SET_BASIC_BLOCK (last_basic_block, bb);
454
455 n_basic_blocks++;
456 last_basic_block++;
457
458 return bb;
459 }
460
461
462 /*---------------------------------------------------------------------------
463 Edge creation
464 ---------------------------------------------------------------------------*/
465
466 /* Fold COND_EXPR_COND of each COND_EXPR. */
467
468 void
469 fold_cond_expr_cond (void)
470 {
471 basic_block bb;
472
473 FOR_EACH_BB (bb)
474 {
475 gimple stmt = last_stmt (bb);
476
477 if (stmt && gimple_code (stmt) == GIMPLE_COND)
478 {
479 tree cond;
480 bool zerop, onep;
481
482 fold_defer_overflow_warnings ();
483 cond = fold_binary (gimple_cond_code (stmt), boolean_type_node,
484 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
485 if (cond)
486 {
487 zerop = integer_zerop (cond);
488 onep = integer_onep (cond);
489 }
490 else
491 zerop = onep = false;
492
493 fold_undefer_overflow_warnings (zerop || onep,
494 stmt,
495 WARN_STRICT_OVERFLOW_CONDITIONAL);
496 if (zerop)
497 gimple_cond_make_false (stmt);
498 else if (onep)
499 gimple_cond_make_true (stmt);
500 }
501 }
502 }
503
504 /* Join all the blocks in the flowgraph. */
505
506 static void
507 make_edges (void)
508 {
509 basic_block bb;
510 struct omp_region *cur_region = NULL;
511
512 /* Create an edge from entry to the first block with executable
513 statements in it. */
514 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
515
516 /* Traverse the basic block array placing edges. */
517 FOR_EACH_BB (bb)
518 {
519 gimple last = last_stmt (bb);
520 bool fallthru;
521
522 if (last)
523 {
524 enum gimple_code code = gimple_code (last);
525 switch (code)
526 {
527 case GIMPLE_GOTO:
528 make_goto_expr_edges (bb);
529 fallthru = false;
530 break;
531 case GIMPLE_RETURN:
532 make_edge (bb, EXIT_BLOCK_PTR, 0);
533 fallthru = false;
534 break;
535 case GIMPLE_COND:
536 make_cond_expr_edges (bb);
537 fallthru = false;
538 break;
539 case GIMPLE_SWITCH:
540 make_gimple_switch_edges (bb);
541 fallthru = false;
542 break;
543 case GIMPLE_RESX:
544 make_eh_edges (last);
545 fallthru = false;
546 break;
547
548 case GIMPLE_CALL:
549 /* If this function receives a nonlocal goto, then we need to
550 make edges from this call site to all the nonlocal goto
551 handlers. */
552 if (stmt_can_make_abnormal_goto (last))
553 make_abnormal_goto_edges (bb, true);
554
555 /* If this statement has reachable exception handlers, then
556 create abnormal edges to them. */
557 make_eh_edges (last);
558
559 /* Some calls are known not to return. */
560 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
561 break;
562
563 case GIMPLE_ASSIGN:
564 /* A GIMPLE_ASSIGN may throw internally and thus be considered
565 control-altering. */
566 if (is_ctrl_altering_stmt (last))
567 {
568 make_eh_edges (last);
569 }
570 fallthru = true;
571 break;
572
573 case GIMPLE_OMP_PARALLEL:
574 case GIMPLE_OMP_TASK:
575 case GIMPLE_OMP_FOR:
576 case GIMPLE_OMP_SINGLE:
577 case GIMPLE_OMP_MASTER:
578 case GIMPLE_OMP_ORDERED:
579 case GIMPLE_OMP_CRITICAL:
580 case GIMPLE_OMP_SECTION:
581 cur_region = new_omp_region (bb, code, cur_region);
582 fallthru = true;
583 break;
584
585 case GIMPLE_OMP_SECTIONS:
586 cur_region = new_omp_region (bb, code, cur_region);
587 fallthru = true;
588 break;
589
590 case GIMPLE_OMP_SECTIONS_SWITCH:
591 fallthru = false;
592 break;
593
594
595 case GIMPLE_OMP_ATOMIC_LOAD:
596 case GIMPLE_OMP_ATOMIC_STORE:
597 fallthru = true;
598 break;
599
600
601 case GIMPLE_OMP_RETURN:
602 /* In the case of a GIMPLE_OMP_SECTION, the edge will go
603 somewhere other than the next block. This will be
604 created later. */
605 cur_region->exit = bb;
606 fallthru = cur_region->type != GIMPLE_OMP_SECTION;
607 cur_region = cur_region->outer;
608 break;
609
610 case GIMPLE_OMP_CONTINUE:
611 cur_region->cont = bb;
612 switch (cur_region->type)
613 {
614 case GIMPLE_OMP_FOR:
615 /* Mark all GIMPLE_OMP_FOR and GIMPLE_OMP_CONTINUE
616 succs edges as abnormal to prevent splitting
617 them. */
618 single_succ_edge (cur_region->entry)->flags |= EDGE_ABNORMAL;
619 /* Make the loopback edge. */
620 make_edge (bb, single_succ (cur_region->entry),
621 EDGE_ABNORMAL);
622
623 /* Create an edge from GIMPLE_OMP_FOR to exit, which
624 corresponds to the case that the body of the loop
625 is not executed at all. */
626 make_edge (cur_region->entry, bb->next_bb, EDGE_ABNORMAL);
627 make_edge (bb, bb->next_bb, EDGE_FALLTHRU | EDGE_ABNORMAL);
628 fallthru = false;
629 break;
630
631 case GIMPLE_OMP_SECTIONS:
632 /* Wire up the edges into and out of the nested sections. */
633 {
634 basic_block switch_bb = single_succ (cur_region->entry);
635
636 struct omp_region *i;
637 for (i = cur_region->inner; i ; i = i->next)
638 {
639 gcc_assert (i->type == GIMPLE_OMP_SECTION);
640 make_edge (switch_bb, i->entry, 0);
641 make_edge (i->exit, bb, EDGE_FALLTHRU);
642 }
643
644 /* Make the loopback edge to the block with
645 GIMPLE_OMP_SECTIONS_SWITCH. */
646 make_edge (bb, switch_bb, 0);
647
648 /* Make the edge from the switch to exit. */
649 make_edge (switch_bb, bb->next_bb, 0);
650 fallthru = false;
651 }
652 break;
653
654 default:
655 gcc_unreachable ();
656 }
657 break;
658
659 default:
660 gcc_assert (!stmt_ends_bb_p (last));
661 fallthru = true;
662 }
663 }
664 else
665 fallthru = true;
666
667 if (fallthru)
668 {
669 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
670 if (last)
671 assign_discriminator (gimple_location (last), bb->next_bb);
672 }
673 }
674
675 if (root_omp_region)
676 free_omp_regions ();
677
678 /* Fold COND_EXPR_COND of each COND_EXPR. */
679 fold_cond_expr_cond ();
680 }
681
682 /* Trivial hash function for a location_t. ITEM is a pointer to
683 a hash table entry that maps a location_t to a discriminator. */
684
685 static unsigned int
686 locus_map_hash (const void *item)
687 {
688 return ((const struct locus_discrim_map *) item)->locus;
689 }
690
691 /* Equality function for the locus-to-discriminator map. VA and VB
692 point to the two hash table entries to compare. */
693
694 static int
695 locus_map_eq (const void *va, const void *vb)
696 {
697 const struct locus_discrim_map *a = (const struct locus_discrim_map *) va;
698 const struct locus_discrim_map *b = (const struct locus_discrim_map *) vb;
699 return a->locus == b->locus;
700 }
701
702 /* Find the next available discriminator value for LOCUS. The
703 discriminator distinguishes among several basic blocks that
704 share a common locus, allowing for more accurate sample-based
705 profiling. */
706
707 static int
708 next_discriminator_for_locus (location_t locus)
709 {
710 struct locus_discrim_map item;
711 struct locus_discrim_map **slot;
712
713 item.locus = locus;
714 item.discriminator = 0;
715 slot = (struct locus_discrim_map **)
716 htab_find_slot_with_hash (discriminator_per_locus, (void *) &item,
717 (hashval_t) locus, INSERT);
718 gcc_assert (slot);
719 if (*slot == HTAB_EMPTY_ENTRY)
720 {
721 *slot = XNEW (struct locus_discrim_map);
722 gcc_assert (*slot);
723 (*slot)->locus = locus;
724 (*slot)->discriminator = 0;
725 }
726 (*slot)->discriminator++;
727 return (*slot)->discriminator;
728 }
729
730 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
731
732 static bool
733 same_line_p (location_t locus1, location_t locus2)
734 {
735 expanded_location from, to;
736
737 if (locus1 == locus2)
738 return true;
739
740 from = expand_location (locus1);
741 to = expand_location (locus2);
742
743 if (from.line != to.line)
744 return false;
745 if (from.file == to.file)
746 return true;
747 return (from.file != NULL
748 && to.file != NULL
749 && strcmp (from.file, to.file) == 0);
750 }
751
752 /* Assign a unique discriminator value to block BB if it begins at the same
753 LOCUS as its predecessor block. */
754
755 static void
756 assign_discriminator (location_t locus, basic_block bb)
757 {
758 gimple to_stmt;
759
760 if (locus == 0 || bb->discriminator != 0)
761 return;
762
763 to_stmt = first_non_label_stmt (bb);
764 if (to_stmt && same_line_p (locus, gimple_location (to_stmt)))
765 bb->discriminator = next_discriminator_for_locus (locus);
766 }
767
768 /* Create the edges for a GIMPLE_COND starting at block BB. */
769
770 static void
771 make_cond_expr_edges (basic_block bb)
772 {
773 gimple entry = last_stmt (bb);
774 gimple then_stmt, else_stmt;
775 basic_block then_bb, else_bb;
776 tree then_label, else_label;
777 edge e;
778 location_t entry_locus;
779
780 gcc_assert (entry);
781 gcc_assert (gimple_code (entry) == GIMPLE_COND);
782
783 entry_locus = gimple_location (entry);
784
785 /* Entry basic blocks for each component. */
786 then_label = gimple_cond_true_label (entry);
787 else_label = gimple_cond_false_label (entry);
788 then_bb = label_to_block (then_label);
789 else_bb = label_to_block (else_label);
790 then_stmt = first_stmt (then_bb);
791 else_stmt = first_stmt (else_bb);
792
793 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
794 assign_discriminator (entry_locus, then_bb);
795 e->goto_locus = gimple_location (then_stmt);
796 if (e->goto_locus)
797 e->goto_block = gimple_block (then_stmt);
798 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
799 if (e)
800 {
801 assign_discriminator (entry_locus, else_bb);
802 e->goto_locus = gimple_location (else_stmt);
803 if (e->goto_locus)
804 e->goto_block = gimple_block (else_stmt);
805 }
806
807 /* We do not need the labels anymore. */
808 gimple_cond_set_true_label (entry, NULL_TREE);
809 gimple_cond_set_false_label (entry, NULL_TREE);
810 }
811
812
813 /* Called for each element in the hash table (P) as we delete the
814 edge to cases hash table.
815
816 Clear all the TREE_CHAINs to prevent problems with copying of
817 SWITCH_EXPRs and structure sharing rules, then free the hash table
818 element. */
819
820 static bool
821 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
822 void *data ATTRIBUTE_UNUSED)
823 {
824 tree t, next;
825
826 for (t = (tree) *value; t; t = next)
827 {
828 next = TREE_CHAIN (t);
829 TREE_CHAIN (t) = NULL;
830 }
831
832 *value = NULL;
833 return false;
834 }
835
836 /* Start recording information mapping edges to case labels. */
837
838 void
839 start_recording_case_labels (void)
840 {
841 gcc_assert (edge_to_cases == NULL);
842 edge_to_cases = pointer_map_create ();
843 }
844
845 /* Return nonzero if we are recording information for case labels. */
846
847 static bool
848 recording_case_labels_p (void)
849 {
850 return (edge_to_cases != NULL);
851 }
852
853 /* Stop recording information mapping edges to case labels and
854 remove any information we have recorded. */
855 void
856 end_recording_case_labels (void)
857 {
858 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
859 pointer_map_destroy (edge_to_cases);
860 edge_to_cases = NULL;
861 }
862
863 /* If we are inside a {start,end}_recording_cases block, then return
864 a chain of CASE_LABEL_EXPRs from T which reference E.
865
866 Otherwise return NULL. */
867
868 static tree
869 get_cases_for_edge (edge e, gimple t)
870 {
871 void **slot;
872 size_t i, n;
873
874 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
875 chains available. Return NULL so the caller can detect this case. */
876 if (!recording_case_labels_p ())
877 return NULL;
878
879 slot = pointer_map_contains (edge_to_cases, e);
880 if (slot)
881 return (tree) *slot;
882
883 /* If we did not find E in the hash table, then this must be the first
884 time we have been queried for information about E & T. Add all the
885 elements from T to the hash table then perform the query again. */
886
887 n = gimple_switch_num_labels (t);
888 for (i = 0; i < n; i++)
889 {
890 tree elt = gimple_switch_label (t, i);
891 tree lab = CASE_LABEL (elt);
892 basic_block label_bb = label_to_block (lab);
893 edge this_edge = find_edge (e->src, label_bb);
894
895 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
896 a new chain. */
897 slot = pointer_map_insert (edge_to_cases, this_edge);
898 TREE_CHAIN (elt) = (tree) *slot;
899 *slot = elt;
900 }
901
902 return (tree) *pointer_map_contains (edge_to_cases, e);
903 }
904
905 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
906
907 static void
908 make_gimple_switch_edges (basic_block bb)
909 {
910 gimple entry = last_stmt (bb);
911 location_t entry_locus;
912 size_t i, n;
913
914 entry_locus = gimple_location (entry);
915
916 n = gimple_switch_num_labels (entry);
917
918 for (i = 0; i < n; ++i)
919 {
920 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
921 basic_block label_bb = label_to_block (lab);
922 make_edge (bb, label_bb, 0);
923 assign_discriminator (entry_locus, label_bb);
924 }
925 }
926
927
928 /* Return the basic block holding label DEST. */
929
930 basic_block
931 label_to_block_fn (struct function *ifun, tree dest)
932 {
933 int uid = LABEL_DECL_UID (dest);
934
935 /* We would die hard when faced by an undefined label. Emit a label to
936 the very first basic block. This will hopefully make even the dataflow
937 and undefined variable warnings quite right. */
938 if ((errorcount || sorrycount) && uid < 0)
939 {
940 gimple_stmt_iterator gsi = gsi_start_bb (BASIC_BLOCK (NUM_FIXED_BLOCKS));
941 gimple stmt;
942
943 stmt = gimple_build_label (dest);
944 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
945 uid = LABEL_DECL_UID (dest);
946 }
947 if (VEC_length (basic_block, ifun->cfg->x_label_to_block_map)
948 <= (unsigned int) uid)
949 return NULL;
950 return VEC_index (basic_block, ifun->cfg->x_label_to_block_map, uid);
951 }
952
953 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
954 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
955
956 void
957 make_abnormal_goto_edges (basic_block bb, bool for_call)
958 {
959 basic_block target_bb;
960 gimple_stmt_iterator gsi;
961
962 FOR_EACH_BB (target_bb)
963 for (gsi = gsi_start_bb (target_bb); !gsi_end_p (gsi); gsi_next (&gsi))
964 {
965 gimple label_stmt = gsi_stmt (gsi);
966 tree target;
967
968 if (gimple_code (label_stmt) != GIMPLE_LABEL)
969 break;
970
971 target = gimple_label_label (label_stmt);
972
973 /* Make an edge to every label block that has been marked as a
974 potential target for a computed goto or a non-local goto. */
975 if ((FORCED_LABEL (target) && !for_call)
976 || (DECL_NONLOCAL (target) && for_call))
977 {
978 make_edge (bb, target_bb, EDGE_ABNORMAL);
979 break;
980 }
981 }
982 }
983
984 /* Create edges for a goto statement at block BB. */
985
986 static void
987 make_goto_expr_edges (basic_block bb)
988 {
989 gimple_stmt_iterator last = gsi_last_bb (bb);
990 gimple goto_t = gsi_stmt (last);
991
992 /* A simple GOTO creates normal edges. */
993 if (simple_goto_p (goto_t))
994 {
995 tree dest = gimple_goto_dest (goto_t);
996 basic_block label_bb = label_to_block (dest);
997 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
998 e->goto_locus = gimple_location (goto_t);
999 assign_discriminator (e->goto_locus, label_bb);
1000 if (e->goto_locus)
1001 e->goto_block = gimple_block (goto_t);
1002 gsi_remove (&last, true);
1003 return;
1004 }
1005
1006 /* A computed GOTO creates abnormal edges. */
1007 make_abnormal_goto_edges (bb, false);
1008 }
1009
1010
1011 /*---------------------------------------------------------------------------
1012 Flowgraph analysis
1013 ---------------------------------------------------------------------------*/
1014
1015 /* Cleanup useless labels in basic blocks. This is something we wish
1016 to do early because it allows us to group case labels before creating
1017 the edges for the CFG, and it speeds up block statement iterators in
1018 all passes later on.
1019 We rerun this pass after CFG is created, to get rid of the labels that
1020 are no longer referenced. After then we do not run it any more, since
1021 (almost) no new labels should be created. */
1022
1023 /* A map from basic block index to the leading label of that block. */
1024 static struct label_record
1025 {
1026 /* The label. */
1027 tree label;
1028
1029 /* True if the label is referenced from somewhere. */
1030 bool used;
1031 } *label_for_bb;
1032
1033 /* Callback for for_each_eh_region. Helper for cleanup_dead_labels. */
1034 static void
1035 update_eh_label (struct eh_region_d *region)
1036 {
1037 tree old_label = get_eh_region_tree_label (region);
1038 if (old_label)
1039 {
1040 tree new_label;
1041 basic_block bb = label_to_block (old_label);
1042
1043 /* ??? After optimizing, there may be EH regions with labels
1044 that have already been removed from the function body, so
1045 there is no basic block for them. */
1046 if (! bb)
1047 return;
1048
1049 new_label = label_for_bb[bb->index].label;
1050 label_for_bb[bb->index].used = true;
1051 set_eh_region_tree_label (region, new_label);
1052 }
1053 }
1054
1055
1056 /* Given LABEL return the first label in the same basic block. */
1057
1058 static tree
1059 main_block_label (tree label)
1060 {
1061 basic_block bb = label_to_block (label);
1062 tree main_label = label_for_bb[bb->index].label;
1063
1064 /* label_to_block possibly inserted undefined label into the chain. */
1065 if (!main_label)
1066 {
1067 label_for_bb[bb->index].label = label;
1068 main_label = label;
1069 }
1070
1071 label_for_bb[bb->index].used = true;
1072 return main_label;
1073 }
1074
1075 /* Cleanup redundant labels. This is a three-step process:
1076 1) Find the leading label for each block.
1077 2) Redirect all references to labels to the leading labels.
1078 3) Cleanup all useless labels. */
1079
1080 void
1081 cleanup_dead_labels (void)
1082 {
1083 basic_block bb;
1084 label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
1085
1086 /* Find a suitable label for each block. We use the first user-defined
1087 label if there is one, or otherwise just the first label we see. */
1088 FOR_EACH_BB (bb)
1089 {
1090 gimple_stmt_iterator i;
1091
1092 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1093 {
1094 tree label;
1095 gimple stmt = gsi_stmt (i);
1096
1097 if (gimple_code (stmt) != GIMPLE_LABEL)
1098 break;
1099
1100 label = gimple_label_label (stmt);
1101
1102 /* If we have not yet seen a label for the current block,
1103 remember this one and see if there are more labels. */
1104 if (!label_for_bb[bb->index].label)
1105 {
1106 label_for_bb[bb->index].label = label;
1107 continue;
1108 }
1109
1110 /* If we did see a label for the current block already, but it
1111 is an artificially created label, replace it if the current
1112 label is a user defined label. */
1113 if (!DECL_ARTIFICIAL (label)
1114 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1115 {
1116 label_for_bb[bb->index].label = label;
1117 break;
1118 }
1119 }
1120 }
1121
1122 /* Now redirect all jumps/branches to the selected label.
1123 First do so for each block ending in a control statement. */
1124 FOR_EACH_BB (bb)
1125 {
1126 gimple stmt = last_stmt (bb);
1127 if (!stmt)
1128 continue;
1129
1130 switch (gimple_code (stmt))
1131 {
1132 case GIMPLE_COND:
1133 {
1134 tree true_label = gimple_cond_true_label (stmt);
1135 tree false_label = gimple_cond_false_label (stmt);
1136
1137 if (true_label)
1138 gimple_cond_set_true_label (stmt, main_block_label (true_label));
1139 if (false_label)
1140 gimple_cond_set_false_label (stmt, main_block_label (false_label));
1141 break;
1142 }
1143
1144 case GIMPLE_SWITCH:
1145 {
1146 size_t i, n = gimple_switch_num_labels (stmt);
1147
1148 /* Replace all destination labels. */
1149 for (i = 0; i < n; ++i)
1150 {
1151 tree case_label = gimple_switch_label (stmt, i);
1152 tree label = main_block_label (CASE_LABEL (case_label));
1153 CASE_LABEL (case_label) = label;
1154 }
1155 break;
1156 }
1157
1158 /* We have to handle gotos until they're removed, and we don't
1159 remove them until after we've created the CFG edges. */
1160 case GIMPLE_GOTO:
1161 if (!computed_goto_p (stmt))
1162 {
1163 tree new_dest = main_block_label (gimple_goto_dest (stmt));
1164 gimple_goto_set_dest (stmt, new_dest);
1165 break;
1166 }
1167
1168 default:
1169 break;
1170 }
1171 }
1172
1173 for_each_eh_region (update_eh_label);
1174
1175 /* Finally, purge dead labels. All user-defined labels and labels that
1176 can be the target of non-local gotos and labels which have their
1177 address taken are preserved. */
1178 FOR_EACH_BB (bb)
1179 {
1180 gimple_stmt_iterator i;
1181 tree label_for_this_bb = label_for_bb[bb->index].label;
1182
1183 if (!label_for_this_bb)
1184 continue;
1185
1186 /* If the main label of the block is unused, we may still remove it. */
1187 if (!label_for_bb[bb->index].used)
1188 label_for_this_bb = NULL;
1189
1190 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1191 {
1192 tree label;
1193 gimple stmt = gsi_stmt (i);
1194
1195 if (gimple_code (stmt) != GIMPLE_LABEL)
1196 break;
1197
1198 label = gimple_label_label (stmt);
1199
1200 if (label == label_for_this_bb
1201 || !DECL_ARTIFICIAL (label)
1202 || DECL_NONLOCAL (label)
1203 || FORCED_LABEL (label))
1204 gsi_next (&i);
1205 else
1206 gsi_remove (&i, true);
1207 }
1208 }
1209
1210 free (label_for_bb);
1211 }
1212
1213 /* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
1214 and scan the sorted vector of cases. Combine the ones jumping to the
1215 same label.
1216 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1217
1218 void
1219 group_case_labels (void)
1220 {
1221 basic_block bb;
1222
1223 FOR_EACH_BB (bb)
1224 {
1225 gimple stmt = last_stmt (bb);
1226 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1227 {
1228 int old_size = gimple_switch_num_labels (stmt);
1229 int i, j, new_size = old_size;
1230 tree default_case = NULL_TREE;
1231 tree default_label = NULL_TREE;
1232 bool has_default;
1233
1234 /* The default label is always the first case in a switch
1235 statement after gimplification if it was not optimized
1236 away */
1237 if (!CASE_LOW (gimple_switch_default_label (stmt))
1238 && !CASE_HIGH (gimple_switch_default_label (stmt)))
1239 {
1240 default_case = gimple_switch_default_label (stmt);
1241 default_label = CASE_LABEL (default_case);
1242 has_default = true;
1243 }
1244 else
1245 has_default = false;
1246
1247 /* Look for possible opportunities to merge cases. */
1248 if (has_default)
1249 i = 1;
1250 else
1251 i = 0;
1252 while (i < old_size)
1253 {
1254 tree base_case, base_label, base_high;
1255 base_case = gimple_switch_label (stmt, i);
1256
1257 gcc_assert (base_case);
1258 base_label = CASE_LABEL (base_case);
1259
1260 /* Discard cases that have the same destination as the
1261 default case. */
1262 if (base_label == default_label)
1263 {
1264 gimple_switch_set_label (stmt, i, NULL_TREE);
1265 i++;
1266 new_size--;
1267 continue;
1268 }
1269
1270 base_high = CASE_HIGH (base_case)
1271 ? CASE_HIGH (base_case)
1272 : CASE_LOW (base_case);
1273 i++;
1274
1275 /* Try to merge case labels. Break out when we reach the end
1276 of the label vector or when we cannot merge the next case
1277 label with the current one. */
1278 while (i < old_size)
1279 {
1280 tree merge_case = gimple_switch_label (stmt, i);
1281 tree merge_label = CASE_LABEL (merge_case);
1282 tree t = int_const_binop (PLUS_EXPR, base_high,
1283 integer_one_node, 1);
1284
1285 /* Merge the cases if they jump to the same place,
1286 and their ranges are consecutive. */
1287 if (merge_label == base_label
1288 && tree_int_cst_equal (CASE_LOW (merge_case), t))
1289 {
1290 base_high = CASE_HIGH (merge_case) ?
1291 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1292 CASE_HIGH (base_case) = base_high;
1293 gimple_switch_set_label (stmt, i, NULL_TREE);
1294 new_size--;
1295 i++;
1296 }
1297 else
1298 break;
1299 }
1300 }
1301
1302 /* Compress the case labels in the label vector, and adjust the
1303 length of the vector. */
1304 for (i = 0, j = 0; i < new_size; i++)
1305 {
1306 while (! gimple_switch_label (stmt, j))
1307 j++;
1308 gimple_switch_set_label (stmt, i,
1309 gimple_switch_label (stmt, j++));
1310 }
1311
1312 gcc_assert (new_size <= old_size);
1313 gimple_switch_set_num_labels (stmt, new_size);
1314 }
1315 }
1316 }
1317
1318 /* Checks whether we can merge block B into block A. */
1319
1320 static bool
1321 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1322 {
1323 gimple stmt;
1324 gimple_stmt_iterator gsi;
1325 gimple_seq phis;
1326
1327 if (!single_succ_p (a))
1328 return false;
1329
1330 if (single_succ_edge (a)->flags & (EDGE_ABNORMAL | EDGE_EH))
1331 return false;
1332
1333 if (single_succ (a) != b)
1334 return false;
1335
1336 if (!single_pred_p (b))
1337 return false;
1338
1339 if (b == EXIT_BLOCK_PTR)
1340 return false;
1341
1342 /* If A ends by a statement causing exceptions or something similar, we
1343 cannot merge the blocks. */
1344 stmt = last_stmt (a);
1345 if (stmt && stmt_ends_bb_p (stmt))
1346 return false;
1347
1348 /* Do not allow a block with only a non-local label to be merged. */
1349 if (stmt
1350 && gimple_code (stmt) == GIMPLE_LABEL
1351 && DECL_NONLOCAL (gimple_label_label (stmt)))
1352 return false;
1353
1354 /* It must be possible to eliminate all phi nodes in B. If ssa form
1355 is not up-to-date, we cannot eliminate any phis; however, if only
1356 some symbols as whole are marked for renaming, this is not a problem,
1357 as phi nodes for those symbols are irrelevant in updating anyway. */
1358 phis = phi_nodes (b);
1359 if (!gimple_seq_empty_p (phis))
1360 {
1361 gimple_stmt_iterator i;
1362
1363 if (name_mappings_registered_p ())
1364 return false;
1365
1366 for (i = gsi_start (phis); !gsi_end_p (i); gsi_next (&i))
1367 {
1368 gimple phi = gsi_stmt (i);
1369
1370 if (!is_gimple_reg (gimple_phi_result (phi))
1371 && !may_propagate_copy (gimple_phi_result (phi),
1372 gimple_phi_arg_def (phi, 0)))
1373 return false;
1374 }
1375 }
1376
1377 /* Do not remove user labels. */
1378 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
1379 {
1380 stmt = gsi_stmt (gsi);
1381 if (gimple_code (stmt) != GIMPLE_LABEL)
1382 break;
1383 if (!DECL_ARTIFICIAL (gimple_label_label (stmt)))
1384 return false;
1385 }
1386
1387 /* Protect the loop latches. */
1388 if (current_loops
1389 && b->loop_father->latch == b)
1390 return false;
1391
1392 return true;
1393 }
1394
1395 /* Replaces all uses of NAME by VAL. */
1396
1397 void
1398 replace_uses_by (tree name, tree val)
1399 {
1400 imm_use_iterator imm_iter;
1401 use_operand_p use;
1402 gimple stmt;
1403 edge e;
1404
1405 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1406 {
1407 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1408 {
1409 replace_exp (use, val);
1410
1411 if (gimple_code (stmt) == GIMPLE_PHI)
1412 {
1413 e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
1414 if (e->flags & EDGE_ABNORMAL)
1415 {
1416 /* This can only occur for virtual operands, since
1417 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1418 would prevent replacement. */
1419 gcc_assert (!is_gimple_reg (name));
1420 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1421 }
1422 }
1423 }
1424
1425 if (gimple_code (stmt) != GIMPLE_PHI)
1426 {
1427 size_t i;
1428
1429 fold_stmt_inplace (stmt);
1430 if (cfgcleanup_altered_bbs)
1431 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1432
1433 /* FIXME. This should go in update_stmt. */
1434 for (i = 0; i < gimple_num_ops (stmt); i++)
1435 {
1436 tree op = gimple_op (stmt, i);
1437 /* Operands may be empty here. For example, the labels
1438 of a GIMPLE_COND are nulled out following the creation
1439 of the corresponding CFG edges. */
1440 if (op && TREE_CODE (op) == ADDR_EXPR)
1441 recompute_tree_invariant_for_addr_expr (op);
1442 }
1443
1444 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1445 update_stmt (stmt);
1446 }
1447 }
1448
1449 gcc_assert (has_zero_uses (name));
1450
1451 /* Also update the trees stored in loop structures. */
1452 if (current_loops)
1453 {
1454 struct loop *loop;
1455 loop_iterator li;
1456
1457 FOR_EACH_LOOP (li, loop, 0)
1458 {
1459 substitute_in_loop_info (loop, name, val);
1460 }
1461 }
1462 }
1463
1464 /* Merge block B into block A. */
1465
1466 static void
1467 gimple_merge_blocks (basic_block a, basic_block b)
1468 {
1469 gimple_stmt_iterator last, gsi, psi;
1470 gimple_seq phis = phi_nodes (b);
1471
1472 if (dump_file)
1473 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1474
1475 /* Remove all single-valued PHI nodes from block B of the form
1476 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1477 gsi = gsi_last_bb (a);
1478 for (psi = gsi_start (phis); !gsi_end_p (psi); )
1479 {
1480 gimple phi = gsi_stmt (psi);
1481 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1482 gimple copy;
1483 bool may_replace_uses = !is_gimple_reg (def)
1484 || may_propagate_copy (def, use);
1485
1486 /* In case we maintain loop closed ssa form, do not propagate arguments
1487 of loop exit phi nodes. */
1488 if (current_loops
1489 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1490 && is_gimple_reg (def)
1491 && TREE_CODE (use) == SSA_NAME
1492 && a->loop_father != b->loop_father)
1493 may_replace_uses = false;
1494
1495 if (!may_replace_uses)
1496 {
1497 gcc_assert (is_gimple_reg (def));
1498
1499 /* Note that just emitting the copies is fine -- there is no problem
1500 with ordering of phi nodes. This is because A is the single
1501 predecessor of B, therefore results of the phi nodes cannot
1502 appear as arguments of the phi nodes. */
1503 copy = gimple_build_assign (def, use);
1504 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1505 remove_phi_node (&psi, false);
1506 }
1507 else
1508 {
1509 /* If we deal with a PHI for virtual operands, we can simply
1510 propagate these without fussing with folding or updating
1511 the stmt. */
1512 if (!is_gimple_reg (def))
1513 {
1514 imm_use_iterator iter;
1515 use_operand_p use_p;
1516 gimple stmt;
1517
1518 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1519 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1520 SET_USE (use_p, use);
1521 }
1522 else
1523 replace_uses_by (def, use);
1524
1525 remove_phi_node (&psi, true);
1526 }
1527 }
1528
1529 /* Ensure that B follows A. */
1530 move_block_after (b, a);
1531
1532 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1533 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1534
1535 /* Remove labels from B and set gimple_bb to A for other statements. */
1536 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1537 {
1538 if (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL)
1539 {
1540 gimple label = gsi_stmt (gsi);
1541
1542 gsi_remove (&gsi, false);
1543
1544 /* Now that we can thread computed gotos, we might have
1545 a situation where we have a forced label in block B
1546 However, the label at the start of block B might still be
1547 used in other ways (think about the runtime checking for
1548 Fortran assigned gotos). So we can not just delete the
1549 label. Instead we move the label to the start of block A. */
1550 if (FORCED_LABEL (gimple_label_label (label)))
1551 {
1552 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1553 gsi_insert_before (&dest_gsi, label, GSI_NEW_STMT);
1554 }
1555 }
1556 else
1557 {
1558 gimple_set_bb (gsi_stmt (gsi), a);
1559 gsi_next (&gsi);
1560 }
1561 }
1562
1563 /* Merge the sequences. */
1564 last = gsi_last_bb (a);
1565 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1566 set_bb_seq (b, NULL);
1567
1568 if (cfgcleanup_altered_bbs)
1569 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1570 }
1571
1572
1573 /* Return the one of two successors of BB that is not reachable by a
1574 complex edge, if there is one. Else, return BB. We use
1575 this in optimizations that use post-dominators for their heuristics,
1576 to catch the cases in C++ where function calls are involved. */
1577
1578 basic_block
1579 single_noncomplex_succ (basic_block bb)
1580 {
1581 edge e0, e1;
1582 if (EDGE_COUNT (bb->succs) != 2)
1583 return bb;
1584
1585 e0 = EDGE_SUCC (bb, 0);
1586 e1 = EDGE_SUCC (bb, 1);
1587 if (e0->flags & EDGE_COMPLEX)
1588 return e1->dest;
1589 if (e1->flags & EDGE_COMPLEX)
1590 return e0->dest;
1591
1592 return bb;
1593 }
1594
1595
1596 /* Walk the function tree removing unnecessary statements.
1597
1598 * Empty statement nodes are removed
1599
1600 * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1601
1602 * Unnecessary COND_EXPRs are removed
1603
1604 * Some unnecessary BIND_EXPRs are removed
1605
1606 * GOTO_EXPRs immediately preceding destination are removed.
1607
1608 Clearly more work could be done. The trick is doing the analysis
1609 and removal fast enough to be a net improvement in compile times.
1610
1611 Note that when we remove a control structure such as a COND_EXPR
1612 BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1613 to ensure we eliminate all the useless code. */
1614
1615 struct rus_data
1616 {
1617 bool repeat;
1618 bool may_throw;
1619 bool may_branch;
1620 bool has_label;
1621 bool last_was_goto;
1622 gimple_stmt_iterator last_goto_gsi;
1623 };
1624
1625
1626 static void remove_useless_stmts_1 (gimple_stmt_iterator *gsi, struct rus_data *);
1627
1628 /* Given a statement sequence, find the first executable statement with
1629 location information, and warn that it is unreachable. When searching,
1630 descend into containers in execution order. */
1631
1632 static bool
1633 remove_useless_stmts_warn_notreached (gimple_seq stmts)
1634 {
1635 gimple_stmt_iterator gsi;
1636
1637 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
1638 {
1639 gimple stmt = gsi_stmt (gsi);
1640
1641 if (gimple_has_location (stmt))
1642 {
1643 location_t loc = gimple_location (stmt);
1644 if (LOCATION_LINE (loc) > 0)
1645 {
1646 warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
1647 return true;
1648 }
1649 }
1650
1651 switch (gimple_code (stmt))
1652 {
1653 /* Unfortunately, we need the CFG now to detect unreachable
1654 branches in a conditional, so conditionals are not handled here. */
1655
1656 case GIMPLE_TRY:
1657 if (remove_useless_stmts_warn_notreached (gimple_try_eval (stmt)))
1658 return true;
1659 if (remove_useless_stmts_warn_notreached (gimple_try_cleanup (stmt)))
1660 return true;
1661 break;
1662
1663 case GIMPLE_CATCH:
1664 return remove_useless_stmts_warn_notreached (gimple_catch_handler (stmt));
1665
1666 case GIMPLE_EH_FILTER:
1667 return remove_useless_stmts_warn_notreached (gimple_eh_filter_failure (stmt));
1668
1669 case GIMPLE_BIND:
1670 return remove_useless_stmts_warn_notreached (gimple_bind_body (stmt));
1671
1672 default:
1673 break;
1674 }
1675 }
1676
1677 return false;
1678 }
1679
1680 /* Helper for remove_useless_stmts_1. Handle GIMPLE_COND statements. */
1681
1682 static void
1683 remove_useless_stmts_cond (gimple_stmt_iterator *gsi, struct rus_data *data)
1684 {
1685 gimple stmt = gsi_stmt (*gsi);
1686
1687 /* The folded result must still be a conditional statement. */
1688 fold_stmt (gsi);
1689 gcc_assert (gsi_stmt (*gsi) == stmt);
1690
1691 data->may_branch = true;
1692
1693 /* Replace trivial conditionals with gotos. */
1694 if (gimple_cond_true_p (stmt))
1695 {
1696 /* Goto THEN label. */
1697 tree then_label = gimple_cond_true_label (stmt);
1698
1699 gsi_replace (gsi, gimple_build_goto (then_label), false);
1700 data->last_goto_gsi = *gsi;
1701 data->last_was_goto = true;
1702 data->repeat = true;
1703 }
1704 else if (gimple_cond_false_p (stmt))
1705 {
1706 /* Goto ELSE label. */
1707 tree else_label = gimple_cond_false_label (stmt);
1708
1709 gsi_replace (gsi, gimple_build_goto (else_label), false);
1710 data->last_goto_gsi = *gsi;
1711 data->last_was_goto = true;
1712 data->repeat = true;
1713 }
1714 else
1715 {
1716 tree then_label = gimple_cond_true_label (stmt);
1717 tree else_label = gimple_cond_false_label (stmt);
1718
1719 if (then_label == else_label)
1720 {
1721 /* Goto common destination. */
1722 gsi_replace (gsi, gimple_build_goto (then_label), false);
1723 data->last_goto_gsi = *gsi;
1724 data->last_was_goto = true;
1725 data->repeat = true;
1726 }
1727 }
1728
1729 gsi_next (gsi);
1730
1731 data->last_was_goto = false;
1732 }
1733
1734 /* Helper for remove_useless_stmts_1.
1735 Handle the try-finally case for GIMPLE_TRY statements. */
1736
1737 static void
1738 remove_useless_stmts_tf (gimple_stmt_iterator *gsi, struct rus_data *data)
1739 {
1740 bool save_may_branch, save_may_throw;
1741 bool this_may_branch, this_may_throw;
1742
1743 gimple_seq eval_seq, cleanup_seq;
1744 gimple_stmt_iterator eval_gsi, cleanup_gsi;
1745
1746 gimple stmt = gsi_stmt (*gsi);
1747
1748 /* Collect may_branch and may_throw information for the body only. */
1749 save_may_branch = data->may_branch;
1750 save_may_throw = data->may_throw;
1751 data->may_branch = false;
1752 data->may_throw = false;
1753 data->last_was_goto = false;
1754
1755 eval_seq = gimple_try_eval (stmt);
1756 eval_gsi = gsi_start (eval_seq);
1757 remove_useless_stmts_1 (&eval_gsi, data);
1758
1759 this_may_branch = data->may_branch;
1760 this_may_throw = data->may_throw;
1761 data->may_branch |= save_may_branch;
1762 data->may_throw |= save_may_throw;
1763 data->last_was_goto = false;
1764
1765 cleanup_seq = gimple_try_cleanup (stmt);
1766 cleanup_gsi = gsi_start (cleanup_seq);
1767 remove_useless_stmts_1 (&cleanup_gsi, data);
1768
1769 /* If the body is empty, then we can emit the FINALLY block without
1770 the enclosing TRY_FINALLY_EXPR. */
1771 if (gimple_seq_empty_p (eval_seq))
1772 {
1773 gsi_insert_seq_before (gsi, cleanup_seq, GSI_SAME_STMT);
1774 gsi_remove (gsi, false);
1775 data->repeat = true;
1776 }
1777
1778 /* If the handler is empty, then we can emit the TRY block without
1779 the enclosing TRY_FINALLY_EXPR. */
1780 else if (gimple_seq_empty_p (cleanup_seq))
1781 {
1782 gsi_insert_seq_before (gsi, eval_seq, GSI_SAME_STMT);
1783 gsi_remove (gsi, false);
1784 data->repeat = true;
1785 }
1786
1787 /* If the body neither throws, nor branches, then we can safely
1788 string the TRY and FINALLY blocks together. */
1789 else if (!this_may_branch && !this_may_throw)
1790 {
1791 gsi_insert_seq_before (gsi, eval_seq, GSI_SAME_STMT);
1792 gsi_insert_seq_before (gsi, cleanup_seq, GSI_SAME_STMT);
1793 gsi_remove (gsi, false);
1794 data->repeat = true;
1795 }
1796 else
1797 gsi_next (gsi);
1798 }
1799
1800 /* Helper for remove_useless_stmts_1.
1801 Handle the try-catch case for GIMPLE_TRY statements. */
1802
1803 static void
1804 remove_useless_stmts_tc (gimple_stmt_iterator *gsi, struct rus_data *data)
1805 {
1806 bool save_may_throw, this_may_throw;
1807
1808 gimple_seq eval_seq, cleanup_seq, handler_seq, failure_seq;
1809 gimple_stmt_iterator eval_gsi, cleanup_gsi, handler_gsi, failure_gsi;
1810
1811 gimple stmt = gsi_stmt (*gsi);
1812
1813 /* Collect may_throw information for the body only. */
1814 save_may_throw = data->may_throw;
1815 data->may_throw = false;
1816 data->last_was_goto = false;
1817
1818 eval_seq = gimple_try_eval (stmt);
1819 eval_gsi = gsi_start (eval_seq);
1820 remove_useless_stmts_1 (&eval_gsi, data);
1821
1822 this_may_throw = data->may_throw;
1823 data->may_throw = save_may_throw;
1824
1825 cleanup_seq = gimple_try_cleanup (stmt);
1826
1827 /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */
1828 if (!this_may_throw)
1829 {
1830 if (warn_notreached)
1831 {
1832 remove_useless_stmts_warn_notreached (cleanup_seq);
1833 }
1834 gsi_insert_seq_before (gsi, eval_seq, GSI_SAME_STMT);
1835 gsi_remove (gsi, false);
1836 data->repeat = true;
1837 return;
1838 }
1839
1840 /* Process the catch clause specially. We may be able to tell that
1841 no exceptions propagate past this point. */
1842
1843 this_may_throw = true;
1844 cleanup_gsi = gsi_start (cleanup_seq);
1845 stmt = gsi_stmt (cleanup_gsi);
1846 data->last_was_goto = false;
1847
1848 switch (gimple_code (stmt))
1849 {
1850 case GIMPLE_CATCH:
1851 /* If the first element is a catch, they all must be. */
1852 while (!gsi_end_p (cleanup_gsi))
1853 {
1854 stmt = gsi_stmt (cleanup_gsi);
1855 /* If we catch all exceptions, then the body does not
1856 propagate exceptions past this point. */
1857 if (gimple_catch_types (stmt) == NULL)
1858 this_may_throw = false;
1859 data->last_was_goto = false;
1860 handler_seq = gimple_catch_handler (stmt);
1861 handler_gsi = gsi_start (handler_seq);
1862 remove_useless_stmts_1 (&handler_gsi, data);
1863 gsi_next (&cleanup_gsi);
1864 }
1865 gsi_next (gsi);
1866 break;
1867
1868 case GIMPLE_EH_FILTER:
1869 /* If the first element is an eh_filter, it should stand alone. */
1870 if (gimple_eh_filter_must_not_throw (stmt))
1871 this_may_throw = false;
1872 else if (gimple_eh_filter_types (stmt) == NULL)
1873 this_may_throw = false;
1874 failure_seq = gimple_eh_filter_failure (stmt);
1875 failure_gsi = gsi_start (failure_seq);
1876 remove_useless_stmts_1 (&failure_gsi, data);
1877 gsi_next (gsi);
1878 break;
1879
1880 default:
1881 /* Otherwise this is a list of cleanup statements. */
1882 remove_useless_stmts_1 (&cleanup_gsi, data);
1883
1884 /* If the cleanup is empty, then we can emit the TRY block without
1885 the enclosing TRY_CATCH_EXPR. */
1886 if (gimple_seq_empty_p (cleanup_seq))
1887 {
1888 gsi_insert_seq_before (gsi, eval_seq, GSI_SAME_STMT);
1889 gsi_remove(gsi, false);
1890 data->repeat = true;
1891 }
1892 else
1893 gsi_next (gsi);
1894 break;
1895 }
1896
1897 data->may_throw |= this_may_throw;
1898 }
1899
1900 /* Helper for remove_useless_stmts_1. Handle GIMPLE_BIND statements. */
1901
1902 static void
1903 remove_useless_stmts_bind (gimple_stmt_iterator *gsi, struct rus_data *data ATTRIBUTE_UNUSED)
1904 {
1905 tree block;
1906 gimple_seq body_seq, fn_body_seq;
1907 gimple_stmt_iterator body_gsi;
1908
1909 gimple stmt = gsi_stmt (*gsi);
1910
1911 /* First remove anything underneath the BIND_EXPR. */
1912
1913 body_seq = gimple_bind_body (stmt);
1914 body_gsi = gsi_start (body_seq);
1915 remove_useless_stmts_1 (&body_gsi, data);
1916
1917 /* If the GIMPLE_BIND has no variables, then we can pull everything
1918 up one level and remove the GIMPLE_BIND, unless this is the toplevel
1919 GIMPLE_BIND for the current function or an inlined function.
1920
1921 When this situation occurs we will want to apply this
1922 optimization again. */
1923 block = gimple_bind_block (stmt);
1924 fn_body_seq = gimple_body (current_function_decl);
1925 if (gimple_bind_vars (stmt) == NULL_TREE
1926 && (gimple_seq_empty_p (fn_body_seq)
1927 || stmt != gimple_seq_first_stmt (fn_body_seq))
1928 && (! block
1929 || ! BLOCK_ABSTRACT_ORIGIN (block)
1930 || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block))
1931 != FUNCTION_DECL)))
1932 {
1933 tree var = NULL_TREE;
1934 /* Even if there are no gimple_bind_vars, there might be other
1935 decls in BLOCK_VARS rendering the GIMPLE_BIND not useless. */
1936 if (block && !BLOCK_NUM_NONLOCALIZED_VARS (block))
1937 for (var = BLOCK_VARS (block); var; var = TREE_CHAIN (var))
1938 if (TREE_CODE (var) == IMPORTED_DECL)
1939 break;
1940 if (var || (block && BLOCK_NUM_NONLOCALIZED_VARS (block)))
1941 gsi_next (gsi);
1942 else
1943 {
1944 gsi_insert_seq_before (gsi, body_seq, GSI_SAME_STMT);
1945 gsi_remove (gsi, false);
1946 data->repeat = true;
1947 }
1948 }
1949 else
1950 gsi_next (gsi);
1951 }
1952
1953 /* Helper for remove_useless_stmts_1. Handle GIMPLE_GOTO statements. */
1954
1955 static void
1956 remove_useless_stmts_goto (gimple_stmt_iterator *gsi, struct rus_data *data)
1957 {
1958 gimple stmt = gsi_stmt (*gsi);
1959
1960 tree dest = gimple_goto_dest (stmt);
1961
1962 data->may_branch = true;
1963 data->last_was_goto = false;
1964
1965 /* Record iterator for last goto expr, so that we can delete it if unnecessary. */
1966 if (TREE_CODE (dest) == LABEL_DECL)
1967 {
1968 data->last_goto_gsi = *gsi;
1969 data->last_was_goto = true;
1970 }
1971
1972 gsi_next(gsi);
1973 }
1974
1975 /* Helper for remove_useless_stmts_1. Handle GIMPLE_LABEL statements. */
1976
1977 static void
1978 remove_useless_stmts_label (gimple_stmt_iterator *gsi, struct rus_data *data)
1979 {
1980 gimple stmt = gsi_stmt (*gsi);
1981
1982 tree label = gimple_label_label (stmt);
1983
1984 data->has_label = true;
1985
1986 /* We do want to jump across non-local label receiver code. */
1987 if (DECL_NONLOCAL (label))
1988 data->last_was_goto = false;
1989
1990 else if (data->last_was_goto
1991 && gimple_goto_dest (gsi_stmt (data->last_goto_gsi)) == label)
1992 {
1993 /* Replace the preceding GIMPLE_GOTO statement with
1994 a GIMPLE_NOP, which will be subsequently removed.
1995 In this way, we avoid invalidating other iterators
1996 active on the statement sequence. */
1997 gsi_replace(&data->last_goto_gsi, gimple_build_nop(), false);
1998 data->last_was_goto = false;
1999 data->repeat = true;
2000 }
2001
2002 /* ??? Add something here to delete unused labels. */
2003
2004 gsi_next (gsi);
2005 }
2006
2007
2008 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2009
2010 void
2011 notice_special_calls (gimple call)
2012 {
2013 int flags = gimple_call_flags (call);
2014
2015 if (flags & ECF_MAY_BE_ALLOCA)
2016 cfun->calls_alloca = true;
2017 if (flags & ECF_RETURNS_TWICE)
2018 cfun->calls_setjmp = true;
2019 }
2020
2021
2022 /* Clear flags set by notice_special_calls. Used by dead code removal
2023 to update the flags. */
2024
2025 void
2026 clear_special_calls (void)
2027 {
2028 cfun->calls_alloca = false;
2029 cfun->calls_setjmp = false;
2030 }
2031
2032 /* Remove useless statements from a statement sequence, and perform
2033 some preliminary simplifications. */
2034
2035 static void
2036 remove_useless_stmts_1 (gimple_stmt_iterator *gsi, struct rus_data *data)
2037 {
2038 while (!gsi_end_p (*gsi))
2039 {
2040 gimple stmt = gsi_stmt (*gsi);
2041
2042 switch (gimple_code (stmt))
2043 {
2044 case GIMPLE_COND:
2045 remove_useless_stmts_cond (gsi, data);
2046 break;
2047
2048 case GIMPLE_GOTO:
2049 remove_useless_stmts_goto (gsi, data);
2050 break;
2051
2052 case GIMPLE_LABEL:
2053 remove_useless_stmts_label (gsi, data);
2054 break;
2055
2056 case GIMPLE_ASSIGN:
2057 fold_stmt (gsi);
2058 stmt = gsi_stmt (*gsi);
2059 data->last_was_goto = false;
2060 if (stmt_could_throw_p (stmt))
2061 data->may_throw = true;
2062 gsi_next (gsi);
2063 break;
2064
2065 case GIMPLE_ASM:
2066 fold_stmt (gsi);
2067 data->last_was_goto = false;
2068 gsi_next (gsi);
2069 break;
2070
2071 case GIMPLE_CALL:
2072 fold_stmt (gsi);
2073 stmt = gsi_stmt (*gsi);
2074 data->last_was_goto = false;
2075 if (is_gimple_call (stmt))
2076 notice_special_calls (stmt);
2077
2078 /* We used to call update_gimple_call_flags here,
2079 which copied side-effects and nothrows status
2080 from the function decl to the call. In the new
2081 tuplified GIMPLE, the accessors for this information
2082 always consult the function decl, so this copying
2083 is no longer necessary. */
2084 if (stmt_could_throw_p (stmt))
2085 data->may_throw = true;
2086 gsi_next (gsi);
2087 break;
2088
2089 case GIMPLE_RETURN:
2090 fold_stmt (gsi);
2091 data->last_was_goto = false;
2092 data->may_branch = true;
2093 gsi_next (gsi);
2094 break;
2095
2096 case GIMPLE_BIND:
2097 remove_useless_stmts_bind (gsi, data);
2098 break;
2099
2100 case GIMPLE_TRY:
2101 if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
2102 remove_useless_stmts_tc (gsi, data);
2103 else if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
2104 remove_useless_stmts_tf (gsi, data);
2105 else
2106 gcc_unreachable ();
2107 break;
2108
2109 case GIMPLE_CATCH:
2110 gcc_unreachable ();
2111 break;
2112
2113 case GIMPLE_NOP:
2114 gsi_remove (gsi, false);
2115 break;
2116
2117 case GIMPLE_OMP_FOR:
2118 {
2119 gimple_seq pre_body_seq = gimple_omp_for_pre_body (stmt);
2120 gimple_stmt_iterator pre_body_gsi = gsi_start (pre_body_seq);
2121
2122 remove_useless_stmts_1 (&pre_body_gsi, data);
2123 data->last_was_goto = false;
2124 }
2125 /* FALLTHROUGH */
2126 case GIMPLE_OMP_CRITICAL:
2127 case GIMPLE_OMP_CONTINUE:
2128 case GIMPLE_OMP_MASTER:
2129 case GIMPLE_OMP_ORDERED:
2130 case GIMPLE_OMP_SECTION:
2131 case GIMPLE_OMP_SECTIONS:
2132 case GIMPLE_OMP_SINGLE:
2133 {
2134 gimple_seq body_seq = gimple_omp_body (stmt);
2135 gimple_stmt_iterator body_gsi = gsi_start (body_seq);
2136
2137 remove_useless_stmts_1 (&body_gsi, data);
2138 data->last_was_goto = false;
2139 gsi_next (gsi);
2140 }
2141 break;
2142
2143 case GIMPLE_OMP_PARALLEL:
2144 case GIMPLE_OMP_TASK:
2145 {
2146 /* Make sure the outermost GIMPLE_BIND isn't removed
2147 as useless. */
2148 gimple_seq body_seq = gimple_omp_body (stmt);
2149 gimple bind = gimple_seq_first_stmt (body_seq);
2150 gimple_seq bind_seq = gimple_bind_body (bind);
2151 gimple_stmt_iterator bind_gsi = gsi_start (bind_seq);
2152
2153 remove_useless_stmts_1 (&bind_gsi, data);
2154 data->last_was_goto = false;
2155 gsi_next (gsi);
2156 }
2157 break;
2158
2159 default:
2160 data->last_was_goto = false;
2161 gsi_next (gsi);
2162 break;
2163 }
2164 }
2165 }
2166
2167 /* Walk the function tree, removing useless statements and performing
2168 some preliminary simplifications. */
2169
2170 static unsigned int
2171 remove_useless_stmts (void)
2172 {
2173 struct rus_data data;
2174
2175 clear_special_calls ();
2176
2177 do
2178 {
2179 gimple_stmt_iterator gsi;
2180
2181 gsi = gsi_start (gimple_body (current_function_decl));
2182 memset (&data, 0, sizeof (data));
2183 remove_useless_stmts_1 (&gsi, &data);
2184 }
2185 while (data.repeat);
2186
2187 #ifdef ENABLE_TYPES_CHECKING
2188 verify_types_in_gimple_seq (gimple_body (current_function_decl));
2189 #endif
2190
2191 return 0;
2192 }
2193
2194
2195 struct gimple_opt_pass pass_remove_useless_stmts =
2196 {
2197 {
2198 GIMPLE_PASS,
2199 "useless", /* name */
2200 NULL, /* gate */
2201 remove_useless_stmts, /* execute */
2202 NULL, /* sub */
2203 NULL, /* next */
2204 0, /* static_pass_number */
2205 TV_NONE, /* tv_id */
2206 PROP_gimple_any, /* properties_required */
2207 0, /* properties_provided */
2208 0, /* properties_destroyed */
2209 0, /* todo_flags_start */
2210 TODO_dump_func /* todo_flags_finish */
2211 }
2212 };
2213
2214 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2215
2216 static void
2217 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2218 {
2219 /* Since this block is no longer reachable, we can just delete all
2220 of its PHI nodes. */
2221 remove_phi_nodes (bb);
2222
2223 /* Remove edges to BB's successors. */
2224 while (EDGE_COUNT (bb->succs) > 0)
2225 remove_edge (EDGE_SUCC (bb, 0));
2226 }
2227
2228
2229 /* Remove statements of basic block BB. */
2230
2231 static void
2232 remove_bb (basic_block bb)
2233 {
2234 gimple_stmt_iterator i;
2235 source_location loc = UNKNOWN_LOCATION;
2236
2237 if (dump_file)
2238 {
2239 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2240 if (dump_flags & TDF_DETAILS)
2241 {
2242 dump_bb (bb, dump_file, 0);
2243 fprintf (dump_file, "\n");
2244 }
2245 }
2246
2247 if (current_loops)
2248 {
2249 struct loop *loop = bb->loop_father;
2250
2251 /* If a loop gets removed, clean up the information associated
2252 with it. */
2253 if (loop->latch == bb
2254 || loop->header == bb)
2255 free_numbers_of_iterations_estimates_loop (loop);
2256 }
2257
2258 /* Remove all the instructions in the block. */
2259 if (bb_seq (bb) != NULL)
2260 {
2261 for (i = gsi_start_bb (bb); !gsi_end_p (i);)
2262 {
2263 gimple stmt = gsi_stmt (i);
2264 if (gimple_code (stmt) == GIMPLE_LABEL
2265 && (FORCED_LABEL (gimple_label_label (stmt))
2266 || DECL_NONLOCAL (gimple_label_label (stmt))))
2267 {
2268 basic_block new_bb;
2269 gimple_stmt_iterator new_gsi;
2270
2271 /* A non-reachable non-local label may still be referenced.
2272 But it no longer needs to carry the extra semantics of
2273 non-locality. */
2274 if (DECL_NONLOCAL (gimple_label_label (stmt)))
2275 {
2276 DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
2277 FORCED_LABEL (gimple_label_label (stmt)) = 1;
2278 }
2279
2280 new_bb = bb->prev_bb;
2281 new_gsi = gsi_start_bb (new_bb);
2282 gsi_remove (&i, false);
2283 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2284 }
2285 else
2286 {
2287 /* Release SSA definitions if we are in SSA. Note that we
2288 may be called when not in SSA. For example,
2289 final_cleanup calls this function via
2290 cleanup_tree_cfg. */
2291 if (gimple_in_ssa_p (cfun))
2292 release_defs (stmt);
2293
2294 gsi_remove (&i, true);
2295 }
2296
2297 /* Don't warn for removed gotos. Gotos are often removed due to
2298 jump threading, thus resulting in bogus warnings. Not great,
2299 since this way we lose warnings for gotos in the original
2300 program that are indeed unreachable. */
2301 if (gimple_code (stmt) != GIMPLE_GOTO
2302 && gimple_has_location (stmt)
2303 && !loc)
2304 loc = gimple_location (stmt);
2305 }
2306 }
2307
2308 /* If requested, give a warning that the first statement in the
2309 block is unreachable. We walk statements backwards in the
2310 loop above, so the last statement we process is the first statement
2311 in the block. */
2312 if (loc > BUILTINS_LOCATION && LOCATION_LINE (loc) > 0)
2313 warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
2314
2315 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2316 bb->il.gimple = NULL;
2317 }
2318
2319
2320 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2321 predicate VAL, return the edge that will be taken out of the block.
2322 If VAL does not match a unique edge, NULL is returned. */
2323
2324 edge
2325 find_taken_edge (basic_block bb, tree val)
2326 {
2327 gimple stmt;
2328
2329 stmt = last_stmt (bb);
2330
2331 gcc_assert (stmt);
2332 gcc_assert (is_ctrl_stmt (stmt));
2333
2334 if (val == NULL)
2335 return NULL;
2336
2337 if (!is_gimple_min_invariant (val))
2338 return NULL;
2339
2340 if (gimple_code (stmt) == GIMPLE_COND)
2341 return find_taken_edge_cond_expr (bb, val);
2342
2343 if (gimple_code (stmt) == GIMPLE_SWITCH)
2344 return find_taken_edge_switch_expr (bb, val);
2345
2346 if (computed_goto_p (stmt))
2347 {
2348 /* Only optimize if the argument is a label, if the argument is
2349 not a label then we can not construct a proper CFG.
2350
2351 It may be the case that we only need to allow the LABEL_REF to
2352 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2353 appear inside a LABEL_EXPR just to be safe. */
2354 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2355 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2356 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2357 return NULL;
2358 }
2359
2360 gcc_unreachable ();
2361 }
2362
2363 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2364 statement, determine which of the outgoing edges will be taken out of the
2365 block. Return NULL if either edge may be taken. */
2366
2367 static edge
2368 find_taken_edge_computed_goto (basic_block bb, tree val)
2369 {
2370 basic_block dest;
2371 edge e = NULL;
2372
2373 dest = label_to_block (val);
2374 if (dest)
2375 {
2376 e = find_edge (bb, dest);
2377 gcc_assert (e != NULL);
2378 }
2379
2380 return e;
2381 }
2382
2383 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2384 statement, determine which of the two edges will be taken out of the
2385 block. Return NULL if either edge may be taken. */
2386
2387 static edge
2388 find_taken_edge_cond_expr (basic_block bb, tree val)
2389 {
2390 edge true_edge, false_edge;
2391
2392 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2393
2394 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2395 return (integer_zerop (val) ? false_edge : true_edge);
2396 }
2397
2398 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2399 statement, determine which edge will be taken out of the block. Return
2400 NULL if any edge may be taken. */
2401
2402 static edge
2403 find_taken_edge_switch_expr (basic_block bb, tree val)
2404 {
2405 basic_block dest_bb;
2406 edge e;
2407 gimple switch_stmt;
2408 tree taken_case;
2409
2410 switch_stmt = last_stmt (bb);
2411 taken_case = find_case_label_for_value (switch_stmt, val);
2412 dest_bb = label_to_block (CASE_LABEL (taken_case));
2413
2414 e = find_edge (bb, dest_bb);
2415 gcc_assert (e);
2416 return e;
2417 }
2418
2419
2420 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2421 We can make optimal use here of the fact that the case labels are
2422 sorted: We can do a binary search for a case matching VAL. */
2423
2424 static tree
2425 find_case_label_for_value (gimple switch_stmt, tree val)
2426 {
2427 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2428 tree default_case = gimple_switch_default_label (switch_stmt);
2429
2430 for (low = 0, high = n; high - low > 1; )
2431 {
2432 size_t i = (high + low) / 2;
2433 tree t = gimple_switch_label (switch_stmt, i);
2434 int cmp;
2435
2436 /* Cache the result of comparing CASE_LOW and val. */
2437 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2438
2439 if (cmp > 0)
2440 high = i;
2441 else
2442 low = i;
2443
2444 if (CASE_HIGH (t) == NULL)
2445 {
2446 /* A singe-valued case label. */
2447 if (cmp == 0)
2448 return t;
2449 }
2450 else
2451 {
2452 /* A case range. We can only handle integer ranges. */
2453 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2454 return t;
2455 }
2456 }
2457
2458 return default_case;
2459 }
2460
2461
2462 /* Dump a basic block on stderr. */
2463
2464 void
2465 gimple_debug_bb (basic_block bb)
2466 {
2467 gimple_dump_bb (bb, stderr, 0, TDF_VOPS|TDF_MEMSYMS);
2468 }
2469
2470
2471 /* Dump basic block with index N on stderr. */
2472
2473 basic_block
2474 gimple_debug_bb_n (int n)
2475 {
2476 gimple_debug_bb (BASIC_BLOCK (n));
2477 return BASIC_BLOCK (n);
2478 }
2479
2480
2481 /* Dump the CFG on stderr.
2482
2483 FLAGS are the same used by the tree dumping functions
2484 (see TDF_* in tree-pass.h). */
2485
2486 void
2487 gimple_debug_cfg (int flags)
2488 {
2489 gimple_dump_cfg (stderr, flags);
2490 }
2491
2492
2493 /* Dump the program showing basic block boundaries on the given FILE.
2494
2495 FLAGS are the same used by the tree dumping functions (see TDF_* in
2496 tree.h). */
2497
2498 void
2499 gimple_dump_cfg (FILE *file, int flags)
2500 {
2501 if (flags & TDF_DETAILS)
2502 {
2503 const char *funcname
2504 = lang_hooks.decl_printable_name (current_function_decl, 2);
2505
2506 fputc ('\n', file);
2507 fprintf (file, ";; Function %s\n\n", funcname);
2508 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2509 n_basic_blocks, n_edges, last_basic_block);
2510
2511 brief_dump_cfg (file);
2512 fprintf (file, "\n");
2513 }
2514
2515 if (flags & TDF_STATS)
2516 dump_cfg_stats (file);
2517
2518 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2519 }
2520
2521
2522 /* Dump CFG statistics on FILE. */
2523
2524 void
2525 dump_cfg_stats (FILE *file)
2526 {
2527 static long max_num_merged_labels = 0;
2528 unsigned long size, total = 0;
2529 long num_edges;
2530 basic_block bb;
2531 const char * const fmt_str = "%-30s%-13s%12s\n";
2532 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2533 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2534 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2535 const char *funcname
2536 = lang_hooks.decl_printable_name (current_function_decl, 2);
2537
2538
2539 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2540
2541 fprintf (file, "---------------------------------------------------------\n");
2542 fprintf (file, fmt_str, "", " Number of ", "Memory");
2543 fprintf (file, fmt_str, "", " instances ", "used ");
2544 fprintf (file, "---------------------------------------------------------\n");
2545
2546 size = n_basic_blocks * sizeof (struct basic_block_def);
2547 total += size;
2548 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2549 SCALE (size), LABEL (size));
2550
2551 num_edges = 0;
2552 FOR_EACH_BB (bb)
2553 num_edges += EDGE_COUNT (bb->succs);
2554 size = num_edges * sizeof (struct edge_def);
2555 total += size;
2556 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2557
2558 fprintf (file, "---------------------------------------------------------\n");
2559 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2560 LABEL (total));
2561 fprintf (file, "---------------------------------------------------------\n");
2562 fprintf (file, "\n");
2563
2564 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2565 max_num_merged_labels = cfg_stats.num_merged_labels;
2566
2567 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2568 cfg_stats.num_merged_labels, max_num_merged_labels);
2569
2570 fprintf (file, "\n");
2571 }
2572
2573
2574 /* Dump CFG statistics on stderr. Keep extern so that it's always
2575 linked in the final executable. */
2576
2577 void
2578 debug_cfg_stats (void)
2579 {
2580 dump_cfg_stats (stderr);
2581 }
2582
2583
2584 /* Dump the flowgraph to a .vcg FILE. */
2585
2586 static void
2587 gimple_cfg2vcg (FILE *file)
2588 {
2589 edge e;
2590 edge_iterator ei;
2591 basic_block bb;
2592 const char *funcname
2593 = lang_hooks.decl_printable_name (current_function_decl, 2);
2594
2595 /* Write the file header. */
2596 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2597 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2598 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2599
2600 /* Write blocks and edges. */
2601 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2602 {
2603 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2604 e->dest->index);
2605
2606 if (e->flags & EDGE_FAKE)
2607 fprintf (file, " linestyle: dotted priority: 10");
2608 else
2609 fprintf (file, " linestyle: solid priority: 100");
2610
2611 fprintf (file, " }\n");
2612 }
2613 fputc ('\n', file);
2614
2615 FOR_EACH_BB (bb)
2616 {
2617 enum gimple_code head_code, end_code;
2618 const char *head_name, *end_name;
2619 int head_line = 0;
2620 int end_line = 0;
2621 gimple first = first_stmt (bb);
2622 gimple last = last_stmt (bb);
2623
2624 if (first)
2625 {
2626 head_code = gimple_code (first);
2627 head_name = gimple_code_name[head_code];
2628 head_line = get_lineno (first);
2629 }
2630 else
2631 head_name = "no-statement";
2632
2633 if (last)
2634 {
2635 end_code = gimple_code (last);
2636 end_name = gimple_code_name[end_code];
2637 end_line = get_lineno (last);
2638 }
2639 else
2640 end_name = "no-statement";
2641
2642 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2643 bb->index, bb->index, head_name, head_line, end_name,
2644 end_line);
2645
2646 FOR_EACH_EDGE (e, ei, bb->succs)
2647 {
2648 if (e->dest == EXIT_BLOCK_PTR)
2649 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2650 else
2651 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2652
2653 if (e->flags & EDGE_FAKE)
2654 fprintf (file, " priority: 10 linestyle: dotted");
2655 else
2656 fprintf (file, " priority: 100 linestyle: solid");
2657
2658 fprintf (file, " }\n");
2659 }
2660
2661 if (bb->next_bb != EXIT_BLOCK_PTR)
2662 fputc ('\n', file);
2663 }
2664
2665 fputs ("}\n\n", file);
2666 }
2667
2668
2669
2670 /*---------------------------------------------------------------------------
2671 Miscellaneous helpers
2672 ---------------------------------------------------------------------------*/
2673
2674 /* Return true if T represents a stmt that always transfers control. */
2675
2676 bool
2677 is_ctrl_stmt (gimple t)
2678 {
2679 return gimple_code (t) == GIMPLE_COND
2680 || gimple_code (t) == GIMPLE_SWITCH
2681 || gimple_code (t) == GIMPLE_GOTO
2682 || gimple_code (t) == GIMPLE_RETURN
2683 || gimple_code (t) == GIMPLE_RESX;
2684 }
2685
2686
2687 /* Return true if T is a statement that may alter the flow of control
2688 (e.g., a call to a non-returning function). */
2689
2690 bool
2691 is_ctrl_altering_stmt (gimple t)
2692 {
2693 gcc_assert (t);
2694
2695 if (is_gimple_call (t))
2696 {
2697 int flags = gimple_call_flags (t);
2698
2699 /* A non-pure/const call alters flow control if the current
2700 function has nonlocal labels. */
2701 if (!(flags & (ECF_CONST | ECF_PURE))
2702 && cfun->has_nonlocal_label)
2703 return true;
2704
2705 /* A call also alters control flow if it does not return. */
2706 if (gimple_call_flags (t) & ECF_NORETURN)
2707 return true;
2708 }
2709
2710 /* OpenMP directives alter control flow. */
2711 if (is_gimple_omp (t))
2712 return true;
2713
2714 /* If a statement can throw, it alters control flow. */
2715 return stmt_can_throw_internal (t);
2716 }
2717
2718
2719 /* Return true if T is a simple local goto. */
2720
2721 bool
2722 simple_goto_p (gimple t)
2723 {
2724 return (gimple_code (t) == GIMPLE_GOTO
2725 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2726 }
2727
2728
2729 /* Return true if T can make an abnormal transfer of control flow.
2730 Transfers of control flow associated with EH are excluded. */
2731
2732 bool
2733 stmt_can_make_abnormal_goto (gimple t)
2734 {
2735 if (computed_goto_p (t))
2736 return true;
2737 if (is_gimple_call (t))
2738 return gimple_has_side_effects (t) && cfun->has_nonlocal_label;
2739 return false;
2740 }
2741
2742
2743 /* Return true if STMT should start a new basic block. PREV_STMT is
2744 the statement preceding STMT. It is used when STMT is a label or a
2745 case label. Labels should only start a new basic block if their
2746 previous statement wasn't a label. Otherwise, sequence of labels
2747 would generate unnecessary basic blocks that only contain a single
2748 label. */
2749
2750 static inline bool
2751 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2752 {
2753 if (stmt == NULL)
2754 return false;
2755
2756 /* Labels start a new basic block only if the preceding statement
2757 wasn't a label of the same type. This prevents the creation of
2758 consecutive blocks that have nothing but a single label. */
2759 if (gimple_code (stmt) == GIMPLE_LABEL)
2760 {
2761 /* Nonlocal and computed GOTO targets always start a new block. */
2762 if (DECL_NONLOCAL (gimple_label_label (stmt))
2763 || FORCED_LABEL (gimple_label_label (stmt)))
2764 return true;
2765
2766 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2767 {
2768 if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
2769 return true;
2770
2771 cfg_stats.num_merged_labels++;
2772 return false;
2773 }
2774 else
2775 return true;
2776 }
2777
2778 return false;
2779 }
2780
2781
2782 /* Return true if T should end a basic block. */
2783
2784 bool
2785 stmt_ends_bb_p (gimple t)
2786 {
2787 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2788 }
2789
2790 /* Remove block annotations and other data structures. */
2791
2792 void
2793 delete_tree_cfg_annotations (void)
2794 {
2795 label_to_block_map = NULL;
2796 }
2797
2798
2799 /* Return the first statement in basic block BB. */
2800
2801 gimple
2802 first_stmt (basic_block bb)
2803 {
2804 gimple_stmt_iterator i = gsi_start_bb (bb);
2805 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2806 }
2807
2808 /* Return the first non-label statement in basic block BB. */
2809
2810 static gimple
2811 first_non_label_stmt (basic_block bb)
2812 {
2813 gimple_stmt_iterator i = gsi_start_bb (bb);
2814 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2815 gsi_next (&i);
2816 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2817 }
2818
2819 /* Return the last statement in basic block BB. */
2820
2821 gimple
2822 last_stmt (basic_block bb)
2823 {
2824 gimple_stmt_iterator b = gsi_last_bb (bb);
2825 return !gsi_end_p (b) ? gsi_stmt (b) : NULL;
2826 }
2827
2828 /* Return the last statement of an otherwise empty block. Return NULL
2829 if the block is totally empty, or if it contains more than one
2830 statement. */
2831
2832 gimple
2833 last_and_only_stmt (basic_block bb)
2834 {
2835 gimple_stmt_iterator i = gsi_last_bb (bb);
2836 gimple last, prev;
2837
2838 if (gsi_end_p (i))
2839 return NULL;
2840
2841 last = gsi_stmt (i);
2842 gsi_prev (&i);
2843 if (gsi_end_p (i))
2844 return last;
2845
2846 /* Empty statements should no longer appear in the instruction stream.
2847 Everything that might have appeared before should be deleted by
2848 remove_useless_stmts, and the optimizers should just gsi_remove
2849 instead of smashing with build_empty_stmt.
2850
2851 Thus the only thing that should appear here in a block containing
2852 one executable statement is a label. */
2853 prev = gsi_stmt (i);
2854 if (gimple_code (prev) == GIMPLE_LABEL)
2855 return last;
2856 else
2857 return NULL;
2858 }
2859
2860 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2861
2862 static void
2863 reinstall_phi_args (edge new_edge, edge old_edge)
2864 {
2865 edge_var_map_vector v;
2866 edge_var_map *vm;
2867 int i;
2868 gimple_stmt_iterator phis;
2869
2870 v = redirect_edge_var_map_vector (old_edge);
2871 if (!v)
2872 return;
2873
2874 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2875 VEC_iterate (edge_var_map, v, i, vm) && !gsi_end_p (phis);
2876 i++, gsi_next (&phis))
2877 {
2878 gimple phi = gsi_stmt (phis);
2879 tree result = redirect_edge_var_map_result (vm);
2880 tree arg = redirect_edge_var_map_def (vm);
2881
2882 gcc_assert (result == gimple_phi_result (phi));
2883
2884 add_phi_arg (phi, arg, new_edge);
2885 }
2886
2887 redirect_edge_var_map_clear (old_edge);
2888 }
2889
2890 /* Returns the basic block after which the new basic block created
2891 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2892 near its "logical" location. This is of most help to humans looking
2893 at debugging dumps. */
2894
2895 static basic_block
2896 split_edge_bb_loc (edge edge_in)
2897 {
2898 basic_block dest = edge_in->dest;
2899
2900 if (dest->prev_bb && find_edge (dest->prev_bb, dest))
2901 return edge_in->src;
2902 else
2903 return dest->prev_bb;
2904 }
2905
2906 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2907 Abort on abnormal edges. */
2908
2909 static basic_block
2910 gimple_split_edge (edge edge_in)
2911 {
2912 basic_block new_bb, after_bb, dest;
2913 edge new_edge, e;
2914
2915 /* Abnormal edges cannot be split. */
2916 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2917
2918 dest = edge_in->dest;
2919
2920 after_bb = split_edge_bb_loc (edge_in);
2921
2922 new_bb = create_empty_bb (after_bb);
2923 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2924 new_bb->count = edge_in->count;
2925 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2926 new_edge->probability = REG_BR_PROB_BASE;
2927 new_edge->count = edge_in->count;
2928
2929 e = redirect_edge_and_branch (edge_in, new_bb);
2930 gcc_assert (e == edge_in);
2931 reinstall_phi_args (new_edge, e);
2932
2933 return new_bb;
2934 }
2935
2936 /* Callback for walk_tree, check that all elements with address taken are
2937 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2938 inside a PHI node. */
2939
2940 static tree
2941 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2942 {
2943 tree t = *tp, x;
2944
2945 if (TYPE_P (t))
2946 *walk_subtrees = 0;
2947
2948 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2949 #define CHECK_OP(N, MSG) \
2950 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2951 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2952
2953 switch (TREE_CODE (t))
2954 {
2955 case SSA_NAME:
2956 if (SSA_NAME_IN_FREE_LIST (t))
2957 {
2958 error ("SSA name in freelist but still referenced");
2959 return *tp;
2960 }
2961 break;
2962
2963 case INDIRECT_REF:
2964 x = TREE_OPERAND (t, 0);
2965 if (!is_gimple_reg (x) && !is_gimple_min_invariant (x))
2966 {
2967 error ("Indirect reference's operand is not a register or a constant.");
2968 return x;
2969 }
2970 break;
2971
2972 case ASSERT_EXPR:
2973 x = fold (ASSERT_EXPR_COND (t));
2974 if (x == boolean_false_node)
2975 {
2976 error ("ASSERT_EXPR with an always-false condition");
2977 return *tp;
2978 }
2979 break;
2980
2981 case MODIFY_EXPR:
2982 error ("MODIFY_EXPR not expected while having tuples.");
2983 return *tp;
2984
2985 case ADDR_EXPR:
2986 {
2987 bool old_constant;
2988 bool old_side_effects;
2989 bool new_constant;
2990 bool new_side_effects;
2991
2992 gcc_assert (is_gimple_address (t));
2993
2994 old_constant = TREE_CONSTANT (t);
2995 old_side_effects = TREE_SIDE_EFFECTS (t);
2996
2997 recompute_tree_invariant_for_addr_expr (t);
2998 new_side_effects = TREE_SIDE_EFFECTS (t);
2999 new_constant = TREE_CONSTANT (t);
3000
3001 if (old_constant != new_constant)
3002 {
3003 error ("constant not recomputed when ADDR_EXPR changed");
3004 return t;
3005 }
3006 if (old_side_effects != new_side_effects)
3007 {
3008 error ("side effects not recomputed when ADDR_EXPR changed");
3009 return t;
3010 }
3011
3012 /* Skip any references (they will be checked when we recurse down the
3013 tree) and ensure that any variable used as a prefix is marked
3014 addressable. */
3015 for (x = TREE_OPERAND (t, 0);
3016 handled_component_p (x);
3017 x = TREE_OPERAND (x, 0))
3018 ;
3019
3020 if (!(TREE_CODE (x) == VAR_DECL
3021 || TREE_CODE (x) == PARM_DECL
3022 || TREE_CODE (x) == RESULT_DECL))
3023 return NULL;
3024 if (!TREE_ADDRESSABLE (x))
3025 {
3026 error ("address taken, but ADDRESSABLE bit not set");
3027 return x;
3028 }
3029 if (DECL_GIMPLE_REG_P (x))
3030 {
3031 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
3032 return x;
3033 }
3034
3035 break;
3036 }
3037
3038 case COND_EXPR:
3039 x = COND_EXPR_COND (t);
3040 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
3041 {
3042 error ("non-integral used in condition");
3043 return x;
3044 }
3045 if (!is_gimple_condexpr (x))
3046 {
3047 error ("invalid conditional operand");
3048 return x;
3049 }
3050 break;
3051
3052 case NON_LVALUE_EXPR:
3053 gcc_unreachable ();
3054
3055 CASE_CONVERT:
3056 case FIX_TRUNC_EXPR:
3057 case FLOAT_EXPR:
3058 case NEGATE_EXPR:
3059 case ABS_EXPR:
3060 case BIT_NOT_EXPR:
3061 case TRUTH_NOT_EXPR:
3062 CHECK_OP (0, "invalid operand to unary operator");
3063 break;
3064
3065 case REALPART_EXPR:
3066 case IMAGPART_EXPR:
3067 case COMPONENT_REF:
3068 case ARRAY_REF:
3069 case ARRAY_RANGE_REF:
3070 case BIT_FIELD_REF:
3071 case VIEW_CONVERT_EXPR:
3072 /* We have a nest of references. Verify that each of the operands
3073 that determine where to reference is either a constant or a variable,
3074 verify that the base is valid, and then show we've already checked
3075 the subtrees. */
3076 while (handled_component_p (t))
3077 {
3078 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3079 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3080 else if (TREE_CODE (t) == ARRAY_REF
3081 || TREE_CODE (t) == ARRAY_RANGE_REF)
3082 {
3083 CHECK_OP (1, "invalid array index");
3084 if (TREE_OPERAND (t, 2))
3085 CHECK_OP (2, "invalid array lower bound");
3086 if (TREE_OPERAND (t, 3))
3087 CHECK_OP (3, "invalid array stride");
3088 }
3089 else if (TREE_CODE (t) == BIT_FIELD_REF)
3090 {
3091 if (!host_integerp (TREE_OPERAND (t, 1), 1)
3092 || !host_integerp (TREE_OPERAND (t, 2), 1))
3093 {
3094 error ("invalid position or size operand to BIT_FIELD_REF");
3095 return t;
3096 }
3097 else if (INTEGRAL_TYPE_P (TREE_TYPE (t))
3098 && (TYPE_PRECISION (TREE_TYPE (t))
3099 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
3100 {
3101 error ("integral result type precision does not match "
3102 "field size of BIT_FIELD_REF");
3103 return t;
3104 }
3105 if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
3106 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
3107 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
3108 {
3109 error ("mode precision of non-integral result does not "
3110 "match field size of BIT_FIELD_REF");
3111 return t;
3112 }
3113 }
3114
3115 t = TREE_OPERAND (t, 0);
3116 }
3117
3118 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
3119 {
3120 error ("invalid reference prefix");
3121 return t;
3122 }
3123 *walk_subtrees = 0;
3124 break;
3125 case PLUS_EXPR:
3126 case MINUS_EXPR:
3127 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3128 POINTER_PLUS_EXPR. */
3129 if (POINTER_TYPE_P (TREE_TYPE (t)))
3130 {
3131 error ("invalid operand to plus/minus, type is a pointer");
3132 return t;
3133 }
3134 CHECK_OP (0, "invalid operand to binary operator");
3135 CHECK_OP (1, "invalid operand to binary operator");
3136 break;
3137
3138 case POINTER_PLUS_EXPR:
3139 /* Check to make sure the first operand is a pointer or reference type. */
3140 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3141 {
3142 error ("invalid operand to pointer plus, first operand is not a pointer");
3143 return t;
3144 }
3145 /* Check to make sure the second operand is an integer with type of
3146 sizetype. */
3147 if (!useless_type_conversion_p (sizetype,
3148 TREE_TYPE (TREE_OPERAND (t, 1))))
3149 {
3150 error ("invalid operand to pointer plus, second operand is not an "
3151 "integer with type of sizetype.");
3152 return t;
3153 }
3154 /* FALLTHROUGH */
3155 case LT_EXPR:
3156 case LE_EXPR:
3157 case GT_EXPR:
3158 case GE_EXPR:
3159 case EQ_EXPR:
3160 case NE_EXPR:
3161 case UNORDERED_EXPR:
3162 case ORDERED_EXPR:
3163 case UNLT_EXPR:
3164 case UNLE_EXPR:
3165 case UNGT_EXPR:
3166 case UNGE_EXPR:
3167 case UNEQ_EXPR:
3168 case LTGT_EXPR:
3169 case MULT_EXPR:
3170 case TRUNC_DIV_EXPR:
3171 case CEIL_DIV_EXPR:
3172 case FLOOR_DIV_EXPR:
3173 case ROUND_DIV_EXPR:
3174 case TRUNC_MOD_EXPR:
3175 case CEIL_MOD_EXPR:
3176 case FLOOR_MOD_EXPR:
3177 case ROUND_MOD_EXPR:
3178 case RDIV_EXPR:
3179 case EXACT_DIV_EXPR:
3180 case MIN_EXPR:
3181 case MAX_EXPR:
3182 case LSHIFT_EXPR:
3183 case RSHIFT_EXPR:
3184 case LROTATE_EXPR:
3185 case RROTATE_EXPR:
3186 case BIT_IOR_EXPR:
3187 case BIT_XOR_EXPR:
3188 case BIT_AND_EXPR:
3189 CHECK_OP (0, "invalid operand to binary operator");
3190 CHECK_OP (1, "invalid operand to binary operator");
3191 break;
3192
3193 case CONSTRUCTOR:
3194 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3195 *walk_subtrees = 0;
3196 break;
3197
3198 default:
3199 break;
3200 }
3201 return NULL;
3202
3203 #undef CHECK_OP
3204 }
3205
3206
3207 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3208 Returns true if there is an error, otherwise false. */
3209
3210 static bool
3211 verify_types_in_gimple_min_lval (tree expr)
3212 {
3213 tree op;
3214
3215 if (is_gimple_id (expr))
3216 return false;
3217
3218 if (!INDIRECT_REF_P (expr)
3219 && TREE_CODE (expr) != TARGET_MEM_REF)
3220 {
3221 error ("invalid expression for min lvalue");
3222 return true;
3223 }
3224
3225 /* TARGET_MEM_REFs are strange beasts. */
3226 if (TREE_CODE (expr) == TARGET_MEM_REF)
3227 return false;
3228
3229 op = TREE_OPERAND (expr, 0);
3230 if (!is_gimple_val (op))
3231 {
3232 error ("invalid operand in indirect reference");
3233 debug_generic_stmt (op);
3234 return true;
3235 }
3236 if (!useless_type_conversion_p (TREE_TYPE (expr),
3237 TREE_TYPE (TREE_TYPE (op))))
3238 {
3239 error ("type mismatch in indirect reference");
3240 debug_generic_stmt (TREE_TYPE (expr));
3241 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3242 return true;
3243 }
3244
3245 return false;
3246 }
3247
3248 /* Verify if EXPR is a valid GIMPLE reference expression. If
3249 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3250 if there is an error, otherwise false. */
3251
3252 static bool
3253 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3254 {
3255 while (handled_component_p (expr))
3256 {
3257 tree op = TREE_OPERAND (expr, 0);
3258
3259 if (TREE_CODE (expr) == ARRAY_REF
3260 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3261 {
3262 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3263 || (TREE_OPERAND (expr, 2)
3264 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3265 || (TREE_OPERAND (expr, 3)
3266 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3267 {
3268 error ("invalid operands to array reference");
3269 debug_generic_stmt (expr);
3270 return true;
3271 }
3272 }
3273
3274 /* Verify if the reference array element types are compatible. */
3275 if (TREE_CODE (expr) == ARRAY_REF
3276 && !useless_type_conversion_p (TREE_TYPE (expr),
3277 TREE_TYPE (TREE_TYPE (op))))
3278 {
3279 error ("type mismatch in array reference");
3280 debug_generic_stmt (TREE_TYPE (expr));
3281 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3282 return true;
3283 }
3284 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3285 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3286 TREE_TYPE (TREE_TYPE (op))))
3287 {
3288 error ("type mismatch in array range reference");
3289 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3290 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3291 return true;
3292 }
3293
3294 if ((TREE_CODE (expr) == REALPART_EXPR
3295 || TREE_CODE (expr) == IMAGPART_EXPR)
3296 && !useless_type_conversion_p (TREE_TYPE (expr),
3297 TREE_TYPE (TREE_TYPE (op))))
3298 {
3299 error ("type mismatch in real/imagpart reference");
3300 debug_generic_stmt (TREE_TYPE (expr));
3301 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3302 return true;
3303 }
3304
3305 if (TREE_CODE (expr) == COMPONENT_REF
3306 && !useless_type_conversion_p (TREE_TYPE (expr),
3307 TREE_TYPE (TREE_OPERAND (expr, 1))))
3308 {
3309 error ("type mismatch in component reference");
3310 debug_generic_stmt (TREE_TYPE (expr));
3311 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3312 return true;
3313 }
3314
3315 /* For VIEW_CONVERT_EXPRs which are allowed here, too, there
3316 is nothing to verify. Gross mismatches at most invoke
3317 undefined behavior. */
3318 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3319 && !handled_component_p (op))
3320 return false;
3321
3322 expr = op;
3323 }
3324
3325 return ((require_lvalue || !is_gimple_min_invariant (expr))
3326 && verify_types_in_gimple_min_lval (expr));
3327 }
3328
3329 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3330 list of pointer-to types that is trivially convertible to DEST. */
3331
3332 static bool
3333 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3334 {
3335 tree src;
3336
3337 if (!TYPE_POINTER_TO (src_obj))
3338 return true;
3339
3340 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3341 if (useless_type_conversion_p (dest, src))
3342 return true;
3343
3344 return false;
3345 }
3346
3347 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3348 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3349
3350 static bool
3351 valid_fixed_convert_types_p (tree type1, tree type2)
3352 {
3353 return (FIXED_POINT_TYPE_P (type1)
3354 && (INTEGRAL_TYPE_P (type2)
3355 || SCALAR_FLOAT_TYPE_P (type2)
3356 || FIXED_POINT_TYPE_P (type2)));
3357 }
3358
3359 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3360 is a problem, otherwise false. */
3361
3362 static bool
3363 verify_gimple_call (gimple stmt)
3364 {
3365 tree fn = gimple_call_fn (stmt);
3366 tree fntype;
3367
3368 if (!POINTER_TYPE_P (TREE_TYPE (fn))
3369 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3370 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE))
3371 {
3372 error ("non-function in gimple call");
3373 return true;
3374 }
3375
3376 if (gimple_call_lhs (stmt)
3377 && !is_gimple_lvalue (gimple_call_lhs (stmt)))
3378 {
3379 error ("invalid LHS in gimple call");
3380 return true;
3381 }
3382
3383 fntype = TREE_TYPE (TREE_TYPE (fn));
3384 if (gimple_call_lhs (stmt)
3385 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3386 TREE_TYPE (fntype))
3387 /* ??? At least C++ misses conversions at assignments from
3388 void * call results.
3389 ??? Java is completely off. Especially with functions
3390 returning java.lang.Object.
3391 For now simply allow arbitrary pointer type conversions. */
3392 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3393 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3394 {
3395 error ("invalid conversion in gimple call");
3396 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3397 debug_generic_stmt (TREE_TYPE (fntype));
3398 return true;
3399 }
3400
3401 /* ??? The C frontend passes unpromoted arguments in case it
3402 didn't see a function declaration before the call. So for now
3403 leave the call arguments unverified. Once we gimplify
3404 unit-at-a-time we have a chance to fix this. */
3405
3406 return false;
3407 }
3408
3409 /* Verifies the gimple comparison with the result type TYPE and
3410 the operands OP0 and OP1. */
3411
3412 static bool
3413 verify_gimple_comparison (tree type, tree op0, tree op1)
3414 {
3415 tree op0_type = TREE_TYPE (op0);
3416 tree op1_type = TREE_TYPE (op1);
3417
3418 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3419 {
3420 error ("invalid operands in gimple comparison");
3421 return true;
3422 }
3423
3424 /* For comparisons we do not have the operations type as the
3425 effective type the comparison is carried out in. Instead
3426 we require that either the first operand is trivially
3427 convertible into the second, or the other way around.
3428 The resulting type of a comparison may be any integral type.
3429 Because we special-case pointers to void we allow
3430 comparisons of pointers with the same mode as well. */
3431 if ((!useless_type_conversion_p (op0_type, op1_type)
3432 && !useless_type_conversion_p (op1_type, op0_type)
3433 && (!POINTER_TYPE_P (op0_type)
3434 || !POINTER_TYPE_P (op1_type)
3435 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3436 || !INTEGRAL_TYPE_P (type))
3437 {
3438 error ("type mismatch in comparison expression");
3439 debug_generic_expr (type);
3440 debug_generic_expr (op0_type);
3441 debug_generic_expr (op1_type);
3442 return true;
3443 }
3444
3445 return false;
3446 }
3447
3448 /* Verify a gimple assignment statement STMT with an unary rhs.
3449 Returns true if anything is wrong. */
3450
3451 static bool
3452 verify_gimple_assign_unary (gimple stmt)
3453 {
3454 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3455 tree lhs = gimple_assign_lhs (stmt);
3456 tree lhs_type = TREE_TYPE (lhs);
3457 tree rhs1 = gimple_assign_rhs1 (stmt);
3458 tree rhs1_type = TREE_TYPE (rhs1);
3459
3460 if (!is_gimple_reg (lhs)
3461 && !(optimize == 0
3462 && TREE_CODE (lhs_type) == COMPLEX_TYPE))
3463 {
3464 error ("non-register as LHS of unary operation");
3465 return true;
3466 }
3467
3468 if (!is_gimple_val (rhs1))
3469 {
3470 error ("invalid operand in unary operation");
3471 return true;
3472 }
3473
3474 /* First handle conversions. */
3475 switch (rhs_code)
3476 {
3477 CASE_CONVERT:
3478 {
3479 /* Allow conversions between integral types and pointers only if
3480 there is no sign or zero extension involved.
3481 For targets were the precision of sizetype doesn't match that
3482 of pointers we need to allow arbitrary conversions from and
3483 to sizetype. */
3484 if ((POINTER_TYPE_P (lhs_type)
3485 && INTEGRAL_TYPE_P (rhs1_type)
3486 && (TYPE_PRECISION (lhs_type) >= TYPE_PRECISION (rhs1_type)
3487 || rhs1_type == sizetype))
3488 || (POINTER_TYPE_P (rhs1_type)
3489 && INTEGRAL_TYPE_P (lhs_type)
3490 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3491 || lhs_type == sizetype)))
3492 return false;
3493
3494 /* Allow conversion from integer to offset type and vice versa. */
3495 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3496 && TREE_CODE (rhs1_type) == INTEGER_TYPE)
3497 || (TREE_CODE (lhs_type) == INTEGER_TYPE
3498 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3499 return false;
3500
3501 /* Otherwise assert we are converting between types of the
3502 same kind. */
3503 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3504 {
3505 error ("invalid types in nop conversion");
3506 debug_generic_expr (lhs_type);
3507 debug_generic_expr (rhs1_type);
3508 return true;
3509 }
3510
3511 return false;
3512 }
3513
3514 case FIXED_CONVERT_EXPR:
3515 {
3516 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3517 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3518 {
3519 error ("invalid types in fixed-point conversion");
3520 debug_generic_expr (lhs_type);
3521 debug_generic_expr (rhs1_type);
3522 return true;
3523 }
3524
3525 return false;
3526 }
3527
3528 case FLOAT_EXPR:
3529 {
3530 if (!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3531 {
3532 error ("invalid types in conversion to floating point");
3533 debug_generic_expr (lhs_type);
3534 debug_generic_expr (rhs1_type);
3535 return true;
3536 }
3537
3538 return false;
3539 }
3540
3541 case FIX_TRUNC_EXPR:
3542 {
3543 if (!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3544 {
3545 error ("invalid types in conversion to integer");
3546 debug_generic_expr (lhs_type);
3547 debug_generic_expr (rhs1_type);
3548 return true;
3549 }
3550
3551 return false;
3552 }
3553
3554 case VEC_UNPACK_HI_EXPR:
3555 case VEC_UNPACK_LO_EXPR:
3556 case REDUC_MAX_EXPR:
3557 case REDUC_MIN_EXPR:
3558 case REDUC_PLUS_EXPR:
3559 case VEC_UNPACK_FLOAT_HI_EXPR:
3560 case VEC_UNPACK_FLOAT_LO_EXPR:
3561 /* FIXME. */
3562 return false;
3563
3564 case TRUTH_NOT_EXPR:
3565 case NEGATE_EXPR:
3566 case ABS_EXPR:
3567 case BIT_NOT_EXPR:
3568 case PAREN_EXPR:
3569 case NON_LVALUE_EXPR:
3570 case CONJ_EXPR:
3571 break;
3572
3573 default:
3574 gcc_unreachable ();
3575 }
3576
3577 /* For the remaining codes assert there is no conversion involved. */
3578 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3579 {
3580 error ("non-trivial conversion in unary operation");
3581 debug_generic_expr (lhs_type);
3582 debug_generic_expr (rhs1_type);
3583 return true;
3584 }
3585
3586 return false;
3587 }
3588
3589 /* Verify a gimple assignment statement STMT with a binary rhs.
3590 Returns true if anything is wrong. */
3591
3592 static bool
3593 verify_gimple_assign_binary (gimple stmt)
3594 {
3595 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3596 tree lhs = gimple_assign_lhs (stmt);
3597 tree lhs_type = TREE_TYPE (lhs);
3598 tree rhs1 = gimple_assign_rhs1 (stmt);
3599 tree rhs1_type = TREE_TYPE (rhs1);
3600 tree rhs2 = gimple_assign_rhs2 (stmt);
3601 tree rhs2_type = TREE_TYPE (rhs2);
3602
3603 if (!is_gimple_reg (lhs)
3604 && !(optimize == 0
3605 && TREE_CODE (lhs_type) == COMPLEX_TYPE))
3606 {
3607 error ("non-register as LHS of binary operation");
3608 return true;
3609 }
3610
3611 if (!is_gimple_val (rhs1)
3612 || !is_gimple_val (rhs2))
3613 {
3614 error ("invalid operands in binary operation");
3615 return true;
3616 }
3617
3618 /* First handle operations that involve different types. */
3619 switch (rhs_code)
3620 {
3621 case COMPLEX_EXPR:
3622 {
3623 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3624 || !(INTEGRAL_TYPE_P (rhs1_type)
3625 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3626 || !(INTEGRAL_TYPE_P (rhs2_type)
3627 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3628 {
3629 error ("type mismatch in complex expression");
3630 debug_generic_expr (lhs_type);
3631 debug_generic_expr (rhs1_type);
3632 debug_generic_expr (rhs2_type);
3633 return true;
3634 }
3635
3636 return false;
3637 }
3638
3639 case LSHIFT_EXPR:
3640 case RSHIFT_EXPR:
3641 case LROTATE_EXPR:
3642 case RROTATE_EXPR:
3643 {
3644 /* Shifts and rotates are ok on integral types, fixed point
3645 types and integer vector types. */
3646 if ((!INTEGRAL_TYPE_P (rhs1_type)
3647 && !FIXED_POINT_TYPE_P (rhs1_type)
3648 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3649 && TREE_CODE (TREE_TYPE (rhs1_type)) == INTEGER_TYPE))
3650 || (!INTEGRAL_TYPE_P (rhs2_type)
3651 /* Vector shifts of vectors are also ok. */
3652 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3653 && TREE_CODE (TREE_TYPE (rhs1_type)) == INTEGER_TYPE
3654 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3655 && TREE_CODE (TREE_TYPE (rhs2_type)) == INTEGER_TYPE))
3656 || !useless_type_conversion_p (lhs_type, rhs1_type))
3657 {
3658 error ("type mismatch in shift expression");
3659 debug_generic_expr (lhs_type);
3660 debug_generic_expr (rhs1_type);
3661 debug_generic_expr (rhs2_type);
3662 return true;
3663 }
3664
3665 return false;
3666 }
3667
3668 case VEC_LSHIFT_EXPR:
3669 case VEC_RSHIFT_EXPR:
3670 {
3671 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3672 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3673 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type))
3674 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3675 || (!INTEGRAL_TYPE_P (rhs2_type)
3676 && (TREE_CODE (rhs2_type) != VECTOR_TYPE
3677 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3678 || !useless_type_conversion_p (lhs_type, rhs1_type))
3679 {
3680 error ("type mismatch in vector shift expression");
3681 debug_generic_expr (lhs_type);
3682 debug_generic_expr (rhs1_type);
3683 debug_generic_expr (rhs2_type);
3684 return true;
3685 }
3686 /* For shifting a vector of floating point components we
3687 only allow shifting by a constant multiple of the element size. */
3688 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))
3689 && (TREE_CODE (rhs2) != INTEGER_CST
3690 || !div_if_zero_remainder (EXACT_DIV_EXPR, rhs2,
3691 TYPE_SIZE (TREE_TYPE (rhs1_type)))))
3692 {
3693 error ("non-element sized vector shift of floating point vector");
3694 return true;
3695 }
3696
3697 return false;
3698 }
3699
3700 case PLUS_EXPR:
3701 {
3702 /* We use regular PLUS_EXPR for vectors.
3703 ??? This just makes the checker happy and may not be what is
3704 intended. */
3705 if (TREE_CODE (lhs_type) == VECTOR_TYPE
3706 && POINTER_TYPE_P (TREE_TYPE (lhs_type)))
3707 {
3708 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3709 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3710 {
3711 error ("invalid non-vector operands to vector valued plus");
3712 return true;
3713 }
3714 lhs_type = TREE_TYPE (lhs_type);
3715 rhs1_type = TREE_TYPE (rhs1_type);
3716 rhs2_type = TREE_TYPE (rhs2_type);
3717 /* PLUS_EXPR is commutative, so we might end up canonicalizing
3718 the pointer to 2nd place. */
3719 if (POINTER_TYPE_P (rhs2_type))
3720 {
3721 tree tem = rhs1_type;
3722 rhs1_type = rhs2_type;
3723 rhs2_type = tem;
3724 }
3725 goto do_pointer_plus_expr_check;
3726 }
3727 }
3728 /* Fallthru. */
3729 case MINUS_EXPR:
3730 {
3731 if (POINTER_TYPE_P (lhs_type)
3732 || POINTER_TYPE_P (rhs1_type)
3733 || POINTER_TYPE_P (rhs2_type))
3734 {
3735 error ("invalid (pointer) operands to plus/minus");
3736 return true;
3737 }
3738
3739 /* Continue with generic binary expression handling. */
3740 break;
3741 }
3742
3743 case POINTER_PLUS_EXPR:
3744 {
3745 do_pointer_plus_expr_check:
3746 if (!POINTER_TYPE_P (rhs1_type)
3747 || !useless_type_conversion_p (lhs_type, rhs1_type)
3748 || !useless_type_conversion_p (sizetype, rhs2_type))
3749 {
3750 error ("type mismatch in pointer plus expression");
3751 debug_generic_stmt (lhs_type);
3752 debug_generic_stmt (rhs1_type);
3753 debug_generic_stmt (rhs2_type);
3754 return true;
3755 }
3756
3757 return false;
3758 }
3759
3760 case TRUTH_ANDIF_EXPR:
3761 case TRUTH_ORIF_EXPR:
3762 gcc_unreachable ();
3763
3764 case TRUTH_AND_EXPR:
3765 case TRUTH_OR_EXPR:
3766 case TRUTH_XOR_EXPR:
3767 {
3768 /* We allow any kind of integral typed argument and result. */
3769 if (!INTEGRAL_TYPE_P (rhs1_type)
3770 || !INTEGRAL_TYPE_P (rhs2_type)
3771 || !INTEGRAL_TYPE_P (lhs_type))
3772 {
3773 error ("type mismatch in binary truth expression");
3774 debug_generic_expr (lhs_type);
3775 debug_generic_expr (rhs1_type);
3776 debug_generic_expr (rhs2_type);
3777 return true;
3778 }
3779
3780 return false;
3781 }
3782
3783 case LT_EXPR:
3784 case LE_EXPR:
3785 case GT_EXPR:
3786 case GE_EXPR:
3787 case EQ_EXPR:
3788 case NE_EXPR:
3789 case UNORDERED_EXPR:
3790 case ORDERED_EXPR:
3791 case UNLT_EXPR:
3792 case UNLE_EXPR:
3793 case UNGT_EXPR:
3794 case UNGE_EXPR:
3795 case UNEQ_EXPR:
3796 case LTGT_EXPR:
3797 /* Comparisons are also binary, but the result type is not
3798 connected to the operand types. */
3799 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3800
3801 case WIDEN_SUM_EXPR:
3802 case WIDEN_MULT_EXPR:
3803 case VEC_WIDEN_MULT_HI_EXPR:
3804 case VEC_WIDEN_MULT_LO_EXPR:
3805 case VEC_PACK_TRUNC_EXPR:
3806 case VEC_PACK_SAT_EXPR:
3807 case VEC_PACK_FIX_TRUNC_EXPR:
3808 case VEC_EXTRACT_EVEN_EXPR:
3809 case VEC_EXTRACT_ODD_EXPR:
3810 case VEC_INTERLEAVE_HIGH_EXPR:
3811 case VEC_INTERLEAVE_LOW_EXPR:
3812 /* FIXME. */
3813 return false;
3814
3815 case MULT_EXPR:
3816 case TRUNC_DIV_EXPR:
3817 case CEIL_DIV_EXPR:
3818 case FLOOR_DIV_EXPR:
3819 case ROUND_DIV_EXPR:
3820 case TRUNC_MOD_EXPR:
3821 case CEIL_MOD_EXPR:
3822 case FLOOR_MOD_EXPR:
3823 case ROUND_MOD_EXPR:
3824 case RDIV_EXPR:
3825 case EXACT_DIV_EXPR:
3826 case MIN_EXPR:
3827 case MAX_EXPR:
3828 case BIT_IOR_EXPR:
3829 case BIT_XOR_EXPR:
3830 case BIT_AND_EXPR:
3831 /* Continue with generic binary expression handling. */
3832 break;
3833
3834 default:
3835 gcc_unreachable ();
3836 }
3837
3838 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3839 || !useless_type_conversion_p (lhs_type, rhs2_type))
3840 {
3841 error ("type mismatch in binary expression");
3842 debug_generic_stmt (lhs_type);
3843 debug_generic_stmt (rhs1_type);
3844 debug_generic_stmt (rhs2_type);
3845 return true;
3846 }
3847
3848 return false;
3849 }
3850
3851 /* Verify a gimple assignment statement STMT with a single rhs.
3852 Returns true if anything is wrong. */
3853
3854 static bool
3855 verify_gimple_assign_single (gimple stmt)
3856 {
3857 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3858 tree lhs = gimple_assign_lhs (stmt);
3859 tree lhs_type = TREE_TYPE (lhs);
3860 tree rhs1 = gimple_assign_rhs1 (stmt);
3861 tree rhs1_type = TREE_TYPE (rhs1);
3862 bool res = false;
3863
3864 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3865 {
3866 error ("non-trivial conversion at assignment");
3867 debug_generic_expr (lhs_type);
3868 debug_generic_expr (rhs1_type);
3869 return true;
3870 }
3871
3872 if (handled_component_p (lhs))
3873 res |= verify_types_in_gimple_reference (lhs, true);
3874
3875 /* Special codes we cannot handle via their class. */
3876 switch (rhs_code)
3877 {
3878 case ADDR_EXPR:
3879 {
3880 tree op = TREE_OPERAND (rhs1, 0);
3881 if (!is_gimple_addressable (op))
3882 {
3883 error ("invalid operand in unary expression");
3884 return true;
3885 }
3886
3887 if (!one_pointer_to_useless_type_conversion_p (lhs_type,
3888 TREE_TYPE (op)))
3889 {
3890 error ("type mismatch in address expression");
3891 debug_generic_stmt (lhs_type);
3892 debug_generic_stmt (TYPE_POINTER_TO (TREE_TYPE (op)));
3893 return true;
3894 }
3895
3896 return verify_types_in_gimple_reference (op, true);
3897 }
3898
3899 /* tcc_reference */
3900 case COMPONENT_REF:
3901 case BIT_FIELD_REF:
3902 case INDIRECT_REF:
3903 case ALIGN_INDIRECT_REF:
3904 case MISALIGNED_INDIRECT_REF:
3905 case ARRAY_REF:
3906 case ARRAY_RANGE_REF:
3907 case VIEW_CONVERT_EXPR:
3908 case REALPART_EXPR:
3909 case IMAGPART_EXPR:
3910 case TARGET_MEM_REF:
3911 if (!is_gimple_reg (lhs)
3912 && is_gimple_reg_type (TREE_TYPE (lhs)))
3913 {
3914 error ("invalid rhs for gimple memory store");
3915 debug_generic_stmt (lhs);
3916 debug_generic_stmt (rhs1);
3917 return true;
3918 }
3919 return res || verify_types_in_gimple_reference (rhs1, false);
3920
3921 /* tcc_constant */
3922 case SSA_NAME:
3923 case INTEGER_CST:
3924 case REAL_CST:
3925 case FIXED_CST:
3926 case COMPLEX_CST:
3927 case VECTOR_CST:
3928 case STRING_CST:
3929 return res;
3930
3931 /* tcc_declaration */
3932 case CONST_DECL:
3933 return res;
3934 case VAR_DECL:
3935 case PARM_DECL:
3936 if (!is_gimple_reg (lhs)
3937 && !is_gimple_reg (rhs1)
3938 && is_gimple_reg_type (TREE_TYPE (lhs)))
3939 {
3940 error ("invalid rhs for gimple memory store");
3941 debug_generic_stmt (lhs);
3942 debug_generic_stmt (rhs1);
3943 return true;
3944 }
3945 return res;
3946
3947 case COND_EXPR:
3948 case CONSTRUCTOR:
3949 case OBJ_TYPE_REF:
3950 case ASSERT_EXPR:
3951 case WITH_SIZE_EXPR:
3952 case EXC_PTR_EXPR:
3953 case FILTER_EXPR:
3954 case POLYNOMIAL_CHREC:
3955 case DOT_PROD_EXPR:
3956 case VEC_COND_EXPR:
3957 case REALIGN_LOAD_EXPR:
3958 /* FIXME. */
3959 return res;
3960
3961 default:;
3962 }
3963
3964 return res;
3965 }
3966
3967 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
3968 is a problem, otherwise false. */
3969
3970 static bool
3971 verify_gimple_assign (gimple stmt)
3972 {
3973 switch (gimple_assign_rhs_class (stmt))
3974 {
3975 case GIMPLE_SINGLE_RHS:
3976 return verify_gimple_assign_single (stmt);
3977
3978 case GIMPLE_UNARY_RHS:
3979 return verify_gimple_assign_unary (stmt);
3980
3981 case GIMPLE_BINARY_RHS:
3982 return verify_gimple_assign_binary (stmt);
3983
3984 default:
3985 gcc_unreachable ();
3986 }
3987 }
3988
3989 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
3990 is a problem, otherwise false. */
3991
3992 static bool
3993 verify_gimple_return (gimple stmt)
3994 {
3995 tree op = gimple_return_retval (stmt);
3996 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
3997
3998 /* We cannot test for present return values as we do not fix up missing
3999 return values from the original source. */
4000 if (op == NULL)
4001 return false;
4002
4003 if (!is_gimple_val (op)
4004 && TREE_CODE (op) != RESULT_DECL)
4005 {
4006 error ("invalid operand in return statement");
4007 debug_generic_stmt (op);
4008 return true;
4009 }
4010
4011 if (!useless_type_conversion_p (restype, TREE_TYPE (op))
4012 /* ??? With C++ we can have the situation that the result
4013 decl is a reference type while the return type is an aggregate. */
4014 && !(TREE_CODE (op) == RESULT_DECL
4015 && TREE_CODE (TREE_TYPE (op)) == REFERENCE_TYPE
4016 && useless_type_conversion_p (restype, TREE_TYPE (TREE_TYPE (op)))))
4017 {
4018 error ("invalid conversion in return statement");
4019 debug_generic_stmt (restype);
4020 debug_generic_stmt (TREE_TYPE (op));
4021 return true;
4022 }
4023
4024 return false;
4025 }
4026
4027
4028 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4029 is a problem, otherwise false. */
4030
4031 static bool
4032 verify_gimple_goto (gimple stmt)
4033 {
4034 tree dest = gimple_goto_dest (stmt);
4035
4036 /* ??? We have two canonical forms of direct goto destinations, a
4037 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4038 if (TREE_CODE (dest) != LABEL_DECL
4039 && (!is_gimple_val (dest)
4040 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4041 {
4042 error ("goto destination is neither a label nor a pointer");
4043 return true;
4044 }
4045
4046 return false;
4047 }
4048
4049 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4050 is a problem, otherwise false. */
4051
4052 static bool
4053 verify_gimple_switch (gimple stmt)
4054 {
4055 if (!is_gimple_val (gimple_switch_index (stmt)))
4056 {
4057 error ("invalid operand to switch statement");
4058 debug_generic_stmt (gimple_switch_index (stmt));
4059 return true;
4060 }
4061
4062 return false;
4063 }
4064
4065
4066 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4067 and false otherwise. */
4068
4069 static bool
4070 verify_gimple_phi (gimple stmt)
4071 {
4072 tree type = TREE_TYPE (gimple_phi_result (stmt));
4073 unsigned i;
4074
4075 if (!is_gimple_variable (gimple_phi_result (stmt)))
4076 {
4077 error ("Invalid PHI result");
4078 return true;
4079 }
4080
4081 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4082 {
4083 tree arg = gimple_phi_arg_def (stmt, i);
4084 if ((is_gimple_reg (gimple_phi_result (stmt))
4085 && !is_gimple_val (arg))
4086 || (!is_gimple_reg (gimple_phi_result (stmt))
4087 && !is_gimple_addressable (arg)))
4088 {
4089 error ("Invalid PHI argument");
4090 debug_generic_stmt (arg);
4091 return true;
4092 }
4093 if (!useless_type_conversion_p (type, TREE_TYPE (arg)))
4094 {
4095 error ("Incompatible types in PHI argument %u", i);
4096 debug_generic_stmt (type);
4097 debug_generic_stmt (TREE_TYPE (arg));
4098 return true;
4099 }
4100 }
4101
4102 return false;
4103 }
4104
4105
4106 /* Verify the GIMPLE statement STMT. Returns true if there is an
4107 error, otherwise false. */
4108
4109 static bool
4110 verify_types_in_gimple_stmt (gimple stmt)
4111 {
4112 if (is_gimple_omp (stmt))
4113 {
4114 /* OpenMP directives are validated by the FE and never operated
4115 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4116 non-gimple expressions when the main index variable has had
4117 its address taken. This does not affect the loop itself
4118 because the header of an GIMPLE_OMP_FOR is merely used to determine
4119 how to setup the parallel iteration. */
4120 return false;
4121 }
4122
4123 switch (gimple_code (stmt))
4124 {
4125 case GIMPLE_ASSIGN:
4126 return verify_gimple_assign (stmt);
4127
4128 case GIMPLE_LABEL:
4129 return TREE_CODE (gimple_label_label (stmt)) != LABEL_DECL;
4130
4131 case GIMPLE_CALL:
4132 return verify_gimple_call (stmt);
4133
4134 case GIMPLE_COND:
4135 return verify_gimple_comparison (boolean_type_node,
4136 gimple_cond_lhs (stmt),
4137 gimple_cond_rhs (stmt));
4138
4139 case GIMPLE_GOTO:
4140 return verify_gimple_goto (stmt);
4141
4142 case GIMPLE_SWITCH:
4143 return verify_gimple_switch (stmt);
4144
4145 case GIMPLE_RETURN:
4146 return verify_gimple_return (stmt);
4147
4148 case GIMPLE_ASM:
4149 return false;
4150
4151 case GIMPLE_PHI:
4152 return verify_gimple_phi (stmt);
4153
4154 /* Tuples that do not have tree operands. */
4155 case GIMPLE_NOP:
4156 case GIMPLE_RESX:
4157 case GIMPLE_PREDICT:
4158 return false;
4159
4160 default:
4161 gcc_unreachable ();
4162 }
4163 }
4164
4165 /* Verify the GIMPLE statements inside the sequence STMTS. */
4166
4167 static bool
4168 verify_types_in_gimple_seq_2 (gimple_seq stmts)
4169 {
4170 gimple_stmt_iterator ittr;
4171 bool err = false;
4172
4173 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4174 {
4175 gimple stmt = gsi_stmt (ittr);
4176
4177 switch (gimple_code (stmt))
4178 {
4179 case GIMPLE_BIND:
4180 err |= verify_types_in_gimple_seq_2 (gimple_bind_body (stmt));
4181 break;
4182
4183 case GIMPLE_TRY:
4184 err |= verify_types_in_gimple_seq_2 (gimple_try_eval (stmt));
4185 err |= verify_types_in_gimple_seq_2 (gimple_try_cleanup (stmt));
4186 break;
4187
4188 case GIMPLE_EH_FILTER:
4189 err |= verify_types_in_gimple_seq_2 (gimple_eh_filter_failure (stmt));
4190 break;
4191
4192 case GIMPLE_CATCH:
4193 err |= verify_types_in_gimple_seq_2 (gimple_catch_handler (stmt));
4194 break;
4195
4196 default:
4197 {
4198 bool err2 = verify_types_in_gimple_stmt (stmt);
4199 if (err2)
4200 debug_gimple_stmt (stmt);
4201 err |= err2;
4202 }
4203 }
4204 }
4205
4206 return err;
4207 }
4208
4209
4210 /* Verify the GIMPLE statements inside the statement list STMTS. */
4211
4212 void
4213 verify_types_in_gimple_seq (gimple_seq stmts)
4214 {
4215 if (verify_types_in_gimple_seq_2 (stmts))
4216 internal_error ("verify_gimple failed");
4217 }
4218
4219
4220 /* Verify STMT, return true if STMT is not in GIMPLE form.
4221 TODO: Implement type checking. */
4222
4223 static bool
4224 verify_stmt (gimple_stmt_iterator *gsi)
4225 {
4226 tree addr;
4227 struct walk_stmt_info wi;
4228 bool last_in_block = gsi_one_before_end_p (*gsi);
4229 gimple stmt = gsi_stmt (*gsi);
4230
4231 if (is_gimple_omp (stmt))
4232 {
4233 /* OpenMP directives are validated by the FE and never operated
4234 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4235 non-gimple expressions when the main index variable has had
4236 its address taken. This does not affect the loop itself
4237 because the header of an GIMPLE_OMP_FOR is merely used to determine
4238 how to setup the parallel iteration. */
4239 return false;
4240 }
4241
4242 /* FIXME. The C frontend passes unpromoted arguments in case it
4243 didn't see a function declaration before the call. */
4244 if (is_gimple_call (stmt))
4245 {
4246 tree decl;
4247
4248 if (!is_gimple_call_addr (gimple_call_fn (stmt)))
4249 {
4250 error ("invalid function in call statement");
4251 return true;
4252 }
4253
4254 decl = gimple_call_fndecl (stmt);
4255 if (decl
4256 && TREE_CODE (decl) == FUNCTION_DECL
4257 && DECL_LOOPING_CONST_OR_PURE_P (decl)
4258 && (!DECL_PURE_P (decl))
4259 && (!TREE_READONLY (decl)))
4260 {
4261 error ("invalid pure const state for function");
4262 return true;
4263 }
4264 }
4265
4266 memset (&wi, 0, sizeof (wi));
4267 addr = walk_gimple_op (gsi_stmt (*gsi), verify_expr, &wi);
4268 if (addr)
4269 {
4270 debug_generic_expr (addr);
4271 inform (gimple_location (gsi_stmt (*gsi)), "in statement");
4272 debug_gimple_stmt (stmt);
4273 return true;
4274 }
4275
4276 /* If the statement is marked as part of an EH region, then it is
4277 expected that the statement could throw. Verify that when we
4278 have optimizations that simplify statements such that we prove
4279 that they cannot throw, that we update other data structures
4280 to match. */
4281 if (lookup_stmt_eh_region (stmt) >= 0)
4282 {
4283 /* During IPA passes, ipa-pure-const sets nothrow flags on calls
4284 and they are updated on statements only after fixup_cfg
4285 is executed at beggining of expansion stage. */
4286 if (!stmt_could_throw_p (stmt) && cgraph_state != CGRAPH_STATE_IPA_SSA)
4287 {
4288 error ("statement marked for throw, but doesn%'t");
4289 goto fail;
4290 }
4291 if (!last_in_block && stmt_can_throw_internal (stmt))
4292 {
4293 error ("statement marked for throw in middle of block");
4294 goto fail;
4295 }
4296 }
4297
4298 return false;
4299
4300 fail:
4301 debug_gimple_stmt (stmt);
4302 return true;
4303 }
4304
4305
4306 /* Return true when the T can be shared. */
4307
4308 static bool
4309 tree_node_can_be_shared (tree t)
4310 {
4311 if (IS_TYPE_OR_DECL_P (t)
4312 || is_gimple_min_invariant (t)
4313 || TREE_CODE (t) == SSA_NAME
4314 || t == error_mark_node
4315 || TREE_CODE (t) == IDENTIFIER_NODE)
4316 return true;
4317
4318 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4319 return true;
4320
4321 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4322 && is_gimple_min_invariant (TREE_OPERAND (t, 1)))
4323 || TREE_CODE (t) == COMPONENT_REF
4324 || TREE_CODE (t) == REALPART_EXPR
4325 || TREE_CODE (t) == IMAGPART_EXPR)
4326 t = TREE_OPERAND (t, 0);
4327
4328 if (DECL_P (t))
4329 return true;
4330
4331 return false;
4332 }
4333
4334
4335 /* Called via walk_gimple_stmt. Verify tree sharing. */
4336
4337 static tree
4338 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4339 {
4340 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4341 struct pointer_set_t *visited = (struct pointer_set_t *) wi->info;
4342
4343 if (tree_node_can_be_shared (*tp))
4344 {
4345 *walk_subtrees = false;
4346 return NULL;
4347 }
4348
4349 if (pointer_set_insert (visited, *tp))
4350 return *tp;
4351
4352 return NULL;
4353 }
4354
4355
4356 static bool eh_error_found;
4357 static int
4358 verify_eh_throw_stmt_node (void **slot, void *data)
4359 {
4360 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4361 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4362
4363 if (!pointer_set_contains (visited, node->stmt))
4364 {
4365 error ("Dead STMT in EH table");
4366 debug_gimple_stmt (node->stmt);
4367 eh_error_found = true;
4368 }
4369 return 1;
4370 }
4371
4372
4373 /* Verify the GIMPLE statements in every basic block. */
4374
4375 void
4376 verify_stmts (void)
4377 {
4378 basic_block bb;
4379 gimple_stmt_iterator gsi;
4380 bool err = false;
4381 struct pointer_set_t *visited, *visited_stmts;
4382 tree addr;
4383 struct walk_stmt_info wi;
4384
4385 timevar_push (TV_TREE_STMT_VERIFY);
4386 visited = pointer_set_create ();
4387 visited_stmts = pointer_set_create ();
4388
4389 memset (&wi, 0, sizeof (wi));
4390 wi.info = (void *) visited;
4391
4392 FOR_EACH_BB (bb)
4393 {
4394 gimple phi;
4395 size_t i;
4396
4397 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4398 {
4399 phi = gsi_stmt (gsi);
4400 pointer_set_insert (visited_stmts, phi);
4401 if (gimple_bb (phi) != bb)
4402 {
4403 error ("gimple_bb (phi) is set to a wrong basic block");
4404 err |= true;
4405 }
4406
4407 for (i = 0; i < gimple_phi_num_args (phi); i++)
4408 {
4409 tree t = gimple_phi_arg_def (phi, i);
4410 tree addr;
4411
4412 if (!t)
4413 {
4414 error ("missing PHI def");
4415 debug_gimple_stmt (phi);
4416 err |= true;
4417 continue;
4418 }
4419 /* Addressable variables do have SSA_NAMEs but they
4420 are not considered gimple values. */
4421 else if (TREE_CODE (t) != SSA_NAME
4422 && TREE_CODE (t) != FUNCTION_DECL
4423 && !is_gimple_min_invariant (t))
4424 {
4425 error ("PHI argument is not a GIMPLE value");
4426 debug_gimple_stmt (phi);
4427 debug_generic_expr (t);
4428 err |= true;
4429 }
4430
4431 addr = walk_tree (&t, verify_node_sharing, visited, NULL);
4432 if (addr)
4433 {
4434 error ("incorrect sharing of tree nodes");
4435 debug_gimple_stmt (phi);
4436 debug_generic_expr (addr);
4437 err |= true;
4438 }
4439 }
4440
4441 #ifdef ENABLE_TYPES_CHECKING
4442 if (verify_gimple_phi (phi))
4443 {
4444 debug_gimple_stmt (phi);
4445 err |= true;
4446 }
4447 #endif
4448 }
4449
4450 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
4451 {
4452 gimple stmt = gsi_stmt (gsi);
4453
4454 if (gimple_code (stmt) == GIMPLE_WITH_CLEANUP_EXPR
4455 || gimple_code (stmt) == GIMPLE_BIND)
4456 {
4457 error ("invalid GIMPLE statement");
4458 debug_gimple_stmt (stmt);
4459 err |= true;
4460 }
4461
4462 pointer_set_insert (visited_stmts, stmt);
4463
4464 if (gimple_bb (stmt) != bb)
4465 {
4466 error ("gimple_bb (stmt) is set to a wrong basic block");
4467 debug_gimple_stmt (stmt);
4468 err |= true;
4469 }
4470
4471 if (gimple_code (stmt) == GIMPLE_LABEL)
4472 {
4473 tree decl = gimple_label_label (stmt);
4474 int uid = LABEL_DECL_UID (decl);
4475
4476 if (uid == -1
4477 || VEC_index (basic_block, label_to_block_map, uid) != bb)
4478 {
4479 error ("incorrect entry in label_to_block_map.\n");
4480 err |= true;
4481 }
4482 }
4483
4484 err |= verify_stmt (&gsi);
4485
4486 #ifdef ENABLE_TYPES_CHECKING
4487 if (verify_types_in_gimple_stmt (gsi_stmt (gsi)))
4488 {
4489 debug_gimple_stmt (stmt);
4490 err |= true;
4491 }
4492 #endif
4493 addr = walk_gimple_op (gsi_stmt (gsi), verify_node_sharing, &wi);
4494 if (addr)
4495 {
4496 error ("incorrect sharing of tree nodes");
4497 debug_gimple_stmt (stmt);
4498 debug_generic_expr (addr);
4499 err |= true;
4500 }
4501 gsi_next (&gsi);
4502 }
4503 }
4504
4505 eh_error_found = false;
4506 if (get_eh_throw_stmt_table (cfun))
4507 htab_traverse (get_eh_throw_stmt_table (cfun),
4508 verify_eh_throw_stmt_node,
4509 visited_stmts);
4510
4511 if (err | eh_error_found)
4512 internal_error ("verify_stmts failed");
4513
4514 pointer_set_destroy (visited);
4515 pointer_set_destroy (visited_stmts);
4516 verify_histograms ();
4517 timevar_pop (TV_TREE_STMT_VERIFY);
4518 }
4519
4520
4521 /* Verifies that the flow information is OK. */
4522
4523 static int
4524 gimple_verify_flow_info (void)
4525 {
4526 int err = 0;
4527 basic_block bb;
4528 gimple_stmt_iterator gsi;
4529 gimple stmt;
4530 edge e;
4531 edge_iterator ei;
4532
4533 if (ENTRY_BLOCK_PTR->il.gimple)
4534 {
4535 error ("ENTRY_BLOCK has IL associated with it");
4536 err = 1;
4537 }
4538
4539 if (EXIT_BLOCK_PTR->il.gimple)
4540 {
4541 error ("EXIT_BLOCK has IL associated with it");
4542 err = 1;
4543 }
4544
4545 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4546 if (e->flags & EDGE_FALLTHRU)
4547 {
4548 error ("fallthru to exit from bb %d", e->src->index);
4549 err = 1;
4550 }
4551
4552 FOR_EACH_BB (bb)
4553 {
4554 bool found_ctrl_stmt = false;
4555
4556 stmt = NULL;
4557
4558 /* Skip labels on the start of basic block. */
4559 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4560 {
4561 tree label;
4562 gimple prev_stmt = stmt;
4563
4564 stmt = gsi_stmt (gsi);
4565
4566 if (gimple_code (stmt) != GIMPLE_LABEL)
4567 break;
4568
4569 label = gimple_label_label (stmt);
4570 if (prev_stmt && DECL_NONLOCAL (label))
4571 {
4572 error ("nonlocal label ");
4573 print_generic_expr (stderr, label, 0);
4574 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4575 bb->index);
4576 err = 1;
4577 }
4578
4579 if (label_to_block (label) != bb)
4580 {
4581 error ("label ");
4582 print_generic_expr (stderr, label, 0);
4583 fprintf (stderr, " to block does not match in bb %d",
4584 bb->index);
4585 err = 1;
4586 }
4587
4588 if (decl_function_context (label) != current_function_decl)
4589 {
4590 error ("label ");
4591 print_generic_expr (stderr, label, 0);
4592 fprintf (stderr, " has incorrect context in bb %d",
4593 bb->index);
4594 err = 1;
4595 }
4596 }
4597
4598 /* Verify that body of basic block BB is free of control flow. */
4599 for (; !gsi_end_p (gsi); gsi_next (&gsi))
4600 {
4601 gimple stmt = gsi_stmt (gsi);
4602
4603 if (found_ctrl_stmt)
4604 {
4605 error ("control flow in the middle of basic block %d",
4606 bb->index);
4607 err = 1;
4608 }
4609
4610 if (stmt_ends_bb_p (stmt))
4611 found_ctrl_stmt = true;
4612
4613 if (gimple_code (stmt) == GIMPLE_LABEL)
4614 {
4615 error ("label ");
4616 print_generic_expr (stderr, gimple_label_label (stmt), 0);
4617 fprintf (stderr, " in the middle of basic block %d", bb->index);
4618 err = 1;
4619 }
4620 }
4621
4622 gsi = gsi_last_bb (bb);
4623 if (gsi_end_p (gsi))
4624 continue;
4625
4626 stmt = gsi_stmt (gsi);
4627
4628 err |= verify_eh_edges (stmt);
4629
4630 if (is_ctrl_stmt (stmt))
4631 {
4632 FOR_EACH_EDGE (e, ei, bb->succs)
4633 if (e->flags & EDGE_FALLTHRU)
4634 {
4635 error ("fallthru edge after a control statement in bb %d",
4636 bb->index);
4637 err = 1;
4638 }
4639 }
4640
4641 if (gimple_code (stmt) != GIMPLE_COND)
4642 {
4643 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4644 after anything else but if statement. */
4645 FOR_EACH_EDGE (e, ei, bb->succs)
4646 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
4647 {
4648 error ("true/false edge after a non-GIMPLE_COND in bb %d",
4649 bb->index);
4650 err = 1;
4651 }
4652 }
4653
4654 switch (gimple_code (stmt))
4655 {
4656 case GIMPLE_COND:
4657 {
4658 edge true_edge;
4659 edge false_edge;
4660
4661 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
4662
4663 if (!true_edge
4664 || !false_edge
4665 || !(true_edge->flags & EDGE_TRUE_VALUE)
4666 || !(false_edge->flags & EDGE_FALSE_VALUE)
4667 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4668 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4669 || EDGE_COUNT (bb->succs) >= 3)
4670 {
4671 error ("wrong outgoing edge flags at end of bb %d",
4672 bb->index);
4673 err = 1;
4674 }
4675 }
4676 break;
4677
4678 case GIMPLE_GOTO:
4679 if (simple_goto_p (stmt))
4680 {
4681 error ("explicit goto at end of bb %d", bb->index);
4682 err = 1;
4683 }
4684 else
4685 {
4686 /* FIXME. We should double check that the labels in the
4687 destination blocks have their address taken. */
4688 FOR_EACH_EDGE (e, ei, bb->succs)
4689 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
4690 | EDGE_FALSE_VALUE))
4691 || !(e->flags & EDGE_ABNORMAL))
4692 {
4693 error ("wrong outgoing edge flags at end of bb %d",
4694 bb->index);
4695 err = 1;
4696 }
4697 }
4698 break;
4699
4700 case GIMPLE_RETURN:
4701 if (!single_succ_p (bb)
4702 || (single_succ_edge (bb)->flags
4703 & (EDGE_FALLTHRU | EDGE_ABNORMAL
4704 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4705 {
4706 error ("wrong outgoing edge flags at end of bb %d", bb->index);
4707 err = 1;
4708 }
4709 if (single_succ (bb) != EXIT_BLOCK_PTR)
4710 {
4711 error ("return edge does not point to exit in bb %d",
4712 bb->index);
4713 err = 1;
4714 }
4715 break;
4716
4717 case GIMPLE_SWITCH:
4718 {
4719 tree prev;
4720 edge e;
4721 size_t i, n;
4722
4723 n = gimple_switch_num_labels (stmt);
4724
4725 /* Mark all the destination basic blocks. */
4726 for (i = 0; i < n; ++i)
4727 {
4728 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
4729 basic_block label_bb = label_to_block (lab);
4730 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
4731 label_bb->aux = (void *)1;
4732 }
4733
4734 /* Verify that the case labels are sorted. */
4735 prev = gimple_switch_label (stmt, 0);
4736 for (i = 1; i < n; ++i)
4737 {
4738 tree c = gimple_switch_label (stmt, i);
4739 if (!CASE_LOW (c))
4740 {
4741 error ("found default case not at the start of "
4742 "case vector");
4743 err = 1;
4744 continue;
4745 }
4746 if (CASE_LOW (prev)
4747 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
4748 {
4749 error ("case labels not sorted: ");
4750 print_generic_expr (stderr, prev, 0);
4751 fprintf (stderr," is greater than ");
4752 print_generic_expr (stderr, c, 0);
4753 fprintf (stderr," but comes before it.\n");
4754 err = 1;
4755 }
4756 prev = c;
4757 }
4758 /* VRP will remove the default case if it can prove it will
4759 never be executed. So do not verify there always exists
4760 a default case here. */
4761
4762 FOR_EACH_EDGE (e, ei, bb->succs)
4763 {
4764 if (!e->dest->aux)
4765 {
4766 error ("extra outgoing edge %d->%d",
4767 bb->index, e->dest->index);
4768 err = 1;
4769 }
4770
4771 e->dest->aux = (void *)2;
4772 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
4773 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4774 {
4775 error ("wrong outgoing edge flags at end of bb %d",
4776 bb->index);
4777 err = 1;
4778 }
4779 }
4780
4781 /* Check that we have all of them. */
4782 for (i = 0; i < n; ++i)
4783 {
4784 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
4785 basic_block label_bb = label_to_block (lab);
4786
4787 if (label_bb->aux != (void *)2)
4788 {
4789 error ("missing edge %i->%i", bb->index, label_bb->index);
4790 err = 1;
4791 }
4792 }
4793
4794 FOR_EACH_EDGE (e, ei, bb->succs)
4795 e->dest->aux = (void *)0;
4796 }
4797
4798 default: ;
4799 }
4800 }
4801
4802 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
4803 verify_dominators (CDI_DOMINATORS);
4804
4805 return err;
4806 }
4807
4808
4809 /* Updates phi nodes after creating a forwarder block joined
4810 by edge FALLTHRU. */
4811
4812 static void
4813 gimple_make_forwarder_block (edge fallthru)
4814 {
4815 edge e;
4816 edge_iterator ei;
4817 basic_block dummy, bb;
4818 tree var;
4819 gimple_stmt_iterator gsi;
4820
4821 dummy = fallthru->src;
4822 bb = fallthru->dest;
4823
4824 if (single_pred_p (bb))
4825 return;
4826
4827 /* If we redirected a branch we must create new PHI nodes at the
4828 start of BB. */
4829 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
4830 {
4831 gimple phi, new_phi;
4832
4833 phi = gsi_stmt (gsi);
4834 var = gimple_phi_result (phi);
4835 new_phi = create_phi_node (var, bb);
4836 SSA_NAME_DEF_STMT (var) = new_phi;
4837 gimple_phi_set_result (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
4838 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru);
4839 }
4840
4841 /* Add the arguments we have stored on edges. */
4842 FOR_EACH_EDGE (e, ei, bb->preds)
4843 {
4844 if (e == fallthru)
4845 continue;
4846
4847 flush_pending_stmts (e);
4848 }
4849 }
4850
4851
4852 /* Return a non-special label in the head of basic block BLOCK.
4853 Create one if it doesn't exist. */
4854
4855 tree
4856 gimple_block_label (basic_block bb)
4857 {
4858 gimple_stmt_iterator i, s = gsi_start_bb (bb);
4859 bool first = true;
4860 tree label;
4861 gimple stmt;
4862
4863 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
4864 {
4865 stmt = gsi_stmt (i);
4866 if (gimple_code (stmt) != GIMPLE_LABEL)
4867 break;
4868 label = gimple_label_label (stmt);
4869 if (!DECL_NONLOCAL (label))
4870 {
4871 if (!first)
4872 gsi_move_before (&i, &s);
4873 return label;
4874 }
4875 }
4876
4877 label = create_artificial_label (UNKNOWN_LOCATION);
4878 stmt = gimple_build_label (label);
4879 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
4880 return label;
4881 }
4882
4883
4884 /* Attempt to perform edge redirection by replacing a possibly complex
4885 jump instruction by a goto or by removing the jump completely.
4886 This can apply only if all edges now point to the same block. The
4887 parameters and return values are equivalent to
4888 redirect_edge_and_branch. */
4889
4890 static edge
4891 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
4892 {
4893 basic_block src = e->src;
4894 gimple_stmt_iterator i;
4895 gimple stmt;
4896
4897 /* We can replace or remove a complex jump only when we have exactly
4898 two edges. */
4899 if (EDGE_COUNT (src->succs) != 2
4900 /* Verify that all targets will be TARGET. Specifically, the
4901 edge that is not E must also go to TARGET. */
4902 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
4903 return NULL;
4904
4905 i = gsi_last_bb (src);
4906 if (gsi_end_p (i))
4907 return NULL;
4908
4909 stmt = gsi_stmt (i);
4910
4911 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
4912 {
4913 gsi_remove (&i, true);
4914 e = ssa_redirect_edge (e, target);
4915 e->flags = EDGE_FALLTHRU;
4916 return e;
4917 }
4918
4919 return NULL;
4920 }
4921
4922
4923 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
4924 edge representing the redirected branch. */
4925
4926 static edge
4927 gimple_redirect_edge_and_branch (edge e, basic_block dest)
4928 {
4929 basic_block bb = e->src;
4930 gimple_stmt_iterator gsi;
4931 edge ret;
4932 gimple stmt;
4933
4934 if (e->flags & EDGE_ABNORMAL)
4935 return NULL;
4936
4937 if (e->src != ENTRY_BLOCK_PTR
4938 && (ret = gimple_try_redirect_by_replacing_jump (e, dest)))
4939 return ret;
4940
4941 if (e->dest == dest)
4942 return NULL;
4943
4944 if (e->flags & EDGE_EH)
4945 return redirect_eh_edge (e, dest);
4946
4947 gsi = gsi_last_bb (bb);
4948 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
4949
4950 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
4951 {
4952 case GIMPLE_COND:
4953 /* For COND_EXPR, we only need to redirect the edge. */
4954 break;
4955
4956 case GIMPLE_GOTO:
4957 /* No non-abnormal edges should lead from a non-simple goto, and
4958 simple ones should be represented implicitly. */
4959 gcc_unreachable ();
4960
4961 case GIMPLE_SWITCH:
4962 {
4963 tree label = gimple_block_label (dest);
4964 tree cases = get_cases_for_edge (e, stmt);
4965
4966 /* If we have a list of cases associated with E, then use it
4967 as it's a lot faster than walking the entire case vector. */
4968 if (cases)
4969 {
4970 edge e2 = find_edge (e->src, dest);
4971 tree last, first;
4972
4973 first = cases;
4974 while (cases)
4975 {
4976 last = cases;
4977 CASE_LABEL (cases) = label;
4978 cases = TREE_CHAIN (cases);
4979 }
4980
4981 /* If there was already an edge in the CFG, then we need
4982 to move all the cases associated with E to E2. */
4983 if (e2)
4984 {
4985 tree cases2 = get_cases_for_edge (e2, stmt);
4986
4987 TREE_CHAIN (last) = TREE_CHAIN (cases2);
4988 TREE_CHAIN (cases2) = first;
4989 }
4990 }
4991 else
4992 {
4993 size_t i, n = gimple_switch_num_labels (stmt);
4994
4995 for (i = 0; i < n; i++)
4996 {
4997 tree elt = gimple_switch_label (stmt, i);
4998 if (label_to_block (CASE_LABEL (elt)) == e->dest)
4999 CASE_LABEL (elt) = label;
5000 }
5001 }
5002
5003 break;
5004 }
5005
5006 case GIMPLE_RETURN:
5007 gsi_remove (&gsi, true);
5008 e->flags |= EDGE_FALLTHRU;
5009 break;
5010
5011 case GIMPLE_OMP_RETURN:
5012 case GIMPLE_OMP_CONTINUE:
5013 case GIMPLE_OMP_SECTIONS_SWITCH:
5014 case GIMPLE_OMP_FOR:
5015 /* The edges from OMP constructs can be simply redirected. */
5016 break;
5017
5018 default:
5019 /* Otherwise it must be a fallthru edge, and we don't need to
5020 do anything besides redirecting it. */
5021 gcc_assert (e->flags & EDGE_FALLTHRU);
5022 break;
5023 }
5024
5025 /* Update/insert PHI nodes as necessary. */
5026
5027 /* Now update the edges in the CFG. */
5028 e = ssa_redirect_edge (e, dest);
5029
5030 return e;
5031 }
5032
5033 /* Returns true if it is possible to remove edge E by redirecting
5034 it to the destination of the other edge from E->src. */
5035
5036 static bool
5037 gimple_can_remove_branch_p (const_edge e)
5038 {
5039 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5040 return false;
5041
5042 return true;
5043 }
5044
5045 /* Simple wrapper, as we can always redirect fallthru edges. */
5046
5047 static basic_block
5048 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5049 {
5050 e = gimple_redirect_edge_and_branch (e, dest);
5051 gcc_assert (e);
5052
5053 return NULL;
5054 }
5055
5056
5057 /* Splits basic block BB after statement STMT (but at least after the
5058 labels). If STMT is NULL, BB is split just after the labels. */
5059
5060 static basic_block
5061 gimple_split_block (basic_block bb, void *stmt)
5062 {
5063 gimple_stmt_iterator gsi;
5064 gimple_stmt_iterator gsi_tgt;
5065 gimple act;
5066 gimple_seq list;
5067 basic_block new_bb;
5068 edge e;
5069 edge_iterator ei;
5070
5071 new_bb = create_empty_bb (bb);
5072
5073 /* Redirect the outgoing edges. */
5074 new_bb->succs = bb->succs;
5075 bb->succs = NULL;
5076 FOR_EACH_EDGE (e, ei, new_bb->succs)
5077 e->src = new_bb;
5078
5079 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5080 stmt = NULL;
5081
5082 /* Move everything from GSI to the new basic block. */
5083 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5084 {
5085 act = gsi_stmt (gsi);
5086 if (gimple_code (act) == GIMPLE_LABEL)
5087 continue;
5088
5089 if (!stmt)
5090 break;
5091
5092 if (stmt == act)
5093 {
5094 gsi_next (&gsi);
5095 break;
5096 }
5097 }
5098
5099 if (gsi_end_p (gsi))
5100 return new_bb;
5101
5102 /* Split the statement list - avoid re-creating new containers as this
5103 brings ugly quadratic memory consumption in the inliner.
5104 (We are still quadratic since we need to update stmt BB pointers,
5105 sadly.) */
5106 list = gsi_split_seq_before (&gsi);
5107 set_bb_seq (new_bb, list);
5108 for (gsi_tgt = gsi_start (list);
5109 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5110 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5111
5112 return new_bb;
5113 }
5114
5115
5116 /* Moves basic block BB after block AFTER. */
5117
5118 static bool
5119 gimple_move_block_after (basic_block bb, basic_block after)
5120 {
5121 if (bb->prev_bb == after)
5122 return true;
5123
5124 unlink_block (bb);
5125 link_block (bb, after);
5126
5127 return true;
5128 }
5129
5130
5131 /* Return true if basic_block can be duplicated. */
5132
5133 static bool
5134 gimple_can_duplicate_bb_p (const_basic_block bb)
5135 {
5136 gimple_stmt_iterator gsi = gsi_last_bb (bb);
5137
5138 /* RTL expander has quite artificial limitation to at most one RESX instruction
5139 per region. It can be fixed by turning 1-1 map to 1-many map, but since the
5140 code needs to be rewritten to gimple level lowering and there is little reason
5141 for duplicating RESX instructions in order to optimize code performance, we
5142 just disallow it for the moment. */
5143 if (!gsi_end_p (gsi) && gimple_code (gsi_stmt (gsi)) == GIMPLE_RESX)
5144 return false;
5145 return true;
5146 }
5147
5148 /* Create a duplicate of the basic block BB. NOTE: This does not
5149 preserve SSA form. */
5150
5151 static basic_block
5152 gimple_duplicate_bb (basic_block bb)
5153 {
5154 basic_block new_bb;
5155 gimple_stmt_iterator gsi, gsi_tgt;
5156 gimple_seq phis = phi_nodes (bb);
5157 gimple phi, stmt, copy;
5158
5159 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
5160
5161 /* Copy the PHI nodes. We ignore PHI node arguments here because
5162 the incoming edges have not been setup yet. */
5163 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
5164 {
5165 phi = gsi_stmt (gsi);
5166 copy = create_phi_node (gimple_phi_result (phi), new_bb);
5167 create_new_def_for (gimple_phi_result (copy), copy,
5168 gimple_phi_result_ptr (copy));
5169 }
5170
5171 gsi_tgt = gsi_start_bb (new_bb);
5172 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5173 {
5174 def_operand_p def_p;
5175 ssa_op_iter op_iter;
5176 int region;
5177
5178 stmt = gsi_stmt (gsi);
5179 if (gimple_code (stmt) == GIMPLE_LABEL)
5180 continue;
5181
5182 /* Create a new copy of STMT and duplicate STMT's virtual
5183 operands. */
5184 copy = gimple_copy (stmt);
5185 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5186 region = lookup_stmt_eh_region (stmt);
5187 if (region >= 0)
5188 add_stmt_to_eh_region (copy, region);
5189 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5190
5191 /* Create new names for all the definitions created by COPY and
5192 add replacement mappings for each new name. */
5193 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5194 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5195 }
5196
5197 return new_bb;
5198 }
5199
5200 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5201
5202 static void
5203 add_phi_args_after_copy_edge (edge e_copy)
5204 {
5205 basic_block bb, bb_copy = e_copy->src, dest;
5206 edge e;
5207 edge_iterator ei;
5208 gimple phi, phi_copy;
5209 tree def;
5210 gimple_stmt_iterator psi, psi_copy;
5211
5212 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5213 return;
5214
5215 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5216
5217 if (e_copy->dest->flags & BB_DUPLICATED)
5218 dest = get_bb_original (e_copy->dest);
5219 else
5220 dest = e_copy->dest;
5221
5222 e = find_edge (bb, dest);
5223 if (!e)
5224 {
5225 /* During loop unrolling the target of the latch edge is copied.
5226 In this case we are not looking for edge to dest, but to
5227 duplicated block whose original was dest. */
5228 FOR_EACH_EDGE (e, ei, bb->succs)
5229 {
5230 if ((e->dest->flags & BB_DUPLICATED)
5231 && get_bb_original (e->dest) == dest)
5232 break;
5233 }
5234
5235 gcc_assert (e != NULL);
5236 }
5237
5238 for (psi = gsi_start_phis (e->dest),
5239 psi_copy = gsi_start_phis (e_copy->dest);
5240 !gsi_end_p (psi);
5241 gsi_next (&psi), gsi_next (&psi_copy))
5242 {
5243 phi = gsi_stmt (psi);
5244 phi_copy = gsi_stmt (psi_copy);
5245 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5246 add_phi_arg (phi_copy, def, e_copy);
5247 }
5248 }
5249
5250
5251 /* Basic block BB_COPY was created by code duplication. Add phi node
5252 arguments for edges going out of BB_COPY. The blocks that were
5253 duplicated have BB_DUPLICATED set. */
5254
5255 void
5256 add_phi_args_after_copy_bb (basic_block bb_copy)
5257 {
5258 edge e_copy;
5259 edge_iterator ei;
5260
5261 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5262 {
5263 add_phi_args_after_copy_edge (e_copy);
5264 }
5265 }
5266
5267 /* Blocks in REGION_COPY array of length N_REGION were created by
5268 duplication of basic blocks. Add phi node arguments for edges
5269 going from these blocks. If E_COPY is not NULL, also add
5270 phi node arguments for its destination.*/
5271
5272 void
5273 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5274 edge e_copy)
5275 {
5276 unsigned i;
5277
5278 for (i = 0; i < n_region; i++)
5279 region_copy[i]->flags |= BB_DUPLICATED;
5280
5281 for (i = 0; i < n_region; i++)
5282 add_phi_args_after_copy_bb (region_copy[i]);
5283 if (e_copy)
5284 add_phi_args_after_copy_edge (e_copy);
5285
5286 for (i = 0; i < n_region; i++)
5287 region_copy[i]->flags &= ~BB_DUPLICATED;
5288 }
5289
5290 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5291 important exit edge EXIT. By important we mean that no SSA name defined
5292 inside region is live over the other exit edges of the region. All entry
5293 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5294 to the duplicate of the region. SSA form, dominance and loop information
5295 is updated. The new basic blocks are stored to REGION_COPY in the same
5296 order as they had in REGION, provided that REGION_COPY is not NULL.
5297 The function returns false if it is unable to copy the region,
5298 true otherwise. */
5299
5300 bool
5301 gimple_duplicate_sese_region (edge entry, edge exit,
5302 basic_block *region, unsigned n_region,
5303 basic_block *region_copy)
5304 {
5305 unsigned i;
5306 bool free_region_copy = false, copying_header = false;
5307 struct loop *loop = entry->dest->loop_father;
5308 edge exit_copy;
5309 VEC (basic_block, heap) *doms;
5310 edge redirected;
5311 int total_freq = 0, entry_freq = 0;
5312 gcov_type total_count = 0, entry_count = 0;
5313
5314 if (!can_copy_bbs_p (region, n_region))
5315 return false;
5316
5317 /* Some sanity checking. Note that we do not check for all possible
5318 missuses of the functions. I.e. if you ask to copy something weird,
5319 it will work, but the state of structures probably will not be
5320 correct. */
5321 for (i = 0; i < n_region; i++)
5322 {
5323 /* We do not handle subloops, i.e. all the blocks must belong to the
5324 same loop. */
5325 if (region[i]->loop_father != loop)
5326 return false;
5327
5328 if (region[i] != entry->dest
5329 && region[i] == loop->header)
5330 return false;
5331 }
5332
5333 set_loop_copy (loop, loop);
5334
5335 /* In case the function is used for loop header copying (which is the primary
5336 use), ensure that EXIT and its copy will be new latch and entry edges. */
5337 if (loop->header == entry->dest)
5338 {
5339 copying_header = true;
5340 set_loop_copy (loop, loop_outer (loop));
5341
5342 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5343 return false;
5344
5345 for (i = 0; i < n_region; i++)
5346 if (region[i] != exit->src
5347 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5348 return false;
5349 }
5350
5351 if (!region_copy)
5352 {
5353 region_copy = XNEWVEC (basic_block, n_region);
5354 free_region_copy = true;
5355 }
5356
5357 gcc_assert (!need_ssa_update_p (cfun));
5358
5359 /* Record blocks outside the region that are dominated by something
5360 inside. */
5361 doms = NULL;
5362 initialize_original_copy_tables ();
5363
5364 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5365
5366 if (entry->dest->count)
5367 {
5368 total_count = entry->dest->count;
5369 entry_count = entry->count;
5370 /* Fix up corner cases, to avoid division by zero or creation of negative
5371 frequencies. */
5372 if (entry_count > total_count)
5373 entry_count = total_count;
5374 }
5375 else
5376 {
5377 total_freq = entry->dest->frequency;
5378 entry_freq = EDGE_FREQUENCY (entry);
5379 /* Fix up corner cases, to avoid division by zero or creation of negative
5380 frequencies. */
5381 if (total_freq == 0)
5382 total_freq = 1;
5383 else if (entry_freq > total_freq)
5384 entry_freq = total_freq;
5385 }
5386
5387 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5388 split_edge_bb_loc (entry));
5389 if (total_count)
5390 {
5391 scale_bbs_frequencies_gcov_type (region, n_region,
5392 total_count - entry_count,
5393 total_count);
5394 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
5395 total_count);
5396 }
5397 else
5398 {
5399 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5400 total_freq);
5401 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5402 }
5403
5404 if (copying_header)
5405 {
5406 loop->header = exit->dest;
5407 loop->latch = exit->src;
5408 }
5409
5410 /* Redirect the entry and add the phi node arguments. */
5411 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
5412 gcc_assert (redirected != NULL);
5413 flush_pending_stmts (entry);
5414
5415 /* Concerning updating of dominators: We must recount dominators
5416 for entry block and its copy. Anything that is outside of the
5417 region, but was dominated by something inside needs recounting as
5418 well. */
5419 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
5420 VEC_safe_push (basic_block, heap, doms, get_bb_original (entry->dest));
5421 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5422 VEC_free (basic_block, heap, doms);
5423
5424 /* Add the other PHI node arguments. */
5425 add_phi_args_after_copy (region_copy, n_region, NULL);
5426
5427 /* Update the SSA web. */
5428 update_ssa (TODO_update_ssa);
5429
5430 if (free_region_copy)
5431 free (region_copy);
5432
5433 free_original_copy_tables ();
5434 return true;
5435 }
5436
5437 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
5438 are stored to REGION_COPY in the same order in that they appear
5439 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5440 the region, EXIT an exit from it. The condition guarding EXIT
5441 is moved to ENTRY. Returns true if duplication succeeds, false
5442 otherwise.
5443
5444 For example,
5445
5446 some_code;
5447 if (cond)
5448 A;
5449 else
5450 B;
5451
5452 is transformed to
5453
5454 if (cond)
5455 {
5456 some_code;
5457 A;
5458 }
5459 else
5460 {
5461 some_code;
5462 B;
5463 }
5464 */
5465
5466 bool
5467 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
5468 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
5469 basic_block *region_copy ATTRIBUTE_UNUSED)
5470 {
5471 unsigned i;
5472 bool free_region_copy = false;
5473 struct loop *loop = exit->dest->loop_father;
5474 struct loop *orig_loop = entry->dest->loop_father;
5475 basic_block switch_bb, entry_bb, nentry_bb;
5476 VEC (basic_block, heap) *doms;
5477 int total_freq = 0, exit_freq = 0;
5478 gcov_type total_count = 0, exit_count = 0;
5479 edge exits[2], nexits[2], e;
5480 gimple_stmt_iterator gsi;
5481 gimple cond_stmt;
5482 edge sorig, snew;
5483
5484 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
5485 exits[0] = exit;
5486 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
5487
5488 if (!can_copy_bbs_p (region, n_region))
5489 return false;
5490
5491 /* Some sanity checking. Note that we do not check for all possible
5492 missuses of the functions. I.e. if you ask to copy something weird
5493 (e.g., in the example, if there is a jump from inside to the middle
5494 of some_code, or come_code defines some of the values used in cond)
5495 it will work, but the resulting code will not be correct. */
5496 for (i = 0; i < n_region; i++)
5497 {
5498 /* We do not handle subloops, i.e. all the blocks must belong to the
5499 same loop. */
5500 if (region[i]->loop_father != orig_loop)
5501 return false;
5502
5503 if (region[i] == orig_loop->latch)
5504 return false;
5505 }
5506
5507 initialize_original_copy_tables ();
5508 set_loop_copy (orig_loop, loop);
5509
5510 if (!region_copy)
5511 {
5512 region_copy = XNEWVEC (basic_block, n_region);
5513 free_region_copy = true;
5514 }
5515
5516 gcc_assert (!need_ssa_update_p (cfun));
5517
5518 /* Record blocks outside the region that are dominated by something
5519 inside. */
5520 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5521
5522 if (exit->src->count)
5523 {
5524 total_count = exit->src->count;
5525 exit_count = exit->count;
5526 /* Fix up corner cases, to avoid division by zero or creation of negative
5527 frequencies. */
5528 if (exit_count > total_count)
5529 exit_count = total_count;
5530 }
5531 else
5532 {
5533 total_freq = exit->src->frequency;
5534 exit_freq = EDGE_FREQUENCY (exit);
5535 /* Fix up corner cases, to avoid division by zero or creation of negative
5536 frequencies. */
5537 if (total_freq == 0)
5538 total_freq = 1;
5539 if (exit_freq > total_freq)
5540 exit_freq = total_freq;
5541 }
5542
5543 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
5544 split_edge_bb_loc (exit));
5545 if (total_count)
5546 {
5547 scale_bbs_frequencies_gcov_type (region, n_region,
5548 total_count - exit_count,
5549 total_count);
5550 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
5551 total_count);
5552 }
5553 else
5554 {
5555 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
5556 total_freq);
5557 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
5558 }
5559
5560 /* Create the switch block, and put the exit condition to it. */
5561 entry_bb = entry->dest;
5562 nentry_bb = get_bb_copy (entry_bb);
5563 if (!last_stmt (entry->src)
5564 || !stmt_ends_bb_p (last_stmt (entry->src)))
5565 switch_bb = entry->src;
5566 else
5567 switch_bb = split_edge (entry);
5568 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
5569
5570 gsi = gsi_last_bb (switch_bb);
5571 cond_stmt = last_stmt (exit->src);
5572 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
5573 cond_stmt = gimple_copy (cond_stmt);
5574 gimple_cond_set_lhs (cond_stmt, unshare_expr (gimple_cond_lhs (cond_stmt)));
5575 gimple_cond_set_rhs (cond_stmt, unshare_expr (gimple_cond_rhs (cond_stmt)));
5576 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
5577
5578 sorig = single_succ_edge (switch_bb);
5579 sorig->flags = exits[1]->flags;
5580 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
5581
5582 /* Register the new edge from SWITCH_BB in loop exit lists. */
5583 rescan_loop_exit (snew, true, false);
5584
5585 /* Add the PHI node arguments. */
5586 add_phi_args_after_copy (region_copy, n_region, snew);
5587
5588 /* Get rid of now superfluous conditions and associated edges (and phi node
5589 arguments). */
5590 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
5591 PENDING_STMT (e) = NULL;
5592 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
5593 PENDING_STMT (e) = NULL;
5594
5595 /* Anything that is outside of the region, but was dominated by something
5596 inside needs to update dominance info. */
5597 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5598 VEC_free (basic_block, heap, doms);
5599
5600 /* Update the SSA web. */
5601 update_ssa (TODO_update_ssa);
5602
5603 if (free_region_copy)
5604 free (region_copy);
5605
5606 free_original_copy_tables ();
5607 return true;
5608 }
5609
5610 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
5611 adding blocks when the dominator traversal reaches EXIT. This
5612 function silently assumes that ENTRY strictly dominates EXIT. */
5613
5614 void
5615 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
5616 VEC(basic_block,heap) **bbs_p)
5617 {
5618 basic_block son;
5619
5620 for (son = first_dom_son (CDI_DOMINATORS, entry);
5621 son;
5622 son = next_dom_son (CDI_DOMINATORS, son))
5623 {
5624 VEC_safe_push (basic_block, heap, *bbs_p, son);
5625 if (son != exit)
5626 gather_blocks_in_sese_region (son, exit, bbs_p);
5627 }
5628 }
5629
5630 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
5631 The duplicates are recorded in VARS_MAP. */
5632
5633 static void
5634 replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
5635 tree to_context)
5636 {
5637 tree t = *tp, new_t;
5638 struct function *f = DECL_STRUCT_FUNCTION (to_context);
5639 void **loc;
5640
5641 if (DECL_CONTEXT (t) == to_context)
5642 return;
5643
5644 loc = pointer_map_contains (vars_map, t);
5645
5646 if (!loc)
5647 {
5648 loc = pointer_map_insert (vars_map, t);
5649
5650 if (SSA_VAR_P (t))
5651 {
5652 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
5653 f->local_decls = tree_cons (NULL_TREE, new_t, f->local_decls);
5654 }
5655 else
5656 {
5657 gcc_assert (TREE_CODE (t) == CONST_DECL);
5658 new_t = copy_node (t);
5659 }
5660 DECL_CONTEXT (new_t) = to_context;
5661
5662 *loc = new_t;
5663 }
5664 else
5665 new_t = (tree) *loc;
5666
5667 *tp = new_t;
5668 }
5669
5670
5671 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
5672 VARS_MAP maps old ssa names and var_decls to the new ones. */
5673
5674 static tree
5675 replace_ssa_name (tree name, struct pointer_map_t *vars_map,
5676 tree to_context)
5677 {
5678 void **loc;
5679 tree new_name, decl = SSA_NAME_VAR (name);
5680
5681 gcc_assert (is_gimple_reg (name));
5682
5683 loc = pointer_map_contains (vars_map, name);
5684
5685 if (!loc)
5686 {
5687 replace_by_duplicate_decl (&decl, vars_map, to_context);
5688
5689 push_cfun (DECL_STRUCT_FUNCTION (to_context));
5690 if (gimple_in_ssa_p (cfun))
5691 add_referenced_var (decl);
5692
5693 new_name = make_ssa_name (decl, SSA_NAME_DEF_STMT (name));
5694 if (SSA_NAME_IS_DEFAULT_DEF (name))
5695 set_default_def (decl, new_name);
5696 pop_cfun ();
5697
5698 loc = pointer_map_insert (vars_map, name);
5699 *loc = new_name;
5700 }
5701 else
5702 new_name = (tree) *loc;
5703
5704 return new_name;
5705 }
5706
5707 struct move_stmt_d
5708 {
5709 tree orig_block;
5710 tree new_block;
5711 tree from_context;
5712 tree to_context;
5713 struct pointer_map_t *vars_map;
5714 htab_t new_label_map;
5715 bool remap_decls_p;
5716 };
5717
5718 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
5719 contained in *TP if it has been ORIG_BLOCK previously and change the
5720 DECL_CONTEXT of every local variable referenced in *TP. */
5721
5722 static tree
5723 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
5724 {
5725 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5726 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
5727 tree t = *tp;
5728
5729 if (EXPR_P (t))
5730 /* We should never have TREE_BLOCK set on non-statements. */
5731 gcc_assert (!TREE_BLOCK (t));
5732
5733 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
5734 {
5735 if (TREE_CODE (t) == SSA_NAME)
5736 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
5737 else if (TREE_CODE (t) == LABEL_DECL)
5738 {
5739 if (p->new_label_map)
5740 {
5741 struct tree_map in, *out;
5742 in.base.from = t;
5743 out = (struct tree_map *)
5744 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
5745 if (out)
5746 *tp = t = out->to;
5747 }
5748
5749 DECL_CONTEXT (t) = p->to_context;
5750 }
5751 else if (p->remap_decls_p)
5752 {
5753 /* Replace T with its duplicate. T should no longer appear in the
5754 parent function, so this looks wasteful; however, it may appear
5755 in referenced_vars, and more importantly, as virtual operands of
5756 statements, and in alias lists of other variables. It would be
5757 quite difficult to expunge it from all those places. ??? It might
5758 suffice to do this for addressable variables. */
5759 if ((TREE_CODE (t) == VAR_DECL
5760 && !is_global_var (t))
5761 || TREE_CODE (t) == CONST_DECL)
5762 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
5763
5764 if (SSA_VAR_P (t)
5765 && gimple_in_ssa_p (cfun))
5766 {
5767 push_cfun (DECL_STRUCT_FUNCTION (p->to_context));
5768 add_referenced_var (*tp);
5769 pop_cfun ();
5770 }
5771 }
5772 *walk_subtrees = 0;
5773 }
5774 else if (TYPE_P (t))
5775 *walk_subtrees = 0;
5776
5777 return NULL_TREE;
5778 }
5779
5780 /* Like move_stmt_op, but for gimple statements.
5781
5782 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
5783 contained in the current statement in *GSI_P and change the
5784 DECL_CONTEXT of every local variable referenced in the current
5785 statement. */
5786
5787 static tree
5788 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
5789 struct walk_stmt_info *wi)
5790 {
5791 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
5792 gimple stmt = gsi_stmt (*gsi_p);
5793 tree block = gimple_block (stmt);
5794
5795 if (p->orig_block == NULL_TREE
5796 || block == p->orig_block
5797 || block == NULL_TREE)
5798 gimple_set_block (stmt, p->new_block);
5799 #ifdef ENABLE_CHECKING
5800 else if (block != p->new_block)
5801 {
5802 while (block && block != p->orig_block)
5803 block = BLOCK_SUPERCONTEXT (block);
5804 gcc_assert (block);
5805 }
5806 #endif
5807
5808 if (is_gimple_omp (stmt)
5809 && gimple_code (stmt) != GIMPLE_OMP_RETURN
5810 && gimple_code (stmt) != GIMPLE_OMP_CONTINUE)
5811 {
5812 /* Do not remap variables inside OMP directives. Variables
5813 referenced in clauses and directive header belong to the
5814 parent function and should not be moved into the child
5815 function. */
5816 bool save_remap_decls_p = p->remap_decls_p;
5817 p->remap_decls_p = false;
5818 *handled_ops_p = true;
5819
5820 walk_gimple_seq (gimple_omp_body (stmt), move_stmt_r, move_stmt_op, wi);
5821
5822 p->remap_decls_p = save_remap_decls_p;
5823 }
5824
5825 return NULL_TREE;
5826 }
5827
5828 /* Marks virtual operands of all statements in basic blocks BBS for
5829 renaming. */
5830
5831 void
5832 mark_virtual_ops_in_bb (basic_block bb)
5833 {
5834 gimple_stmt_iterator gsi;
5835
5836 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5837 mark_virtual_ops_for_renaming (gsi_stmt (gsi));
5838
5839 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5840 mark_virtual_ops_for_renaming (gsi_stmt (gsi));
5841 }
5842
5843 /* Move basic block BB from function CFUN to function DEST_FN. The
5844 block is moved out of the original linked list and placed after
5845 block AFTER in the new list. Also, the block is removed from the
5846 original array of blocks and placed in DEST_FN's array of blocks.
5847 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
5848 updated to reflect the moved edges.
5849
5850 The local variables are remapped to new instances, VARS_MAP is used
5851 to record the mapping. */
5852
5853 static void
5854 move_block_to_fn (struct function *dest_cfun, basic_block bb,
5855 basic_block after, bool update_edge_count_p,
5856 struct move_stmt_d *d, int eh_offset)
5857 {
5858 struct control_flow_graph *cfg;
5859 edge_iterator ei;
5860 edge e;
5861 gimple_stmt_iterator si;
5862 unsigned old_len, new_len;
5863
5864 /* Remove BB from dominance structures. */
5865 delete_from_dominance_info (CDI_DOMINATORS, bb);
5866 if (current_loops)
5867 remove_bb_from_loops (bb);
5868
5869 /* Link BB to the new linked list. */
5870 move_block_after (bb, after);
5871
5872 /* Update the edge count in the corresponding flowgraphs. */
5873 if (update_edge_count_p)
5874 FOR_EACH_EDGE (e, ei, bb->succs)
5875 {
5876 cfun->cfg->x_n_edges--;
5877 dest_cfun->cfg->x_n_edges++;
5878 }
5879
5880 /* Remove BB from the original basic block array. */
5881 VEC_replace (basic_block, cfun->cfg->x_basic_block_info, bb->index, NULL);
5882 cfun->cfg->x_n_basic_blocks--;
5883
5884 /* Grow DEST_CFUN's basic block array if needed. */
5885 cfg = dest_cfun->cfg;
5886 cfg->x_n_basic_blocks++;
5887 if (bb->index >= cfg->x_last_basic_block)
5888 cfg->x_last_basic_block = bb->index + 1;
5889
5890 old_len = VEC_length (basic_block, cfg->x_basic_block_info);
5891 if ((unsigned) cfg->x_last_basic_block >= old_len)
5892 {
5893 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
5894 VEC_safe_grow_cleared (basic_block, gc, cfg->x_basic_block_info,
5895 new_len);
5896 }
5897
5898 VEC_replace (basic_block, cfg->x_basic_block_info,
5899 bb->index, bb);
5900
5901 /* Remap the variables in phi nodes. */
5902 for (si = gsi_start_phis (bb); !gsi_end_p (si); )
5903 {
5904 gimple phi = gsi_stmt (si);
5905 use_operand_p use;
5906 tree op = PHI_RESULT (phi);
5907 ssa_op_iter oi;
5908
5909 if (!is_gimple_reg (op))
5910 {
5911 /* Remove the phi nodes for virtual operands (alias analysis will be
5912 run for the new function, anyway). */
5913 remove_phi_node (&si, true);
5914 continue;
5915 }
5916
5917 SET_PHI_RESULT (phi,
5918 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
5919 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
5920 {
5921 op = USE_FROM_PTR (use);
5922 if (TREE_CODE (op) == SSA_NAME)
5923 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
5924 }
5925
5926 gsi_next (&si);
5927 }
5928
5929 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5930 {
5931 gimple stmt = gsi_stmt (si);
5932 int region;
5933 struct walk_stmt_info wi;
5934
5935 memset (&wi, 0, sizeof (wi));
5936 wi.info = d;
5937 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
5938
5939 if (gimple_code (stmt) == GIMPLE_LABEL)
5940 {
5941 tree label = gimple_label_label (stmt);
5942 int uid = LABEL_DECL_UID (label);
5943
5944 gcc_assert (uid > -1);
5945
5946 old_len = VEC_length (basic_block, cfg->x_label_to_block_map);
5947 if (old_len <= (unsigned) uid)
5948 {
5949 new_len = 3 * uid / 2 + 1;
5950 VEC_safe_grow_cleared (basic_block, gc,
5951 cfg->x_label_to_block_map, new_len);
5952 }
5953
5954 VEC_replace (basic_block, cfg->x_label_to_block_map, uid, bb);
5955 VEC_replace (basic_block, cfun->cfg->x_label_to_block_map, uid, NULL);
5956
5957 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
5958
5959 if (uid >= dest_cfun->cfg->last_label_uid)
5960 dest_cfun->cfg->last_label_uid = uid + 1;
5961 }
5962 else if (gimple_code (stmt) == GIMPLE_RESX && eh_offset != 0)
5963 gimple_resx_set_region (stmt, gimple_resx_region (stmt) + eh_offset);
5964
5965 region = lookup_stmt_eh_region (stmt);
5966 if (region >= 0)
5967 {
5968 add_stmt_to_eh_region_fn (dest_cfun, stmt, region + eh_offset);
5969 remove_stmt_from_eh_region (stmt);
5970 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
5971 gimple_remove_stmt_histograms (cfun, stmt);
5972 }
5973
5974 /* We cannot leave any operands allocated from the operand caches of
5975 the current function. */
5976 free_stmt_operands (stmt);
5977 push_cfun (dest_cfun);
5978 update_stmt (stmt);
5979 pop_cfun ();
5980 }
5981
5982 FOR_EACH_EDGE (e, ei, bb->succs)
5983 if (e->goto_locus)
5984 {
5985 tree block = e->goto_block;
5986 if (d->orig_block == NULL_TREE
5987 || block == d->orig_block)
5988 e->goto_block = d->new_block;
5989 #ifdef ENABLE_CHECKING
5990 else if (block != d->new_block)
5991 {
5992 while (block && block != d->orig_block)
5993 block = BLOCK_SUPERCONTEXT (block);
5994 gcc_assert (block);
5995 }
5996 #endif
5997 }
5998 }
5999
6000 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6001 the outermost EH region. Use REGION as the incoming base EH region. */
6002
6003 static int
6004 find_outermost_region_in_block (struct function *src_cfun,
6005 basic_block bb, int region)
6006 {
6007 gimple_stmt_iterator si;
6008
6009 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6010 {
6011 gimple stmt = gsi_stmt (si);
6012 int stmt_region;
6013
6014 if (gimple_code (stmt) == GIMPLE_RESX)
6015 stmt_region = gimple_resx_region (stmt);
6016 else
6017 stmt_region = lookup_stmt_eh_region_fn (src_cfun, stmt);
6018 if (stmt_region > 0)
6019 {
6020 if (region < 0)
6021 region = stmt_region;
6022 else if (stmt_region != region)
6023 {
6024 region = eh_region_outermost (src_cfun, stmt_region, region);
6025 gcc_assert (region != -1);
6026 }
6027 }
6028 }
6029
6030 return region;
6031 }
6032
6033 static tree
6034 new_label_mapper (tree decl, void *data)
6035 {
6036 htab_t hash = (htab_t) data;
6037 struct tree_map *m;
6038 void **slot;
6039
6040 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6041
6042 m = XNEW (struct tree_map);
6043 m->hash = DECL_UID (decl);
6044 m->base.from = decl;
6045 m->to = create_artificial_label (UNKNOWN_LOCATION);
6046 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6047 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6048 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6049
6050 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6051 gcc_assert (*slot == NULL);
6052
6053 *slot = m;
6054
6055 return m->to;
6056 }
6057
6058 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6059 subblocks. */
6060
6061 static void
6062 replace_block_vars_by_duplicates (tree block, struct pointer_map_t *vars_map,
6063 tree to_context)
6064 {
6065 tree *tp, t;
6066
6067 for (tp = &BLOCK_VARS (block); *tp; tp = &TREE_CHAIN (*tp))
6068 {
6069 t = *tp;
6070 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6071 continue;
6072 replace_by_duplicate_decl (&t, vars_map, to_context);
6073 if (t != *tp)
6074 {
6075 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6076 {
6077 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6078 DECL_HAS_VALUE_EXPR_P (t) = 1;
6079 }
6080 TREE_CHAIN (t) = TREE_CHAIN (*tp);
6081 *tp = t;
6082 }
6083 }
6084
6085 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6086 replace_block_vars_by_duplicates (block, vars_map, to_context);
6087 }
6088
6089 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6090 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6091 single basic block in the original CFG and the new basic block is
6092 returned. DEST_CFUN must not have a CFG yet.
6093
6094 Note that the region need not be a pure SESE region. Blocks inside
6095 the region may contain calls to abort/exit. The only restriction
6096 is that ENTRY_BB should be the only entry point and it must
6097 dominate EXIT_BB.
6098
6099 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6100 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6101 to the new function.
6102
6103 All local variables referenced in the region are assumed to be in
6104 the corresponding BLOCK_VARS and unexpanded variable lists
6105 associated with DEST_CFUN. */
6106
6107 basic_block
6108 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
6109 basic_block exit_bb, tree orig_block)
6110 {
6111 VEC(basic_block,heap) *bbs, *dom_bbs;
6112 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6113 basic_block after, bb, *entry_pred, *exit_succ, abb;
6114 struct function *saved_cfun = cfun;
6115 int *entry_flag, *exit_flag, eh_offset;
6116 unsigned *entry_prob, *exit_prob;
6117 unsigned i, num_entry_edges, num_exit_edges;
6118 edge e;
6119 edge_iterator ei;
6120 htab_t new_label_map;
6121 struct pointer_map_t *vars_map;
6122 struct loop *loop = entry_bb->loop_father;
6123 struct move_stmt_d d;
6124
6125 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6126 region. */
6127 gcc_assert (entry_bb != exit_bb
6128 && (!exit_bb
6129 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
6130
6131 /* Collect all the blocks in the region. Manually add ENTRY_BB
6132 because it won't be added by dfs_enumerate_from. */
6133 bbs = NULL;
6134 VEC_safe_push (basic_block, heap, bbs, entry_bb);
6135 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6136
6137 /* The blocks that used to be dominated by something in BBS will now be
6138 dominated by the new block. */
6139 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6140 VEC_address (basic_block, bbs),
6141 VEC_length (basic_block, bbs));
6142
6143 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6144 the predecessor edges to ENTRY_BB and the successor edges to
6145 EXIT_BB so that we can re-attach them to the new basic block that
6146 will replace the region. */
6147 num_entry_edges = EDGE_COUNT (entry_bb->preds);
6148 entry_pred = (basic_block *) xcalloc (num_entry_edges, sizeof (basic_block));
6149 entry_flag = (int *) xcalloc (num_entry_edges, sizeof (int));
6150 entry_prob = XNEWVEC (unsigned, num_entry_edges);
6151 i = 0;
6152 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
6153 {
6154 entry_prob[i] = e->probability;
6155 entry_flag[i] = e->flags;
6156 entry_pred[i++] = e->src;
6157 remove_edge (e);
6158 }
6159
6160 if (exit_bb)
6161 {
6162 num_exit_edges = EDGE_COUNT (exit_bb->succs);
6163 exit_succ = (basic_block *) xcalloc (num_exit_edges,
6164 sizeof (basic_block));
6165 exit_flag = (int *) xcalloc (num_exit_edges, sizeof (int));
6166 exit_prob = XNEWVEC (unsigned, num_exit_edges);
6167 i = 0;
6168 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
6169 {
6170 exit_prob[i] = e->probability;
6171 exit_flag[i] = e->flags;
6172 exit_succ[i++] = e->dest;
6173 remove_edge (e);
6174 }
6175 }
6176 else
6177 {
6178 num_exit_edges = 0;
6179 exit_succ = NULL;
6180 exit_flag = NULL;
6181 exit_prob = NULL;
6182 }
6183
6184 /* Switch context to the child function to initialize DEST_FN's CFG. */
6185 gcc_assert (dest_cfun->cfg == NULL);
6186 push_cfun (dest_cfun);
6187
6188 init_empty_tree_cfg ();
6189
6190 /* Initialize EH information for the new function. */
6191 eh_offset = 0;
6192 new_label_map = NULL;
6193 if (saved_cfun->eh)
6194 {
6195 int region = -1;
6196
6197 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
6198 region = find_outermost_region_in_block (saved_cfun, bb, region);
6199
6200 init_eh_for_function ();
6201 if (region != -1)
6202 {
6203 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6204 eh_offset = duplicate_eh_regions (saved_cfun, new_label_mapper,
6205 new_label_map, region, 0);
6206 }
6207 }
6208
6209 pop_cfun ();
6210
6211 /* Move blocks from BBS into DEST_CFUN. */
6212 gcc_assert (VEC_length (basic_block, bbs) >= 2);
6213 after = dest_cfun->cfg->x_entry_block_ptr;
6214 vars_map = pointer_map_create ();
6215
6216 memset (&d, 0, sizeof (d));
6217 d.vars_map = vars_map;
6218 d.from_context = cfun->decl;
6219 d.to_context = dest_cfun->decl;
6220 d.new_label_map = new_label_map;
6221 d.remap_decls_p = true;
6222 d.orig_block = orig_block;
6223 d.new_block = DECL_INITIAL (dest_cfun->decl);
6224
6225 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
6226 {
6227 /* No need to update edge counts on the last block. It has
6228 already been updated earlier when we detached the region from
6229 the original CFG. */
6230 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d, eh_offset);
6231 after = bb;
6232 }
6233
6234 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
6235 if (orig_block)
6236 {
6237 tree block;
6238 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6239 == NULL_TREE);
6240 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6241 = BLOCK_SUBBLOCKS (orig_block);
6242 for (block = BLOCK_SUBBLOCKS (orig_block);
6243 block; block = BLOCK_CHAIN (block))
6244 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
6245 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
6246 }
6247
6248 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
6249 vars_map, dest_cfun->decl);
6250
6251 if (new_label_map)
6252 htab_delete (new_label_map);
6253 pointer_map_destroy (vars_map);
6254
6255 /* Rewire the entry and exit blocks. The successor to the entry
6256 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6257 the child function. Similarly, the predecessor of DEST_FN's
6258 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
6259 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6260 various CFG manipulation function get to the right CFG.
6261
6262 FIXME, this is silly. The CFG ought to become a parameter to
6263 these helpers. */
6264 push_cfun (dest_cfun);
6265 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
6266 if (exit_bb)
6267 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
6268 pop_cfun ();
6269
6270 /* Back in the original function, the SESE region has disappeared,
6271 create a new basic block in its place. */
6272 bb = create_empty_bb (entry_pred[0]);
6273 if (current_loops)
6274 add_bb_to_loop (bb, loop);
6275 for (i = 0; i < num_entry_edges; i++)
6276 {
6277 e = make_edge (entry_pred[i], bb, entry_flag[i]);
6278 e->probability = entry_prob[i];
6279 }
6280
6281 for (i = 0; i < num_exit_edges; i++)
6282 {
6283 e = make_edge (bb, exit_succ[i], exit_flag[i]);
6284 e->probability = exit_prob[i];
6285 }
6286
6287 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
6288 for (i = 0; VEC_iterate (basic_block, dom_bbs, i, abb); i++)
6289 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
6290 VEC_free (basic_block, heap, dom_bbs);
6291
6292 if (exit_bb)
6293 {
6294 free (exit_prob);
6295 free (exit_flag);
6296 free (exit_succ);
6297 }
6298 free (entry_prob);
6299 free (entry_flag);
6300 free (entry_pred);
6301 VEC_free (basic_block, heap, bbs);
6302
6303 return bb;
6304 }
6305
6306
6307 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree-pass.h)
6308 */
6309
6310 void
6311 dump_function_to_file (tree fn, FILE *file, int flags)
6312 {
6313 tree arg, vars, var;
6314 struct function *dsf;
6315 bool ignore_topmost_bind = false, any_var = false;
6316 basic_block bb;
6317 tree chain;
6318
6319 fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
6320
6321 arg = DECL_ARGUMENTS (fn);
6322 while (arg)
6323 {
6324 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
6325 fprintf (file, " ");
6326 print_generic_expr (file, arg, dump_flags);
6327 if (flags & TDF_VERBOSE)
6328 print_node (file, "", arg, 4);
6329 if (TREE_CHAIN (arg))
6330 fprintf (file, ", ");
6331 arg = TREE_CHAIN (arg);
6332 }
6333 fprintf (file, ")\n");
6334
6335 if (flags & TDF_VERBOSE)
6336 print_node (file, "", fn, 2);
6337
6338 dsf = DECL_STRUCT_FUNCTION (fn);
6339 if (dsf && (flags & TDF_DETAILS))
6340 dump_eh_tree (file, dsf);
6341
6342 if (flags & TDF_RAW && !gimple_has_body_p (fn))
6343 {
6344 dump_node (fn, TDF_SLIM | flags, file);
6345 return;
6346 }
6347
6348 /* Switch CFUN to point to FN. */
6349 push_cfun (DECL_STRUCT_FUNCTION (fn));
6350
6351 /* When GIMPLE is lowered, the variables are no longer available in
6352 BIND_EXPRs, so display them separately. */
6353 if (cfun && cfun->decl == fn && cfun->local_decls)
6354 {
6355 ignore_topmost_bind = true;
6356
6357 fprintf (file, "{\n");
6358 for (vars = cfun->local_decls; vars; vars = TREE_CHAIN (vars))
6359 {
6360 var = TREE_VALUE (vars);
6361
6362 print_generic_decl (file, var, flags);
6363 if (flags & TDF_VERBOSE)
6364 print_node (file, "", var, 4);
6365 fprintf (file, "\n");
6366
6367 any_var = true;
6368 }
6369 }
6370
6371 if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
6372 {
6373 /* If the CFG has been built, emit a CFG-based dump. */
6374 check_bb_profile (ENTRY_BLOCK_PTR, file);
6375 if (!ignore_topmost_bind)
6376 fprintf (file, "{\n");
6377
6378 if (any_var && n_basic_blocks)
6379 fprintf (file, "\n");
6380
6381 FOR_EACH_BB (bb)
6382 gimple_dump_bb (bb, file, 2, flags);
6383
6384 fprintf (file, "}\n");
6385 check_bb_profile (EXIT_BLOCK_PTR, file);
6386 }
6387 else if (DECL_SAVED_TREE (fn) == NULL)
6388 {
6389 /* The function is now in GIMPLE form but the CFG has not been
6390 built yet. Emit the single sequence of GIMPLE statements
6391 that make up its body. */
6392 gimple_seq body = gimple_body (fn);
6393
6394 if (gimple_seq_first_stmt (body)
6395 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
6396 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
6397 print_gimple_seq (file, body, 0, flags);
6398 else
6399 {
6400 if (!ignore_topmost_bind)
6401 fprintf (file, "{\n");
6402
6403 if (any_var)
6404 fprintf (file, "\n");
6405
6406 print_gimple_seq (file, body, 2, flags);
6407 fprintf (file, "}\n");
6408 }
6409 }
6410 else
6411 {
6412 int indent;
6413
6414 /* Make a tree based dump. */
6415 chain = DECL_SAVED_TREE (fn);
6416
6417 if (chain && TREE_CODE (chain) == BIND_EXPR)
6418 {
6419 if (ignore_topmost_bind)
6420 {
6421 chain = BIND_EXPR_BODY (chain);
6422 indent = 2;
6423 }
6424 else
6425 indent = 0;
6426 }
6427 else
6428 {
6429 if (!ignore_topmost_bind)
6430 fprintf (file, "{\n");
6431 indent = 2;
6432 }
6433
6434 if (any_var)
6435 fprintf (file, "\n");
6436
6437 print_generic_stmt_indented (file, chain, flags, indent);
6438 if (ignore_topmost_bind)
6439 fprintf (file, "}\n");
6440 }
6441
6442 fprintf (file, "\n\n");
6443
6444 /* Restore CFUN. */
6445 pop_cfun ();
6446 }
6447
6448
6449 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
6450
6451 void
6452 debug_function (tree fn, int flags)
6453 {
6454 dump_function_to_file (fn, stderr, flags);
6455 }
6456
6457
6458 /* Print on FILE the indexes for the predecessors of basic_block BB. */
6459
6460 static void
6461 print_pred_bbs (FILE *file, basic_block bb)
6462 {
6463 edge e;
6464 edge_iterator ei;
6465
6466 FOR_EACH_EDGE (e, ei, bb->preds)
6467 fprintf (file, "bb_%d ", e->src->index);
6468 }
6469
6470
6471 /* Print on FILE the indexes for the successors of basic_block BB. */
6472
6473 static void
6474 print_succ_bbs (FILE *file, basic_block bb)
6475 {
6476 edge e;
6477 edge_iterator ei;
6478
6479 FOR_EACH_EDGE (e, ei, bb->succs)
6480 fprintf (file, "bb_%d ", e->dest->index);
6481 }
6482
6483 /* Print to FILE the basic block BB following the VERBOSITY level. */
6484
6485 void
6486 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
6487 {
6488 char *s_indent = (char *) alloca ((size_t) indent + 1);
6489 memset ((void *) s_indent, ' ', (size_t) indent);
6490 s_indent[indent] = '\0';
6491
6492 /* Print basic_block's header. */
6493 if (verbosity >= 2)
6494 {
6495 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
6496 print_pred_bbs (file, bb);
6497 fprintf (file, "}, succs = {");
6498 print_succ_bbs (file, bb);
6499 fprintf (file, "})\n");
6500 }
6501
6502 /* Print basic_block's body. */
6503 if (verbosity >= 3)
6504 {
6505 fprintf (file, "%s {\n", s_indent);
6506 gimple_dump_bb (bb, file, indent + 4, TDF_VOPS|TDF_MEMSYMS);
6507 fprintf (file, "%s }\n", s_indent);
6508 }
6509 }
6510
6511 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
6512
6513 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
6514 VERBOSITY level this outputs the contents of the loop, or just its
6515 structure. */
6516
6517 static void
6518 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
6519 {
6520 char *s_indent;
6521 basic_block bb;
6522
6523 if (loop == NULL)
6524 return;
6525
6526 s_indent = (char *) alloca ((size_t) indent + 1);
6527 memset ((void *) s_indent, ' ', (size_t) indent);
6528 s_indent[indent] = '\0';
6529
6530 /* Print loop's header. */
6531 fprintf (file, "%sloop_%d (header = %d, latch = %d", s_indent,
6532 loop->num, loop->header->index, loop->latch->index);
6533 fprintf (file, ", niter = ");
6534 print_generic_expr (file, loop->nb_iterations, 0);
6535
6536 if (loop->any_upper_bound)
6537 {
6538 fprintf (file, ", upper_bound = ");
6539 dump_double_int (file, loop->nb_iterations_upper_bound, true);
6540 }
6541
6542 if (loop->any_estimate)
6543 {
6544 fprintf (file, ", estimate = ");
6545 dump_double_int (file, loop->nb_iterations_estimate, true);
6546 }
6547 fprintf (file, ")\n");
6548
6549 /* Print loop's body. */
6550 if (verbosity >= 1)
6551 {
6552 fprintf (file, "%s{\n", s_indent);
6553 FOR_EACH_BB (bb)
6554 if (bb->loop_father == loop)
6555 print_loops_bb (file, bb, indent, verbosity);
6556
6557 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
6558 fprintf (file, "%s}\n", s_indent);
6559 }
6560 }
6561
6562 /* Print the LOOP and its sibling loops on FILE, indented INDENT
6563 spaces. Following VERBOSITY level this outputs the contents of the
6564 loop, or just its structure. */
6565
6566 static void
6567 print_loop_and_siblings (FILE *file, struct loop *loop, int indent, int verbosity)
6568 {
6569 if (loop == NULL)
6570 return;
6571
6572 print_loop (file, loop, indent, verbosity);
6573 print_loop_and_siblings (file, loop->next, indent, verbosity);
6574 }
6575
6576 /* Follow a CFG edge from the entry point of the program, and on entry
6577 of a loop, pretty print the loop structure on FILE. */
6578
6579 void
6580 print_loops (FILE *file, int verbosity)
6581 {
6582 basic_block bb;
6583
6584 bb = ENTRY_BLOCK_PTR;
6585 if (bb && bb->loop_father)
6586 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
6587 }
6588
6589
6590 /* Debugging loops structure at tree level, at some VERBOSITY level. */
6591
6592 void
6593 debug_loops (int verbosity)
6594 {
6595 print_loops (stderr, verbosity);
6596 }
6597
6598 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
6599
6600 void
6601 debug_loop (struct loop *loop, int verbosity)
6602 {
6603 print_loop (stderr, loop, 0, verbosity);
6604 }
6605
6606 /* Print on stderr the code of loop number NUM, at some VERBOSITY
6607 level. */
6608
6609 void
6610 debug_loop_num (unsigned num, int verbosity)
6611 {
6612 debug_loop (get_loop (num), verbosity);
6613 }
6614
6615 /* Return true if BB ends with a call, possibly followed by some
6616 instructions that must stay with the call. Return false,
6617 otherwise. */
6618
6619 static bool
6620 gimple_block_ends_with_call_p (basic_block bb)
6621 {
6622 gimple_stmt_iterator gsi = gsi_last_bb (bb);
6623 return is_gimple_call (gsi_stmt (gsi));
6624 }
6625
6626
6627 /* Return true if BB ends with a conditional branch. Return false,
6628 otherwise. */
6629
6630 static bool
6631 gimple_block_ends_with_condjump_p (const_basic_block bb)
6632 {
6633 gimple stmt = last_stmt (CONST_CAST_BB (bb));
6634 return (stmt && gimple_code (stmt) == GIMPLE_COND);
6635 }
6636
6637
6638 /* Return true if we need to add fake edge to exit at statement T.
6639 Helper function for gimple_flow_call_edges_add. */
6640
6641 static bool
6642 need_fake_edge_p (gimple t)
6643 {
6644 tree fndecl = NULL_TREE;
6645 int call_flags = 0;
6646
6647 /* NORETURN and LONGJMP calls already have an edge to exit.
6648 CONST and PURE calls do not need one.
6649 We don't currently check for CONST and PURE here, although
6650 it would be a good idea, because those attributes are
6651 figured out from the RTL in mark_constant_function, and
6652 the counter incrementation code from -fprofile-arcs
6653 leads to different results from -fbranch-probabilities. */
6654 if (is_gimple_call (t))
6655 {
6656 fndecl = gimple_call_fndecl (t);
6657 call_flags = gimple_call_flags (t);
6658 }
6659
6660 if (is_gimple_call (t)
6661 && fndecl
6662 && DECL_BUILT_IN (fndecl)
6663 && (call_flags & ECF_NOTHROW)
6664 && !(call_flags & ECF_RETURNS_TWICE)
6665 /* fork() doesn't really return twice, but the effect of
6666 wrapping it in __gcov_fork() which calls __gcov_flush()
6667 and clears the counters before forking has the same
6668 effect as returning twice. Force a fake edge. */
6669 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6670 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
6671 return false;
6672
6673 if (is_gimple_call (t)
6674 && !(call_flags & ECF_NORETURN))
6675 return true;
6676
6677 if (gimple_code (t) == GIMPLE_ASM
6678 && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
6679 return true;
6680
6681 return false;
6682 }
6683
6684
6685 /* Add fake edges to the function exit for any non constant and non
6686 noreturn calls, volatile inline assembly in the bitmap of blocks
6687 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
6688 the number of blocks that were split.
6689
6690 The goal is to expose cases in which entering a basic block does
6691 not imply that all subsequent instructions must be executed. */
6692
6693 static int
6694 gimple_flow_call_edges_add (sbitmap blocks)
6695 {
6696 int i;
6697 int blocks_split = 0;
6698 int last_bb = last_basic_block;
6699 bool check_last_block = false;
6700
6701 if (n_basic_blocks == NUM_FIXED_BLOCKS)
6702 return 0;
6703
6704 if (! blocks)
6705 check_last_block = true;
6706 else
6707 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
6708
6709 /* In the last basic block, before epilogue generation, there will be
6710 a fallthru edge to EXIT. Special care is required if the last insn
6711 of the last basic block is a call because make_edge folds duplicate
6712 edges, which would result in the fallthru edge also being marked
6713 fake, which would result in the fallthru edge being removed by
6714 remove_fake_edges, which would result in an invalid CFG.
6715
6716 Moreover, we can't elide the outgoing fake edge, since the block
6717 profiler needs to take this into account in order to solve the minimal
6718 spanning tree in the case that the call doesn't return.
6719
6720 Handle this by adding a dummy instruction in a new last basic block. */
6721 if (check_last_block)
6722 {
6723 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
6724 gimple_stmt_iterator gsi = gsi_last_bb (bb);
6725 gimple t = NULL;
6726
6727 if (!gsi_end_p (gsi))
6728 t = gsi_stmt (gsi);
6729
6730 if (t && need_fake_edge_p (t))
6731 {
6732 edge e;
6733
6734 e = find_edge (bb, EXIT_BLOCK_PTR);
6735 if (e)
6736 {
6737 gsi_insert_on_edge (e, gimple_build_nop ());
6738 gsi_commit_edge_inserts ();
6739 }
6740 }
6741 }
6742
6743 /* Now add fake edges to the function exit for any non constant
6744 calls since there is no way that we can determine if they will
6745 return or not... */
6746 for (i = 0; i < last_bb; i++)
6747 {
6748 basic_block bb = BASIC_BLOCK (i);
6749 gimple_stmt_iterator gsi;
6750 gimple stmt, last_stmt;
6751
6752 if (!bb)
6753 continue;
6754
6755 if (blocks && !TEST_BIT (blocks, i))
6756 continue;
6757
6758 gsi = gsi_last_bb (bb);
6759 if (!gsi_end_p (gsi))
6760 {
6761 last_stmt = gsi_stmt (gsi);
6762 do
6763 {
6764 stmt = gsi_stmt (gsi);
6765 if (need_fake_edge_p (stmt))
6766 {
6767 edge e;
6768
6769 /* The handling above of the final block before the
6770 epilogue should be enough to verify that there is
6771 no edge to the exit block in CFG already.
6772 Calling make_edge in such case would cause us to
6773 mark that edge as fake and remove it later. */
6774 #ifdef ENABLE_CHECKING
6775 if (stmt == last_stmt)
6776 {
6777 e = find_edge (bb, EXIT_BLOCK_PTR);
6778 gcc_assert (e == NULL);
6779 }
6780 #endif
6781
6782 /* Note that the following may create a new basic block
6783 and renumber the existing basic blocks. */
6784 if (stmt != last_stmt)
6785 {
6786 e = split_block (bb, stmt);
6787 if (e)
6788 blocks_split++;
6789 }
6790 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
6791 }
6792 gsi_prev (&gsi);
6793 }
6794 while (!gsi_end_p (gsi));
6795 }
6796 }
6797
6798 if (blocks_split)
6799 verify_flow_info ();
6800
6801 return blocks_split;
6802 }
6803
6804 /* Purge dead abnormal call edges from basic block BB. */
6805
6806 bool
6807 gimple_purge_dead_abnormal_call_edges (basic_block bb)
6808 {
6809 bool changed = gimple_purge_dead_eh_edges (bb);
6810
6811 if (cfun->has_nonlocal_label)
6812 {
6813 gimple stmt = last_stmt (bb);
6814 edge_iterator ei;
6815 edge e;
6816
6817 if (!(stmt && stmt_can_make_abnormal_goto (stmt)))
6818 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6819 {
6820 if (e->flags & EDGE_ABNORMAL)
6821 {
6822 remove_edge (e);
6823 changed = true;
6824 }
6825 else
6826 ei_next (&ei);
6827 }
6828
6829 /* See gimple_purge_dead_eh_edges below. */
6830 if (changed)
6831 free_dominance_info (CDI_DOMINATORS);
6832 }
6833
6834 return changed;
6835 }
6836
6837 /* Removes edge E and all the blocks dominated by it, and updates dominance
6838 information. The IL in E->src needs to be updated separately.
6839 If dominance info is not available, only the edge E is removed.*/
6840
6841 void
6842 remove_edge_and_dominated_blocks (edge e)
6843 {
6844 VEC (basic_block, heap) *bbs_to_remove = NULL;
6845 VEC (basic_block, heap) *bbs_to_fix_dom = NULL;
6846 bitmap df, df_idom;
6847 edge f;
6848 edge_iterator ei;
6849 bool none_removed = false;
6850 unsigned i;
6851 basic_block bb, dbb;
6852 bitmap_iterator bi;
6853
6854 if (!dom_info_available_p (CDI_DOMINATORS))
6855 {
6856 remove_edge (e);
6857 return;
6858 }
6859
6860 /* No updating is needed for edges to exit. */
6861 if (e->dest == EXIT_BLOCK_PTR)
6862 {
6863 if (cfgcleanup_altered_bbs)
6864 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
6865 remove_edge (e);
6866 return;
6867 }
6868
6869 /* First, we find the basic blocks to remove. If E->dest has a predecessor
6870 that is not dominated by E->dest, then this set is empty. Otherwise,
6871 all the basic blocks dominated by E->dest are removed.
6872
6873 Also, to DF_IDOM we store the immediate dominators of the blocks in
6874 the dominance frontier of E (i.e., of the successors of the
6875 removed blocks, if there are any, and of E->dest otherwise). */
6876 FOR_EACH_EDGE (f, ei, e->dest->preds)
6877 {
6878 if (f == e)
6879 continue;
6880
6881 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
6882 {
6883 none_removed = true;
6884 break;
6885 }
6886 }
6887
6888 df = BITMAP_ALLOC (NULL);
6889 df_idom = BITMAP_ALLOC (NULL);
6890
6891 if (none_removed)
6892 bitmap_set_bit (df_idom,
6893 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
6894 else
6895 {
6896 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
6897 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6898 {
6899 FOR_EACH_EDGE (f, ei, bb->succs)
6900 {
6901 if (f->dest != EXIT_BLOCK_PTR)
6902 bitmap_set_bit (df, f->dest->index);
6903 }
6904 }
6905 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6906 bitmap_clear_bit (df, bb->index);
6907
6908 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
6909 {
6910 bb = BASIC_BLOCK (i);
6911 bitmap_set_bit (df_idom,
6912 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
6913 }
6914 }
6915
6916 if (cfgcleanup_altered_bbs)
6917 {
6918 /* Record the set of the altered basic blocks. */
6919 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
6920 bitmap_ior_into (cfgcleanup_altered_bbs, df);
6921 }
6922
6923 /* Remove E and the cancelled blocks. */
6924 if (none_removed)
6925 remove_edge (e);
6926 else
6927 {
6928 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6929 delete_basic_block (bb);
6930 }
6931
6932 /* Update the dominance information. The immediate dominator may change only
6933 for blocks whose immediate dominator belongs to DF_IDOM:
6934
6935 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
6936 removal. Let Z the arbitrary block such that idom(Z) = Y and
6937 Z dominates X after the removal. Before removal, there exists a path P
6938 from Y to X that avoids Z. Let F be the last edge on P that is
6939 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
6940 dominates W, and because of P, Z does not dominate W), and W belongs to
6941 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
6942 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
6943 {
6944 bb = BASIC_BLOCK (i);
6945 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
6946 dbb;
6947 dbb = next_dom_son (CDI_DOMINATORS, dbb))
6948 VEC_safe_push (basic_block, heap, bbs_to_fix_dom, dbb);
6949 }
6950
6951 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
6952
6953 BITMAP_FREE (df);
6954 BITMAP_FREE (df_idom);
6955 VEC_free (basic_block, heap, bbs_to_remove);
6956 VEC_free (basic_block, heap, bbs_to_fix_dom);
6957 }
6958
6959 /* Purge dead EH edges from basic block BB. */
6960
6961 bool
6962 gimple_purge_dead_eh_edges (basic_block bb)
6963 {
6964 bool changed = false;
6965 edge e;
6966 edge_iterator ei;
6967 gimple stmt = last_stmt (bb);
6968
6969 if (stmt && stmt_can_throw_internal (stmt))
6970 return false;
6971
6972 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6973 {
6974 if (e->flags & EDGE_EH)
6975 {
6976 remove_edge_and_dominated_blocks (e);
6977 changed = true;
6978 }
6979 else
6980 ei_next (&ei);
6981 }
6982
6983 return changed;
6984 }
6985
6986 bool
6987 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
6988 {
6989 bool changed = false;
6990 unsigned i;
6991 bitmap_iterator bi;
6992
6993 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
6994 {
6995 basic_block bb = BASIC_BLOCK (i);
6996
6997 /* Earlier gimple_purge_dead_eh_edges could have removed
6998 this basic block already. */
6999 gcc_assert (bb || changed);
7000 if (bb != NULL)
7001 changed |= gimple_purge_dead_eh_edges (bb);
7002 }
7003
7004 return changed;
7005 }
7006
7007 /* This function is called whenever a new edge is created or
7008 redirected. */
7009
7010 static void
7011 gimple_execute_on_growing_pred (edge e)
7012 {
7013 basic_block bb = e->dest;
7014
7015 if (phi_nodes (bb))
7016 reserve_phi_args_for_new_edge (bb);
7017 }
7018
7019 /* This function is called immediately before edge E is removed from
7020 the edge vector E->dest->preds. */
7021
7022 static void
7023 gimple_execute_on_shrinking_pred (edge e)
7024 {
7025 if (phi_nodes (e->dest))
7026 remove_phi_args (e);
7027 }
7028
7029 /*---------------------------------------------------------------------------
7030 Helper functions for Loop versioning
7031 ---------------------------------------------------------------------------*/
7032
7033 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
7034 of 'first'. Both of them are dominated by 'new_head' basic block. When
7035 'new_head' was created by 'second's incoming edge it received phi arguments
7036 on the edge by split_edge(). Later, additional edge 'e' was created to
7037 connect 'new_head' and 'first'. Now this routine adds phi args on this
7038 additional edge 'e' that new_head to second edge received as part of edge
7039 splitting. */
7040
7041 static void
7042 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
7043 basic_block new_head, edge e)
7044 {
7045 gimple phi1, phi2;
7046 gimple_stmt_iterator psi1, psi2;
7047 tree def;
7048 edge e2 = find_edge (new_head, second);
7049
7050 /* Because NEW_HEAD has been created by splitting SECOND's incoming
7051 edge, we should always have an edge from NEW_HEAD to SECOND. */
7052 gcc_assert (e2 != NULL);
7053
7054 /* Browse all 'second' basic block phi nodes and add phi args to
7055 edge 'e' for 'first' head. PHI args are always in correct order. */
7056
7057 for (psi2 = gsi_start_phis (second),
7058 psi1 = gsi_start_phis (first);
7059 !gsi_end_p (psi2) && !gsi_end_p (psi1);
7060 gsi_next (&psi2), gsi_next (&psi1))
7061 {
7062 phi1 = gsi_stmt (psi1);
7063 phi2 = gsi_stmt (psi2);
7064 def = PHI_ARG_DEF (phi2, e2->dest_idx);
7065 add_phi_arg (phi1, def, e);
7066 }
7067 }
7068
7069
7070 /* Adds a if else statement to COND_BB with condition COND_EXPR.
7071 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
7072 the destination of the ELSE part. */
7073
7074 static void
7075 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
7076 basic_block second_head ATTRIBUTE_UNUSED,
7077 basic_block cond_bb, void *cond_e)
7078 {
7079 gimple_stmt_iterator gsi;
7080 gimple new_cond_expr;
7081 tree cond_expr = (tree) cond_e;
7082 edge e0;
7083
7084 /* Build new conditional expr */
7085 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
7086 NULL_TREE, NULL_TREE);
7087
7088 /* Add new cond in cond_bb. */
7089 gsi = gsi_last_bb (cond_bb);
7090 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
7091
7092 /* Adjust edges appropriately to connect new head with first head
7093 as well as second head. */
7094 e0 = single_succ_edge (cond_bb);
7095 e0->flags &= ~EDGE_FALLTHRU;
7096 e0->flags |= EDGE_FALSE_VALUE;
7097 }
7098
7099 struct cfg_hooks gimple_cfg_hooks = {
7100 "gimple",
7101 gimple_verify_flow_info,
7102 gimple_dump_bb, /* dump_bb */
7103 create_bb, /* create_basic_block */
7104 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
7105 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
7106 gimple_can_remove_branch_p, /* can_remove_branch_p */
7107 remove_bb, /* delete_basic_block */
7108 gimple_split_block, /* split_block */
7109 gimple_move_block_after, /* move_block_after */
7110 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
7111 gimple_merge_blocks, /* merge_blocks */
7112 gimple_predict_edge, /* predict_edge */
7113 gimple_predicted_by_p, /* predicted_by_p */
7114 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
7115 gimple_duplicate_bb, /* duplicate_block */
7116 gimple_split_edge, /* split_edge */
7117 gimple_make_forwarder_block, /* make_forward_block */
7118 NULL, /* tidy_fallthru_edge */
7119 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
7120 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
7121 gimple_flow_call_edges_add, /* flow_call_edges_add */
7122 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
7123 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
7124 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
7125 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
7126 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
7127 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
7128 flush_pending_stmts /* flush_pending_stmts */
7129 };
7130
7131
7132 /* Split all critical edges. */
7133
7134 static unsigned int
7135 split_critical_edges (void)
7136 {
7137 basic_block bb;
7138 edge e;
7139 edge_iterator ei;
7140
7141 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
7142 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
7143 mappings around the calls to split_edge. */
7144 start_recording_case_labels ();
7145 FOR_ALL_BB (bb)
7146 {
7147 FOR_EACH_EDGE (e, ei, bb->succs)
7148 {
7149 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
7150 split_edge (e);
7151 /* PRE inserts statements to edges and expects that
7152 since split_critical_edges was done beforehand, committing edge
7153 insertions will not split more edges. In addition to critical
7154 edges we must split edges that have multiple successors and
7155 end by control flow statements, such as RESX.
7156 Go ahead and split them too. This matches the logic in
7157 gimple_find_edge_insert_loc. */
7158 else if ((!single_pred_p (e->dest)
7159 || phi_nodes (e->dest)
7160 || e->dest == EXIT_BLOCK_PTR)
7161 && e->src != ENTRY_BLOCK_PTR
7162 && !(e->flags & EDGE_ABNORMAL))
7163 {
7164 gimple_stmt_iterator gsi;
7165
7166 gsi = gsi_last_bb (e->src);
7167 if (!gsi_end_p (gsi)
7168 && stmt_ends_bb_p (gsi_stmt (gsi))
7169 && gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN)
7170 split_edge (e);
7171 }
7172 }
7173 }
7174 end_recording_case_labels ();
7175 return 0;
7176 }
7177
7178 struct gimple_opt_pass pass_split_crit_edges =
7179 {
7180 {
7181 GIMPLE_PASS,
7182 "crited", /* name */
7183 NULL, /* gate */
7184 split_critical_edges, /* execute */
7185 NULL, /* sub */
7186 NULL, /* next */
7187 0, /* static_pass_number */
7188 TV_TREE_SPLIT_EDGES, /* tv_id */
7189 PROP_cfg, /* properties required */
7190 PROP_no_crit_edges, /* properties_provided */
7191 0, /* properties_destroyed */
7192 0, /* todo_flags_start */
7193 TODO_dump_func | TODO_verify_flow /* todo_flags_finish */
7194 }
7195 };
7196
7197
7198 /* Build a ternary operation and gimplify it. Emit code before GSI.
7199 Return the gimple_val holding the result. */
7200
7201 tree
7202 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
7203 tree type, tree a, tree b, tree c)
7204 {
7205 tree ret;
7206
7207 ret = fold_build3 (code, type, a, b, c);
7208 STRIP_NOPS (ret);
7209
7210 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7211 GSI_SAME_STMT);
7212 }
7213
7214 /* Build a binary operation and gimplify it. Emit code before GSI.
7215 Return the gimple_val holding the result. */
7216
7217 tree
7218 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
7219 tree type, tree a, tree b)
7220 {
7221 tree ret;
7222
7223 ret = fold_build2 (code, type, a, b);
7224 STRIP_NOPS (ret);
7225
7226 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7227 GSI_SAME_STMT);
7228 }
7229
7230 /* Build a unary operation and gimplify it. Emit code before GSI.
7231 Return the gimple_val holding the result. */
7232
7233 tree
7234 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
7235 tree a)
7236 {
7237 tree ret;
7238
7239 ret = fold_build1 (code, type, a);
7240 STRIP_NOPS (ret);
7241
7242 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7243 GSI_SAME_STMT);
7244 }
7245
7246
7247 \f
7248 /* Emit return warnings. */
7249
7250 static unsigned int
7251 execute_warn_function_return (void)
7252 {
7253 source_location location;
7254 gimple last;
7255 edge e;
7256 edge_iterator ei;
7257
7258 /* If we have a path to EXIT, then we do return. */
7259 if (TREE_THIS_VOLATILE (cfun->decl)
7260 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
7261 {
7262 location = UNKNOWN_LOCATION;
7263 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7264 {
7265 last = last_stmt (e->src);
7266 if (gimple_code (last) == GIMPLE_RETURN
7267 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
7268 break;
7269 }
7270 if (location == UNKNOWN_LOCATION)
7271 location = cfun->function_end_locus;
7272 warning (0, "%H%<noreturn%> function does return", &location);
7273 }
7274
7275 /* If we see "return;" in some basic block, then we do reach the end
7276 without returning a value. */
7277 else if (warn_return_type
7278 && !TREE_NO_WARNING (cfun->decl)
7279 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
7280 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
7281 {
7282 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7283 {
7284 gimple last = last_stmt (e->src);
7285 if (gimple_code (last) == GIMPLE_RETURN
7286 && gimple_return_retval (last) == NULL
7287 && !gimple_no_warning_p (last))
7288 {
7289 location = gimple_location (last);
7290 if (location == UNKNOWN_LOCATION)
7291 location = cfun->function_end_locus;
7292 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
7293 TREE_NO_WARNING (cfun->decl) = 1;
7294 break;
7295 }
7296 }
7297 }
7298 return 0;
7299 }
7300
7301
7302 /* Given a basic block B which ends with a conditional and has
7303 precisely two successors, determine which of the edges is taken if
7304 the conditional is true and which is taken if the conditional is
7305 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
7306
7307 void
7308 extract_true_false_edges_from_block (basic_block b,
7309 edge *true_edge,
7310 edge *false_edge)
7311 {
7312 edge e = EDGE_SUCC (b, 0);
7313
7314 if (e->flags & EDGE_TRUE_VALUE)
7315 {
7316 *true_edge = e;
7317 *false_edge = EDGE_SUCC (b, 1);
7318 }
7319 else
7320 {
7321 *false_edge = e;
7322 *true_edge = EDGE_SUCC (b, 1);
7323 }
7324 }
7325
7326 struct gimple_opt_pass pass_warn_function_return =
7327 {
7328 {
7329 GIMPLE_PASS,
7330 NULL, /* name */
7331 NULL, /* gate */
7332 execute_warn_function_return, /* execute */
7333 NULL, /* sub */
7334 NULL, /* next */
7335 0, /* static_pass_number */
7336 TV_NONE, /* tv_id */
7337 PROP_cfg, /* properties_required */
7338 0, /* properties_provided */
7339 0, /* properties_destroyed */
7340 0, /* todo_flags_start */
7341 0 /* todo_flags_finish */
7342 }
7343 };
7344
7345 /* Emit noreturn warnings. */
7346
7347 static unsigned int
7348 execute_warn_function_noreturn (void)
7349 {
7350 if (warn_missing_noreturn
7351 && !TREE_THIS_VOLATILE (cfun->decl)
7352 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
7353 && !lang_hooks.missing_noreturn_ok_p (cfun->decl))
7354 warning (OPT_Wmissing_noreturn, "%Jfunction might be possible candidate "
7355 "for attribute %<noreturn%>",
7356 cfun->decl);
7357 return 0;
7358 }
7359
7360 struct gimple_opt_pass pass_warn_function_noreturn =
7361 {
7362 {
7363 GIMPLE_PASS,
7364 NULL, /* name */
7365 NULL, /* gate */
7366 execute_warn_function_noreturn, /* execute */
7367 NULL, /* sub */
7368 NULL, /* next */
7369 0, /* static_pass_number */
7370 TV_NONE, /* tv_id */
7371 PROP_cfg, /* properties_required */
7372 0, /* properties_provided */
7373 0, /* properties_destroyed */
7374 0, /* todo_flags_start */
7375 0 /* todo_flags_finish */
7376 }
7377 };