* jvspec.c (jvgenmain_spec): Don't handle -fnew-verifier.
[gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
3 2010 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "output.h"
30 #include "flags.h"
31 #include "function.h"
32 #include "ggc.h"
33 #include "langhooks.h"
34 #include "tree-pretty-print.h"
35 #include "gimple-pretty-print.h"
36 #include "tree-flow.h"
37 #include "timevar.h"
38 #include "tree-dump.h"
39 #include "tree-pass.h"
40 #include "diagnostic-core.h"
41 #include "toplev.h"
42 #include "except.h"
43 #include "cfgloop.h"
44 #include "cfglayout.h"
45 #include "tree-ssa-propagate.h"
46 #include "value-prof.h"
47 #include "pointer-set.h"
48 #include "tree-inline.h"
49
50 /* This file contains functions for building the Control Flow Graph (CFG)
51 for a function tree. */
52
53 /* Local declarations. */
54
55 /* Initial capacity for the basic block array. */
56 static const int initial_cfg_capacity = 20;
57
58 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
59 which use a particular edge. The CASE_LABEL_EXPRs are chained together
60 via their TREE_CHAIN field, which we clear after we're done with the
61 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
62
63 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
64 update the case vector in response to edge redirections.
65
66 Right now this table is set up and torn down at key points in the
67 compilation process. It would be nice if we could make the table
68 more persistent. The key is getting notification of changes to
69 the CFG (particularly edge removal, creation and redirection). */
70
71 static struct pointer_map_t *edge_to_cases;
72
73 /* If we record edge_to_cases, this bitmap will hold indexes
74 of basic blocks that end in a GIMPLE_SWITCH which we touched
75 due to edge manipulations. */
76
77 static bitmap touched_switch_bbs;
78
79 /* CFG statistics. */
80 struct cfg_stats_d
81 {
82 long num_merged_labels;
83 };
84
85 static struct cfg_stats_d cfg_stats;
86
87 /* Nonzero if we found a computed goto while building basic blocks. */
88 static bool found_computed_goto;
89
90 /* Hash table to store last discriminator assigned for each locus. */
91 struct locus_discrim_map
92 {
93 location_t locus;
94 int discriminator;
95 };
96 static htab_t discriminator_per_locus;
97
98 /* Basic blocks and flowgraphs. */
99 static void make_blocks (gimple_seq);
100 static void factor_computed_gotos (void);
101
102 /* Edges. */
103 static void make_edges (void);
104 static void make_cond_expr_edges (basic_block);
105 static void make_gimple_switch_edges (basic_block);
106 static void make_goto_expr_edges (basic_block);
107 static void make_gimple_asm_edges (basic_block);
108 static unsigned int locus_map_hash (const void *);
109 static int locus_map_eq (const void *, const void *);
110 static void assign_discriminator (location_t, basic_block);
111 static edge gimple_redirect_edge_and_branch (edge, basic_block);
112 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
113 static unsigned int split_critical_edges (void);
114
115 /* Various helpers. */
116 static inline bool stmt_starts_bb_p (gimple, gimple);
117 static int gimple_verify_flow_info (void);
118 static void gimple_make_forwarder_block (edge);
119 static void gimple_cfg2vcg (FILE *);
120 static gimple first_non_label_stmt (basic_block);
121
122 /* Flowgraph optimization and cleanup. */
123 static void gimple_merge_blocks (basic_block, basic_block);
124 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
125 static void remove_bb (basic_block);
126 static edge find_taken_edge_computed_goto (basic_block, tree);
127 static edge find_taken_edge_cond_expr (basic_block, tree);
128 static edge find_taken_edge_switch_expr (basic_block, tree);
129 static tree find_case_label_for_value (gimple, tree);
130 static void group_case_labels_stmt (gimple);
131
132 void
133 init_empty_tree_cfg_for_function (struct function *fn)
134 {
135 /* Initialize the basic block array. */
136 init_flow (fn);
137 profile_status_for_function (fn) = PROFILE_ABSENT;
138 n_basic_blocks_for_function (fn) = NUM_FIXED_BLOCKS;
139 last_basic_block_for_function (fn) = NUM_FIXED_BLOCKS;
140 basic_block_info_for_function (fn)
141 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
142 VEC_safe_grow_cleared (basic_block, gc,
143 basic_block_info_for_function (fn),
144 initial_cfg_capacity);
145
146 /* Build a mapping of labels to their associated blocks. */
147 label_to_block_map_for_function (fn)
148 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
149 VEC_safe_grow_cleared (basic_block, gc,
150 label_to_block_map_for_function (fn),
151 initial_cfg_capacity);
152
153 SET_BASIC_BLOCK_FOR_FUNCTION (fn, ENTRY_BLOCK,
154 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn));
155 SET_BASIC_BLOCK_FOR_FUNCTION (fn, EXIT_BLOCK,
156 EXIT_BLOCK_PTR_FOR_FUNCTION (fn));
157
158 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)->next_bb
159 = EXIT_BLOCK_PTR_FOR_FUNCTION (fn);
160 EXIT_BLOCK_PTR_FOR_FUNCTION (fn)->prev_bb
161 = ENTRY_BLOCK_PTR_FOR_FUNCTION (fn);
162 }
163
164 void
165 init_empty_tree_cfg (void)
166 {
167 init_empty_tree_cfg_for_function (cfun);
168 }
169
170 /*---------------------------------------------------------------------------
171 Create basic blocks
172 ---------------------------------------------------------------------------*/
173
174 /* Entry point to the CFG builder for trees. SEQ is the sequence of
175 statements to be added to the flowgraph. */
176
177 static void
178 build_gimple_cfg (gimple_seq seq)
179 {
180 /* Register specific gimple functions. */
181 gimple_register_cfg_hooks ();
182
183 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
184
185 init_empty_tree_cfg ();
186
187 found_computed_goto = 0;
188 make_blocks (seq);
189
190 /* Computed gotos are hell to deal with, especially if there are
191 lots of them with a large number of destinations. So we factor
192 them to a common computed goto location before we build the
193 edge list. After we convert back to normal form, we will un-factor
194 the computed gotos since factoring introduces an unwanted jump. */
195 if (found_computed_goto)
196 factor_computed_gotos ();
197
198 /* Make sure there is always at least one block, even if it's empty. */
199 if (n_basic_blocks == NUM_FIXED_BLOCKS)
200 create_empty_bb (ENTRY_BLOCK_PTR);
201
202 /* Adjust the size of the array. */
203 if (VEC_length (basic_block, basic_block_info) < (size_t) n_basic_blocks)
204 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, n_basic_blocks);
205
206 /* To speed up statement iterator walks, we first purge dead labels. */
207 cleanup_dead_labels ();
208
209 /* Group case nodes to reduce the number of edges.
210 We do this after cleaning up dead labels because otherwise we miss
211 a lot of obvious case merging opportunities. */
212 group_case_labels ();
213
214 /* Create the edges of the flowgraph. */
215 discriminator_per_locus = htab_create (13, locus_map_hash, locus_map_eq,
216 free);
217 make_edges ();
218 cleanup_dead_labels ();
219 htab_delete (discriminator_per_locus);
220
221 /* Debugging dumps. */
222
223 /* Write the flowgraph to a VCG file. */
224 {
225 int local_dump_flags;
226 FILE *vcg_file = dump_begin (TDI_vcg, &local_dump_flags);
227 if (vcg_file)
228 {
229 gimple_cfg2vcg (vcg_file);
230 dump_end (TDI_vcg, vcg_file);
231 }
232 }
233
234 #ifdef ENABLE_CHECKING
235 verify_stmts ();
236 #endif
237 }
238
239 static unsigned int
240 execute_build_cfg (void)
241 {
242 gimple_seq body = gimple_body (current_function_decl);
243
244 build_gimple_cfg (body);
245 gimple_set_body (current_function_decl, NULL);
246 if (dump_file && (dump_flags & TDF_DETAILS))
247 {
248 fprintf (dump_file, "Scope blocks:\n");
249 dump_scope_blocks (dump_file, dump_flags);
250 }
251 return 0;
252 }
253
254 struct gimple_opt_pass pass_build_cfg =
255 {
256 {
257 GIMPLE_PASS,
258 "cfg", /* name */
259 NULL, /* gate */
260 execute_build_cfg, /* execute */
261 NULL, /* sub */
262 NULL, /* next */
263 0, /* static_pass_number */
264 TV_TREE_CFG, /* tv_id */
265 PROP_gimple_leh, /* properties_required */
266 PROP_cfg, /* properties_provided */
267 0, /* properties_destroyed */
268 0, /* todo_flags_start */
269 TODO_verify_stmts | TODO_cleanup_cfg
270 | TODO_dump_func /* todo_flags_finish */
271 }
272 };
273
274
275 /* Return true if T is a computed goto. */
276
277 static bool
278 computed_goto_p (gimple t)
279 {
280 return (gimple_code (t) == GIMPLE_GOTO
281 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
282 }
283
284
285 /* Search the CFG for any computed gotos. If found, factor them to a
286 common computed goto site. Also record the location of that site so
287 that we can un-factor the gotos after we have converted back to
288 normal form. */
289
290 static void
291 factor_computed_gotos (void)
292 {
293 basic_block bb;
294 tree factored_label_decl = NULL;
295 tree var = NULL;
296 gimple factored_computed_goto_label = NULL;
297 gimple factored_computed_goto = NULL;
298
299 /* We know there are one or more computed gotos in this function.
300 Examine the last statement in each basic block to see if the block
301 ends with a computed goto. */
302
303 FOR_EACH_BB (bb)
304 {
305 gimple_stmt_iterator gsi = gsi_last_bb (bb);
306 gimple last;
307
308 if (gsi_end_p (gsi))
309 continue;
310
311 last = gsi_stmt (gsi);
312
313 /* Ignore the computed goto we create when we factor the original
314 computed gotos. */
315 if (last == factored_computed_goto)
316 continue;
317
318 /* If the last statement is a computed goto, factor it. */
319 if (computed_goto_p (last))
320 {
321 gimple assignment;
322
323 /* The first time we find a computed goto we need to create
324 the factored goto block and the variable each original
325 computed goto will use for their goto destination. */
326 if (!factored_computed_goto)
327 {
328 basic_block new_bb = create_empty_bb (bb);
329 gimple_stmt_iterator new_gsi = gsi_start_bb (new_bb);
330
331 /* Create the destination of the factored goto. Each original
332 computed goto will put its desired destination into this
333 variable and jump to the label we create immediately
334 below. */
335 var = create_tmp_var (ptr_type_node, "gotovar");
336
337 /* Build a label for the new block which will contain the
338 factored computed goto. */
339 factored_label_decl = create_artificial_label (UNKNOWN_LOCATION);
340 factored_computed_goto_label
341 = gimple_build_label (factored_label_decl);
342 gsi_insert_after (&new_gsi, factored_computed_goto_label,
343 GSI_NEW_STMT);
344
345 /* Build our new computed goto. */
346 factored_computed_goto = gimple_build_goto (var);
347 gsi_insert_after (&new_gsi, factored_computed_goto, GSI_NEW_STMT);
348 }
349
350 /* Copy the original computed goto's destination into VAR. */
351 assignment = gimple_build_assign (var, gimple_goto_dest (last));
352 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
353
354 /* And re-vector the computed goto to the new destination. */
355 gimple_goto_set_dest (last, factored_label_decl);
356 }
357 }
358 }
359
360
361 /* Build a flowgraph for the sequence of stmts SEQ. */
362
363 static void
364 make_blocks (gimple_seq seq)
365 {
366 gimple_stmt_iterator i = gsi_start (seq);
367 gimple stmt = NULL;
368 bool start_new_block = true;
369 bool first_stmt_of_seq = true;
370 basic_block bb = ENTRY_BLOCK_PTR;
371
372 while (!gsi_end_p (i))
373 {
374 gimple prev_stmt;
375
376 prev_stmt = stmt;
377 stmt = gsi_stmt (i);
378
379 /* If the statement starts a new basic block or if we have determined
380 in a previous pass that we need to create a new block for STMT, do
381 so now. */
382 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
383 {
384 if (!first_stmt_of_seq)
385 seq = gsi_split_seq_before (&i);
386 bb = create_basic_block (seq, NULL, bb);
387 start_new_block = false;
388 }
389
390 /* Now add STMT to BB and create the subgraphs for special statement
391 codes. */
392 gimple_set_bb (stmt, bb);
393
394 if (computed_goto_p (stmt))
395 found_computed_goto = true;
396
397 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
398 next iteration. */
399 if (stmt_ends_bb_p (stmt))
400 {
401 /* If the stmt can make abnormal goto use a new temporary
402 for the assignment to the LHS. This makes sure the old value
403 of the LHS is available on the abnormal edge. Otherwise
404 we will end up with overlapping life-ranges for abnormal
405 SSA names. */
406 if (gimple_has_lhs (stmt)
407 && stmt_can_make_abnormal_goto (stmt)
408 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
409 {
410 tree lhs = gimple_get_lhs (stmt);
411 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
412 gimple s = gimple_build_assign (lhs, tmp);
413 gimple_set_location (s, gimple_location (stmt));
414 gimple_set_block (s, gimple_block (stmt));
415 gimple_set_lhs (stmt, tmp);
416 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
417 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
418 DECL_GIMPLE_REG_P (tmp) = 1;
419 gsi_insert_after (&i, s, GSI_SAME_STMT);
420 }
421 start_new_block = true;
422 }
423
424 gsi_next (&i);
425 first_stmt_of_seq = false;
426 }
427 }
428
429
430 /* Create and return a new empty basic block after bb AFTER. */
431
432 static basic_block
433 create_bb (void *h, void *e, basic_block after)
434 {
435 basic_block bb;
436
437 gcc_assert (!e);
438
439 /* Create and initialize a new basic block. Since alloc_block uses
440 GC allocation that clears memory to allocate a basic block, we do
441 not have to clear the newly allocated basic block here. */
442 bb = alloc_block ();
443
444 bb->index = last_basic_block;
445 bb->flags = BB_NEW;
446 bb->il.gimple = ggc_alloc_cleared_gimple_bb_info ();
447 set_bb_seq (bb, h ? (gimple_seq) h : gimple_seq_alloc ());
448
449 /* Add the new block to the linked list of blocks. */
450 link_block (bb, after);
451
452 /* Grow the basic block array if needed. */
453 if ((size_t) last_basic_block == VEC_length (basic_block, basic_block_info))
454 {
455 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
456 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
457 }
458
459 /* Add the newly created block to the array. */
460 SET_BASIC_BLOCK (last_basic_block, bb);
461
462 n_basic_blocks++;
463 last_basic_block++;
464
465 return bb;
466 }
467
468
469 /*---------------------------------------------------------------------------
470 Edge creation
471 ---------------------------------------------------------------------------*/
472
473 /* Fold COND_EXPR_COND of each COND_EXPR. */
474
475 void
476 fold_cond_expr_cond (void)
477 {
478 basic_block bb;
479
480 FOR_EACH_BB (bb)
481 {
482 gimple stmt = last_stmt (bb);
483
484 if (stmt && gimple_code (stmt) == GIMPLE_COND)
485 {
486 location_t loc = gimple_location (stmt);
487 tree cond;
488 bool zerop, onep;
489
490 fold_defer_overflow_warnings ();
491 cond = fold_binary_loc (loc, gimple_cond_code (stmt), boolean_type_node,
492 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
493 if (cond)
494 {
495 zerop = integer_zerop (cond);
496 onep = integer_onep (cond);
497 }
498 else
499 zerop = onep = false;
500
501 fold_undefer_overflow_warnings (zerop || onep,
502 stmt,
503 WARN_STRICT_OVERFLOW_CONDITIONAL);
504 if (zerop)
505 gimple_cond_make_false (stmt);
506 else if (onep)
507 gimple_cond_make_true (stmt);
508 }
509 }
510 }
511
512 /* Join all the blocks in the flowgraph. */
513
514 static void
515 make_edges (void)
516 {
517 basic_block bb;
518 struct omp_region *cur_region = NULL;
519
520 /* Create an edge from entry to the first block with executable
521 statements in it. */
522 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
523
524 /* Traverse the basic block array placing edges. */
525 FOR_EACH_BB (bb)
526 {
527 gimple last = last_stmt (bb);
528 bool fallthru;
529
530 if (last)
531 {
532 enum gimple_code code = gimple_code (last);
533 switch (code)
534 {
535 case GIMPLE_GOTO:
536 make_goto_expr_edges (bb);
537 fallthru = false;
538 break;
539 case GIMPLE_RETURN:
540 make_edge (bb, EXIT_BLOCK_PTR, 0);
541 fallthru = false;
542 break;
543 case GIMPLE_COND:
544 make_cond_expr_edges (bb);
545 fallthru = false;
546 break;
547 case GIMPLE_SWITCH:
548 make_gimple_switch_edges (bb);
549 fallthru = false;
550 break;
551 case GIMPLE_RESX:
552 make_eh_edges (last);
553 fallthru = false;
554 break;
555 case GIMPLE_EH_DISPATCH:
556 fallthru = make_eh_dispatch_edges (last);
557 break;
558
559 case GIMPLE_CALL:
560 /* If this function receives a nonlocal goto, then we need to
561 make edges from this call site to all the nonlocal goto
562 handlers. */
563 if (stmt_can_make_abnormal_goto (last))
564 make_abnormal_goto_edges (bb, true);
565
566 /* If this statement has reachable exception handlers, then
567 create abnormal edges to them. */
568 make_eh_edges (last);
569
570 /* BUILTIN_RETURN is really a return statement. */
571 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
572 make_edge (bb, EXIT_BLOCK_PTR, 0), fallthru = false;
573 /* Some calls are known not to return. */
574 else
575 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
576 break;
577
578 case GIMPLE_ASSIGN:
579 /* A GIMPLE_ASSIGN may throw internally and thus be considered
580 control-altering. */
581 if (is_ctrl_altering_stmt (last))
582 make_eh_edges (last);
583 fallthru = true;
584 break;
585
586 case GIMPLE_ASM:
587 make_gimple_asm_edges (bb);
588 fallthru = true;
589 break;
590
591 case GIMPLE_OMP_PARALLEL:
592 case GIMPLE_OMP_TASK:
593 case GIMPLE_OMP_FOR:
594 case GIMPLE_OMP_SINGLE:
595 case GIMPLE_OMP_MASTER:
596 case GIMPLE_OMP_ORDERED:
597 case GIMPLE_OMP_CRITICAL:
598 case GIMPLE_OMP_SECTION:
599 cur_region = new_omp_region (bb, code, cur_region);
600 fallthru = true;
601 break;
602
603 case GIMPLE_OMP_SECTIONS:
604 cur_region = new_omp_region (bb, code, cur_region);
605 fallthru = true;
606 break;
607
608 case GIMPLE_OMP_SECTIONS_SWITCH:
609 fallthru = false;
610 break;
611
612 case GIMPLE_OMP_ATOMIC_LOAD:
613 case GIMPLE_OMP_ATOMIC_STORE:
614 fallthru = true;
615 break;
616
617 case GIMPLE_OMP_RETURN:
618 /* In the case of a GIMPLE_OMP_SECTION, the edge will go
619 somewhere other than the next block. This will be
620 created later. */
621 cur_region->exit = bb;
622 fallthru = cur_region->type != GIMPLE_OMP_SECTION;
623 cur_region = cur_region->outer;
624 break;
625
626 case GIMPLE_OMP_CONTINUE:
627 cur_region->cont = bb;
628 switch (cur_region->type)
629 {
630 case GIMPLE_OMP_FOR:
631 /* Mark all GIMPLE_OMP_FOR and GIMPLE_OMP_CONTINUE
632 succs edges as abnormal to prevent splitting
633 them. */
634 single_succ_edge (cur_region->entry)->flags |= EDGE_ABNORMAL;
635 /* Make the loopback edge. */
636 make_edge (bb, single_succ (cur_region->entry),
637 EDGE_ABNORMAL);
638
639 /* Create an edge from GIMPLE_OMP_FOR to exit, which
640 corresponds to the case that the body of the loop
641 is not executed at all. */
642 make_edge (cur_region->entry, bb->next_bb, EDGE_ABNORMAL);
643 make_edge (bb, bb->next_bb, EDGE_FALLTHRU | EDGE_ABNORMAL);
644 fallthru = false;
645 break;
646
647 case GIMPLE_OMP_SECTIONS:
648 /* Wire up the edges into and out of the nested sections. */
649 {
650 basic_block switch_bb = single_succ (cur_region->entry);
651
652 struct omp_region *i;
653 for (i = cur_region->inner; i ; i = i->next)
654 {
655 gcc_assert (i->type == GIMPLE_OMP_SECTION);
656 make_edge (switch_bb, i->entry, 0);
657 make_edge (i->exit, bb, EDGE_FALLTHRU);
658 }
659
660 /* Make the loopback edge to the block with
661 GIMPLE_OMP_SECTIONS_SWITCH. */
662 make_edge (bb, switch_bb, 0);
663
664 /* Make the edge from the switch to exit. */
665 make_edge (switch_bb, bb->next_bb, 0);
666 fallthru = false;
667 }
668 break;
669
670 default:
671 gcc_unreachable ();
672 }
673 break;
674
675 default:
676 gcc_assert (!stmt_ends_bb_p (last));
677 fallthru = true;
678 }
679 }
680 else
681 fallthru = true;
682
683 if (fallthru)
684 {
685 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
686 if (last)
687 assign_discriminator (gimple_location (last), bb->next_bb);
688 }
689 }
690
691 if (root_omp_region)
692 free_omp_regions ();
693
694 /* Fold COND_EXPR_COND of each COND_EXPR. */
695 fold_cond_expr_cond ();
696 }
697
698 /* Trivial hash function for a location_t. ITEM is a pointer to
699 a hash table entry that maps a location_t to a discriminator. */
700
701 static unsigned int
702 locus_map_hash (const void *item)
703 {
704 return ((const struct locus_discrim_map *) item)->locus;
705 }
706
707 /* Equality function for the locus-to-discriminator map. VA and VB
708 point to the two hash table entries to compare. */
709
710 static int
711 locus_map_eq (const void *va, const void *vb)
712 {
713 const struct locus_discrim_map *a = (const struct locus_discrim_map *) va;
714 const struct locus_discrim_map *b = (const struct locus_discrim_map *) vb;
715 return a->locus == b->locus;
716 }
717
718 /* Find the next available discriminator value for LOCUS. The
719 discriminator distinguishes among several basic blocks that
720 share a common locus, allowing for more accurate sample-based
721 profiling. */
722
723 static int
724 next_discriminator_for_locus (location_t locus)
725 {
726 struct locus_discrim_map item;
727 struct locus_discrim_map **slot;
728
729 item.locus = locus;
730 item.discriminator = 0;
731 slot = (struct locus_discrim_map **)
732 htab_find_slot_with_hash (discriminator_per_locus, (void *) &item,
733 (hashval_t) locus, INSERT);
734 gcc_assert (slot);
735 if (*slot == HTAB_EMPTY_ENTRY)
736 {
737 *slot = XNEW (struct locus_discrim_map);
738 gcc_assert (*slot);
739 (*slot)->locus = locus;
740 (*slot)->discriminator = 0;
741 }
742 (*slot)->discriminator++;
743 return (*slot)->discriminator;
744 }
745
746 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
747
748 static bool
749 same_line_p (location_t locus1, location_t locus2)
750 {
751 expanded_location from, to;
752
753 if (locus1 == locus2)
754 return true;
755
756 from = expand_location (locus1);
757 to = expand_location (locus2);
758
759 if (from.line != to.line)
760 return false;
761 if (from.file == to.file)
762 return true;
763 return (from.file != NULL
764 && to.file != NULL
765 && strcmp (from.file, to.file) == 0);
766 }
767
768 /* Assign a unique discriminator value to block BB if it begins at the same
769 LOCUS as its predecessor block. */
770
771 static void
772 assign_discriminator (location_t locus, basic_block bb)
773 {
774 gimple first_in_to_bb, last_in_to_bb;
775
776 if (locus == 0 || bb->discriminator != 0)
777 return;
778
779 first_in_to_bb = first_non_label_stmt (bb);
780 last_in_to_bb = last_stmt (bb);
781 if ((first_in_to_bb && same_line_p (locus, gimple_location (first_in_to_bb)))
782 || (last_in_to_bb && same_line_p (locus, gimple_location (last_in_to_bb))))
783 bb->discriminator = next_discriminator_for_locus (locus);
784 }
785
786 /* Create the edges for a GIMPLE_COND starting at block BB. */
787
788 static void
789 make_cond_expr_edges (basic_block bb)
790 {
791 gimple entry = last_stmt (bb);
792 gimple then_stmt, else_stmt;
793 basic_block then_bb, else_bb;
794 tree then_label, else_label;
795 edge e;
796 location_t entry_locus;
797
798 gcc_assert (entry);
799 gcc_assert (gimple_code (entry) == GIMPLE_COND);
800
801 entry_locus = gimple_location (entry);
802
803 /* Entry basic blocks for each component. */
804 then_label = gimple_cond_true_label (entry);
805 else_label = gimple_cond_false_label (entry);
806 then_bb = label_to_block (then_label);
807 else_bb = label_to_block (else_label);
808 then_stmt = first_stmt (then_bb);
809 else_stmt = first_stmt (else_bb);
810
811 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
812 assign_discriminator (entry_locus, then_bb);
813 e->goto_locus = gimple_location (then_stmt);
814 if (e->goto_locus)
815 e->goto_block = gimple_block (then_stmt);
816 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
817 if (e)
818 {
819 assign_discriminator (entry_locus, else_bb);
820 e->goto_locus = gimple_location (else_stmt);
821 if (e->goto_locus)
822 e->goto_block = gimple_block (else_stmt);
823 }
824
825 /* We do not need the labels anymore. */
826 gimple_cond_set_true_label (entry, NULL_TREE);
827 gimple_cond_set_false_label (entry, NULL_TREE);
828 }
829
830
831 /* Called for each element in the hash table (P) as we delete the
832 edge to cases hash table.
833
834 Clear all the TREE_CHAINs to prevent problems with copying of
835 SWITCH_EXPRs and structure sharing rules, then free the hash table
836 element. */
837
838 static bool
839 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
840 void *data ATTRIBUTE_UNUSED)
841 {
842 tree t, next;
843
844 for (t = (tree) *value; t; t = next)
845 {
846 next = TREE_CHAIN (t);
847 TREE_CHAIN (t) = NULL;
848 }
849
850 *value = NULL;
851 return false;
852 }
853
854 /* Start recording information mapping edges to case labels. */
855
856 void
857 start_recording_case_labels (void)
858 {
859 gcc_assert (edge_to_cases == NULL);
860 edge_to_cases = pointer_map_create ();
861 touched_switch_bbs = BITMAP_ALLOC (NULL);
862 }
863
864 /* Return nonzero if we are recording information for case labels. */
865
866 static bool
867 recording_case_labels_p (void)
868 {
869 return (edge_to_cases != NULL);
870 }
871
872 /* Stop recording information mapping edges to case labels and
873 remove any information we have recorded. */
874 void
875 end_recording_case_labels (void)
876 {
877 bitmap_iterator bi;
878 unsigned i;
879 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
880 pointer_map_destroy (edge_to_cases);
881 edge_to_cases = NULL;
882 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
883 {
884 basic_block bb = BASIC_BLOCK (i);
885 if (bb)
886 {
887 gimple stmt = last_stmt (bb);
888 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
889 group_case_labels_stmt (stmt);
890 }
891 }
892 BITMAP_FREE (touched_switch_bbs);
893 }
894
895 /* If we are inside a {start,end}_recording_cases block, then return
896 a chain of CASE_LABEL_EXPRs from T which reference E.
897
898 Otherwise return NULL. */
899
900 static tree
901 get_cases_for_edge (edge e, gimple t)
902 {
903 void **slot;
904 size_t i, n;
905
906 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
907 chains available. Return NULL so the caller can detect this case. */
908 if (!recording_case_labels_p ())
909 return NULL;
910
911 slot = pointer_map_contains (edge_to_cases, e);
912 if (slot)
913 return (tree) *slot;
914
915 /* If we did not find E in the hash table, then this must be the first
916 time we have been queried for information about E & T. Add all the
917 elements from T to the hash table then perform the query again. */
918
919 n = gimple_switch_num_labels (t);
920 for (i = 0; i < n; i++)
921 {
922 tree elt = gimple_switch_label (t, i);
923 tree lab = CASE_LABEL (elt);
924 basic_block label_bb = label_to_block (lab);
925 edge this_edge = find_edge (e->src, label_bb);
926
927 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
928 a new chain. */
929 slot = pointer_map_insert (edge_to_cases, this_edge);
930 TREE_CHAIN (elt) = (tree) *slot;
931 *slot = elt;
932 }
933
934 return (tree) *pointer_map_contains (edge_to_cases, e);
935 }
936
937 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
938
939 static void
940 make_gimple_switch_edges (basic_block bb)
941 {
942 gimple entry = last_stmt (bb);
943 location_t entry_locus;
944 size_t i, n;
945
946 entry_locus = gimple_location (entry);
947
948 n = gimple_switch_num_labels (entry);
949
950 for (i = 0; i < n; ++i)
951 {
952 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
953 basic_block label_bb = label_to_block (lab);
954 make_edge (bb, label_bb, 0);
955 assign_discriminator (entry_locus, label_bb);
956 }
957 }
958
959
960 /* Return the basic block holding label DEST. */
961
962 basic_block
963 label_to_block_fn (struct function *ifun, tree dest)
964 {
965 int uid = LABEL_DECL_UID (dest);
966
967 /* We would die hard when faced by an undefined label. Emit a label to
968 the very first basic block. This will hopefully make even the dataflow
969 and undefined variable warnings quite right. */
970 if (seen_error () && uid < 0)
971 {
972 gimple_stmt_iterator gsi = gsi_start_bb (BASIC_BLOCK (NUM_FIXED_BLOCKS));
973 gimple stmt;
974
975 stmt = gimple_build_label (dest);
976 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
977 uid = LABEL_DECL_UID (dest);
978 }
979 if (VEC_length (basic_block, ifun->cfg->x_label_to_block_map)
980 <= (unsigned int) uid)
981 return NULL;
982 return VEC_index (basic_block, ifun->cfg->x_label_to_block_map, uid);
983 }
984
985 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
986 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
987
988 void
989 make_abnormal_goto_edges (basic_block bb, bool for_call)
990 {
991 basic_block target_bb;
992 gimple_stmt_iterator gsi;
993
994 FOR_EACH_BB (target_bb)
995 for (gsi = gsi_start_bb (target_bb); !gsi_end_p (gsi); gsi_next (&gsi))
996 {
997 gimple label_stmt = gsi_stmt (gsi);
998 tree target;
999
1000 if (gimple_code (label_stmt) != GIMPLE_LABEL)
1001 break;
1002
1003 target = gimple_label_label (label_stmt);
1004
1005 /* Make an edge to every label block that has been marked as a
1006 potential target for a computed goto or a non-local goto. */
1007 if ((FORCED_LABEL (target) && !for_call)
1008 || (DECL_NONLOCAL (target) && for_call))
1009 {
1010 make_edge (bb, target_bb, EDGE_ABNORMAL);
1011 break;
1012 }
1013 }
1014 }
1015
1016 /* Create edges for a goto statement at block BB. */
1017
1018 static void
1019 make_goto_expr_edges (basic_block bb)
1020 {
1021 gimple_stmt_iterator last = gsi_last_bb (bb);
1022 gimple goto_t = gsi_stmt (last);
1023
1024 /* A simple GOTO creates normal edges. */
1025 if (simple_goto_p (goto_t))
1026 {
1027 tree dest = gimple_goto_dest (goto_t);
1028 basic_block label_bb = label_to_block (dest);
1029 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1030 e->goto_locus = gimple_location (goto_t);
1031 assign_discriminator (e->goto_locus, label_bb);
1032 if (e->goto_locus)
1033 e->goto_block = gimple_block (goto_t);
1034 gsi_remove (&last, true);
1035 return;
1036 }
1037
1038 /* A computed GOTO creates abnormal edges. */
1039 make_abnormal_goto_edges (bb, false);
1040 }
1041
1042 /* Create edges for an asm statement with labels at block BB. */
1043
1044 static void
1045 make_gimple_asm_edges (basic_block bb)
1046 {
1047 gimple stmt = last_stmt (bb);
1048 location_t stmt_loc = gimple_location (stmt);
1049 int i, n = gimple_asm_nlabels (stmt);
1050
1051 for (i = 0; i < n; ++i)
1052 {
1053 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1054 basic_block label_bb = label_to_block (label);
1055 make_edge (bb, label_bb, 0);
1056 assign_discriminator (stmt_loc, label_bb);
1057 }
1058 }
1059
1060 /*---------------------------------------------------------------------------
1061 Flowgraph analysis
1062 ---------------------------------------------------------------------------*/
1063
1064 /* Cleanup useless labels in basic blocks. This is something we wish
1065 to do early because it allows us to group case labels before creating
1066 the edges for the CFG, and it speeds up block statement iterators in
1067 all passes later on.
1068 We rerun this pass after CFG is created, to get rid of the labels that
1069 are no longer referenced. After then we do not run it any more, since
1070 (almost) no new labels should be created. */
1071
1072 /* A map from basic block index to the leading label of that block. */
1073 static struct label_record
1074 {
1075 /* The label. */
1076 tree label;
1077
1078 /* True if the label is referenced from somewhere. */
1079 bool used;
1080 } *label_for_bb;
1081
1082 /* Given LABEL return the first label in the same basic block. */
1083
1084 static tree
1085 main_block_label (tree label)
1086 {
1087 basic_block bb = label_to_block (label);
1088 tree main_label = label_for_bb[bb->index].label;
1089
1090 /* label_to_block possibly inserted undefined label into the chain. */
1091 if (!main_label)
1092 {
1093 label_for_bb[bb->index].label = label;
1094 main_label = label;
1095 }
1096
1097 label_for_bb[bb->index].used = true;
1098 return main_label;
1099 }
1100
1101 /* Clean up redundant labels within the exception tree. */
1102
1103 static void
1104 cleanup_dead_labels_eh (void)
1105 {
1106 eh_landing_pad lp;
1107 eh_region r;
1108 tree lab;
1109 int i;
1110
1111 if (cfun->eh == NULL)
1112 return;
1113
1114 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
1115 if (lp && lp->post_landing_pad)
1116 {
1117 lab = main_block_label (lp->post_landing_pad);
1118 if (lab != lp->post_landing_pad)
1119 {
1120 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1121 EH_LANDING_PAD_NR (lab) = lp->index;
1122 }
1123 }
1124
1125 FOR_ALL_EH_REGION (r)
1126 switch (r->type)
1127 {
1128 case ERT_CLEANUP:
1129 case ERT_MUST_NOT_THROW:
1130 break;
1131
1132 case ERT_TRY:
1133 {
1134 eh_catch c;
1135 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1136 {
1137 lab = c->label;
1138 if (lab)
1139 c->label = main_block_label (lab);
1140 }
1141 }
1142 break;
1143
1144 case ERT_ALLOWED_EXCEPTIONS:
1145 lab = r->u.allowed.label;
1146 if (lab)
1147 r->u.allowed.label = main_block_label (lab);
1148 break;
1149 }
1150 }
1151
1152
1153 /* Cleanup redundant labels. This is a three-step process:
1154 1) Find the leading label for each block.
1155 2) Redirect all references to labels to the leading labels.
1156 3) Cleanup all useless labels. */
1157
1158 void
1159 cleanup_dead_labels (void)
1160 {
1161 basic_block bb;
1162 label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
1163
1164 /* Find a suitable label for each block. We use the first user-defined
1165 label if there is one, or otherwise just the first label we see. */
1166 FOR_EACH_BB (bb)
1167 {
1168 gimple_stmt_iterator i;
1169
1170 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1171 {
1172 tree label;
1173 gimple stmt = gsi_stmt (i);
1174
1175 if (gimple_code (stmt) != GIMPLE_LABEL)
1176 break;
1177
1178 label = gimple_label_label (stmt);
1179
1180 /* If we have not yet seen a label for the current block,
1181 remember this one and see if there are more labels. */
1182 if (!label_for_bb[bb->index].label)
1183 {
1184 label_for_bb[bb->index].label = label;
1185 continue;
1186 }
1187
1188 /* If we did see a label for the current block already, but it
1189 is an artificially created label, replace it if the current
1190 label is a user defined label. */
1191 if (!DECL_ARTIFICIAL (label)
1192 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1193 {
1194 label_for_bb[bb->index].label = label;
1195 break;
1196 }
1197 }
1198 }
1199
1200 /* Now redirect all jumps/branches to the selected label.
1201 First do so for each block ending in a control statement. */
1202 FOR_EACH_BB (bb)
1203 {
1204 gimple stmt = last_stmt (bb);
1205 if (!stmt)
1206 continue;
1207
1208 switch (gimple_code (stmt))
1209 {
1210 case GIMPLE_COND:
1211 {
1212 tree true_label = gimple_cond_true_label (stmt);
1213 tree false_label = gimple_cond_false_label (stmt);
1214
1215 if (true_label)
1216 gimple_cond_set_true_label (stmt, main_block_label (true_label));
1217 if (false_label)
1218 gimple_cond_set_false_label (stmt, main_block_label (false_label));
1219 break;
1220 }
1221
1222 case GIMPLE_SWITCH:
1223 {
1224 size_t i, n = gimple_switch_num_labels (stmt);
1225
1226 /* Replace all destination labels. */
1227 for (i = 0; i < n; ++i)
1228 {
1229 tree case_label = gimple_switch_label (stmt, i);
1230 tree label = main_block_label (CASE_LABEL (case_label));
1231 CASE_LABEL (case_label) = label;
1232 }
1233 break;
1234 }
1235
1236 case GIMPLE_ASM:
1237 {
1238 int i, n = gimple_asm_nlabels (stmt);
1239
1240 for (i = 0; i < n; ++i)
1241 {
1242 tree cons = gimple_asm_label_op (stmt, i);
1243 tree label = main_block_label (TREE_VALUE (cons));
1244 TREE_VALUE (cons) = label;
1245 }
1246 break;
1247 }
1248
1249 /* We have to handle gotos until they're removed, and we don't
1250 remove them until after we've created the CFG edges. */
1251 case GIMPLE_GOTO:
1252 if (!computed_goto_p (stmt))
1253 {
1254 tree new_dest = main_block_label (gimple_goto_dest (stmt));
1255 gimple_goto_set_dest (stmt, new_dest);
1256 }
1257 break;
1258
1259 default:
1260 break;
1261 }
1262 }
1263
1264 /* Do the same for the exception region tree labels. */
1265 cleanup_dead_labels_eh ();
1266
1267 /* Finally, purge dead labels. All user-defined labels and labels that
1268 can be the target of non-local gotos and labels which have their
1269 address taken are preserved. */
1270 FOR_EACH_BB (bb)
1271 {
1272 gimple_stmt_iterator i;
1273 tree label_for_this_bb = label_for_bb[bb->index].label;
1274
1275 if (!label_for_this_bb)
1276 continue;
1277
1278 /* If the main label of the block is unused, we may still remove it. */
1279 if (!label_for_bb[bb->index].used)
1280 label_for_this_bb = NULL;
1281
1282 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1283 {
1284 tree label;
1285 gimple stmt = gsi_stmt (i);
1286
1287 if (gimple_code (stmt) != GIMPLE_LABEL)
1288 break;
1289
1290 label = gimple_label_label (stmt);
1291
1292 if (label == label_for_this_bb
1293 || !DECL_ARTIFICIAL (label)
1294 || DECL_NONLOCAL (label)
1295 || FORCED_LABEL (label))
1296 gsi_next (&i);
1297 else
1298 gsi_remove (&i, true);
1299 }
1300 }
1301
1302 free (label_for_bb);
1303 }
1304
1305 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1306 the ones jumping to the same label.
1307 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1308
1309 static void
1310 group_case_labels_stmt (gimple stmt)
1311 {
1312 int old_size = gimple_switch_num_labels (stmt);
1313 int i, j, new_size = old_size;
1314 tree default_case = NULL_TREE;
1315 tree default_label = NULL_TREE;
1316 bool has_default;
1317
1318 /* The default label is always the first case in a switch
1319 statement after gimplification if it was not optimized
1320 away */
1321 if (!CASE_LOW (gimple_switch_default_label (stmt))
1322 && !CASE_HIGH (gimple_switch_default_label (stmt)))
1323 {
1324 default_case = gimple_switch_default_label (stmt);
1325 default_label = CASE_LABEL (default_case);
1326 has_default = true;
1327 }
1328 else
1329 has_default = false;
1330
1331 /* Look for possible opportunities to merge cases. */
1332 if (has_default)
1333 i = 1;
1334 else
1335 i = 0;
1336 while (i < old_size)
1337 {
1338 tree base_case, base_label, base_high;
1339 base_case = gimple_switch_label (stmt, i);
1340
1341 gcc_assert (base_case);
1342 base_label = CASE_LABEL (base_case);
1343
1344 /* Discard cases that have the same destination as the
1345 default case. */
1346 if (base_label == default_label)
1347 {
1348 gimple_switch_set_label (stmt, i, NULL_TREE);
1349 i++;
1350 new_size--;
1351 continue;
1352 }
1353
1354 base_high = CASE_HIGH (base_case)
1355 ? CASE_HIGH (base_case)
1356 : CASE_LOW (base_case);
1357 i++;
1358
1359 /* Try to merge case labels. Break out when we reach the end
1360 of the label vector or when we cannot merge the next case
1361 label with the current one. */
1362 while (i < old_size)
1363 {
1364 tree merge_case = gimple_switch_label (stmt, i);
1365 tree merge_label = CASE_LABEL (merge_case);
1366 tree t = int_const_binop (PLUS_EXPR, base_high,
1367 integer_one_node, 1);
1368
1369 /* Merge the cases if they jump to the same place,
1370 and their ranges are consecutive. */
1371 if (merge_label == base_label
1372 && tree_int_cst_equal (CASE_LOW (merge_case), t))
1373 {
1374 base_high = CASE_HIGH (merge_case) ?
1375 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1376 CASE_HIGH (base_case) = base_high;
1377 gimple_switch_set_label (stmt, i, NULL_TREE);
1378 new_size--;
1379 i++;
1380 }
1381 else
1382 break;
1383 }
1384 }
1385
1386 /* Compress the case labels in the label vector, and adjust the
1387 length of the vector. */
1388 for (i = 0, j = 0; i < new_size; i++)
1389 {
1390 while (! gimple_switch_label (stmt, j))
1391 j++;
1392 gimple_switch_set_label (stmt, i,
1393 gimple_switch_label (stmt, j++));
1394 }
1395
1396 gcc_assert (new_size <= old_size);
1397 gimple_switch_set_num_labels (stmt, new_size);
1398 }
1399
1400 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1401 and scan the sorted vector of cases. Combine the ones jumping to the
1402 same label. */
1403
1404 void
1405 group_case_labels (void)
1406 {
1407 basic_block bb;
1408
1409 FOR_EACH_BB (bb)
1410 {
1411 gimple stmt = last_stmt (bb);
1412 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1413 group_case_labels_stmt (stmt);
1414 }
1415 }
1416
1417 /* Checks whether we can merge block B into block A. */
1418
1419 static bool
1420 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1421 {
1422 gimple stmt;
1423 gimple_stmt_iterator gsi;
1424 gimple_seq phis;
1425
1426 if (!single_succ_p (a))
1427 return false;
1428
1429 if (single_succ_edge (a)->flags & (EDGE_ABNORMAL | EDGE_EH))
1430 return false;
1431
1432 if (single_succ (a) != b)
1433 return false;
1434
1435 if (!single_pred_p (b))
1436 return false;
1437
1438 if (b == EXIT_BLOCK_PTR)
1439 return false;
1440
1441 /* If A ends by a statement causing exceptions or something similar, we
1442 cannot merge the blocks. */
1443 stmt = last_stmt (a);
1444 if (stmt && stmt_ends_bb_p (stmt))
1445 return false;
1446
1447 /* Do not allow a block with only a non-local label to be merged. */
1448 if (stmt
1449 && gimple_code (stmt) == GIMPLE_LABEL
1450 && DECL_NONLOCAL (gimple_label_label (stmt)))
1451 return false;
1452
1453 /* Examine the labels at the beginning of B. */
1454 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
1455 {
1456 tree lab;
1457 stmt = gsi_stmt (gsi);
1458 if (gimple_code (stmt) != GIMPLE_LABEL)
1459 break;
1460 lab = gimple_label_label (stmt);
1461
1462 /* Do not remove user labels. */
1463 if (!DECL_ARTIFICIAL (lab))
1464 return false;
1465 }
1466
1467 /* Protect the loop latches. */
1468 if (current_loops && b->loop_father->latch == b)
1469 return false;
1470
1471 /* It must be possible to eliminate all phi nodes in B. If ssa form
1472 is not up-to-date and a name-mapping is registered, we cannot eliminate
1473 any phis. Symbols marked for renaming are never a problem though. */
1474 phis = phi_nodes (b);
1475 if (!gimple_seq_empty_p (phis)
1476 && name_mappings_registered_p ())
1477 return false;
1478
1479 /* When not optimizing, don't merge if we'd lose goto_locus. */
1480 if (!optimize
1481 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1482 {
1483 location_t goto_locus = single_succ_edge (a)->goto_locus;
1484 gimple_stmt_iterator prev, next;
1485 prev = gsi_last_nondebug_bb (a);
1486 next = gsi_after_labels (b);
1487 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1488 gsi_next_nondebug (&next);
1489 if ((gsi_end_p (prev)
1490 || gimple_location (gsi_stmt (prev)) != goto_locus)
1491 && (gsi_end_p (next)
1492 || gimple_location (gsi_stmt (next)) != goto_locus))
1493 return false;
1494 }
1495
1496 return true;
1497 }
1498
1499 /* Return true if the var whose chain of uses starts at PTR has no
1500 nondebug uses. */
1501 bool
1502 has_zero_uses_1 (const ssa_use_operand_t *head)
1503 {
1504 const ssa_use_operand_t *ptr;
1505
1506 for (ptr = head->next; ptr != head; ptr = ptr->next)
1507 if (!is_gimple_debug (USE_STMT (ptr)))
1508 return false;
1509
1510 return true;
1511 }
1512
1513 /* Return true if the var whose chain of uses starts at PTR has a
1514 single nondebug use. Set USE_P and STMT to that single nondebug
1515 use, if so, or to NULL otherwise. */
1516 bool
1517 single_imm_use_1 (const ssa_use_operand_t *head,
1518 use_operand_p *use_p, gimple *stmt)
1519 {
1520 ssa_use_operand_t *ptr, *single_use = 0;
1521
1522 for (ptr = head->next; ptr != head; ptr = ptr->next)
1523 if (!is_gimple_debug (USE_STMT (ptr)))
1524 {
1525 if (single_use)
1526 {
1527 single_use = NULL;
1528 break;
1529 }
1530 single_use = ptr;
1531 }
1532
1533 if (use_p)
1534 *use_p = single_use;
1535
1536 if (stmt)
1537 *stmt = single_use ? single_use->loc.stmt : NULL;
1538
1539 return !!single_use;
1540 }
1541
1542 /* Replaces all uses of NAME by VAL. */
1543
1544 void
1545 replace_uses_by (tree name, tree val)
1546 {
1547 imm_use_iterator imm_iter;
1548 use_operand_p use;
1549 gimple stmt;
1550 edge e;
1551
1552 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1553 {
1554 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1555 {
1556 replace_exp (use, val);
1557
1558 if (gimple_code (stmt) == GIMPLE_PHI)
1559 {
1560 e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
1561 if (e->flags & EDGE_ABNORMAL)
1562 {
1563 /* This can only occur for virtual operands, since
1564 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1565 would prevent replacement. */
1566 gcc_assert (!is_gimple_reg (name));
1567 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1568 }
1569 }
1570 }
1571
1572 if (gimple_code (stmt) != GIMPLE_PHI)
1573 {
1574 size_t i;
1575
1576 fold_stmt_inplace (stmt);
1577 if (cfgcleanup_altered_bbs)
1578 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1579
1580 /* FIXME. This should go in update_stmt. */
1581 for (i = 0; i < gimple_num_ops (stmt); i++)
1582 {
1583 tree op = gimple_op (stmt, i);
1584 /* Operands may be empty here. For example, the labels
1585 of a GIMPLE_COND are nulled out following the creation
1586 of the corresponding CFG edges. */
1587 if (op && TREE_CODE (op) == ADDR_EXPR)
1588 recompute_tree_invariant_for_addr_expr (op);
1589 }
1590
1591 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1592 update_stmt (stmt);
1593 }
1594 }
1595
1596 gcc_assert (has_zero_uses (name));
1597
1598 /* Also update the trees stored in loop structures. */
1599 if (current_loops)
1600 {
1601 struct loop *loop;
1602 loop_iterator li;
1603
1604 FOR_EACH_LOOP (li, loop, 0)
1605 {
1606 substitute_in_loop_info (loop, name, val);
1607 }
1608 }
1609 }
1610
1611 /* Merge block B into block A. */
1612
1613 static void
1614 gimple_merge_blocks (basic_block a, basic_block b)
1615 {
1616 gimple_stmt_iterator last, gsi, psi;
1617 gimple_seq phis = phi_nodes (b);
1618
1619 if (dump_file)
1620 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1621
1622 /* Remove all single-valued PHI nodes from block B of the form
1623 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1624 gsi = gsi_last_bb (a);
1625 for (psi = gsi_start (phis); !gsi_end_p (psi); )
1626 {
1627 gimple phi = gsi_stmt (psi);
1628 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1629 gimple copy;
1630 bool may_replace_uses = !is_gimple_reg (def)
1631 || may_propagate_copy (def, use);
1632
1633 /* In case we maintain loop closed ssa form, do not propagate arguments
1634 of loop exit phi nodes. */
1635 if (current_loops
1636 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1637 && is_gimple_reg (def)
1638 && TREE_CODE (use) == SSA_NAME
1639 && a->loop_father != b->loop_father)
1640 may_replace_uses = false;
1641
1642 if (!may_replace_uses)
1643 {
1644 gcc_assert (is_gimple_reg (def));
1645
1646 /* Note that just emitting the copies is fine -- there is no problem
1647 with ordering of phi nodes. This is because A is the single
1648 predecessor of B, therefore results of the phi nodes cannot
1649 appear as arguments of the phi nodes. */
1650 copy = gimple_build_assign (def, use);
1651 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1652 remove_phi_node (&psi, false);
1653 }
1654 else
1655 {
1656 /* If we deal with a PHI for virtual operands, we can simply
1657 propagate these without fussing with folding or updating
1658 the stmt. */
1659 if (!is_gimple_reg (def))
1660 {
1661 imm_use_iterator iter;
1662 use_operand_p use_p;
1663 gimple stmt;
1664
1665 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1666 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1667 SET_USE (use_p, use);
1668
1669 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1670 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1671 }
1672 else
1673 replace_uses_by (def, use);
1674
1675 remove_phi_node (&psi, true);
1676 }
1677 }
1678
1679 /* Ensure that B follows A. */
1680 move_block_after (b, a);
1681
1682 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1683 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1684
1685 /* Remove labels from B and set gimple_bb to A for other statements. */
1686 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1687 {
1688 gimple stmt = gsi_stmt (gsi);
1689 if (gimple_code (stmt) == GIMPLE_LABEL)
1690 {
1691 tree label = gimple_label_label (stmt);
1692 int lp_nr;
1693
1694 gsi_remove (&gsi, false);
1695
1696 /* Now that we can thread computed gotos, we might have
1697 a situation where we have a forced label in block B
1698 However, the label at the start of block B might still be
1699 used in other ways (think about the runtime checking for
1700 Fortran assigned gotos). So we can not just delete the
1701 label. Instead we move the label to the start of block A. */
1702 if (FORCED_LABEL (label))
1703 {
1704 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1705 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1706 }
1707
1708 lp_nr = EH_LANDING_PAD_NR (label);
1709 if (lp_nr)
1710 {
1711 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1712 lp->post_landing_pad = NULL;
1713 }
1714 }
1715 else
1716 {
1717 gimple_set_bb (stmt, a);
1718 gsi_next (&gsi);
1719 }
1720 }
1721
1722 /* Merge the sequences. */
1723 last = gsi_last_bb (a);
1724 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1725 set_bb_seq (b, NULL);
1726
1727 if (cfgcleanup_altered_bbs)
1728 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1729 }
1730
1731
1732 /* Return the one of two successors of BB that is not reachable by a
1733 complex edge, if there is one. Else, return BB. We use
1734 this in optimizations that use post-dominators for their heuristics,
1735 to catch the cases in C++ where function calls are involved. */
1736
1737 basic_block
1738 single_noncomplex_succ (basic_block bb)
1739 {
1740 edge e0, e1;
1741 if (EDGE_COUNT (bb->succs) != 2)
1742 return bb;
1743
1744 e0 = EDGE_SUCC (bb, 0);
1745 e1 = EDGE_SUCC (bb, 1);
1746 if (e0->flags & EDGE_COMPLEX)
1747 return e1->dest;
1748 if (e1->flags & EDGE_COMPLEX)
1749 return e0->dest;
1750
1751 return bb;
1752 }
1753
1754 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1755
1756 void
1757 notice_special_calls (gimple call)
1758 {
1759 int flags = gimple_call_flags (call);
1760
1761 if (flags & ECF_MAY_BE_ALLOCA)
1762 cfun->calls_alloca = true;
1763 if (flags & ECF_RETURNS_TWICE)
1764 cfun->calls_setjmp = true;
1765 }
1766
1767
1768 /* Clear flags set by notice_special_calls. Used by dead code removal
1769 to update the flags. */
1770
1771 void
1772 clear_special_calls (void)
1773 {
1774 cfun->calls_alloca = false;
1775 cfun->calls_setjmp = false;
1776 }
1777
1778 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1779
1780 static void
1781 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1782 {
1783 /* Since this block is no longer reachable, we can just delete all
1784 of its PHI nodes. */
1785 remove_phi_nodes (bb);
1786
1787 /* Remove edges to BB's successors. */
1788 while (EDGE_COUNT (bb->succs) > 0)
1789 remove_edge (EDGE_SUCC (bb, 0));
1790 }
1791
1792
1793 /* Remove statements of basic block BB. */
1794
1795 static void
1796 remove_bb (basic_block bb)
1797 {
1798 gimple_stmt_iterator i;
1799
1800 if (dump_file)
1801 {
1802 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1803 if (dump_flags & TDF_DETAILS)
1804 {
1805 dump_bb (bb, dump_file, 0);
1806 fprintf (dump_file, "\n");
1807 }
1808 }
1809
1810 if (current_loops)
1811 {
1812 struct loop *loop = bb->loop_father;
1813
1814 /* If a loop gets removed, clean up the information associated
1815 with it. */
1816 if (loop->latch == bb
1817 || loop->header == bb)
1818 free_numbers_of_iterations_estimates_loop (loop);
1819 }
1820
1821 /* Remove all the instructions in the block. */
1822 if (bb_seq (bb) != NULL)
1823 {
1824 /* Walk backwards so as to get a chance to substitute all
1825 released DEFs into debug stmts. See
1826 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
1827 details. */
1828 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
1829 {
1830 gimple stmt = gsi_stmt (i);
1831 if (gimple_code (stmt) == GIMPLE_LABEL
1832 && (FORCED_LABEL (gimple_label_label (stmt))
1833 || DECL_NONLOCAL (gimple_label_label (stmt))))
1834 {
1835 basic_block new_bb;
1836 gimple_stmt_iterator new_gsi;
1837
1838 /* A non-reachable non-local label may still be referenced.
1839 But it no longer needs to carry the extra semantics of
1840 non-locality. */
1841 if (DECL_NONLOCAL (gimple_label_label (stmt)))
1842 {
1843 DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
1844 FORCED_LABEL (gimple_label_label (stmt)) = 1;
1845 }
1846
1847 new_bb = bb->prev_bb;
1848 new_gsi = gsi_start_bb (new_bb);
1849 gsi_remove (&i, false);
1850 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
1851 }
1852 else
1853 {
1854 /* Release SSA definitions if we are in SSA. Note that we
1855 may be called when not in SSA. For example,
1856 final_cleanup calls this function via
1857 cleanup_tree_cfg. */
1858 if (gimple_in_ssa_p (cfun))
1859 release_defs (stmt);
1860
1861 gsi_remove (&i, true);
1862 }
1863
1864 if (gsi_end_p (i))
1865 i = gsi_last_bb (bb);
1866 else
1867 gsi_prev (&i);
1868 }
1869 }
1870
1871 remove_phi_nodes_and_edges_for_unreachable_block (bb);
1872 bb->il.gimple = NULL;
1873 }
1874
1875
1876 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
1877 predicate VAL, return the edge that will be taken out of the block.
1878 If VAL does not match a unique edge, NULL is returned. */
1879
1880 edge
1881 find_taken_edge (basic_block bb, tree val)
1882 {
1883 gimple stmt;
1884
1885 stmt = last_stmt (bb);
1886
1887 gcc_assert (stmt);
1888 gcc_assert (is_ctrl_stmt (stmt));
1889
1890 if (val == NULL)
1891 return NULL;
1892
1893 if (!is_gimple_min_invariant (val))
1894 return NULL;
1895
1896 if (gimple_code (stmt) == GIMPLE_COND)
1897 return find_taken_edge_cond_expr (bb, val);
1898
1899 if (gimple_code (stmt) == GIMPLE_SWITCH)
1900 return find_taken_edge_switch_expr (bb, val);
1901
1902 if (computed_goto_p (stmt))
1903 {
1904 /* Only optimize if the argument is a label, if the argument is
1905 not a label then we can not construct a proper CFG.
1906
1907 It may be the case that we only need to allow the LABEL_REF to
1908 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
1909 appear inside a LABEL_EXPR just to be safe. */
1910 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
1911 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
1912 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
1913 return NULL;
1914 }
1915
1916 gcc_unreachable ();
1917 }
1918
1919 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
1920 statement, determine which of the outgoing edges will be taken out of the
1921 block. Return NULL if either edge may be taken. */
1922
1923 static edge
1924 find_taken_edge_computed_goto (basic_block bb, tree val)
1925 {
1926 basic_block dest;
1927 edge e = NULL;
1928
1929 dest = label_to_block (val);
1930 if (dest)
1931 {
1932 e = find_edge (bb, dest);
1933 gcc_assert (e != NULL);
1934 }
1935
1936 return e;
1937 }
1938
1939 /* Given a constant value VAL and the entry block BB to a COND_EXPR
1940 statement, determine which of the two edges will be taken out of the
1941 block. Return NULL if either edge may be taken. */
1942
1943 static edge
1944 find_taken_edge_cond_expr (basic_block bb, tree val)
1945 {
1946 edge true_edge, false_edge;
1947
1948 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1949
1950 gcc_assert (TREE_CODE (val) == INTEGER_CST);
1951 return (integer_zerop (val) ? false_edge : true_edge);
1952 }
1953
1954 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
1955 statement, determine which edge will be taken out of the block. Return
1956 NULL if any edge may be taken. */
1957
1958 static edge
1959 find_taken_edge_switch_expr (basic_block bb, tree val)
1960 {
1961 basic_block dest_bb;
1962 edge e;
1963 gimple switch_stmt;
1964 tree taken_case;
1965
1966 switch_stmt = last_stmt (bb);
1967 taken_case = find_case_label_for_value (switch_stmt, val);
1968 dest_bb = label_to_block (CASE_LABEL (taken_case));
1969
1970 e = find_edge (bb, dest_bb);
1971 gcc_assert (e);
1972 return e;
1973 }
1974
1975
1976 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
1977 We can make optimal use here of the fact that the case labels are
1978 sorted: We can do a binary search for a case matching VAL. */
1979
1980 static tree
1981 find_case_label_for_value (gimple switch_stmt, tree val)
1982 {
1983 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
1984 tree default_case = gimple_switch_default_label (switch_stmt);
1985
1986 for (low = 0, high = n; high - low > 1; )
1987 {
1988 size_t i = (high + low) / 2;
1989 tree t = gimple_switch_label (switch_stmt, i);
1990 int cmp;
1991
1992 /* Cache the result of comparing CASE_LOW and val. */
1993 cmp = tree_int_cst_compare (CASE_LOW (t), val);
1994
1995 if (cmp > 0)
1996 high = i;
1997 else
1998 low = i;
1999
2000 if (CASE_HIGH (t) == NULL)
2001 {
2002 /* A singe-valued case label. */
2003 if (cmp == 0)
2004 return t;
2005 }
2006 else
2007 {
2008 /* A case range. We can only handle integer ranges. */
2009 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2010 return t;
2011 }
2012 }
2013
2014 return default_case;
2015 }
2016
2017
2018 /* Dump a basic block on stderr. */
2019
2020 void
2021 gimple_debug_bb (basic_block bb)
2022 {
2023 gimple_dump_bb (bb, stderr, 0, TDF_VOPS|TDF_MEMSYMS);
2024 }
2025
2026
2027 /* Dump basic block with index N on stderr. */
2028
2029 basic_block
2030 gimple_debug_bb_n (int n)
2031 {
2032 gimple_debug_bb (BASIC_BLOCK (n));
2033 return BASIC_BLOCK (n);
2034 }
2035
2036
2037 /* Dump the CFG on stderr.
2038
2039 FLAGS are the same used by the tree dumping functions
2040 (see TDF_* in tree-pass.h). */
2041
2042 void
2043 gimple_debug_cfg (int flags)
2044 {
2045 gimple_dump_cfg (stderr, flags);
2046 }
2047
2048
2049 /* Dump the program showing basic block boundaries on the given FILE.
2050
2051 FLAGS are the same used by the tree dumping functions (see TDF_* in
2052 tree.h). */
2053
2054 void
2055 gimple_dump_cfg (FILE *file, int flags)
2056 {
2057 if (flags & TDF_DETAILS)
2058 {
2059 const char *funcname
2060 = lang_hooks.decl_printable_name (current_function_decl, 2);
2061
2062 fputc ('\n', file);
2063 fprintf (file, ";; Function %s\n\n", funcname);
2064 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2065 n_basic_blocks, n_edges, last_basic_block);
2066
2067 brief_dump_cfg (file);
2068 fprintf (file, "\n");
2069 }
2070
2071 if (flags & TDF_STATS)
2072 dump_cfg_stats (file);
2073
2074 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2075 }
2076
2077
2078 /* Dump CFG statistics on FILE. */
2079
2080 void
2081 dump_cfg_stats (FILE *file)
2082 {
2083 static long max_num_merged_labels = 0;
2084 unsigned long size, total = 0;
2085 long num_edges;
2086 basic_block bb;
2087 const char * const fmt_str = "%-30s%-13s%12s\n";
2088 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2089 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2090 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2091 const char *funcname
2092 = lang_hooks.decl_printable_name (current_function_decl, 2);
2093
2094
2095 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2096
2097 fprintf (file, "---------------------------------------------------------\n");
2098 fprintf (file, fmt_str, "", " Number of ", "Memory");
2099 fprintf (file, fmt_str, "", " instances ", "used ");
2100 fprintf (file, "---------------------------------------------------------\n");
2101
2102 size = n_basic_blocks * sizeof (struct basic_block_def);
2103 total += size;
2104 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2105 SCALE (size), LABEL (size));
2106
2107 num_edges = 0;
2108 FOR_EACH_BB (bb)
2109 num_edges += EDGE_COUNT (bb->succs);
2110 size = num_edges * sizeof (struct edge_def);
2111 total += size;
2112 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2113
2114 fprintf (file, "---------------------------------------------------------\n");
2115 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2116 LABEL (total));
2117 fprintf (file, "---------------------------------------------------------\n");
2118 fprintf (file, "\n");
2119
2120 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2121 max_num_merged_labels = cfg_stats.num_merged_labels;
2122
2123 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2124 cfg_stats.num_merged_labels, max_num_merged_labels);
2125
2126 fprintf (file, "\n");
2127 }
2128
2129
2130 /* Dump CFG statistics on stderr. Keep extern so that it's always
2131 linked in the final executable. */
2132
2133 DEBUG_FUNCTION void
2134 debug_cfg_stats (void)
2135 {
2136 dump_cfg_stats (stderr);
2137 }
2138
2139
2140 /* Dump the flowgraph to a .vcg FILE. */
2141
2142 static void
2143 gimple_cfg2vcg (FILE *file)
2144 {
2145 edge e;
2146 edge_iterator ei;
2147 basic_block bb;
2148 const char *funcname
2149 = lang_hooks.decl_printable_name (current_function_decl, 2);
2150
2151 /* Write the file header. */
2152 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2153 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2154 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2155
2156 /* Write blocks and edges. */
2157 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2158 {
2159 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2160 e->dest->index);
2161
2162 if (e->flags & EDGE_FAKE)
2163 fprintf (file, " linestyle: dotted priority: 10");
2164 else
2165 fprintf (file, " linestyle: solid priority: 100");
2166
2167 fprintf (file, " }\n");
2168 }
2169 fputc ('\n', file);
2170
2171 FOR_EACH_BB (bb)
2172 {
2173 enum gimple_code head_code, end_code;
2174 const char *head_name, *end_name;
2175 int head_line = 0;
2176 int end_line = 0;
2177 gimple first = first_stmt (bb);
2178 gimple last = last_stmt (bb);
2179
2180 if (first)
2181 {
2182 head_code = gimple_code (first);
2183 head_name = gimple_code_name[head_code];
2184 head_line = get_lineno (first);
2185 }
2186 else
2187 head_name = "no-statement";
2188
2189 if (last)
2190 {
2191 end_code = gimple_code (last);
2192 end_name = gimple_code_name[end_code];
2193 end_line = get_lineno (last);
2194 }
2195 else
2196 end_name = "no-statement";
2197
2198 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2199 bb->index, bb->index, head_name, head_line, end_name,
2200 end_line);
2201
2202 FOR_EACH_EDGE (e, ei, bb->succs)
2203 {
2204 if (e->dest == EXIT_BLOCK_PTR)
2205 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2206 else
2207 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2208
2209 if (e->flags & EDGE_FAKE)
2210 fprintf (file, " priority: 10 linestyle: dotted");
2211 else
2212 fprintf (file, " priority: 100 linestyle: solid");
2213
2214 fprintf (file, " }\n");
2215 }
2216
2217 if (bb->next_bb != EXIT_BLOCK_PTR)
2218 fputc ('\n', file);
2219 }
2220
2221 fputs ("}\n\n", file);
2222 }
2223
2224
2225
2226 /*---------------------------------------------------------------------------
2227 Miscellaneous helpers
2228 ---------------------------------------------------------------------------*/
2229
2230 /* Return true if T represents a stmt that always transfers control. */
2231
2232 bool
2233 is_ctrl_stmt (gimple t)
2234 {
2235 switch (gimple_code (t))
2236 {
2237 case GIMPLE_COND:
2238 case GIMPLE_SWITCH:
2239 case GIMPLE_GOTO:
2240 case GIMPLE_RETURN:
2241 case GIMPLE_RESX:
2242 return true;
2243 default:
2244 return false;
2245 }
2246 }
2247
2248
2249 /* Return true if T is a statement that may alter the flow of control
2250 (e.g., a call to a non-returning function). */
2251
2252 bool
2253 is_ctrl_altering_stmt (gimple t)
2254 {
2255 gcc_assert (t);
2256
2257 switch (gimple_code (t))
2258 {
2259 case GIMPLE_CALL:
2260 {
2261 int flags = gimple_call_flags (t);
2262
2263 /* A non-pure/const call alters flow control if the current
2264 function has nonlocal labels. */
2265 if (!(flags & (ECF_CONST | ECF_PURE)) && cfun->has_nonlocal_label)
2266 return true;
2267
2268 /* A call also alters control flow if it does not return. */
2269 if (flags & ECF_NORETURN)
2270 return true;
2271
2272 /* BUILT_IN_RETURN call is same as return statement. */
2273 if (gimple_call_builtin_p (t, BUILT_IN_RETURN))
2274 return true;
2275 }
2276 break;
2277
2278 case GIMPLE_EH_DISPATCH:
2279 /* EH_DISPATCH branches to the individual catch handlers at
2280 this level of a try or allowed-exceptions region. It can
2281 fallthru to the next statement as well. */
2282 return true;
2283
2284 case GIMPLE_ASM:
2285 if (gimple_asm_nlabels (t) > 0)
2286 return true;
2287 break;
2288
2289 CASE_GIMPLE_OMP:
2290 /* OpenMP directives alter control flow. */
2291 return true;
2292
2293 default:
2294 break;
2295 }
2296
2297 /* If a statement can throw, it alters control flow. */
2298 return stmt_can_throw_internal (t);
2299 }
2300
2301
2302 /* Return true if T is a simple local goto. */
2303
2304 bool
2305 simple_goto_p (gimple t)
2306 {
2307 return (gimple_code (t) == GIMPLE_GOTO
2308 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2309 }
2310
2311
2312 /* Return true if T can make an abnormal transfer of control flow.
2313 Transfers of control flow associated with EH are excluded. */
2314
2315 bool
2316 stmt_can_make_abnormal_goto (gimple t)
2317 {
2318 if (computed_goto_p (t))
2319 return true;
2320 if (is_gimple_call (t))
2321 return gimple_has_side_effects (t) && cfun->has_nonlocal_label;
2322 return false;
2323 }
2324
2325
2326 /* Return true if STMT should start a new basic block. PREV_STMT is
2327 the statement preceding STMT. It is used when STMT is a label or a
2328 case label. Labels should only start a new basic block if their
2329 previous statement wasn't a label. Otherwise, sequence of labels
2330 would generate unnecessary basic blocks that only contain a single
2331 label. */
2332
2333 static inline bool
2334 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2335 {
2336 if (stmt == NULL)
2337 return false;
2338
2339 /* Labels start a new basic block only if the preceding statement
2340 wasn't a label of the same type. This prevents the creation of
2341 consecutive blocks that have nothing but a single label. */
2342 if (gimple_code (stmt) == GIMPLE_LABEL)
2343 {
2344 /* Nonlocal and computed GOTO targets always start a new block. */
2345 if (DECL_NONLOCAL (gimple_label_label (stmt))
2346 || FORCED_LABEL (gimple_label_label (stmt)))
2347 return true;
2348
2349 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2350 {
2351 if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
2352 return true;
2353
2354 cfg_stats.num_merged_labels++;
2355 return false;
2356 }
2357 else
2358 return true;
2359 }
2360
2361 return false;
2362 }
2363
2364
2365 /* Return true if T should end a basic block. */
2366
2367 bool
2368 stmt_ends_bb_p (gimple t)
2369 {
2370 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2371 }
2372
2373 /* Remove block annotations and other data structures. */
2374
2375 void
2376 delete_tree_cfg_annotations (void)
2377 {
2378 label_to_block_map = NULL;
2379 }
2380
2381
2382 /* Return the first statement in basic block BB. */
2383
2384 gimple
2385 first_stmt (basic_block bb)
2386 {
2387 gimple_stmt_iterator i = gsi_start_bb (bb);
2388 gimple stmt = NULL;
2389
2390 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2391 {
2392 gsi_next (&i);
2393 stmt = NULL;
2394 }
2395 return stmt;
2396 }
2397
2398 /* Return the first non-label statement in basic block BB. */
2399
2400 static gimple
2401 first_non_label_stmt (basic_block bb)
2402 {
2403 gimple_stmt_iterator i = gsi_start_bb (bb);
2404 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2405 gsi_next (&i);
2406 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2407 }
2408
2409 /* Return the last statement in basic block BB. */
2410
2411 gimple
2412 last_stmt (basic_block bb)
2413 {
2414 gimple_stmt_iterator i = gsi_last_bb (bb);
2415 gimple stmt = NULL;
2416
2417 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2418 {
2419 gsi_prev (&i);
2420 stmt = NULL;
2421 }
2422 return stmt;
2423 }
2424
2425 /* Return the last statement of an otherwise empty block. Return NULL
2426 if the block is totally empty, or if it contains more than one
2427 statement. */
2428
2429 gimple
2430 last_and_only_stmt (basic_block bb)
2431 {
2432 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2433 gimple last, prev;
2434
2435 if (gsi_end_p (i))
2436 return NULL;
2437
2438 last = gsi_stmt (i);
2439 gsi_prev_nondebug (&i);
2440 if (gsi_end_p (i))
2441 return last;
2442
2443 /* Empty statements should no longer appear in the instruction stream.
2444 Everything that might have appeared before should be deleted by
2445 remove_useless_stmts, and the optimizers should just gsi_remove
2446 instead of smashing with build_empty_stmt.
2447
2448 Thus the only thing that should appear here in a block containing
2449 one executable statement is a label. */
2450 prev = gsi_stmt (i);
2451 if (gimple_code (prev) == GIMPLE_LABEL)
2452 return last;
2453 else
2454 return NULL;
2455 }
2456
2457 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2458
2459 static void
2460 reinstall_phi_args (edge new_edge, edge old_edge)
2461 {
2462 edge_var_map_vector v;
2463 edge_var_map *vm;
2464 int i;
2465 gimple_stmt_iterator phis;
2466
2467 v = redirect_edge_var_map_vector (old_edge);
2468 if (!v)
2469 return;
2470
2471 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2472 VEC_iterate (edge_var_map, v, i, vm) && !gsi_end_p (phis);
2473 i++, gsi_next (&phis))
2474 {
2475 gimple phi = gsi_stmt (phis);
2476 tree result = redirect_edge_var_map_result (vm);
2477 tree arg = redirect_edge_var_map_def (vm);
2478
2479 gcc_assert (result == gimple_phi_result (phi));
2480
2481 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2482 }
2483
2484 redirect_edge_var_map_clear (old_edge);
2485 }
2486
2487 /* Returns the basic block after which the new basic block created
2488 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2489 near its "logical" location. This is of most help to humans looking
2490 at debugging dumps. */
2491
2492 static basic_block
2493 split_edge_bb_loc (edge edge_in)
2494 {
2495 basic_block dest = edge_in->dest;
2496 basic_block dest_prev = dest->prev_bb;
2497
2498 if (dest_prev)
2499 {
2500 edge e = find_edge (dest_prev, dest);
2501 if (e && !(e->flags & EDGE_COMPLEX))
2502 return edge_in->src;
2503 }
2504 return dest_prev;
2505 }
2506
2507 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2508 Abort on abnormal edges. */
2509
2510 static basic_block
2511 gimple_split_edge (edge edge_in)
2512 {
2513 basic_block new_bb, after_bb, dest;
2514 edge new_edge, e;
2515
2516 /* Abnormal edges cannot be split. */
2517 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2518
2519 dest = edge_in->dest;
2520
2521 after_bb = split_edge_bb_loc (edge_in);
2522
2523 new_bb = create_empty_bb (after_bb);
2524 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2525 new_bb->count = edge_in->count;
2526 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2527 new_edge->probability = REG_BR_PROB_BASE;
2528 new_edge->count = edge_in->count;
2529
2530 e = redirect_edge_and_branch (edge_in, new_bb);
2531 gcc_assert (e == edge_in);
2532 reinstall_phi_args (new_edge, e);
2533
2534 return new_bb;
2535 }
2536
2537
2538 /* Verify properties of the address expression T with base object BASE. */
2539
2540 static tree
2541 verify_address (tree t, tree base)
2542 {
2543 bool old_constant;
2544 bool old_side_effects;
2545 bool new_constant;
2546 bool new_side_effects;
2547
2548 old_constant = TREE_CONSTANT (t);
2549 old_side_effects = TREE_SIDE_EFFECTS (t);
2550
2551 recompute_tree_invariant_for_addr_expr (t);
2552 new_side_effects = TREE_SIDE_EFFECTS (t);
2553 new_constant = TREE_CONSTANT (t);
2554
2555 if (old_constant != new_constant)
2556 {
2557 error ("constant not recomputed when ADDR_EXPR changed");
2558 return t;
2559 }
2560 if (old_side_effects != new_side_effects)
2561 {
2562 error ("side effects not recomputed when ADDR_EXPR changed");
2563 return t;
2564 }
2565
2566 if (!(TREE_CODE (base) == VAR_DECL
2567 || TREE_CODE (base) == PARM_DECL
2568 || TREE_CODE (base) == RESULT_DECL))
2569 return NULL_TREE;
2570
2571 if (DECL_GIMPLE_REG_P (base))
2572 {
2573 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2574 return base;
2575 }
2576
2577 return NULL_TREE;
2578 }
2579
2580 /* Callback for walk_tree, check that all elements with address taken are
2581 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2582 inside a PHI node. */
2583
2584 static tree
2585 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2586 {
2587 tree t = *tp, x;
2588
2589 if (TYPE_P (t))
2590 *walk_subtrees = 0;
2591
2592 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2593 #define CHECK_OP(N, MSG) \
2594 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2595 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2596
2597 switch (TREE_CODE (t))
2598 {
2599 case SSA_NAME:
2600 if (SSA_NAME_IN_FREE_LIST (t))
2601 {
2602 error ("SSA name in freelist but still referenced");
2603 return *tp;
2604 }
2605 break;
2606
2607 case INDIRECT_REF:
2608 error ("INDIRECT_REF in gimple IL");
2609 return t;
2610
2611 case MEM_REF:
2612 x = TREE_OPERAND (t, 0);
2613 if (!POINTER_TYPE_P (TREE_TYPE (x))
2614 || !is_gimple_mem_ref_addr (x))
2615 {
2616 error ("Invalid first operand of MEM_REF.");
2617 return x;
2618 }
2619 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2620 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2621 {
2622 error ("Invalid offset operand of MEM_REF.");
2623 return TREE_OPERAND (t, 1);
2624 }
2625 if (TREE_CODE (x) == ADDR_EXPR
2626 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2627 return x;
2628 *walk_subtrees = 0;
2629 break;
2630
2631 case ASSERT_EXPR:
2632 x = fold (ASSERT_EXPR_COND (t));
2633 if (x == boolean_false_node)
2634 {
2635 error ("ASSERT_EXPR with an always-false condition");
2636 return *tp;
2637 }
2638 break;
2639
2640 case MODIFY_EXPR:
2641 error ("MODIFY_EXPR not expected while having tuples.");
2642 return *tp;
2643
2644 case ADDR_EXPR:
2645 {
2646 tree tem;
2647
2648 gcc_assert (is_gimple_address (t));
2649
2650 /* Skip any references (they will be checked when we recurse down the
2651 tree) and ensure that any variable used as a prefix is marked
2652 addressable. */
2653 for (x = TREE_OPERAND (t, 0);
2654 handled_component_p (x);
2655 x = TREE_OPERAND (x, 0))
2656 ;
2657
2658 if ((tem = verify_address (t, x)))
2659 return tem;
2660
2661 if (!(TREE_CODE (x) == VAR_DECL
2662 || TREE_CODE (x) == PARM_DECL
2663 || TREE_CODE (x) == RESULT_DECL))
2664 return NULL;
2665
2666 if (!TREE_ADDRESSABLE (x))
2667 {
2668 error ("address taken, but ADDRESSABLE bit not set");
2669 return x;
2670 }
2671
2672 break;
2673 }
2674
2675 case COND_EXPR:
2676 x = COND_EXPR_COND (t);
2677 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2678 {
2679 error ("non-integral used in condition");
2680 return x;
2681 }
2682 if (!is_gimple_condexpr (x))
2683 {
2684 error ("invalid conditional operand");
2685 return x;
2686 }
2687 break;
2688
2689 case NON_LVALUE_EXPR:
2690 gcc_unreachable ();
2691
2692 CASE_CONVERT:
2693 case FIX_TRUNC_EXPR:
2694 case FLOAT_EXPR:
2695 case NEGATE_EXPR:
2696 case ABS_EXPR:
2697 case BIT_NOT_EXPR:
2698 case TRUTH_NOT_EXPR:
2699 CHECK_OP (0, "invalid operand to unary operator");
2700 break;
2701
2702 case REALPART_EXPR:
2703 case IMAGPART_EXPR:
2704 case COMPONENT_REF:
2705 case ARRAY_REF:
2706 case ARRAY_RANGE_REF:
2707 case BIT_FIELD_REF:
2708 case VIEW_CONVERT_EXPR:
2709 /* We have a nest of references. Verify that each of the operands
2710 that determine where to reference is either a constant or a variable,
2711 verify that the base is valid, and then show we've already checked
2712 the subtrees. */
2713 while (handled_component_p (t))
2714 {
2715 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2716 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2717 else if (TREE_CODE (t) == ARRAY_REF
2718 || TREE_CODE (t) == ARRAY_RANGE_REF)
2719 {
2720 CHECK_OP (1, "invalid array index");
2721 if (TREE_OPERAND (t, 2))
2722 CHECK_OP (2, "invalid array lower bound");
2723 if (TREE_OPERAND (t, 3))
2724 CHECK_OP (3, "invalid array stride");
2725 }
2726 else if (TREE_CODE (t) == BIT_FIELD_REF)
2727 {
2728 if (!host_integerp (TREE_OPERAND (t, 1), 1)
2729 || !host_integerp (TREE_OPERAND (t, 2), 1))
2730 {
2731 error ("invalid position or size operand to BIT_FIELD_REF");
2732 return t;
2733 }
2734 else if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2735 && (TYPE_PRECISION (TREE_TYPE (t))
2736 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2737 {
2738 error ("integral result type precision does not match "
2739 "field size of BIT_FIELD_REF");
2740 return t;
2741 }
2742 if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2743 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2744 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2745 {
2746 error ("mode precision of non-integral result does not "
2747 "match field size of BIT_FIELD_REF");
2748 return t;
2749 }
2750 }
2751
2752 t = TREE_OPERAND (t, 0);
2753 }
2754
2755 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2756 {
2757 error ("invalid reference prefix");
2758 return t;
2759 }
2760 *walk_subtrees = 0;
2761 break;
2762 case PLUS_EXPR:
2763 case MINUS_EXPR:
2764 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2765 POINTER_PLUS_EXPR. */
2766 if (POINTER_TYPE_P (TREE_TYPE (t)))
2767 {
2768 error ("invalid operand to plus/minus, type is a pointer");
2769 return t;
2770 }
2771 CHECK_OP (0, "invalid operand to binary operator");
2772 CHECK_OP (1, "invalid operand to binary operator");
2773 break;
2774
2775 case POINTER_PLUS_EXPR:
2776 /* Check to make sure the first operand is a pointer or reference type. */
2777 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
2778 {
2779 error ("invalid operand to pointer plus, first operand is not a pointer");
2780 return t;
2781 }
2782 /* Check to make sure the second operand is an integer with type of
2783 sizetype. */
2784 if (!useless_type_conversion_p (sizetype,
2785 TREE_TYPE (TREE_OPERAND (t, 1))))
2786 {
2787 error ("invalid operand to pointer plus, second operand is not an "
2788 "integer with type of sizetype.");
2789 return t;
2790 }
2791 /* FALLTHROUGH */
2792 case LT_EXPR:
2793 case LE_EXPR:
2794 case GT_EXPR:
2795 case GE_EXPR:
2796 case EQ_EXPR:
2797 case NE_EXPR:
2798 case UNORDERED_EXPR:
2799 case ORDERED_EXPR:
2800 case UNLT_EXPR:
2801 case UNLE_EXPR:
2802 case UNGT_EXPR:
2803 case UNGE_EXPR:
2804 case UNEQ_EXPR:
2805 case LTGT_EXPR:
2806 case MULT_EXPR:
2807 case TRUNC_DIV_EXPR:
2808 case CEIL_DIV_EXPR:
2809 case FLOOR_DIV_EXPR:
2810 case ROUND_DIV_EXPR:
2811 case TRUNC_MOD_EXPR:
2812 case CEIL_MOD_EXPR:
2813 case FLOOR_MOD_EXPR:
2814 case ROUND_MOD_EXPR:
2815 case RDIV_EXPR:
2816 case EXACT_DIV_EXPR:
2817 case MIN_EXPR:
2818 case MAX_EXPR:
2819 case LSHIFT_EXPR:
2820 case RSHIFT_EXPR:
2821 case LROTATE_EXPR:
2822 case RROTATE_EXPR:
2823 case BIT_IOR_EXPR:
2824 case BIT_XOR_EXPR:
2825 case BIT_AND_EXPR:
2826 CHECK_OP (0, "invalid operand to binary operator");
2827 CHECK_OP (1, "invalid operand to binary operator");
2828 break;
2829
2830 case CONSTRUCTOR:
2831 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2832 *walk_subtrees = 0;
2833 break;
2834
2835 default:
2836 break;
2837 }
2838 return NULL;
2839
2840 #undef CHECK_OP
2841 }
2842
2843
2844 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
2845 Returns true if there is an error, otherwise false. */
2846
2847 static bool
2848 verify_types_in_gimple_min_lval (tree expr)
2849 {
2850 tree op;
2851
2852 if (is_gimple_id (expr))
2853 return false;
2854
2855 if (TREE_CODE (expr) != MISALIGNED_INDIRECT_REF
2856 && TREE_CODE (expr) != TARGET_MEM_REF
2857 && TREE_CODE (expr) != MEM_REF)
2858 {
2859 error ("invalid expression for min lvalue");
2860 return true;
2861 }
2862
2863 /* TARGET_MEM_REFs are strange beasts. */
2864 if (TREE_CODE (expr) == TARGET_MEM_REF)
2865 return false;
2866
2867 op = TREE_OPERAND (expr, 0);
2868 if (!is_gimple_val (op))
2869 {
2870 error ("invalid operand in indirect reference");
2871 debug_generic_stmt (op);
2872 return true;
2873 }
2874 /* Memory references now generally can involve a value conversion. */
2875
2876 return false;
2877 }
2878
2879 /* Verify if EXPR is a valid GIMPLE reference expression. If
2880 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
2881 if there is an error, otherwise false. */
2882
2883 static bool
2884 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
2885 {
2886 while (handled_component_p (expr))
2887 {
2888 tree op = TREE_OPERAND (expr, 0);
2889
2890 if (TREE_CODE (expr) == ARRAY_REF
2891 || TREE_CODE (expr) == ARRAY_RANGE_REF)
2892 {
2893 if (!is_gimple_val (TREE_OPERAND (expr, 1))
2894 || (TREE_OPERAND (expr, 2)
2895 && !is_gimple_val (TREE_OPERAND (expr, 2)))
2896 || (TREE_OPERAND (expr, 3)
2897 && !is_gimple_val (TREE_OPERAND (expr, 3))))
2898 {
2899 error ("invalid operands to array reference");
2900 debug_generic_stmt (expr);
2901 return true;
2902 }
2903 }
2904
2905 /* Verify if the reference array element types are compatible. */
2906 if (TREE_CODE (expr) == ARRAY_REF
2907 && !useless_type_conversion_p (TREE_TYPE (expr),
2908 TREE_TYPE (TREE_TYPE (op))))
2909 {
2910 error ("type mismatch in array reference");
2911 debug_generic_stmt (TREE_TYPE (expr));
2912 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2913 return true;
2914 }
2915 if (TREE_CODE (expr) == ARRAY_RANGE_REF
2916 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
2917 TREE_TYPE (TREE_TYPE (op))))
2918 {
2919 error ("type mismatch in array range reference");
2920 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
2921 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2922 return true;
2923 }
2924
2925 if ((TREE_CODE (expr) == REALPART_EXPR
2926 || TREE_CODE (expr) == IMAGPART_EXPR)
2927 && !useless_type_conversion_p (TREE_TYPE (expr),
2928 TREE_TYPE (TREE_TYPE (op))))
2929 {
2930 error ("type mismatch in real/imagpart reference");
2931 debug_generic_stmt (TREE_TYPE (expr));
2932 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2933 return true;
2934 }
2935
2936 if (TREE_CODE (expr) == COMPONENT_REF
2937 && !useless_type_conversion_p (TREE_TYPE (expr),
2938 TREE_TYPE (TREE_OPERAND (expr, 1))))
2939 {
2940 error ("type mismatch in component reference");
2941 debug_generic_stmt (TREE_TYPE (expr));
2942 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
2943 return true;
2944 }
2945
2946 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2947 {
2948 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
2949 that their operand is not an SSA name or an invariant when
2950 requiring an lvalue (this usually means there is a SRA or IPA-SRA
2951 bug). Otherwise there is nothing to verify, gross mismatches at
2952 most invoke undefined behavior. */
2953 if (require_lvalue
2954 && (TREE_CODE (op) == SSA_NAME
2955 || is_gimple_min_invariant (op)))
2956 {
2957 error ("Conversion of an SSA_NAME on the left hand side.");
2958 debug_generic_stmt (expr);
2959 return true;
2960 }
2961 else if (TREE_CODE (op) == SSA_NAME
2962 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
2963 {
2964 error ("Conversion of register to a different size.");
2965 debug_generic_stmt (expr);
2966 return true;
2967 }
2968 else if (!handled_component_p (op))
2969 return false;
2970 }
2971
2972 expr = op;
2973 }
2974
2975 if (TREE_CODE (expr) == MEM_REF)
2976 {
2977 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
2978 {
2979 error ("Invalid address operand in MEM_REF.");
2980 debug_generic_stmt (expr);
2981 return true;
2982 }
2983 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
2984 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
2985 {
2986 error ("Invalid offset operand in MEM_REF.");
2987 debug_generic_stmt (expr);
2988 return true;
2989 }
2990 }
2991 else if (TREE_CODE (expr) == TARGET_MEM_REF)
2992 {
2993 if (TMR_SYMBOL (expr)
2994 && TMR_BASE (expr)
2995 && !useless_type_conversion_p (sizetype, TREE_TYPE (TMR_BASE (expr))))
2996 {
2997 error ("Non-sizetype base in TARGET_MEM_REF with symbol");
2998 return true;
2999 }
3000 if (!TMR_OFFSET (expr)
3001 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3002 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3003 {
3004 error ("Invalid offset operand in TARGET_MEM_REF.");
3005 debug_generic_stmt (expr);
3006 return true;
3007 }
3008 }
3009
3010 return ((require_lvalue || !is_gimple_min_invariant (expr))
3011 && verify_types_in_gimple_min_lval (expr));
3012 }
3013
3014 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3015 list of pointer-to types that is trivially convertible to DEST. */
3016
3017 static bool
3018 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3019 {
3020 tree src;
3021
3022 if (!TYPE_POINTER_TO (src_obj))
3023 return true;
3024
3025 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3026 if (useless_type_conversion_p (dest, src))
3027 return true;
3028
3029 return false;
3030 }
3031
3032 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3033 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3034
3035 static bool
3036 valid_fixed_convert_types_p (tree type1, tree type2)
3037 {
3038 return (FIXED_POINT_TYPE_P (type1)
3039 && (INTEGRAL_TYPE_P (type2)
3040 || SCALAR_FLOAT_TYPE_P (type2)
3041 || FIXED_POINT_TYPE_P (type2)));
3042 }
3043
3044 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3045 is a problem, otherwise false. */
3046
3047 static bool
3048 verify_gimple_call (gimple stmt)
3049 {
3050 tree fn = gimple_call_fn (stmt);
3051 tree fntype;
3052 unsigned i;
3053
3054 if (TREE_CODE (fn) != OBJ_TYPE_REF
3055 && !is_gimple_val (fn))
3056 {
3057 error ("invalid function in gimple call");
3058 debug_generic_stmt (fn);
3059 return true;
3060 }
3061
3062 if (!POINTER_TYPE_P (TREE_TYPE (fn))
3063 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3064 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE))
3065 {
3066 error ("non-function in gimple call");
3067 return true;
3068 }
3069
3070 if (gimple_call_lhs (stmt)
3071 && (!is_gimple_lvalue (gimple_call_lhs (stmt))
3072 || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
3073 {
3074 error ("invalid LHS in gimple call");
3075 return true;
3076 }
3077
3078 if (gimple_call_lhs (stmt) && gimple_call_noreturn_p (stmt))
3079 {
3080 error ("LHS in noreturn call");
3081 return true;
3082 }
3083
3084 fntype = TREE_TYPE (TREE_TYPE (fn));
3085 if (gimple_call_lhs (stmt)
3086 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3087 TREE_TYPE (fntype))
3088 /* ??? At least C++ misses conversions at assignments from
3089 void * call results.
3090 ??? Java is completely off. Especially with functions
3091 returning java.lang.Object.
3092 For now simply allow arbitrary pointer type conversions. */
3093 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3094 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3095 {
3096 error ("invalid conversion in gimple call");
3097 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3098 debug_generic_stmt (TREE_TYPE (fntype));
3099 return true;
3100 }
3101
3102 if (gimple_call_chain (stmt)
3103 && !is_gimple_val (gimple_call_chain (stmt)))
3104 {
3105 error ("invalid static chain in gimple call");
3106 debug_generic_stmt (gimple_call_chain (stmt));
3107 return true;
3108 }
3109
3110 /* If there is a static chain argument, this should not be an indirect
3111 call, and the decl should have DECL_STATIC_CHAIN set. */
3112 if (gimple_call_chain (stmt))
3113 {
3114 if (TREE_CODE (fn) != ADDR_EXPR
3115 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
3116 {
3117 error ("static chain in indirect gimple call");
3118 return true;
3119 }
3120 fn = TREE_OPERAND (fn, 0);
3121
3122 if (!DECL_STATIC_CHAIN (fn))
3123 {
3124 error ("static chain with function that doesn't use one");
3125 return true;
3126 }
3127 }
3128
3129 /* ??? The C frontend passes unpromoted arguments in case it
3130 didn't see a function declaration before the call. So for now
3131 leave the call arguments mostly unverified. Once we gimplify
3132 unit-at-a-time we have a chance to fix this. */
3133
3134 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3135 {
3136 tree arg = gimple_call_arg (stmt, i);
3137 if ((is_gimple_reg_type (TREE_TYPE (arg))
3138 && !is_gimple_val (arg))
3139 || (!is_gimple_reg_type (TREE_TYPE (arg))
3140 && !is_gimple_lvalue (arg)))
3141 {
3142 error ("invalid argument to gimple call");
3143 debug_generic_expr (arg);
3144 }
3145 }
3146
3147 return false;
3148 }
3149
3150 /* Verifies the gimple comparison with the result type TYPE and
3151 the operands OP0 and OP1. */
3152
3153 static bool
3154 verify_gimple_comparison (tree type, tree op0, tree op1)
3155 {
3156 tree op0_type = TREE_TYPE (op0);
3157 tree op1_type = TREE_TYPE (op1);
3158
3159 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3160 {
3161 error ("invalid operands in gimple comparison");
3162 return true;
3163 }
3164
3165 /* For comparisons we do not have the operations type as the
3166 effective type the comparison is carried out in. Instead
3167 we require that either the first operand is trivially
3168 convertible into the second, or the other way around.
3169 The resulting type of a comparison may be any integral type.
3170 Because we special-case pointers to void we allow
3171 comparisons of pointers with the same mode as well. */
3172 if ((!useless_type_conversion_p (op0_type, op1_type)
3173 && !useless_type_conversion_p (op1_type, op0_type)
3174 && (!POINTER_TYPE_P (op0_type)
3175 || !POINTER_TYPE_P (op1_type)
3176 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3177 || !INTEGRAL_TYPE_P (type))
3178 {
3179 error ("type mismatch in comparison expression");
3180 debug_generic_expr (type);
3181 debug_generic_expr (op0_type);
3182 debug_generic_expr (op1_type);
3183 return true;
3184 }
3185
3186 return false;
3187 }
3188
3189 /* Verify a gimple assignment statement STMT with an unary rhs.
3190 Returns true if anything is wrong. */
3191
3192 static bool
3193 verify_gimple_assign_unary (gimple stmt)
3194 {
3195 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3196 tree lhs = gimple_assign_lhs (stmt);
3197 tree lhs_type = TREE_TYPE (lhs);
3198 tree rhs1 = gimple_assign_rhs1 (stmt);
3199 tree rhs1_type = TREE_TYPE (rhs1);
3200
3201 if (!is_gimple_reg (lhs)
3202 && !(optimize == 0
3203 && TREE_CODE (lhs_type) == COMPLEX_TYPE))
3204 {
3205 error ("non-register as LHS of unary operation");
3206 return true;
3207 }
3208
3209 if (!is_gimple_val (rhs1))
3210 {
3211 error ("invalid operand in unary operation");
3212 return true;
3213 }
3214
3215 /* First handle conversions. */
3216 switch (rhs_code)
3217 {
3218 CASE_CONVERT:
3219 {
3220 /* Allow conversions between integral types and pointers only if
3221 there is no sign or zero extension involved.
3222 For targets were the precision of sizetype doesn't match that
3223 of pointers we need to allow arbitrary conversions from and
3224 to sizetype. */
3225 if ((POINTER_TYPE_P (lhs_type)
3226 && INTEGRAL_TYPE_P (rhs1_type)
3227 && (TYPE_PRECISION (lhs_type) >= TYPE_PRECISION (rhs1_type)
3228 || rhs1_type == sizetype))
3229 || (POINTER_TYPE_P (rhs1_type)
3230 && INTEGRAL_TYPE_P (lhs_type)
3231 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3232 || lhs_type == sizetype)))
3233 return false;
3234
3235 /* Allow conversion from integer to offset type and vice versa. */
3236 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3237 && TREE_CODE (rhs1_type) == INTEGER_TYPE)
3238 || (TREE_CODE (lhs_type) == INTEGER_TYPE
3239 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3240 return false;
3241
3242 /* Otherwise assert we are converting between types of the
3243 same kind. */
3244 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3245 {
3246 error ("invalid types in nop conversion");
3247 debug_generic_expr (lhs_type);
3248 debug_generic_expr (rhs1_type);
3249 return true;
3250 }
3251
3252 return false;
3253 }
3254
3255 case ADDR_SPACE_CONVERT_EXPR:
3256 {
3257 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3258 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3259 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3260 {
3261 error ("invalid types in address space conversion");
3262 debug_generic_expr (lhs_type);
3263 debug_generic_expr (rhs1_type);
3264 return true;
3265 }
3266
3267 return false;
3268 }
3269
3270 case FIXED_CONVERT_EXPR:
3271 {
3272 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3273 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3274 {
3275 error ("invalid types in fixed-point conversion");
3276 debug_generic_expr (lhs_type);
3277 debug_generic_expr (rhs1_type);
3278 return true;
3279 }
3280
3281 return false;
3282 }
3283
3284 case FLOAT_EXPR:
3285 {
3286 if (!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3287 {
3288 error ("invalid types in conversion to floating point");
3289 debug_generic_expr (lhs_type);
3290 debug_generic_expr (rhs1_type);
3291 return true;
3292 }
3293
3294 return false;
3295 }
3296
3297 case FIX_TRUNC_EXPR:
3298 {
3299 if (!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3300 {
3301 error ("invalid types in conversion to integer");
3302 debug_generic_expr (lhs_type);
3303 debug_generic_expr (rhs1_type);
3304 return true;
3305 }
3306
3307 return false;
3308 }
3309
3310 case VEC_UNPACK_HI_EXPR:
3311 case VEC_UNPACK_LO_EXPR:
3312 case REDUC_MAX_EXPR:
3313 case REDUC_MIN_EXPR:
3314 case REDUC_PLUS_EXPR:
3315 case VEC_UNPACK_FLOAT_HI_EXPR:
3316 case VEC_UNPACK_FLOAT_LO_EXPR:
3317 /* FIXME. */
3318 return false;
3319
3320 case TRUTH_NOT_EXPR:
3321 case NEGATE_EXPR:
3322 case ABS_EXPR:
3323 case BIT_NOT_EXPR:
3324 case PAREN_EXPR:
3325 case NON_LVALUE_EXPR:
3326 case CONJ_EXPR:
3327 break;
3328
3329 default:
3330 gcc_unreachable ();
3331 }
3332
3333 /* For the remaining codes assert there is no conversion involved. */
3334 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3335 {
3336 error ("non-trivial conversion in unary operation");
3337 debug_generic_expr (lhs_type);
3338 debug_generic_expr (rhs1_type);
3339 return true;
3340 }
3341
3342 return false;
3343 }
3344
3345 /* Verify a gimple assignment statement STMT with a binary rhs.
3346 Returns true if anything is wrong. */
3347
3348 static bool
3349 verify_gimple_assign_binary (gimple stmt)
3350 {
3351 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3352 tree lhs = gimple_assign_lhs (stmt);
3353 tree lhs_type = TREE_TYPE (lhs);
3354 tree rhs1 = gimple_assign_rhs1 (stmt);
3355 tree rhs1_type = TREE_TYPE (rhs1);
3356 tree rhs2 = gimple_assign_rhs2 (stmt);
3357 tree rhs2_type = TREE_TYPE (rhs2);
3358
3359 if (!is_gimple_reg (lhs)
3360 && !(optimize == 0
3361 && TREE_CODE (lhs_type) == COMPLEX_TYPE))
3362 {
3363 error ("non-register as LHS of binary operation");
3364 return true;
3365 }
3366
3367 if (!is_gimple_val (rhs1)
3368 || !is_gimple_val (rhs2))
3369 {
3370 error ("invalid operands in binary operation");
3371 return true;
3372 }
3373
3374 /* First handle operations that involve different types. */
3375 switch (rhs_code)
3376 {
3377 case COMPLEX_EXPR:
3378 {
3379 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3380 || !(INTEGRAL_TYPE_P (rhs1_type)
3381 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3382 || !(INTEGRAL_TYPE_P (rhs2_type)
3383 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3384 {
3385 error ("type mismatch in complex expression");
3386 debug_generic_expr (lhs_type);
3387 debug_generic_expr (rhs1_type);
3388 debug_generic_expr (rhs2_type);
3389 return true;
3390 }
3391
3392 return false;
3393 }
3394
3395 case LSHIFT_EXPR:
3396 case RSHIFT_EXPR:
3397 case LROTATE_EXPR:
3398 case RROTATE_EXPR:
3399 {
3400 /* Shifts and rotates are ok on integral types, fixed point
3401 types and integer vector types. */
3402 if ((!INTEGRAL_TYPE_P (rhs1_type)
3403 && !FIXED_POINT_TYPE_P (rhs1_type)
3404 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3405 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3406 || (!INTEGRAL_TYPE_P (rhs2_type)
3407 /* Vector shifts of vectors are also ok. */
3408 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3409 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3410 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3411 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3412 || !useless_type_conversion_p (lhs_type, rhs1_type))
3413 {
3414 error ("type mismatch in shift expression");
3415 debug_generic_expr (lhs_type);
3416 debug_generic_expr (rhs1_type);
3417 debug_generic_expr (rhs2_type);
3418 return true;
3419 }
3420
3421 return false;
3422 }
3423
3424 case VEC_LSHIFT_EXPR:
3425 case VEC_RSHIFT_EXPR:
3426 {
3427 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3428 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3429 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type))
3430 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3431 || (!INTEGRAL_TYPE_P (rhs2_type)
3432 && (TREE_CODE (rhs2_type) != VECTOR_TYPE
3433 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3434 || !useless_type_conversion_p (lhs_type, rhs1_type))
3435 {
3436 error ("type mismatch in vector shift expression");
3437 debug_generic_expr (lhs_type);
3438 debug_generic_expr (rhs1_type);
3439 debug_generic_expr (rhs2_type);
3440 return true;
3441 }
3442 /* For shifting a vector of floating point components we
3443 only allow shifting by a constant multiple of the element size. */
3444 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))
3445 && (TREE_CODE (rhs2) != INTEGER_CST
3446 || !div_if_zero_remainder (EXACT_DIV_EXPR, rhs2,
3447 TYPE_SIZE (TREE_TYPE (rhs1_type)))))
3448 {
3449 error ("non-element sized vector shift of floating point vector");
3450 return true;
3451 }
3452
3453 return false;
3454 }
3455
3456 case PLUS_EXPR:
3457 {
3458 /* We use regular PLUS_EXPR for vectors.
3459 ??? This just makes the checker happy and may not be what is
3460 intended. */
3461 if (TREE_CODE (lhs_type) == VECTOR_TYPE
3462 && POINTER_TYPE_P (TREE_TYPE (lhs_type)))
3463 {
3464 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3465 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3466 {
3467 error ("invalid non-vector operands to vector valued plus");
3468 return true;
3469 }
3470 lhs_type = TREE_TYPE (lhs_type);
3471 rhs1_type = TREE_TYPE (rhs1_type);
3472 rhs2_type = TREE_TYPE (rhs2_type);
3473 /* PLUS_EXPR is commutative, so we might end up canonicalizing
3474 the pointer to 2nd place. */
3475 if (POINTER_TYPE_P (rhs2_type))
3476 {
3477 tree tem = rhs1_type;
3478 rhs1_type = rhs2_type;
3479 rhs2_type = tem;
3480 }
3481 goto do_pointer_plus_expr_check;
3482 }
3483 }
3484 /* Fallthru. */
3485 case MINUS_EXPR:
3486 {
3487 if (POINTER_TYPE_P (lhs_type)
3488 || POINTER_TYPE_P (rhs1_type)
3489 || POINTER_TYPE_P (rhs2_type))
3490 {
3491 error ("invalid (pointer) operands to plus/minus");
3492 return true;
3493 }
3494
3495 /* Continue with generic binary expression handling. */
3496 break;
3497 }
3498
3499 case POINTER_PLUS_EXPR:
3500 {
3501 do_pointer_plus_expr_check:
3502 if (!POINTER_TYPE_P (rhs1_type)
3503 || !useless_type_conversion_p (lhs_type, rhs1_type)
3504 || !useless_type_conversion_p (sizetype, rhs2_type))
3505 {
3506 error ("type mismatch in pointer plus expression");
3507 debug_generic_stmt (lhs_type);
3508 debug_generic_stmt (rhs1_type);
3509 debug_generic_stmt (rhs2_type);
3510 return true;
3511 }
3512
3513 return false;
3514 }
3515
3516 case TRUTH_ANDIF_EXPR:
3517 case TRUTH_ORIF_EXPR:
3518 gcc_unreachable ();
3519
3520 case TRUTH_AND_EXPR:
3521 case TRUTH_OR_EXPR:
3522 case TRUTH_XOR_EXPR:
3523 {
3524 /* We allow any kind of integral typed argument and result. */
3525 if (!INTEGRAL_TYPE_P (rhs1_type)
3526 || !INTEGRAL_TYPE_P (rhs2_type)
3527 || !INTEGRAL_TYPE_P (lhs_type))
3528 {
3529 error ("type mismatch in binary truth expression");
3530 debug_generic_expr (lhs_type);
3531 debug_generic_expr (rhs1_type);
3532 debug_generic_expr (rhs2_type);
3533 return true;
3534 }
3535
3536 return false;
3537 }
3538
3539 case LT_EXPR:
3540 case LE_EXPR:
3541 case GT_EXPR:
3542 case GE_EXPR:
3543 case EQ_EXPR:
3544 case NE_EXPR:
3545 case UNORDERED_EXPR:
3546 case ORDERED_EXPR:
3547 case UNLT_EXPR:
3548 case UNLE_EXPR:
3549 case UNGT_EXPR:
3550 case UNGE_EXPR:
3551 case UNEQ_EXPR:
3552 case LTGT_EXPR:
3553 /* Comparisons are also binary, but the result type is not
3554 connected to the operand types. */
3555 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3556
3557 case WIDEN_MULT_EXPR:
3558 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3559 return true;
3560 return ((2 * TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (lhs_type))
3561 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3562
3563 case WIDEN_SUM_EXPR:
3564 case VEC_WIDEN_MULT_HI_EXPR:
3565 case VEC_WIDEN_MULT_LO_EXPR:
3566 case VEC_PACK_TRUNC_EXPR:
3567 case VEC_PACK_SAT_EXPR:
3568 case VEC_PACK_FIX_TRUNC_EXPR:
3569 case VEC_EXTRACT_EVEN_EXPR:
3570 case VEC_EXTRACT_ODD_EXPR:
3571 case VEC_INTERLEAVE_HIGH_EXPR:
3572 case VEC_INTERLEAVE_LOW_EXPR:
3573 /* FIXME. */
3574 return false;
3575
3576 case MULT_EXPR:
3577 case TRUNC_DIV_EXPR:
3578 case CEIL_DIV_EXPR:
3579 case FLOOR_DIV_EXPR:
3580 case ROUND_DIV_EXPR:
3581 case TRUNC_MOD_EXPR:
3582 case CEIL_MOD_EXPR:
3583 case FLOOR_MOD_EXPR:
3584 case ROUND_MOD_EXPR:
3585 case RDIV_EXPR:
3586 case EXACT_DIV_EXPR:
3587 case MIN_EXPR:
3588 case MAX_EXPR:
3589 case BIT_IOR_EXPR:
3590 case BIT_XOR_EXPR:
3591 case BIT_AND_EXPR:
3592 /* Continue with generic binary expression handling. */
3593 break;
3594
3595 default:
3596 gcc_unreachable ();
3597 }
3598
3599 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3600 || !useless_type_conversion_p (lhs_type, rhs2_type))
3601 {
3602 error ("type mismatch in binary expression");
3603 debug_generic_stmt (lhs_type);
3604 debug_generic_stmt (rhs1_type);
3605 debug_generic_stmt (rhs2_type);
3606 return true;
3607 }
3608
3609 return false;
3610 }
3611
3612 /* Verify a gimple assignment statement STMT with a ternary rhs.
3613 Returns true if anything is wrong. */
3614
3615 static bool
3616 verify_gimple_assign_ternary (gimple stmt)
3617 {
3618 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3619 tree lhs = gimple_assign_lhs (stmt);
3620 tree lhs_type = TREE_TYPE (lhs);
3621 tree rhs1 = gimple_assign_rhs1 (stmt);
3622 tree rhs1_type = TREE_TYPE (rhs1);
3623 tree rhs2 = gimple_assign_rhs2 (stmt);
3624 tree rhs2_type = TREE_TYPE (rhs2);
3625 tree rhs3 = gimple_assign_rhs3 (stmt);
3626 tree rhs3_type = TREE_TYPE (rhs3);
3627
3628 if (!is_gimple_reg (lhs)
3629 && !(optimize == 0
3630 && TREE_CODE (lhs_type) == COMPLEX_TYPE))
3631 {
3632 error ("non-register as LHS of ternary operation");
3633 return true;
3634 }
3635
3636 if (!is_gimple_val (rhs1)
3637 || !is_gimple_val (rhs2)
3638 || !is_gimple_val (rhs3))
3639 {
3640 error ("invalid operands in ternary operation");
3641 return true;
3642 }
3643
3644 /* First handle operations that involve different types. */
3645 switch (rhs_code)
3646 {
3647 case WIDEN_MULT_PLUS_EXPR:
3648 case WIDEN_MULT_MINUS_EXPR:
3649 if ((!INTEGRAL_TYPE_P (rhs1_type)
3650 && !FIXED_POINT_TYPE_P (rhs1_type))
3651 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3652 || !useless_type_conversion_p (lhs_type, rhs3_type)
3653 || 2 * TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (lhs_type)
3654 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3655 {
3656 error ("type mismatch in widening multiply-accumulate expression");
3657 debug_generic_expr (lhs_type);
3658 debug_generic_expr (rhs1_type);
3659 debug_generic_expr (rhs2_type);
3660 debug_generic_expr (rhs3_type);
3661 return true;
3662 }
3663 break;
3664
3665 default:
3666 gcc_unreachable ();
3667 }
3668 return false;
3669 }
3670
3671 /* Verify a gimple assignment statement STMT with a single rhs.
3672 Returns true if anything is wrong. */
3673
3674 static bool
3675 verify_gimple_assign_single (gimple stmt)
3676 {
3677 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3678 tree lhs = gimple_assign_lhs (stmt);
3679 tree lhs_type = TREE_TYPE (lhs);
3680 tree rhs1 = gimple_assign_rhs1 (stmt);
3681 tree rhs1_type = TREE_TYPE (rhs1);
3682 bool res = false;
3683
3684 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3685 {
3686 error ("non-trivial conversion at assignment");
3687 debug_generic_expr (lhs_type);
3688 debug_generic_expr (rhs1_type);
3689 return true;
3690 }
3691
3692 if (handled_component_p (lhs))
3693 res |= verify_types_in_gimple_reference (lhs, true);
3694
3695 /* Special codes we cannot handle via their class. */
3696 switch (rhs_code)
3697 {
3698 case ADDR_EXPR:
3699 {
3700 tree op = TREE_OPERAND (rhs1, 0);
3701 if (!is_gimple_addressable (op))
3702 {
3703 error ("invalid operand in unary expression");
3704 return true;
3705 }
3706
3707 if (!types_compatible_p (TREE_TYPE (op), TREE_TYPE (TREE_TYPE (rhs1)))
3708 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
3709 TREE_TYPE (op)))
3710 {
3711 error ("type mismatch in address expression");
3712 debug_generic_stmt (TREE_TYPE (rhs1));
3713 debug_generic_stmt (TREE_TYPE (op));
3714 return true;
3715 }
3716
3717 return verify_types_in_gimple_reference (op, true);
3718 }
3719
3720 /* tcc_reference */
3721 case INDIRECT_REF:
3722 error ("INDIRECT_REF in gimple IL");
3723 return true;
3724
3725 case COMPONENT_REF:
3726 case BIT_FIELD_REF:
3727 case MISALIGNED_INDIRECT_REF:
3728 case ARRAY_REF:
3729 case ARRAY_RANGE_REF:
3730 case VIEW_CONVERT_EXPR:
3731 case REALPART_EXPR:
3732 case IMAGPART_EXPR:
3733 case TARGET_MEM_REF:
3734 case MEM_REF:
3735 if (!is_gimple_reg (lhs)
3736 && is_gimple_reg_type (TREE_TYPE (lhs)))
3737 {
3738 error ("invalid rhs for gimple memory store");
3739 debug_generic_stmt (lhs);
3740 debug_generic_stmt (rhs1);
3741 return true;
3742 }
3743 return res || verify_types_in_gimple_reference (rhs1, false);
3744
3745 /* tcc_constant */
3746 case SSA_NAME:
3747 case INTEGER_CST:
3748 case REAL_CST:
3749 case FIXED_CST:
3750 case COMPLEX_CST:
3751 case VECTOR_CST:
3752 case STRING_CST:
3753 return res;
3754
3755 /* tcc_declaration */
3756 case CONST_DECL:
3757 return res;
3758 case VAR_DECL:
3759 case PARM_DECL:
3760 if (!is_gimple_reg (lhs)
3761 && !is_gimple_reg (rhs1)
3762 && is_gimple_reg_type (TREE_TYPE (lhs)))
3763 {
3764 error ("invalid rhs for gimple memory store");
3765 debug_generic_stmt (lhs);
3766 debug_generic_stmt (rhs1);
3767 return true;
3768 }
3769 return res;
3770
3771 case COND_EXPR:
3772 if (!is_gimple_reg (lhs)
3773 || (!is_gimple_reg (TREE_OPERAND (rhs1, 0))
3774 && !COMPARISON_CLASS_P (TREE_OPERAND (rhs1, 0)))
3775 || (!is_gimple_reg (TREE_OPERAND (rhs1, 1))
3776 && !is_gimple_min_invariant (TREE_OPERAND (rhs1, 1)))
3777 || (!is_gimple_reg (TREE_OPERAND (rhs1, 2))
3778 && !is_gimple_min_invariant (TREE_OPERAND (rhs1, 2))))
3779 {
3780 error ("invalid COND_EXPR in gimple assignment");
3781 debug_generic_stmt (rhs1);
3782 return true;
3783 }
3784 return res;
3785
3786 case CONSTRUCTOR:
3787 case OBJ_TYPE_REF:
3788 case ASSERT_EXPR:
3789 case WITH_SIZE_EXPR:
3790 case POLYNOMIAL_CHREC:
3791 case DOT_PROD_EXPR:
3792 case VEC_COND_EXPR:
3793 case REALIGN_LOAD_EXPR:
3794 /* FIXME. */
3795 return res;
3796
3797 default:;
3798 }
3799
3800 return res;
3801 }
3802
3803 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
3804 is a problem, otherwise false. */
3805
3806 static bool
3807 verify_gimple_assign (gimple stmt)
3808 {
3809 switch (gimple_assign_rhs_class (stmt))
3810 {
3811 case GIMPLE_SINGLE_RHS:
3812 return verify_gimple_assign_single (stmt);
3813
3814 case GIMPLE_UNARY_RHS:
3815 return verify_gimple_assign_unary (stmt);
3816
3817 case GIMPLE_BINARY_RHS:
3818 return verify_gimple_assign_binary (stmt);
3819
3820 case GIMPLE_TERNARY_RHS:
3821 return verify_gimple_assign_ternary (stmt);
3822
3823 default:
3824 gcc_unreachable ();
3825 }
3826 }
3827
3828 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
3829 is a problem, otherwise false. */
3830
3831 static bool
3832 verify_gimple_return (gimple stmt)
3833 {
3834 tree op = gimple_return_retval (stmt);
3835 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
3836
3837 /* We cannot test for present return values as we do not fix up missing
3838 return values from the original source. */
3839 if (op == NULL)
3840 return false;
3841
3842 if (!is_gimple_val (op)
3843 && TREE_CODE (op) != RESULT_DECL)
3844 {
3845 error ("invalid operand in return statement");
3846 debug_generic_stmt (op);
3847 return true;
3848 }
3849
3850 if ((TREE_CODE (op) == RESULT_DECL
3851 && DECL_BY_REFERENCE (op))
3852 || (TREE_CODE (op) == SSA_NAME
3853 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
3854 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
3855 op = TREE_TYPE (op);
3856
3857 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
3858 {
3859 error ("invalid conversion in return statement");
3860 debug_generic_stmt (restype);
3861 debug_generic_stmt (TREE_TYPE (op));
3862 return true;
3863 }
3864
3865 return false;
3866 }
3867
3868
3869 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
3870 is a problem, otherwise false. */
3871
3872 static bool
3873 verify_gimple_goto (gimple stmt)
3874 {
3875 tree dest = gimple_goto_dest (stmt);
3876
3877 /* ??? We have two canonical forms of direct goto destinations, a
3878 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
3879 if (TREE_CODE (dest) != LABEL_DECL
3880 && (!is_gimple_val (dest)
3881 || !POINTER_TYPE_P (TREE_TYPE (dest))))
3882 {
3883 error ("goto destination is neither a label nor a pointer");
3884 return true;
3885 }
3886
3887 return false;
3888 }
3889
3890 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
3891 is a problem, otherwise false. */
3892
3893 static bool
3894 verify_gimple_switch (gimple stmt)
3895 {
3896 if (!is_gimple_val (gimple_switch_index (stmt)))
3897 {
3898 error ("invalid operand to switch statement");
3899 debug_generic_stmt (gimple_switch_index (stmt));
3900 return true;
3901 }
3902
3903 return false;
3904 }
3905
3906
3907 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
3908 and false otherwise. */
3909
3910 static bool
3911 verify_gimple_phi (gimple stmt)
3912 {
3913 tree type = TREE_TYPE (gimple_phi_result (stmt));
3914 unsigned i;
3915
3916 if (TREE_CODE (gimple_phi_result (stmt)) != SSA_NAME)
3917 {
3918 error ("Invalid PHI result");
3919 return true;
3920 }
3921
3922 for (i = 0; i < gimple_phi_num_args (stmt); i++)
3923 {
3924 tree arg = gimple_phi_arg_def (stmt, i);
3925 if ((is_gimple_reg (gimple_phi_result (stmt))
3926 && !is_gimple_val (arg))
3927 || (!is_gimple_reg (gimple_phi_result (stmt))
3928 && !is_gimple_addressable (arg)))
3929 {
3930 error ("Invalid PHI argument");
3931 debug_generic_stmt (arg);
3932 return true;
3933 }
3934 if (!useless_type_conversion_p (type, TREE_TYPE (arg)))
3935 {
3936 error ("Incompatible types in PHI argument %u", i);
3937 debug_generic_stmt (type);
3938 debug_generic_stmt (TREE_TYPE (arg));
3939 return true;
3940 }
3941 }
3942
3943 return false;
3944 }
3945
3946
3947 /* Verify a gimple debug statement STMT.
3948 Returns true if anything is wrong. */
3949
3950 static bool
3951 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
3952 {
3953 /* There isn't much that could be wrong in a gimple debug stmt. A
3954 gimple debug bind stmt, for example, maps a tree, that's usually
3955 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
3956 component or member of an aggregate type, to another tree, that
3957 can be an arbitrary expression. These stmts expand into debug
3958 insns, and are converted to debug notes by var-tracking.c. */
3959 return false;
3960 }
3961
3962
3963 /* Verify the GIMPLE statement STMT. Returns true if there is an
3964 error, otherwise false. */
3965
3966 static bool
3967 verify_types_in_gimple_stmt (gimple stmt)
3968 {
3969 switch (gimple_code (stmt))
3970 {
3971 case GIMPLE_ASSIGN:
3972 return verify_gimple_assign (stmt);
3973
3974 case GIMPLE_LABEL:
3975 return TREE_CODE (gimple_label_label (stmt)) != LABEL_DECL;
3976
3977 case GIMPLE_CALL:
3978 return verify_gimple_call (stmt);
3979
3980 case GIMPLE_COND:
3981 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
3982 {
3983 error ("invalid comparison code in gimple cond");
3984 return true;
3985 }
3986 if (!(!gimple_cond_true_label (stmt)
3987 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
3988 || !(!gimple_cond_false_label (stmt)
3989 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
3990 {
3991 error ("invalid labels in gimple cond");
3992 return true;
3993 }
3994
3995 return verify_gimple_comparison (boolean_type_node,
3996 gimple_cond_lhs (stmt),
3997 gimple_cond_rhs (stmt));
3998
3999 case GIMPLE_GOTO:
4000 return verify_gimple_goto (stmt);
4001
4002 case GIMPLE_SWITCH:
4003 return verify_gimple_switch (stmt);
4004
4005 case GIMPLE_RETURN:
4006 return verify_gimple_return (stmt);
4007
4008 case GIMPLE_ASM:
4009 return false;
4010
4011 case GIMPLE_PHI:
4012 return verify_gimple_phi (stmt);
4013
4014 /* Tuples that do not have tree operands. */
4015 case GIMPLE_NOP:
4016 case GIMPLE_PREDICT:
4017 case GIMPLE_RESX:
4018 case GIMPLE_EH_DISPATCH:
4019 case GIMPLE_EH_MUST_NOT_THROW:
4020 return false;
4021
4022 CASE_GIMPLE_OMP:
4023 /* OpenMP directives are validated by the FE and never operated
4024 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4025 non-gimple expressions when the main index variable has had
4026 its address taken. This does not affect the loop itself
4027 because the header of an GIMPLE_OMP_FOR is merely used to determine
4028 how to setup the parallel iteration. */
4029 return false;
4030
4031 case GIMPLE_DEBUG:
4032 return verify_gimple_debug (stmt);
4033
4034 default:
4035 gcc_unreachable ();
4036 }
4037 }
4038
4039 /* Verify the GIMPLE statements inside the sequence STMTS. */
4040
4041 static bool
4042 verify_types_in_gimple_seq_2 (gimple_seq stmts)
4043 {
4044 gimple_stmt_iterator ittr;
4045 bool err = false;
4046
4047 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4048 {
4049 gimple stmt = gsi_stmt (ittr);
4050
4051 switch (gimple_code (stmt))
4052 {
4053 case GIMPLE_BIND:
4054 err |= verify_types_in_gimple_seq_2 (gimple_bind_body (stmt));
4055 break;
4056
4057 case GIMPLE_TRY:
4058 err |= verify_types_in_gimple_seq_2 (gimple_try_eval (stmt));
4059 err |= verify_types_in_gimple_seq_2 (gimple_try_cleanup (stmt));
4060 break;
4061
4062 case GIMPLE_EH_FILTER:
4063 err |= verify_types_in_gimple_seq_2 (gimple_eh_filter_failure (stmt));
4064 break;
4065
4066 case GIMPLE_CATCH:
4067 err |= verify_types_in_gimple_seq_2 (gimple_catch_handler (stmt));
4068 break;
4069
4070 default:
4071 {
4072 bool err2 = verify_types_in_gimple_stmt (stmt);
4073 if (err2)
4074 debug_gimple_stmt (stmt);
4075 err |= err2;
4076 }
4077 }
4078 }
4079
4080 return err;
4081 }
4082
4083
4084 /* Verify the GIMPLE statements inside the statement list STMTS. */
4085
4086 void
4087 verify_types_in_gimple_seq (gimple_seq stmts)
4088 {
4089 if (verify_types_in_gimple_seq_2 (stmts))
4090 internal_error ("verify_gimple failed");
4091 }
4092
4093
4094 /* Verify STMT, return true if STMT is not in GIMPLE form.
4095 TODO: Implement type checking. */
4096
4097 static bool
4098 verify_stmt (gimple_stmt_iterator *gsi)
4099 {
4100 tree addr;
4101 struct walk_stmt_info wi;
4102 bool last_in_block = gsi_one_before_end_p (*gsi);
4103 gimple stmt = gsi_stmt (*gsi);
4104 int lp_nr;
4105
4106 if (is_gimple_omp (stmt))
4107 {
4108 /* OpenMP directives are validated by the FE and never operated
4109 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4110 non-gimple expressions when the main index variable has had
4111 its address taken. This does not affect the loop itself
4112 because the header of an GIMPLE_OMP_FOR is merely used to determine
4113 how to setup the parallel iteration. */
4114 return false;
4115 }
4116
4117 /* FIXME. The C frontend passes unpromoted arguments in case it
4118 didn't see a function declaration before the call. */
4119 if (is_gimple_call (stmt))
4120 {
4121 tree decl;
4122
4123 if (!is_gimple_call_addr (gimple_call_fn (stmt)))
4124 {
4125 error ("invalid function in call statement");
4126 return true;
4127 }
4128
4129 decl = gimple_call_fndecl (stmt);
4130 if (decl
4131 && TREE_CODE (decl) == FUNCTION_DECL
4132 && DECL_LOOPING_CONST_OR_PURE_P (decl)
4133 && (!DECL_PURE_P (decl))
4134 && (!TREE_READONLY (decl)))
4135 {
4136 error ("invalid pure const state for function");
4137 return true;
4138 }
4139 }
4140
4141 if (is_gimple_debug (stmt))
4142 return false;
4143
4144 memset (&wi, 0, sizeof (wi));
4145 addr = walk_gimple_op (gsi_stmt (*gsi), verify_expr, &wi);
4146 if (addr)
4147 {
4148 debug_generic_expr (addr);
4149 inform (gimple_location (gsi_stmt (*gsi)), "in statement");
4150 debug_gimple_stmt (stmt);
4151 return true;
4152 }
4153
4154 /* If the statement is marked as part of an EH region, then it is
4155 expected that the statement could throw. Verify that when we
4156 have optimizations that simplify statements such that we prove
4157 that they cannot throw, that we update other data structures
4158 to match. */
4159 lp_nr = lookup_stmt_eh_lp (stmt);
4160 if (lp_nr != 0)
4161 {
4162 if (!stmt_could_throw_p (stmt))
4163 {
4164 error ("statement marked for throw, but doesn%'t");
4165 goto fail;
4166 }
4167 else if (lp_nr > 0 && !last_in_block && stmt_can_throw_internal (stmt))
4168 {
4169 error ("statement marked for throw in middle of block");
4170 goto fail;
4171 }
4172 }
4173
4174 return false;
4175
4176 fail:
4177 debug_gimple_stmt (stmt);
4178 return true;
4179 }
4180
4181
4182 /* Return true when the T can be shared. */
4183
4184 bool
4185 tree_node_can_be_shared (tree t)
4186 {
4187 if (IS_TYPE_OR_DECL_P (t)
4188 || is_gimple_min_invariant (t)
4189 || TREE_CODE (t) == SSA_NAME
4190 || t == error_mark_node
4191 || TREE_CODE (t) == IDENTIFIER_NODE)
4192 return true;
4193
4194 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4195 return true;
4196
4197 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4198 && is_gimple_min_invariant (TREE_OPERAND (t, 1)))
4199 || TREE_CODE (t) == COMPONENT_REF
4200 || TREE_CODE (t) == REALPART_EXPR
4201 || TREE_CODE (t) == IMAGPART_EXPR)
4202 t = TREE_OPERAND (t, 0);
4203
4204 if (DECL_P (t))
4205 return true;
4206
4207 return false;
4208 }
4209
4210
4211 /* Called via walk_gimple_stmt. Verify tree sharing. */
4212
4213 static tree
4214 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4215 {
4216 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4217 struct pointer_set_t *visited = (struct pointer_set_t *) wi->info;
4218
4219 if (tree_node_can_be_shared (*tp))
4220 {
4221 *walk_subtrees = false;
4222 return NULL;
4223 }
4224
4225 if (pointer_set_insert (visited, *tp))
4226 return *tp;
4227
4228 return NULL;
4229 }
4230
4231
4232 static bool eh_error_found;
4233 static int
4234 verify_eh_throw_stmt_node (void **slot, void *data)
4235 {
4236 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4237 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4238
4239 if (!pointer_set_contains (visited, node->stmt))
4240 {
4241 error ("Dead STMT in EH table");
4242 debug_gimple_stmt (node->stmt);
4243 eh_error_found = true;
4244 }
4245 return 1;
4246 }
4247
4248
4249 /* Verify the GIMPLE statements in every basic block. */
4250
4251 DEBUG_FUNCTION void
4252 verify_stmts (void)
4253 {
4254 basic_block bb;
4255 gimple_stmt_iterator gsi;
4256 bool err = false;
4257 struct pointer_set_t *visited, *visited_stmts;
4258 tree addr;
4259 struct walk_stmt_info wi;
4260
4261 timevar_push (TV_TREE_STMT_VERIFY);
4262 visited = pointer_set_create ();
4263 visited_stmts = pointer_set_create ();
4264
4265 memset (&wi, 0, sizeof (wi));
4266 wi.info = (void *) visited;
4267
4268 FOR_EACH_BB (bb)
4269 {
4270 gimple phi;
4271 size_t i;
4272
4273 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4274 {
4275 phi = gsi_stmt (gsi);
4276 pointer_set_insert (visited_stmts, phi);
4277 if (gimple_bb (phi) != bb)
4278 {
4279 error ("gimple_bb (phi) is set to a wrong basic block");
4280 err |= true;
4281 }
4282
4283 for (i = 0; i < gimple_phi_num_args (phi); i++)
4284 {
4285 tree t = gimple_phi_arg_def (phi, i);
4286 tree addr;
4287
4288 if (!t)
4289 {
4290 error ("missing PHI def");
4291 debug_gimple_stmt (phi);
4292 err |= true;
4293 continue;
4294 }
4295 /* Addressable variables do have SSA_NAMEs but they
4296 are not considered gimple values. */
4297 else if (TREE_CODE (t) != SSA_NAME
4298 && TREE_CODE (t) != FUNCTION_DECL
4299 && !is_gimple_min_invariant (t))
4300 {
4301 error ("PHI argument is not a GIMPLE value");
4302 debug_gimple_stmt (phi);
4303 debug_generic_expr (t);
4304 err |= true;
4305 }
4306
4307 addr = walk_tree (&t, verify_node_sharing, visited, NULL);
4308 if (addr)
4309 {
4310 error ("incorrect sharing of tree nodes");
4311 debug_gimple_stmt (phi);
4312 debug_generic_expr (addr);
4313 err |= true;
4314 }
4315 }
4316
4317 #ifdef ENABLE_TYPES_CHECKING
4318 if (verify_gimple_phi (phi))
4319 {
4320 debug_gimple_stmt (phi);
4321 err |= true;
4322 }
4323 #endif
4324 }
4325
4326 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
4327 {
4328 gimple stmt = gsi_stmt (gsi);
4329
4330 if (gimple_code (stmt) == GIMPLE_WITH_CLEANUP_EXPR
4331 || gimple_code (stmt) == GIMPLE_BIND)
4332 {
4333 error ("invalid GIMPLE statement");
4334 debug_gimple_stmt (stmt);
4335 err |= true;
4336 }
4337
4338 pointer_set_insert (visited_stmts, stmt);
4339
4340 if (gimple_bb (stmt) != bb)
4341 {
4342 error ("gimple_bb (stmt) is set to a wrong basic block");
4343 debug_gimple_stmt (stmt);
4344 err |= true;
4345 }
4346
4347 if (gimple_code (stmt) == GIMPLE_LABEL)
4348 {
4349 tree decl = gimple_label_label (stmt);
4350 int uid = LABEL_DECL_UID (decl);
4351
4352 if (uid == -1
4353 || VEC_index (basic_block, label_to_block_map, uid) != bb)
4354 {
4355 error ("incorrect entry in label_to_block_map");
4356 err |= true;
4357 }
4358
4359 uid = EH_LANDING_PAD_NR (decl);
4360 if (uid)
4361 {
4362 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4363 if (decl != lp->post_landing_pad)
4364 {
4365 error ("incorrect setting of landing pad number");
4366 err |= true;
4367 }
4368 }
4369 }
4370
4371 err |= verify_stmt (&gsi);
4372
4373 #ifdef ENABLE_TYPES_CHECKING
4374 if (verify_types_in_gimple_stmt (gsi_stmt (gsi)))
4375 {
4376 debug_gimple_stmt (stmt);
4377 err |= true;
4378 }
4379 #endif
4380 addr = walk_gimple_op (gsi_stmt (gsi), verify_node_sharing, &wi);
4381 if (addr)
4382 {
4383 error ("incorrect sharing of tree nodes");
4384 debug_gimple_stmt (stmt);
4385 debug_generic_expr (addr);
4386 err |= true;
4387 }
4388 gsi_next (&gsi);
4389 }
4390 }
4391
4392 eh_error_found = false;
4393 if (get_eh_throw_stmt_table (cfun))
4394 htab_traverse (get_eh_throw_stmt_table (cfun),
4395 verify_eh_throw_stmt_node,
4396 visited_stmts);
4397
4398 if (err | eh_error_found)
4399 internal_error ("verify_stmts failed");
4400
4401 pointer_set_destroy (visited);
4402 pointer_set_destroy (visited_stmts);
4403 verify_histograms ();
4404 timevar_pop (TV_TREE_STMT_VERIFY);
4405 }
4406
4407
4408 /* Verifies that the flow information is OK. */
4409
4410 static int
4411 gimple_verify_flow_info (void)
4412 {
4413 int err = 0;
4414 basic_block bb;
4415 gimple_stmt_iterator gsi;
4416 gimple stmt;
4417 edge e;
4418 edge_iterator ei;
4419
4420 if (ENTRY_BLOCK_PTR->il.gimple)
4421 {
4422 error ("ENTRY_BLOCK has IL associated with it");
4423 err = 1;
4424 }
4425
4426 if (EXIT_BLOCK_PTR->il.gimple)
4427 {
4428 error ("EXIT_BLOCK has IL associated with it");
4429 err = 1;
4430 }
4431
4432 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4433 if (e->flags & EDGE_FALLTHRU)
4434 {
4435 error ("fallthru to exit from bb %d", e->src->index);
4436 err = 1;
4437 }
4438
4439 FOR_EACH_BB (bb)
4440 {
4441 bool found_ctrl_stmt = false;
4442
4443 stmt = NULL;
4444
4445 /* Skip labels on the start of basic block. */
4446 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4447 {
4448 tree label;
4449 gimple prev_stmt = stmt;
4450
4451 stmt = gsi_stmt (gsi);
4452
4453 if (gimple_code (stmt) != GIMPLE_LABEL)
4454 break;
4455
4456 label = gimple_label_label (stmt);
4457 if (prev_stmt && DECL_NONLOCAL (label))
4458 {
4459 error ("nonlocal label ");
4460 print_generic_expr (stderr, label, 0);
4461 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4462 bb->index);
4463 err = 1;
4464 }
4465
4466 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
4467 {
4468 error ("EH landing pad label ");
4469 print_generic_expr (stderr, label, 0);
4470 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4471 bb->index);
4472 err = 1;
4473 }
4474
4475 if (label_to_block (label) != bb)
4476 {
4477 error ("label ");
4478 print_generic_expr (stderr, label, 0);
4479 fprintf (stderr, " to block does not match in bb %d",
4480 bb->index);
4481 err = 1;
4482 }
4483
4484 if (decl_function_context (label) != current_function_decl)
4485 {
4486 error ("label ");
4487 print_generic_expr (stderr, label, 0);
4488 fprintf (stderr, " has incorrect context in bb %d",
4489 bb->index);
4490 err = 1;
4491 }
4492 }
4493
4494 /* Verify that body of basic block BB is free of control flow. */
4495 for (; !gsi_end_p (gsi); gsi_next (&gsi))
4496 {
4497 gimple stmt = gsi_stmt (gsi);
4498
4499 if (found_ctrl_stmt)
4500 {
4501 error ("control flow in the middle of basic block %d",
4502 bb->index);
4503 err = 1;
4504 }
4505
4506 if (stmt_ends_bb_p (stmt))
4507 found_ctrl_stmt = true;
4508
4509 if (gimple_code (stmt) == GIMPLE_LABEL)
4510 {
4511 error ("label ");
4512 print_generic_expr (stderr, gimple_label_label (stmt), 0);
4513 fprintf (stderr, " in the middle of basic block %d", bb->index);
4514 err = 1;
4515 }
4516 }
4517
4518 gsi = gsi_last_bb (bb);
4519 if (gsi_end_p (gsi))
4520 continue;
4521
4522 stmt = gsi_stmt (gsi);
4523
4524 if (gimple_code (stmt) == GIMPLE_LABEL)
4525 continue;
4526
4527 err |= verify_eh_edges (stmt);
4528
4529 if (is_ctrl_stmt (stmt))
4530 {
4531 FOR_EACH_EDGE (e, ei, bb->succs)
4532 if (e->flags & EDGE_FALLTHRU)
4533 {
4534 error ("fallthru edge after a control statement in bb %d",
4535 bb->index);
4536 err = 1;
4537 }
4538 }
4539
4540 if (gimple_code (stmt) != GIMPLE_COND)
4541 {
4542 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4543 after anything else but if statement. */
4544 FOR_EACH_EDGE (e, ei, bb->succs)
4545 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
4546 {
4547 error ("true/false edge after a non-GIMPLE_COND in bb %d",
4548 bb->index);
4549 err = 1;
4550 }
4551 }
4552
4553 switch (gimple_code (stmt))
4554 {
4555 case GIMPLE_COND:
4556 {
4557 edge true_edge;
4558 edge false_edge;
4559
4560 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
4561
4562 if (!true_edge
4563 || !false_edge
4564 || !(true_edge->flags & EDGE_TRUE_VALUE)
4565 || !(false_edge->flags & EDGE_FALSE_VALUE)
4566 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4567 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4568 || EDGE_COUNT (bb->succs) >= 3)
4569 {
4570 error ("wrong outgoing edge flags at end of bb %d",
4571 bb->index);
4572 err = 1;
4573 }
4574 }
4575 break;
4576
4577 case GIMPLE_GOTO:
4578 if (simple_goto_p (stmt))
4579 {
4580 error ("explicit goto at end of bb %d", bb->index);
4581 err = 1;
4582 }
4583 else
4584 {
4585 /* FIXME. We should double check that the labels in the
4586 destination blocks have their address taken. */
4587 FOR_EACH_EDGE (e, ei, bb->succs)
4588 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
4589 | EDGE_FALSE_VALUE))
4590 || !(e->flags & EDGE_ABNORMAL))
4591 {
4592 error ("wrong outgoing edge flags at end of bb %d",
4593 bb->index);
4594 err = 1;
4595 }
4596 }
4597 break;
4598
4599 case GIMPLE_CALL:
4600 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
4601 break;
4602 /* ... fallthru ... */
4603 case GIMPLE_RETURN:
4604 if (!single_succ_p (bb)
4605 || (single_succ_edge (bb)->flags
4606 & (EDGE_FALLTHRU | EDGE_ABNORMAL
4607 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4608 {
4609 error ("wrong outgoing edge flags at end of bb %d", bb->index);
4610 err = 1;
4611 }
4612 if (single_succ (bb) != EXIT_BLOCK_PTR)
4613 {
4614 error ("return edge does not point to exit in bb %d",
4615 bb->index);
4616 err = 1;
4617 }
4618 break;
4619
4620 case GIMPLE_SWITCH:
4621 {
4622 tree prev;
4623 edge e;
4624 size_t i, n;
4625
4626 n = gimple_switch_num_labels (stmt);
4627
4628 /* Mark all the destination basic blocks. */
4629 for (i = 0; i < n; ++i)
4630 {
4631 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
4632 basic_block label_bb = label_to_block (lab);
4633 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
4634 label_bb->aux = (void *)1;
4635 }
4636
4637 /* Verify that the case labels are sorted. */
4638 prev = gimple_switch_label (stmt, 0);
4639 for (i = 1; i < n; ++i)
4640 {
4641 tree c = gimple_switch_label (stmt, i);
4642 if (!CASE_LOW (c))
4643 {
4644 error ("found default case not at the start of "
4645 "case vector");
4646 err = 1;
4647 continue;
4648 }
4649 if (CASE_LOW (prev)
4650 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
4651 {
4652 error ("case labels not sorted: ");
4653 print_generic_expr (stderr, prev, 0);
4654 fprintf (stderr," is greater than ");
4655 print_generic_expr (stderr, c, 0);
4656 fprintf (stderr," but comes before it.\n");
4657 err = 1;
4658 }
4659 prev = c;
4660 }
4661 /* VRP will remove the default case if it can prove it will
4662 never be executed. So do not verify there always exists
4663 a default case here. */
4664
4665 FOR_EACH_EDGE (e, ei, bb->succs)
4666 {
4667 if (!e->dest->aux)
4668 {
4669 error ("extra outgoing edge %d->%d",
4670 bb->index, e->dest->index);
4671 err = 1;
4672 }
4673
4674 e->dest->aux = (void *)2;
4675 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
4676 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4677 {
4678 error ("wrong outgoing edge flags at end of bb %d",
4679 bb->index);
4680 err = 1;
4681 }
4682 }
4683
4684 /* Check that we have all of them. */
4685 for (i = 0; i < n; ++i)
4686 {
4687 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
4688 basic_block label_bb = label_to_block (lab);
4689
4690 if (label_bb->aux != (void *)2)
4691 {
4692 error ("missing edge %i->%i", bb->index, label_bb->index);
4693 err = 1;
4694 }
4695 }
4696
4697 FOR_EACH_EDGE (e, ei, bb->succs)
4698 e->dest->aux = (void *)0;
4699 }
4700 break;
4701
4702 case GIMPLE_EH_DISPATCH:
4703 err |= verify_eh_dispatch_edge (stmt);
4704 break;
4705
4706 default:
4707 break;
4708 }
4709 }
4710
4711 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
4712 verify_dominators (CDI_DOMINATORS);
4713
4714 return err;
4715 }
4716
4717
4718 /* Updates phi nodes after creating a forwarder block joined
4719 by edge FALLTHRU. */
4720
4721 static void
4722 gimple_make_forwarder_block (edge fallthru)
4723 {
4724 edge e;
4725 edge_iterator ei;
4726 basic_block dummy, bb;
4727 tree var;
4728 gimple_stmt_iterator gsi;
4729
4730 dummy = fallthru->src;
4731 bb = fallthru->dest;
4732
4733 if (single_pred_p (bb))
4734 return;
4735
4736 /* If we redirected a branch we must create new PHI nodes at the
4737 start of BB. */
4738 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
4739 {
4740 gimple phi, new_phi;
4741
4742 phi = gsi_stmt (gsi);
4743 var = gimple_phi_result (phi);
4744 new_phi = create_phi_node (var, bb);
4745 SSA_NAME_DEF_STMT (var) = new_phi;
4746 gimple_phi_set_result (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
4747 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
4748 UNKNOWN_LOCATION);
4749 }
4750
4751 /* Add the arguments we have stored on edges. */
4752 FOR_EACH_EDGE (e, ei, bb->preds)
4753 {
4754 if (e == fallthru)
4755 continue;
4756
4757 flush_pending_stmts (e);
4758 }
4759 }
4760
4761
4762 /* Return a non-special label in the head of basic block BLOCK.
4763 Create one if it doesn't exist. */
4764
4765 tree
4766 gimple_block_label (basic_block bb)
4767 {
4768 gimple_stmt_iterator i, s = gsi_start_bb (bb);
4769 bool first = true;
4770 tree label;
4771 gimple stmt;
4772
4773 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
4774 {
4775 stmt = gsi_stmt (i);
4776 if (gimple_code (stmt) != GIMPLE_LABEL)
4777 break;
4778 label = gimple_label_label (stmt);
4779 if (!DECL_NONLOCAL (label))
4780 {
4781 if (!first)
4782 gsi_move_before (&i, &s);
4783 return label;
4784 }
4785 }
4786
4787 label = create_artificial_label (UNKNOWN_LOCATION);
4788 stmt = gimple_build_label (label);
4789 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
4790 return label;
4791 }
4792
4793
4794 /* Attempt to perform edge redirection by replacing a possibly complex
4795 jump instruction by a goto or by removing the jump completely.
4796 This can apply only if all edges now point to the same block. The
4797 parameters and return values are equivalent to
4798 redirect_edge_and_branch. */
4799
4800 static edge
4801 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
4802 {
4803 basic_block src = e->src;
4804 gimple_stmt_iterator i;
4805 gimple stmt;
4806
4807 /* We can replace or remove a complex jump only when we have exactly
4808 two edges. */
4809 if (EDGE_COUNT (src->succs) != 2
4810 /* Verify that all targets will be TARGET. Specifically, the
4811 edge that is not E must also go to TARGET. */
4812 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
4813 return NULL;
4814
4815 i = gsi_last_bb (src);
4816 if (gsi_end_p (i))
4817 return NULL;
4818
4819 stmt = gsi_stmt (i);
4820
4821 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
4822 {
4823 gsi_remove (&i, true);
4824 e = ssa_redirect_edge (e, target);
4825 e->flags = EDGE_FALLTHRU;
4826 return e;
4827 }
4828
4829 return NULL;
4830 }
4831
4832
4833 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
4834 edge representing the redirected branch. */
4835
4836 static edge
4837 gimple_redirect_edge_and_branch (edge e, basic_block dest)
4838 {
4839 basic_block bb = e->src;
4840 gimple_stmt_iterator gsi;
4841 edge ret;
4842 gimple stmt;
4843
4844 if (e->flags & EDGE_ABNORMAL)
4845 return NULL;
4846
4847 if (e->dest == dest)
4848 return NULL;
4849
4850 if (e->flags & EDGE_EH)
4851 return redirect_eh_edge (e, dest);
4852
4853 if (e->src != ENTRY_BLOCK_PTR)
4854 {
4855 ret = gimple_try_redirect_by_replacing_jump (e, dest);
4856 if (ret)
4857 return ret;
4858 }
4859
4860 gsi = gsi_last_bb (bb);
4861 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
4862
4863 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
4864 {
4865 case GIMPLE_COND:
4866 /* For COND_EXPR, we only need to redirect the edge. */
4867 break;
4868
4869 case GIMPLE_GOTO:
4870 /* No non-abnormal edges should lead from a non-simple goto, and
4871 simple ones should be represented implicitly. */
4872 gcc_unreachable ();
4873
4874 case GIMPLE_SWITCH:
4875 {
4876 tree label = gimple_block_label (dest);
4877 tree cases = get_cases_for_edge (e, stmt);
4878
4879 /* If we have a list of cases associated with E, then use it
4880 as it's a lot faster than walking the entire case vector. */
4881 if (cases)
4882 {
4883 edge e2 = find_edge (e->src, dest);
4884 tree last, first;
4885
4886 first = cases;
4887 while (cases)
4888 {
4889 last = cases;
4890 CASE_LABEL (cases) = label;
4891 cases = TREE_CHAIN (cases);
4892 }
4893
4894 /* If there was already an edge in the CFG, then we need
4895 to move all the cases associated with E to E2. */
4896 if (e2)
4897 {
4898 tree cases2 = get_cases_for_edge (e2, stmt);
4899
4900 TREE_CHAIN (last) = TREE_CHAIN (cases2);
4901 TREE_CHAIN (cases2) = first;
4902 }
4903 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
4904 }
4905 else
4906 {
4907 size_t i, n = gimple_switch_num_labels (stmt);
4908
4909 for (i = 0; i < n; i++)
4910 {
4911 tree elt = gimple_switch_label (stmt, i);
4912 if (label_to_block (CASE_LABEL (elt)) == e->dest)
4913 CASE_LABEL (elt) = label;
4914 }
4915 }
4916 }
4917 break;
4918
4919 case GIMPLE_ASM:
4920 {
4921 int i, n = gimple_asm_nlabels (stmt);
4922 tree label = NULL;
4923
4924 for (i = 0; i < n; ++i)
4925 {
4926 tree cons = gimple_asm_label_op (stmt, i);
4927 if (label_to_block (TREE_VALUE (cons)) == e->dest)
4928 {
4929 if (!label)
4930 label = gimple_block_label (dest);
4931 TREE_VALUE (cons) = label;
4932 }
4933 }
4934
4935 /* If we didn't find any label matching the former edge in the
4936 asm labels, we must be redirecting the fallthrough
4937 edge. */
4938 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
4939 }
4940 break;
4941
4942 case GIMPLE_RETURN:
4943 gsi_remove (&gsi, true);
4944 e->flags |= EDGE_FALLTHRU;
4945 break;
4946
4947 case GIMPLE_OMP_RETURN:
4948 case GIMPLE_OMP_CONTINUE:
4949 case GIMPLE_OMP_SECTIONS_SWITCH:
4950 case GIMPLE_OMP_FOR:
4951 /* The edges from OMP constructs can be simply redirected. */
4952 break;
4953
4954 case GIMPLE_EH_DISPATCH:
4955 if (!(e->flags & EDGE_FALLTHRU))
4956 redirect_eh_dispatch_edge (stmt, e, dest);
4957 break;
4958
4959 default:
4960 /* Otherwise it must be a fallthru edge, and we don't need to
4961 do anything besides redirecting it. */
4962 gcc_assert (e->flags & EDGE_FALLTHRU);
4963 break;
4964 }
4965
4966 /* Update/insert PHI nodes as necessary. */
4967
4968 /* Now update the edges in the CFG. */
4969 e = ssa_redirect_edge (e, dest);
4970
4971 return e;
4972 }
4973
4974 /* Returns true if it is possible to remove edge E by redirecting
4975 it to the destination of the other edge from E->src. */
4976
4977 static bool
4978 gimple_can_remove_branch_p (const_edge e)
4979 {
4980 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
4981 return false;
4982
4983 return true;
4984 }
4985
4986 /* Simple wrapper, as we can always redirect fallthru edges. */
4987
4988 static basic_block
4989 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
4990 {
4991 e = gimple_redirect_edge_and_branch (e, dest);
4992 gcc_assert (e);
4993
4994 return NULL;
4995 }
4996
4997
4998 /* Splits basic block BB after statement STMT (but at least after the
4999 labels). If STMT is NULL, BB is split just after the labels. */
5000
5001 static basic_block
5002 gimple_split_block (basic_block bb, void *stmt)
5003 {
5004 gimple_stmt_iterator gsi;
5005 gimple_stmt_iterator gsi_tgt;
5006 gimple act;
5007 gimple_seq list;
5008 basic_block new_bb;
5009 edge e;
5010 edge_iterator ei;
5011
5012 new_bb = create_empty_bb (bb);
5013
5014 /* Redirect the outgoing edges. */
5015 new_bb->succs = bb->succs;
5016 bb->succs = NULL;
5017 FOR_EACH_EDGE (e, ei, new_bb->succs)
5018 e->src = new_bb;
5019
5020 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5021 stmt = NULL;
5022
5023 /* Move everything from GSI to the new basic block. */
5024 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5025 {
5026 act = gsi_stmt (gsi);
5027 if (gimple_code (act) == GIMPLE_LABEL)
5028 continue;
5029
5030 if (!stmt)
5031 break;
5032
5033 if (stmt == act)
5034 {
5035 gsi_next (&gsi);
5036 break;
5037 }
5038 }
5039
5040 if (gsi_end_p (gsi))
5041 return new_bb;
5042
5043 /* Split the statement list - avoid re-creating new containers as this
5044 brings ugly quadratic memory consumption in the inliner.
5045 (We are still quadratic since we need to update stmt BB pointers,
5046 sadly.) */
5047 list = gsi_split_seq_before (&gsi);
5048 set_bb_seq (new_bb, list);
5049 for (gsi_tgt = gsi_start (list);
5050 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5051 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5052
5053 return new_bb;
5054 }
5055
5056
5057 /* Moves basic block BB after block AFTER. */
5058
5059 static bool
5060 gimple_move_block_after (basic_block bb, basic_block after)
5061 {
5062 if (bb->prev_bb == after)
5063 return true;
5064
5065 unlink_block (bb);
5066 link_block (bb, after);
5067
5068 return true;
5069 }
5070
5071
5072 /* Return true if basic_block can be duplicated. */
5073
5074 static bool
5075 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5076 {
5077 return true;
5078 }
5079
5080 /* Create a duplicate of the basic block BB. NOTE: This does not
5081 preserve SSA form. */
5082
5083 static basic_block
5084 gimple_duplicate_bb (basic_block bb)
5085 {
5086 basic_block new_bb;
5087 gimple_stmt_iterator gsi, gsi_tgt;
5088 gimple_seq phis = phi_nodes (bb);
5089 gimple phi, stmt, copy;
5090
5091 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
5092
5093 /* Copy the PHI nodes. We ignore PHI node arguments here because
5094 the incoming edges have not been setup yet. */
5095 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
5096 {
5097 phi = gsi_stmt (gsi);
5098 copy = create_phi_node (gimple_phi_result (phi), new_bb);
5099 create_new_def_for (gimple_phi_result (copy), copy,
5100 gimple_phi_result_ptr (copy));
5101 }
5102
5103 gsi_tgt = gsi_start_bb (new_bb);
5104 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5105 {
5106 def_operand_p def_p;
5107 ssa_op_iter op_iter;
5108
5109 stmt = gsi_stmt (gsi);
5110 if (gimple_code (stmt) == GIMPLE_LABEL)
5111 continue;
5112
5113 /* Create a new copy of STMT and duplicate STMT's virtual
5114 operands. */
5115 copy = gimple_copy (stmt);
5116 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5117
5118 maybe_duplicate_eh_stmt (copy, stmt);
5119 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5120
5121 /* Create new names for all the definitions created by COPY and
5122 add replacement mappings for each new name. */
5123 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5124 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5125 }
5126
5127 return new_bb;
5128 }
5129
5130 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5131
5132 static void
5133 add_phi_args_after_copy_edge (edge e_copy)
5134 {
5135 basic_block bb, bb_copy = e_copy->src, dest;
5136 edge e;
5137 edge_iterator ei;
5138 gimple phi, phi_copy;
5139 tree def;
5140 gimple_stmt_iterator psi, psi_copy;
5141
5142 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5143 return;
5144
5145 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5146
5147 if (e_copy->dest->flags & BB_DUPLICATED)
5148 dest = get_bb_original (e_copy->dest);
5149 else
5150 dest = e_copy->dest;
5151
5152 e = find_edge (bb, dest);
5153 if (!e)
5154 {
5155 /* During loop unrolling the target of the latch edge is copied.
5156 In this case we are not looking for edge to dest, but to
5157 duplicated block whose original was dest. */
5158 FOR_EACH_EDGE (e, ei, bb->succs)
5159 {
5160 if ((e->dest->flags & BB_DUPLICATED)
5161 && get_bb_original (e->dest) == dest)
5162 break;
5163 }
5164
5165 gcc_assert (e != NULL);
5166 }
5167
5168 for (psi = gsi_start_phis (e->dest),
5169 psi_copy = gsi_start_phis (e_copy->dest);
5170 !gsi_end_p (psi);
5171 gsi_next (&psi), gsi_next (&psi_copy))
5172 {
5173 phi = gsi_stmt (psi);
5174 phi_copy = gsi_stmt (psi_copy);
5175 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5176 add_phi_arg (phi_copy, def, e_copy,
5177 gimple_phi_arg_location_from_edge (phi, e));
5178 }
5179 }
5180
5181
5182 /* Basic block BB_COPY was created by code duplication. Add phi node
5183 arguments for edges going out of BB_COPY. The blocks that were
5184 duplicated have BB_DUPLICATED set. */
5185
5186 void
5187 add_phi_args_after_copy_bb (basic_block bb_copy)
5188 {
5189 edge e_copy;
5190 edge_iterator ei;
5191
5192 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5193 {
5194 add_phi_args_after_copy_edge (e_copy);
5195 }
5196 }
5197
5198 /* Blocks in REGION_COPY array of length N_REGION were created by
5199 duplication of basic blocks. Add phi node arguments for edges
5200 going from these blocks. If E_COPY is not NULL, also add
5201 phi node arguments for its destination.*/
5202
5203 void
5204 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5205 edge e_copy)
5206 {
5207 unsigned i;
5208
5209 for (i = 0; i < n_region; i++)
5210 region_copy[i]->flags |= BB_DUPLICATED;
5211
5212 for (i = 0; i < n_region; i++)
5213 add_phi_args_after_copy_bb (region_copy[i]);
5214 if (e_copy)
5215 add_phi_args_after_copy_edge (e_copy);
5216
5217 for (i = 0; i < n_region; i++)
5218 region_copy[i]->flags &= ~BB_DUPLICATED;
5219 }
5220
5221 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5222 important exit edge EXIT. By important we mean that no SSA name defined
5223 inside region is live over the other exit edges of the region. All entry
5224 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5225 to the duplicate of the region. SSA form, dominance and loop information
5226 is updated. The new basic blocks are stored to REGION_COPY in the same
5227 order as they had in REGION, provided that REGION_COPY is not NULL.
5228 The function returns false if it is unable to copy the region,
5229 true otherwise. */
5230
5231 bool
5232 gimple_duplicate_sese_region (edge entry, edge exit,
5233 basic_block *region, unsigned n_region,
5234 basic_block *region_copy)
5235 {
5236 unsigned i;
5237 bool free_region_copy = false, copying_header = false;
5238 struct loop *loop = entry->dest->loop_father;
5239 edge exit_copy;
5240 VEC (basic_block, heap) *doms;
5241 edge redirected;
5242 int total_freq = 0, entry_freq = 0;
5243 gcov_type total_count = 0, entry_count = 0;
5244
5245 if (!can_copy_bbs_p (region, n_region))
5246 return false;
5247
5248 /* Some sanity checking. Note that we do not check for all possible
5249 missuses of the functions. I.e. if you ask to copy something weird,
5250 it will work, but the state of structures probably will not be
5251 correct. */
5252 for (i = 0; i < n_region; i++)
5253 {
5254 /* We do not handle subloops, i.e. all the blocks must belong to the
5255 same loop. */
5256 if (region[i]->loop_father != loop)
5257 return false;
5258
5259 if (region[i] != entry->dest
5260 && region[i] == loop->header)
5261 return false;
5262 }
5263
5264 set_loop_copy (loop, loop);
5265
5266 /* In case the function is used for loop header copying (which is the primary
5267 use), ensure that EXIT and its copy will be new latch and entry edges. */
5268 if (loop->header == entry->dest)
5269 {
5270 copying_header = true;
5271 set_loop_copy (loop, loop_outer (loop));
5272
5273 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5274 return false;
5275
5276 for (i = 0; i < n_region; i++)
5277 if (region[i] != exit->src
5278 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5279 return false;
5280 }
5281
5282 if (!region_copy)
5283 {
5284 region_copy = XNEWVEC (basic_block, n_region);
5285 free_region_copy = true;
5286 }
5287
5288 gcc_assert (!need_ssa_update_p (cfun));
5289
5290 /* Record blocks outside the region that are dominated by something
5291 inside. */
5292 doms = NULL;
5293 initialize_original_copy_tables ();
5294
5295 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5296
5297 if (entry->dest->count)
5298 {
5299 total_count = entry->dest->count;
5300 entry_count = entry->count;
5301 /* Fix up corner cases, to avoid division by zero or creation of negative
5302 frequencies. */
5303 if (entry_count > total_count)
5304 entry_count = total_count;
5305 }
5306 else
5307 {
5308 total_freq = entry->dest->frequency;
5309 entry_freq = EDGE_FREQUENCY (entry);
5310 /* Fix up corner cases, to avoid division by zero or creation of negative
5311 frequencies. */
5312 if (total_freq == 0)
5313 total_freq = 1;
5314 else if (entry_freq > total_freq)
5315 entry_freq = total_freq;
5316 }
5317
5318 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5319 split_edge_bb_loc (entry));
5320 if (total_count)
5321 {
5322 scale_bbs_frequencies_gcov_type (region, n_region,
5323 total_count - entry_count,
5324 total_count);
5325 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
5326 total_count);
5327 }
5328 else
5329 {
5330 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5331 total_freq);
5332 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5333 }
5334
5335 if (copying_header)
5336 {
5337 loop->header = exit->dest;
5338 loop->latch = exit->src;
5339 }
5340
5341 /* Redirect the entry and add the phi node arguments. */
5342 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
5343 gcc_assert (redirected != NULL);
5344 flush_pending_stmts (entry);
5345
5346 /* Concerning updating of dominators: We must recount dominators
5347 for entry block and its copy. Anything that is outside of the
5348 region, but was dominated by something inside needs recounting as
5349 well. */
5350 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
5351 VEC_safe_push (basic_block, heap, doms, get_bb_original (entry->dest));
5352 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5353 VEC_free (basic_block, heap, doms);
5354
5355 /* Add the other PHI node arguments. */
5356 add_phi_args_after_copy (region_copy, n_region, NULL);
5357
5358 /* Update the SSA web. */
5359 update_ssa (TODO_update_ssa);
5360
5361 if (free_region_copy)
5362 free (region_copy);
5363
5364 free_original_copy_tables ();
5365 return true;
5366 }
5367
5368 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
5369 are stored to REGION_COPY in the same order in that they appear
5370 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5371 the region, EXIT an exit from it. The condition guarding EXIT
5372 is moved to ENTRY. Returns true if duplication succeeds, false
5373 otherwise.
5374
5375 For example,
5376
5377 some_code;
5378 if (cond)
5379 A;
5380 else
5381 B;
5382
5383 is transformed to
5384
5385 if (cond)
5386 {
5387 some_code;
5388 A;
5389 }
5390 else
5391 {
5392 some_code;
5393 B;
5394 }
5395 */
5396
5397 bool
5398 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
5399 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
5400 basic_block *region_copy ATTRIBUTE_UNUSED)
5401 {
5402 unsigned i;
5403 bool free_region_copy = false;
5404 struct loop *loop = exit->dest->loop_father;
5405 struct loop *orig_loop = entry->dest->loop_father;
5406 basic_block switch_bb, entry_bb, nentry_bb;
5407 VEC (basic_block, heap) *doms;
5408 int total_freq = 0, exit_freq = 0;
5409 gcov_type total_count = 0, exit_count = 0;
5410 edge exits[2], nexits[2], e;
5411 gimple_stmt_iterator gsi,gsi1;
5412 gimple cond_stmt;
5413 edge sorig, snew;
5414 basic_block exit_bb;
5415 basic_block iters_bb;
5416 tree new_rhs;
5417 gimple_stmt_iterator psi;
5418 gimple phi;
5419 tree def;
5420
5421 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
5422 exits[0] = exit;
5423 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
5424
5425 if (!can_copy_bbs_p (region, n_region))
5426 return false;
5427
5428 initialize_original_copy_tables ();
5429 set_loop_copy (orig_loop, loop);
5430 duplicate_subloops (orig_loop, loop);
5431
5432 if (!region_copy)
5433 {
5434 region_copy = XNEWVEC (basic_block, n_region);
5435 free_region_copy = true;
5436 }
5437
5438 gcc_assert (!need_ssa_update_p (cfun));
5439
5440 /* Record blocks outside the region that are dominated by something
5441 inside. */
5442 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5443
5444 if (exit->src->count)
5445 {
5446 total_count = exit->src->count;
5447 exit_count = exit->count;
5448 /* Fix up corner cases, to avoid division by zero or creation of negative
5449 frequencies. */
5450 if (exit_count > total_count)
5451 exit_count = total_count;
5452 }
5453 else
5454 {
5455 total_freq = exit->src->frequency;
5456 exit_freq = EDGE_FREQUENCY (exit);
5457 /* Fix up corner cases, to avoid division by zero or creation of negative
5458 frequencies. */
5459 if (total_freq == 0)
5460 total_freq = 1;
5461 if (exit_freq > total_freq)
5462 exit_freq = total_freq;
5463 }
5464
5465 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
5466 split_edge_bb_loc (exit));
5467 if (total_count)
5468 {
5469 scale_bbs_frequencies_gcov_type (region, n_region,
5470 total_count - exit_count,
5471 total_count);
5472 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
5473 total_count);
5474 }
5475 else
5476 {
5477 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
5478 total_freq);
5479 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
5480 }
5481
5482 /* Create the switch block, and put the exit condition to it. */
5483 entry_bb = entry->dest;
5484 nentry_bb = get_bb_copy (entry_bb);
5485 if (!last_stmt (entry->src)
5486 || !stmt_ends_bb_p (last_stmt (entry->src)))
5487 switch_bb = entry->src;
5488 else
5489 switch_bb = split_edge (entry);
5490 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
5491
5492 gsi = gsi_last_bb (switch_bb);
5493 cond_stmt = last_stmt (exit->src);
5494 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
5495 cond_stmt = gimple_copy (cond_stmt);
5496
5497 /* If the block consisting of the exit condition has the latch as
5498 successor, then the body of the loop is executed before
5499 the exit condition is tested. In such case, moving the
5500 condition to the entry, causes that the loop will iterate
5501 one less iteration (which is the wanted outcome, since we
5502 peel out the last iteration). If the body is executed after
5503 the condition, moving the condition to the entry requires
5504 decrementing one iteration. */
5505 if (exits[1]->dest == orig_loop->latch)
5506 new_rhs = gimple_cond_rhs (cond_stmt);
5507 else
5508 {
5509 new_rhs = fold_build2 (MINUS_EXPR, TREE_TYPE (gimple_cond_rhs (cond_stmt)),
5510 gimple_cond_rhs (cond_stmt),
5511 build_int_cst (TREE_TYPE (gimple_cond_rhs (cond_stmt)), 1));
5512
5513 if (TREE_CODE (gimple_cond_rhs (cond_stmt)) == SSA_NAME)
5514 {
5515 iters_bb = gimple_bb (SSA_NAME_DEF_STMT (gimple_cond_rhs (cond_stmt)));
5516 for (gsi1 = gsi_start_bb (iters_bb); !gsi_end_p (gsi1); gsi_next (&gsi1))
5517 if (gsi_stmt (gsi1) == SSA_NAME_DEF_STMT (gimple_cond_rhs (cond_stmt)))
5518 break;
5519
5520 new_rhs = force_gimple_operand_gsi (&gsi1, new_rhs, true,
5521 NULL_TREE,false,GSI_CONTINUE_LINKING);
5522 }
5523 }
5524 gimple_cond_set_rhs (cond_stmt, unshare_expr (new_rhs));
5525 gimple_cond_set_lhs (cond_stmt, unshare_expr (gimple_cond_lhs (cond_stmt)));
5526 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
5527
5528 sorig = single_succ_edge (switch_bb);
5529 sorig->flags = exits[1]->flags;
5530 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
5531
5532 /* Register the new edge from SWITCH_BB in loop exit lists. */
5533 rescan_loop_exit (snew, true, false);
5534
5535 /* Add the PHI node arguments. */
5536 add_phi_args_after_copy (region_copy, n_region, snew);
5537
5538 /* Get rid of now superfluous conditions and associated edges (and phi node
5539 arguments). */
5540 exit_bb = exit->dest;
5541
5542 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
5543 PENDING_STMT (e) = NULL;
5544
5545 /* The latch of ORIG_LOOP was copied, and so was the backedge
5546 to the original header. We redirect this backedge to EXIT_BB. */
5547 for (i = 0; i < n_region; i++)
5548 if (get_bb_original (region_copy[i]) == orig_loop->latch)
5549 {
5550 gcc_assert (single_succ_edge (region_copy[i]));
5551 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
5552 PENDING_STMT (e) = NULL;
5553 for (psi = gsi_start_phis (exit_bb);
5554 !gsi_end_p (psi);
5555 gsi_next (&psi))
5556 {
5557 phi = gsi_stmt (psi);
5558 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
5559 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
5560 }
5561 }
5562 e = redirect_edge_and_branch (nexits[0], nexits[1]->dest);
5563 PENDING_STMT (e) = NULL;
5564
5565 /* Anything that is outside of the region, but was dominated by something
5566 inside needs to update dominance info. */
5567 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5568 VEC_free (basic_block, heap, doms);
5569 /* Update the SSA web. */
5570 update_ssa (TODO_update_ssa);
5571
5572 if (free_region_copy)
5573 free (region_copy);
5574
5575 free_original_copy_tables ();
5576 return true;
5577 }
5578
5579 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
5580 adding blocks when the dominator traversal reaches EXIT. This
5581 function silently assumes that ENTRY strictly dominates EXIT. */
5582
5583 void
5584 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
5585 VEC(basic_block,heap) **bbs_p)
5586 {
5587 basic_block son;
5588
5589 for (son = first_dom_son (CDI_DOMINATORS, entry);
5590 son;
5591 son = next_dom_son (CDI_DOMINATORS, son))
5592 {
5593 VEC_safe_push (basic_block, heap, *bbs_p, son);
5594 if (son != exit)
5595 gather_blocks_in_sese_region (son, exit, bbs_p);
5596 }
5597 }
5598
5599 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
5600 The duplicates are recorded in VARS_MAP. */
5601
5602 static void
5603 replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
5604 tree to_context)
5605 {
5606 tree t = *tp, new_t;
5607 struct function *f = DECL_STRUCT_FUNCTION (to_context);
5608 void **loc;
5609
5610 if (DECL_CONTEXT (t) == to_context)
5611 return;
5612
5613 loc = pointer_map_contains (vars_map, t);
5614
5615 if (!loc)
5616 {
5617 loc = pointer_map_insert (vars_map, t);
5618
5619 if (SSA_VAR_P (t))
5620 {
5621 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
5622 add_local_decl (f, new_t);
5623 }
5624 else
5625 {
5626 gcc_assert (TREE_CODE (t) == CONST_DECL);
5627 new_t = copy_node (t);
5628 }
5629 DECL_CONTEXT (new_t) = to_context;
5630
5631 *loc = new_t;
5632 }
5633 else
5634 new_t = (tree) *loc;
5635
5636 *tp = new_t;
5637 }
5638
5639
5640 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
5641 VARS_MAP maps old ssa names and var_decls to the new ones. */
5642
5643 static tree
5644 replace_ssa_name (tree name, struct pointer_map_t *vars_map,
5645 tree to_context)
5646 {
5647 void **loc;
5648 tree new_name, decl = SSA_NAME_VAR (name);
5649
5650 gcc_assert (is_gimple_reg (name));
5651
5652 loc = pointer_map_contains (vars_map, name);
5653
5654 if (!loc)
5655 {
5656 replace_by_duplicate_decl (&decl, vars_map, to_context);
5657
5658 push_cfun (DECL_STRUCT_FUNCTION (to_context));
5659 if (gimple_in_ssa_p (cfun))
5660 add_referenced_var (decl);
5661
5662 new_name = make_ssa_name (decl, SSA_NAME_DEF_STMT (name));
5663 if (SSA_NAME_IS_DEFAULT_DEF (name))
5664 set_default_def (decl, new_name);
5665 pop_cfun ();
5666
5667 loc = pointer_map_insert (vars_map, name);
5668 *loc = new_name;
5669 }
5670 else
5671 new_name = (tree) *loc;
5672
5673 return new_name;
5674 }
5675
5676 struct move_stmt_d
5677 {
5678 tree orig_block;
5679 tree new_block;
5680 tree from_context;
5681 tree to_context;
5682 struct pointer_map_t *vars_map;
5683 htab_t new_label_map;
5684 struct pointer_map_t *eh_map;
5685 bool remap_decls_p;
5686 };
5687
5688 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
5689 contained in *TP if it has been ORIG_BLOCK previously and change the
5690 DECL_CONTEXT of every local variable referenced in *TP. */
5691
5692 static tree
5693 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
5694 {
5695 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5696 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
5697 tree t = *tp;
5698
5699 if (EXPR_P (t))
5700 /* We should never have TREE_BLOCK set on non-statements. */
5701 gcc_assert (!TREE_BLOCK (t));
5702
5703 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
5704 {
5705 if (TREE_CODE (t) == SSA_NAME)
5706 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
5707 else if (TREE_CODE (t) == LABEL_DECL)
5708 {
5709 if (p->new_label_map)
5710 {
5711 struct tree_map in, *out;
5712 in.base.from = t;
5713 out = (struct tree_map *)
5714 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
5715 if (out)
5716 *tp = t = out->to;
5717 }
5718
5719 DECL_CONTEXT (t) = p->to_context;
5720 }
5721 else if (p->remap_decls_p)
5722 {
5723 /* Replace T with its duplicate. T should no longer appear in the
5724 parent function, so this looks wasteful; however, it may appear
5725 in referenced_vars, and more importantly, as virtual operands of
5726 statements, and in alias lists of other variables. It would be
5727 quite difficult to expunge it from all those places. ??? It might
5728 suffice to do this for addressable variables. */
5729 if ((TREE_CODE (t) == VAR_DECL
5730 && !is_global_var (t))
5731 || TREE_CODE (t) == CONST_DECL)
5732 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
5733
5734 if (SSA_VAR_P (t)
5735 && gimple_in_ssa_p (cfun))
5736 {
5737 push_cfun (DECL_STRUCT_FUNCTION (p->to_context));
5738 add_referenced_var (*tp);
5739 pop_cfun ();
5740 }
5741 }
5742 *walk_subtrees = 0;
5743 }
5744 else if (TYPE_P (t))
5745 *walk_subtrees = 0;
5746
5747 return NULL_TREE;
5748 }
5749
5750 /* Helper for move_stmt_r. Given an EH region number for the source
5751 function, map that to the duplicate EH regio number in the dest. */
5752
5753 static int
5754 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
5755 {
5756 eh_region old_r, new_r;
5757 void **slot;
5758
5759 old_r = get_eh_region_from_number (old_nr);
5760 slot = pointer_map_contains (p->eh_map, old_r);
5761 new_r = (eh_region) *slot;
5762
5763 return new_r->index;
5764 }
5765
5766 /* Similar, but operate on INTEGER_CSTs. */
5767
5768 static tree
5769 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
5770 {
5771 int old_nr, new_nr;
5772
5773 old_nr = tree_low_cst (old_t_nr, 0);
5774 new_nr = move_stmt_eh_region_nr (old_nr, p);
5775
5776 return build_int_cst (NULL, new_nr);
5777 }
5778
5779 /* Like move_stmt_op, but for gimple statements.
5780
5781 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
5782 contained in the current statement in *GSI_P and change the
5783 DECL_CONTEXT of every local variable referenced in the current
5784 statement. */
5785
5786 static tree
5787 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
5788 struct walk_stmt_info *wi)
5789 {
5790 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
5791 gimple stmt = gsi_stmt (*gsi_p);
5792 tree block = gimple_block (stmt);
5793
5794 if (p->orig_block == NULL_TREE
5795 || block == p->orig_block
5796 || block == NULL_TREE)
5797 gimple_set_block (stmt, p->new_block);
5798 #ifdef ENABLE_CHECKING
5799 else if (block != p->new_block)
5800 {
5801 while (block && block != p->orig_block)
5802 block = BLOCK_SUPERCONTEXT (block);
5803 gcc_assert (block);
5804 }
5805 #endif
5806
5807 switch (gimple_code (stmt))
5808 {
5809 case GIMPLE_CALL:
5810 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
5811 {
5812 tree r, fndecl = gimple_call_fndecl (stmt);
5813 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
5814 switch (DECL_FUNCTION_CODE (fndecl))
5815 {
5816 case BUILT_IN_EH_COPY_VALUES:
5817 r = gimple_call_arg (stmt, 1);
5818 r = move_stmt_eh_region_tree_nr (r, p);
5819 gimple_call_set_arg (stmt, 1, r);
5820 /* FALLTHRU */
5821
5822 case BUILT_IN_EH_POINTER:
5823 case BUILT_IN_EH_FILTER:
5824 r = gimple_call_arg (stmt, 0);
5825 r = move_stmt_eh_region_tree_nr (r, p);
5826 gimple_call_set_arg (stmt, 0, r);
5827 break;
5828
5829 default:
5830 break;
5831 }
5832 }
5833 break;
5834
5835 case GIMPLE_RESX:
5836 {
5837 int r = gimple_resx_region (stmt);
5838 r = move_stmt_eh_region_nr (r, p);
5839 gimple_resx_set_region (stmt, r);
5840 }
5841 break;
5842
5843 case GIMPLE_EH_DISPATCH:
5844 {
5845 int r = gimple_eh_dispatch_region (stmt);
5846 r = move_stmt_eh_region_nr (r, p);
5847 gimple_eh_dispatch_set_region (stmt, r);
5848 }
5849 break;
5850
5851 case GIMPLE_OMP_RETURN:
5852 case GIMPLE_OMP_CONTINUE:
5853 break;
5854 default:
5855 if (is_gimple_omp (stmt))
5856 {
5857 /* Do not remap variables inside OMP directives. Variables
5858 referenced in clauses and directive header belong to the
5859 parent function and should not be moved into the child
5860 function. */
5861 bool save_remap_decls_p = p->remap_decls_p;
5862 p->remap_decls_p = false;
5863 *handled_ops_p = true;
5864
5865 walk_gimple_seq (gimple_omp_body (stmt), move_stmt_r,
5866 move_stmt_op, wi);
5867
5868 p->remap_decls_p = save_remap_decls_p;
5869 }
5870 break;
5871 }
5872
5873 return NULL_TREE;
5874 }
5875
5876 /* Move basic block BB from function CFUN to function DEST_FN. The
5877 block is moved out of the original linked list and placed after
5878 block AFTER in the new list. Also, the block is removed from the
5879 original array of blocks and placed in DEST_FN's array of blocks.
5880 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
5881 updated to reflect the moved edges.
5882
5883 The local variables are remapped to new instances, VARS_MAP is used
5884 to record the mapping. */
5885
5886 static void
5887 move_block_to_fn (struct function *dest_cfun, basic_block bb,
5888 basic_block after, bool update_edge_count_p,
5889 struct move_stmt_d *d)
5890 {
5891 struct control_flow_graph *cfg;
5892 edge_iterator ei;
5893 edge e;
5894 gimple_stmt_iterator si;
5895 unsigned old_len, new_len;
5896
5897 /* Remove BB from dominance structures. */
5898 delete_from_dominance_info (CDI_DOMINATORS, bb);
5899 if (current_loops)
5900 remove_bb_from_loops (bb);
5901
5902 /* Link BB to the new linked list. */
5903 move_block_after (bb, after);
5904
5905 /* Update the edge count in the corresponding flowgraphs. */
5906 if (update_edge_count_p)
5907 FOR_EACH_EDGE (e, ei, bb->succs)
5908 {
5909 cfun->cfg->x_n_edges--;
5910 dest_cfun->cfg->x_n_edges++;
5911 }
5912
5913 /* Remove BB from the original basic block array. */
5914 VEC_replace (basic_block, cfun->cfg->x_basic_block_info, bb->index, NULL);
5915 cfun->cfg->x_n_basic_blocks--;
5916
5917 /* Grow DEST_CFUN's basic block array if needed. */
5918 cfg = dest_cfun->cfg;
5919 cfg->x_n_basic_blocks++;
5920 if (bb->index >= cfg->x_last_basic_block)
5921 cfg->x_last_basic_block = bb->index + 1;
5922
5923 old_len = VEC_length (basic_block, cfg->x_basic_block_info);
5924 if ((unsigned) cfg->x_last_basic_block >= old_len)
5925 {
5926 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
5927 VEC_safe_grow_cleared (basic_block, gc, cfg->x_basic_block_info,
5928 new_len);
5929 }
5930
5931 VEC_replace (basic_block, cfg->x_basic_block_info,
5932 bb->index, bb);
5933
5934 /* Remap the variables in phi nodes. */
5935 for (si = gsi_start_phis (bb); !gsi_end_p (si); )
5936 {
5937 gimple phi = gsi_stmt (si);
5938 use_operand_p use;
5939 tree op = PHI_RESULT (phi);
5940 ssa_op_iter oi;
5941
5942 if (!is_gimple_reg (op))
5943 {
5944 /* Remove the phi nodes for virtual operands (alias analysis will be
5945 run for the new function, anyway). */
5946 remove_phi_node (&si, true);
5947 continue;
5948 }
5949
5950 SET_PHI_RESULT (phi,
5951 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
5952 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
5953 {
5954 op = USE_FROM_PTR (use);
5955 if (TREE_CODE (op) == SSA_NAME)
5956 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
5957 }
5958
5959 gsi_next (&si);
5960 }
5961
5962 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5963 {
5964 gimple stmt = gsi_stmt (si);
5965 struct walk_stmt_info wi;
5966
5967 memset (&wi, 0, sizeof (wi));
5968 wi.info = d;
5969 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
5970
5971 if (gimple_code (stmt) == GIMPLE_LABEL)
5972 {
5973 tree label = gimple_label_label (stmt);
5974 int uid = LABEL_DECL_UID (label);
5975
5976 gcc_assert (uid > -1);
5977
5978 old_len = VEC_length (basic_block, cfg->x_label_to_block_map);
5979 if (old_len <= (unsigned) uid)
5980 {
5981 new_len = 3 * uid / 2 + 1;
5982 VEC_safe_grow_cleared (basic_block, gc,
5983 cfg->x_label_to_block_map, new_len);
5984 }
5985
5986 VEC_replace (basic_block, cfg->x_label_to_block_map, uid, bb);
5987 VEC_replace (basic_block, cfun->cfg->x_label_to_block_map, uid, NULL);
5988
5989 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
5990
5991 if (uid >= dest_cfun->cfg->last_label_uid)
5992 dest_cfun->cfg->last_label_uid = uid + 1;
5993 }
5994
5995 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
5996 remove_stmt_from_eh_lp_fn (cfun, stmt);
5997
5998 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
5999 gimple_remove_stmt_histograms (cfun, stmt);
6000
6001 /* We cannot leave any operands allocated from the operand caches of
6002 the current function. */
6003 free_stmt_operands (stmt);
6004 push_cfun (dest_cfun);
6005 update_stmt (stmt);
6006 pop_cfun ();
6007 }
6008
6009 FOR_EACH_EDGE (e, ei, bb->succs)
6010 if (e->goto_locus)
6011 {
6012 tree block = e->goto_block;
6013 if (d->orig_block == NULL_TREE
6014 || block == d->orig_block)
6015 e->goto_block = d->new_block;
6016 #ifdef ENABLE_CHECKING
6017 else if (block != d->new_block)
6018 {
6019 while (block && block != d->orig_block)
6020 block = BLOCK_SUPERCONTEXT (block);
6021 gcc_assert (block);
6022 }
6023 #endif
6024 }
6025 }
6026
6027 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6028 the outermost EH region. Use REGION as the incoming base EH region. */
6029
6030 static eh_region
6031 find_outermost_region_in_block (struct function *src_cfun,
6032 basic_block bb, eh_region region)
6033 {
6034 gimple_stmt_iterator si;
6035
6036 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6037 {
6038 gimple stmt = gsi_stmt (si);
6039 eh_region stmt_region;
6040 int lp_nr;
6041
6042 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6043 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6044 if (stmt_region)
6045 {
6046 if (region == NULL)
6047 region = stmt_region;
6048 else if (stmt_region != region)
6049 {
6050 region = eh_region_outermost (src_cfun, stmt_region, region);
6051 gcc_assert (region != NULL);
6052 }
6053 }
6054 }
6055
6056 return region;
6057 }
6058
6059 static tree
6060 new_label_mapper (tree decl, void *data)
6061 {
6062 htab_t hash = (htab_t) data;
6063 struct tree_map *m;
6064 void **slot;
6065
6066 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6067
6068 m = XNEW (struct tree_map);
6069 m->hash = DECL_UID (decl);
6070 m->base.from = decl;
6071 m->to = create_artificial_label (UNKNOWN_LOCATION);
6072 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6073 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6074 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6075
6076 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6077 gcc_assert (*slot == NULL);
6078
6079 *slot = m;
6080
6081 return m->to;
6082 }
6083
6084 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6085 subblocks. */
6086
6087 static void
6088 replace_block_vars_by_duplicates (tree block, struct pointer_map_t *vars_map,
6089 tree to_context)
6090 {
6091 tree *tp, t;
6092
6093 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6094 {
6095 t = *tp;
6096 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6097 continue;
6098 replace_by_duplicate_decl (&t, vars_map, to_context);
6099 if (t != *tp)
6100 {
6101 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6102 {
6103 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6104 DECL_HAS_VALUE_EXPR_P (t) = 1;
6105 }
6106 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6107 *tp = t;
6108 }
6109 }
6110
6111 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6112 replace_block_vars_by_duplicates (block, vars_map, to_context);
6113 }
6114
6115 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6116 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6117 single basic block in the original CFG and the new basic block is
6118 returned. DEST_CFUN must not have a CFG yet.
6119
6120 Note that the region need not be a pure SESE region. Blocks inside
6121 the region may contain calls to abort/exit. The only restriction
6122 is that ENTRY_BB should be the only entry point and it must
6123 dominate EXIT_BB.
6124
6125 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6126 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6127 to the new function.
6128
6129 All local variables referenced in the region are assumed to be in
6130 the corresponding BLOCK_VARS and unexpanded variable lists
6131 associated with DEST_CFUN. */
6132
6133 basic_block
6134 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
6135 basic_block exit_bb, tree orig_block)
6136 {
6137 VEC(basic_block,heap) *bbs, *dom_bbs;
6138 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6139 basic_block after, bb, *entry_pred, *exit_succ, abb;
6140 struct function *saved_cfun = cfun;
6141 int *entry_flag, *exit_flag;
6142 unsigned *entry_prob, *exit_prob;
6143 unsigned i, num_entry_edges, num_exit_edges;
6144 edge e;
6145 edge_iterator ei;
6146 htab_t new_label_map;
6147 struct pointer_map_t *vars_map, *eh_map;
6148 struct loop *loop = entry_bb->loop_father;
6149 struct move_stmt_d d;
6150
6151 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6152 region. */
6153 gcc_assert (entry_bb != exit_bb
6154 && (!exit_bb
6155 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
6156
6157 /* Collect all the blocks in the region. Manually add ENTRY_BB
6158 because it won't be added by dfs_enumerate_from. */
6159 bbs = NULL;
6160 VEC_safe_push (basic_block, heap, bbs, entry_bb);
6161 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6162
6163 /* The blocks that used to be dominated by something in BBS will now be
6164 dominated by the new block. */
6165 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6166 VEC_address (basic_block, bbs),
6167 VEC_length (basic_block, bbs));
6168
6169 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6170 the predecessor edges to ENTRY_BB and the successor edges to
6171 EXIT_BB so that we can re-attach them to the new basic block that
6172 will replace the region. */
6173 num_entry_edges = EDGE_COUNT (entry_bb->preds);
6174 entry_pred = (basic_block *) xcalloc (num_entry_edges, sizeof (basic_block));
6175 entry_flag = (int *) xcalloc (num_entry_edges, sizeof (int));
6176 entry_prob = XNEWVEC (unsigned, num_entry_edges);
6177 i = 0;
6178 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
6179 {
6180 entry_prob[i] = e->probability;
6181 entry_flag[i] = e->flags;
6182 entry_pred[i++] = e->src;
6183 remove_edge (e);
6184 }
6185
6186 if (exit_bb)
6187 {
6188 num_exit_edges = EDGE_COUNT (exit_bb->succs);
6189 exit_succ = (basic_block *) xcalloc (num_exit_edges,
6190 sizeof (basic_block));
6191 exit_flag = (int *) xcalloc (num_exit_edges, sizeof (int));
6192 exit_prob = XNEWVEC (unsigned, num_exit_edges);
6193 i = 0;
6194 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
6195 {
6196 exit_prob[i] = e->probability;
6197 exit_flag[i] = e->flags;
6198 exit_succ[i++] = e->dest;
6199 remove_edge (e);
6200 }
6201 }
6202 else
6203 {
6204 num_exit_edges = 0;
6205 exit_succ = NULL;
6206 exit_flag = NULL;
6207 exit_prob = NULL;
6208 }
6209
6210 /* Switch context to the child function to initialize DEST_FN's CFG. */
6211 gcc_assert (dest_cfun->cfg == NULL);
6212 push_cfun (dest_cfun);
6213
6214 init_empty_tree_cfg ();
6215
6216 /* Initialize EH information for the new function. */
6217 eh_map = NULL;
6218 new_label_map = NULL;
6219 if (saved_cfun->eh)
6220 {
6221 eh_region region = NULL;
6222
6223 FOR_EACH_VEC_ELT (basic_block, bbs, i, bb)
6224 region = find_outermost_region_in_block (saved_cfun, bb, region);
6225
6226 init_eh_for_function ();
6227 if (region != NULL)
6228 {
6229 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6230 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
6231 new_label_mapper, new_label_map);
6232 }
6233 }
6234
6235 pop_cfun ();
6236
6237 /* Move blocks from BBS into DEST_CFUN. */
6238 gcc_assert (VEC_length (basic_block, bbs) >= 2);
6239 after = dest_cfun->cfg->x_entry_block_ptr;
6240 vars_map = pointer_map_create ();
6241
6242 memset (&d, 0, sizeof (d));
6243 d.orig_block = orig_block;
6244 d.new_block = DECL_INITIAL (dest_cfun->decl);
6245 d.from_context = cfun->decl;
6246 d.to_context = dest_cfun->decl;
6247 d.vars_map = vars_map;
6248 d.new_label_map = new_label_map;
6249 d.eh_map = eh_map;
6250 d.remap_decls_p = true;
6251
6252 FOR_EACH_VEC_ELT (basic_block, bbs, i, bb)
6253 {
6254 /* No need to update edge counts on the last block. It has
6255 already been updated earlier when we detached the region from
6256 the original CFG. */
6257 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
6258 after = bb;
6259 }
6260
6261 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
6262 if (orig_block)
6263 {
6264 tree block;
6265 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6266 == NULL_TREE);
6267 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6268 = BLOCK_SUBBLOCKS (orig_block);
6269 for (block = BLOCK_SUBBLOCKS (orig_block);
6270 block; block = BLOCK_CHAIN (block))
6271 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
6272 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
6273 }
6274
6275 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
6276 vars_map, dest_cfun->decl);
6277
6278 if (new_label_map)
6279 htab_delete (new_label_map);
6280 if (eh_map)
6281 pointer_map_destroy (eh_map);
6282 pointer_map_destroy (vars_map);
6283
6284 /* Rewire the entry and exit blocks. The successor to the entry
6285 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6286 the child function. Similarly, the predecessor of DEST_FN's
6287 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
6288 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6289 various CFG manipulation function get to the right CFG.
6290
6291 FIXME, this is silly. The CFG ought to become a parameter to
6292 these helpers. */
6293 push_cfun (dest_cfun);
6294 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
6295 if (exit_bb)
6296 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
6297 pop_cfun ();
6298
6299 /* Back in the original function, the SESE region has disappeared,
6300 create a new basic block in its place. */
6301 bb = create_empty_bb (entry_pred[0]);
6302 if (current_loops)
6303 add_bb_to_loop (bb, loop);
6304 for (i = 0; i < num_entry_edges; i++)
6305 {
6306 e = make_edge (entry_pred[i], bb, entry_flag[i]);
6307 e->probability = entry_prob[i];
6308 }
6309
6310 for (i = 0; i < num_exit_edges; i++)
6311 {
6312 e = make_edge (bb, exit_succ[i], exit_flag[i]);
6313 e->probability = exit_prob[i];
6314 }
6315
6316 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
6317 FOR_EACH_VEC_ELT (basic_block, dom_bbs, i, abb)
6318 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
6319 VEC_free (basic_block, heap, dom_bbs);
6320
6321 if (exit_bb)
6322 {
6323 free (exit_prob);
6324 free (exit_flag);
6325 free (exit_succ);
6326 }
6327 free (entry_prob);
6328 free (entry_flag);
6329 free (entry_pred);
6330 VEC_free (basic_block, heap, bbs);
6331
6332 return bb;
6333 }
6334
6335
6336 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree-pass.h)
6337 */
6338
6339 void
6340 dump_function_to_file (tree fn, FILE *file, int flags)
6341 {
6342 tree arg, var;
6343 struct function *dsf;
6344 bool ignore_topmost_bind = false, any_var = false;
6345 basic_block bb;
6346 tree chain;
6347
6348 fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
6349
6350 arg = DECL_ARGUMENTS (fn);
6351 while (arg)
6352 {
6353 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
6354 fprintf (file, " ");
6355 print_generic_expr (file, arg, dump_flags);
6356 if (flags & TDF_VERBOSE)
6357 print_node (file, "", arg, 4);
6358 if (DECL_CHAIN (arg))
6359 fprintf (file, ", ");
6360 arg = DECL_CHAIN (arg);
6361 }
6362 fprintf (file, ")\n");
6363
6364 if (flags & TDF_VERBOSE)
6365 print_node (file, "", fn, 2);
6366
6367 dsf = DECL_STRUCT_FUNCTION (fn);
6368 if (dsf && (flags & TDF_EH))
6369 dump_eh_tree (file, dsf);
6370
6371 if (flags & TDF_RAW && !gimple_has_body_p (fn))
6372 {
6373 dump_node (fn, TDF_SLIM | flags, file);
6374 return;
6375 }
6376
6377 /* Switch CFUN to point to FN. */
6378 push_cfun (DECL_STRUCT_FUNCTION (fn));
6379
6380 /* When GIMPLE is lowered, the variables are no longer available in
6381 BIND_EXPRs, so display them separately. */
6382 if (cfun && cfun->decl == fn && !VEC_empty (tree, cfun->local_decls))
6383 {
6384 unsigned ix;
6385 ignore_topmost_bind = true;
6386
6387 fprintf (file, "{\n");
6388 FOR_EACH_LOCAL_DECL (cfun, ix, var)
6389 {
6390 print_generic_decl (file, var, flags);
6391 if (flags & TDF_VERBOSE)
6392 print_node (file, "", var, 4);
6393 fprintf (file, "\n");
6394
6395 any_var = true;
6396 }
6397 }
6398
6399 if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
6400 {
6401 /* If the CFG has been built, emit a CFG-based dump. */
6402 check_bb_profile (ENTRY_BLOCK_PTR, file);
6403 if (!ignore_topmost_bind)
6404 fprintf (file, "{\n");
6405
6406 if (any_var && n_basic_blocks)
6407 fprintf (file, "\n");
6408
6409 FOR_EACH_BB (bb)
6410 gimple_dump_bb (bb, file, 2, flags);
6411
6412 fprintf (file, "}\n");
6413 check_bb_profile (EXIT_BLOCK_PTR, file);
6414 }
6415 else if (DECL_SAVED_TREE (fn) == NULL)
6416 {
6417 /* The function is now in GIMPLE form but the CFG has not been
6418 built yet. Emit the single sequence of GIMPLE statements
6419 that make up its body. */
6420 gimple_seq body = gimple_body (fn);
6421
6422 if (gimple_seq_first_stmt (body)
6423 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
6424 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
6425 print_gimple_seq (file, body, 0, flags);
6426 else
6427 {
6428 if (!ignore_topmost_bind)
6429 fprintf (file, "{\n");
6430
6431 if (any_var)
6432 fprintf (file, "\n");
6433
6434 print_gimple_seq (file, body, 2, flags);
6435 fprintf (file, "}\n");
6436 }
6437 }
6438 else
6439 {
6440 int indent;
6441
6442 /* Make a tree based dump. */
6443 chain = DECL_SAVED_TREE (fn);
6444
6445 if (chain && TREE_CODE (chain) == BIND_EXPR)
6446 {
6447 if (ignore_topmost_bind)
6448 {
6449 chain = BIND_EXPR_BODY (chain);
6450 indent = 2;
6451 }
6452 else
6453 indent = 0;
6454 }
6455 else
6456 {
6457 if (!ignore_topmost_bind)
6458 fprintf (file, "{\n");
6459 indent = 2;
6460 }
6461
6462 if (any_var)
6463 fprintf (file, "\n");
6464
6465 print_generic_stmt_indented (file, chain, flags, indent);
6466 if (ignore_topmost_bind)
6467 fprintf (file, "}\n");
6468 }
6469
6470 if (flags & TDF_ENUMERATE_LOCALS)
6471 dump_enumerated_decls (file, flags);
6472 fprintf (file, "\n\n");
6473
6474 /* Restore CFUN. */
6475 pop_cfun ();
6476 }
6477
6478
6479 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
6480
6481 DEBUG_FUNCTION void
6482 debug_function (tree fn, int flags)
6483 {
6484 dump_function_to_file (fn, stderr, flags);
6485 }
6486
6487
6488 /* Print on FILE the indexes for the predecessors of basic_block BB. */
6489
6490 static void
6491 print_pred_bbs (FILE *file, basic_block bb)
6492 {
6493 edge e;
6494 edge_iterator ei;
6495
6496 FOR_EACH_EDGE (e, ei, bb->preds)
6497 fprintf (file, "bb_%d ", e->src->index);
6498 }
6499
6500
6501 /* Print on FILE the indexes for the successors of basic_block BB. */
6502
6503 static void
6504 print_succ_bbs (FILE *file, basic_block bb)
6505 {
6506 edge e;
6507 edge_iterator ei;
6508
6509 FOR_EACH_EDGE (e, ei, bb->succs)
6510 fprintf (file, "bb_%d ", e->dest->index);
6511 }
6512
6513 /* Print to FILE the basic block BB following the VERBOSITY level. */
6514
6515 void
6516 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
6517 {
6518 char *s_indent = (char *) alloca ((size_t) indent + 1);
6519 memset ((void *) s_indent, ' ', (size_t) indent);
6520 s_indent[indent] = '\0';
6521
6522 /* Print basic_block's header. */
6523 if (verbosity >= 2)
6524 {
6525 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
6526 print_pred_bbs (file, bb);
6527 fprintf (file, "}, succs = {");
6528 print_succ_bbs (file, bb);
6529 fprintf (file, "})\n");
6530 }
6531
6532 /* Print basic_block's body. */
6533 if (verbosity >= 3)
6534 {
6535 fprintf (file, "%s {\n", s_indent);
6536 gimple_dump_bb (bb, file, indent + 4, TDF_VOPS|TDF_MEMSYMS);
6537 fprintf (file, "%s }\n", s_indent);
6538 }
6539 }
6540
6541 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
6542
6543 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
6544 VERBOSITY level this outputs the contents of the loop, or just its
6545 structure. */
6546
6547 static void
6548 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
6549 {
6550 char *s_indent;
6551 basic_block bb;
6552
6553 if (loop == NULL)
6554 return;
6555
6556 s_indent = (char *) alloca ((size_t) indent + 1);
6557 memset ((void *) s_indent, ' ', (size_t) indent);
6558 s_indent[indent] = '\0';
6559
6560 /* Print loop's header. */
6561 fprintf (file, "%sloop_%d (header = %d, latch = %d", s_indent,
6562 loop->num, loop->header->index, loop->latch->index);
6563 fprintf (file, ", niter = ");
6564 print_generic_expr (file, loop->nb_iterations, 0);
6565
6566 if (loop->any_upper_bound)
6567 {
6568 fprintf (file, ", upper_bound = ");
6569 dump_double_int (file, loop->nb_iterations_upper_bound, true);
6570 }
6571
6572 if (loop->any_estimate)
6573 {
6574 fprintf (file, ", estimate = ");
6575 dump_double_int (file, loop->nb_iterations_estimate, true);
6576 }
6577 fprintf (file, ")\n");
6578
6579 /* Print loop's body. */
6580 if (verbosity >= 1)
6581 {
6582 fprintf (file, "%s{\n", s_indent);
6583 FOR_EACH_BB (bb)
6584 if (bb->loop_father == loop)
6585 print_loops_bb (file, bb, indent, verbosity);
6586
6587 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
6588 fprintf (file, "%s}\n", s_indent);
6589 }
6590 }
6591
6592 /* Print the LOOP and its sibling loops on FILE, indented INDENT
6593 spaces. Following VERBOSITY level this outputs the contents of the
6594 loop, or just its structure. */
6595
6596 static void
6597 print_loop_and_siblings (FILE *file, struct loop *loop, int indent, int verbosity)
6598 {
6599 if (loop == NULL)
6600 return;
6601
6602 print_loop (file, loop, indent, verbosity);
6603 print_loop_and_siblings (file, loop->next, indent, verbosity);
6604 }
6605
6606 /* Follow a CFG edge from the entry point of the program, and on entry
6607 of a loop, pretty print the loop structure on FILE. */
6608
6609 void
6610 print_loops (FILE *file, int verbosity)
6611 {
6612 basic_block bb;
6613
6614 bb = ENTRY_BLOCK_PTR;
6615 if (bb && bb->loop_father)
6616 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
6617 }
6618
6619
6620 /* Debugging loops structure at tree level, at some VERBOSITY level. */
6621
6622 DEBUG_FUNCTION void
6623 debug_loops (int verbosity)
6624 {
6625 print_loops (stderr, verbosity);
6626 }
6627
6628 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
6629
6630 DEBUG_FUNCTION void
6631 debug_loop (struct loop *loop, int verbosity)
6632 {
6633 print_loop (stderr, loop, 0, verbosity);
6634 }
6635
6636 /* Print on stderr the code of loop number NUM, at some VERBOSITY
6637 level. */
6638
6639 DEBUG_FUNCTION void
6640 debug_loop_num (unsigned num, int verbosity)
6641 {
6642 debug_loop (get_loop (num), verbosity);
6643 }
6644
6645 /* Return true if BB ends with a call, possibly followed by some
6646 instructions that must stay with the call. Return false,
6647 otherwise. */
6648
6649 static bool
6650 gimple_block_ends_with_call_p (basic_block bb)
6651 {
6652 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6653 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
6654 }
6655
6656
6657 /* Return true if BB ends with a conditional branch. Return false,
6658 otherwise. */
6659
6660 static bool
6661 gimple_block_ends_with_condjump_p (const_basic_block bb)
6662 {
6663 gimple stmt = last_stmt (CONST_CAST_BB (bb));
6664 return (stmt && gimple_code (stmt) == GIMPLE_COND);
6665 }
6666
6667
6668 /* Return true if we need to add fake edge to exit at statement T.
6669 Helper function for gimple_flow_call_edges_add. */
6670
6671 static bool
6672 need_fake_edge_p (gimple t)
6673 {
6674 tree fndecl = NULL_TREE;
6675 int call_flags = 0;
6676
6677 /* NORETURN and LONGJMP calls already have an edge to exit.
6678 CONST and PURE calls do not need one.
6679 We don't currently check for CONST and PURE here, although
6680 it would be a good idea, because those attributes are
6681 figured out from the RTL in mark_constant_function, and
6682 the counter incrementation code from -fprofile-arcs
6683 leads to different results from -fbranch-probabilities. */
6684 if (is_gimple_call (t))
6685 {
6686 fndecl = gimple_call_fndecl (t);
6687 call_flags = gimple_call_flags (t);
6688 }
6689
6690 if (is_gimple_call (t)
6691 && fndecl
6692 && DECL_BUILT_IN (fndecl)
6693 && (call_flags & ECF_NOTHROW)
6694 && !(call_flags & ECF_RETURNS_TWICE)
6695 /* fork() doesn't really return twice, but the effect of
6696 wrapping it in __gcov_fork() which calls __gcov_flush()
6697 and clears the counters before forking has the same
6698 effect as returning twice. Force a fake edge. */
6699 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6700 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
6701 return false;
6702
6703 if (is_gimple_call (t)
6704 && !(call_flags & ECF_NORETURN))
6705 return true;
6706
6707 if (gimple_code (t) == GIMPLE_ASM
6708 && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
6709 return true;
6710
6711 return false;
6712 }
6713
6714
6715 /* Add fake edges to the function exit for any non constant and non
6716 noreturn calls, volatile inline assembly in the bitmap of blocks
6717 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
6718 the number of blocks that were split.
6719
6720 The goal is to expose cases in which entering a basic block does
6721 not imply that all subsequent instructions must be executed. */
6722
6723 static int
6724 gimple_flow_call_edges_add (sbitmap blocks)
6725 {
6726 int i;
6727 int blocks_split = 0;
6728 int last_bb = last_basic_block;
6729 bool check_last_block = false;
6730
6731 if (n_basic_blocks == NUM_FIXED_BLOCKS)
6732 return 0;
6733
6734 if (! blocks)
6735 check_last_block = true;
6736 else
6737 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
6738
6739 /* In the last basic block, before epilogue generation, there will be
6740 a fallthru edge to EXIT. Special care is required if the last insn
6741 of the last basic block is a call because make_edge folds duplicate
6742 edges, which would result in the fallthru edge also being marked
6743 fake, which would result in the fallthru edge being removed by
6744 remove_fake_edges, which would result in an invalid CFG.
6745
6746 Moreover, we can't elide the outgoing fake edge, since the block
6747 profiler needs to take this into account in order to solve the minimal
6748 spanning tree in the case that the call doesn't return.
6749
6750 Handle this by adding a dummy instruction in a new last basic block. */
6751 if (check_last_block)
6752 {
6753 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
6754 gimple_stmt_iterator gsi = gsi_last_bb (bb);
6755 gimple t = NULL;
6756
6757 if (!gsi_end_p (gsi))
6758 t = gsi_stmt (gsi);
6759
6760 if (t && need_fake_edge_p (t))
6761 {
6762 edge e;
6763
6764 e = find_edge (bb, EXIT_BLOCK_PTR);
6765 if (e)
6766 {
6767 gsi_insert_on_edge (e, gimple_build_nop ());
6768 gsi_commit_edge_inserts ();
6769 }
6770 }
6771 }
6772
6773 /* Now add fake edges to the function exit for any non constant
6774 calls since there is no way that we can determine if they will
6775 return or not... */
6776 for (i = 0; i < last_bb; i++)
6777 {
6778 basic_block bb = BASIC_BLOCK (i);
6779 gimple_stmt_iterator gsi;
6780 gimple stmt, last_stmt;
6781
6782 if (!bb)
6783 continue;
6784
6785 if (blocks && !TEST_BIT (blocks, i))
6786 continue;
6787
6788 gsi = gsi_last_bb (bb);
6789 if (!gsi_end_p (gsi))
6790 {
6791 last_stmt = gsi_stmt (gsi);
6792 do
6793 {
6794 stmt = gsi_stmt (gsi);
6795 if (need_fake_edge_p (stmt))
6796 {
6797 edge e;
6798
6799 /* The handling above of the final block before the
6800 epilogue should be enough to verify that there is
6801 no edge to the exit block in CFG already.
6802 Calling make_edge in such case would cause us to
6803 mark that edge as fake and remove it later. */
6804 #ifdef ENABLE_CHECKING
6805 if (stmt == last_stmt)
6806 {
6807 e = find_edge (bb, EXIT_BLOCK_PTR);
6808 gcc_assert (e == NULL);
6809 }
6810 #endif
6811
6812 /* Note that the following may create a new basic block
6813 and renumber the existing basic blocks. */
6814 if (stmt != last_stmt)
6815 {
6816 e = split_block (bb, stmt);
6817 if (e)
6818 blocks_split++;
6819 }
6820 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
6821 }
6822 gsi_prev (&gsi);
6823 }
6824 while (!gsi_end_p (gsi));
6825 }
6826 }
6827
6828 if (blocks_split)
6829 verify_flow_info ();
6830
6831 return blocks_split;
6832 }
6833
6834 /* Purge dead abnormal call edges from basic block BB. */
6835
6836 bool
6837 gimple_purge_dead_abnormal_call_edges (basic_block bb)
6838 {
6839 bool changed = gimple_purge_dead_eh_edges (bb);
6840
6841 if (cfun->has_nonlocal_label)
6842 {
6843 gimple stmt = last_stmt (bb);
6844 edge_iterator ei;
6845 edge e;
6846
6847 if (!(stmt && stmt_can_make_abnormal_goto (stmt)))
6848 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6849 {
6850 if (e->flags & EDGE_ABNORMAL)
6851 {
6852 remove_edge (e);
6853 changed = true;
6854 }
6855 else
6856 ei_next (&ei);
6857 }
6858
6859 /* See gimple_purge_dead_eh_edges below. */
6860 if (changed)
6861 free_dominance_info (CDI_DOMINATORS);
6862 }
6863
6864 return changed;
6865 }
6866
6867 /* Removes edge E and all the blocks dominated by it, and updates dominance
6868 information. The IL in E->src needs to be updated separately.
6869 If dominance info is not available, only the edge E is removed.*/
6870
6871 void
6872 remove_edge_and_dominated_blocks (edge e)
6873 {
6874 VEC (basic_block, heap) *bbs_to_remove = NULL;
6875 VEC (basic_block, heap) *bbs_to_fix_dom = NULL;
6876 bitmap df, df_idom;
6877 edge f;
6878 edge_iterator ei;
6879 bool none_removed = false;
6880 unsigned i;
6881 basic_block bb, dbb;
6882 bitmap_iterator bi;
6883
6884 if (!dom_info_available_p (CDI_DOMINATORS))
6885 {
6886 remove_edge (e);
6887 return;
6888 }
6889
6890 /* No updating is needed for edges to exit. */
6891 if (e->dest == EXIT_BLOCK_PTR)
6892 {
6893 if (cfgcleanup_altered_bbs)
6894 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
6895 remove_edge (e);
6896 return;
6897 }
6898
6899 /* First, we find the basic blocks to remove. If E->dest has a predecessor
6900 that is not dominated by E->dest, then this set is empty. Otherwise,
6901 all the basic blocks dominated by E->dest are removed.
6902
6903 Also, to DF_IDOM we store the immediate dominators of the blocks in
6904 the dominance frontier of E (i.e., of the successors of the
6905 removed blocks, if there are any, and of E->dest otherwise). */
6906 FOR_EACH_EDGE (f, ei, e->dest->preds)
6907 {
6908 if (f == e)
6909 continue;
6910
6911 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
6912 {
6913 none_removed = true;
6914 break;
6915 }
6916 }
6917
6918 df = BITMAP_ALLOC (NULL);
6919 df_idom = BITMAP_ALLOC (NULL);
6920
6921 if (none_removed)
6922 bitmap_set_bit (df_idom,
6923 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
6924 else
6925 {
6926 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
6927 FOR_EACH_VEC_ELT (basic_block, bbs_to_remove, i, bb)
6928 {
6929 FOR_EACH_EDGE (f, ei, bb->succs)
6930 {
6931 if (f->dest != EXIT_BLOCK_PTR)
6932 bitmap_set_bit (df, f->dest->index);
6933 }
6934 }
6935 FOR_EACH_VEC_ELT (basic_block, bbs_to_remove, i, bb)
6936 bitmap_clear_bit (df, bb->index);
6937
6938 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
6939 {
6940 bb = BASIC_BLOCK (i);
6941 bitmap_set_bit (df_idom,
6942 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
6943 }
6944 }
6945
6946 if (cfgcleanup_altered_bbs)
6947 {
6948 /* Record the set of the altered basic blocks. */
6949 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
6950 bitmap_ior_into (cfgcleanup_altered_bbs, df);
6951 }
6952
6953 /* Remove E and the cancelled blocks. */
6954 if (none_removed)
6955 remove_edge (e);
6956 else
6957 {
6958 /* Walk backwards so as to get a chance to substitute all
6959 released DEFs into debug stmts. See
6960 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
6961 details. */
6962 for (i = VEC_length (basic_block, bbs_to_remove); i-- > 0; )
6963 delete_basic_block (VEC_index (basic_block, bbs_to_remove, i));
6964 }
6965
6966 /* Update the dominance information. The immediate dominator may change only
6967 for blocks whose immediate dominator belongs to DF_IDOM:
6968
6969 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
6970 removal. Let Z the arbitrary block such that idom(Z) = Y and
6971 Z dominates X after the removal. Before removal, there exists a path P
6972 from Y to X that avoids Z. Let F be the last edge on P that is
6973 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
6974 dominates W, and because of P, Z does not dominate W), and W belongs to
6975 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
6976 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
6977 {
6978 bb = BASIC_BLOCK (i);
6979 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
6980 dbb;
6981 dbb = next_dom_son (CDI_DOMINATORS, dbb))
6982 VEC_safe_push (basic_block, heap, bbs_to_fix_dom, dbb);
6983 }
6984
6985 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
6986
6987 BITMAP_FREE (df);
6988 BITMAP_FREE (df_idom);
6989 VEC_free (basic_block, heap, bbs_to_remove);
6990 VEC_free (basic_block, heap, bbs_to_fix_dom);
6991 }
6992
6993 /* Purge dead EH edges from basic block BB. */
6994
6995 bool
6996 gimple_purge_dead_eh_edges (basic_block bb)
6997 {
6998 bool changed = false;
6999 edge e;
7000 edge_iterator ei;
7001 gimple stmt = last_stmt (bb);
7002
7003 if (stmt && stmt_can_throw_internal (stmt))
7004 return false;
7005
7006 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7007 {
7008 if (e->flags & EDGE_EH)
7009 {
7010 remove_edge_and_dominated_blocks (e);
7011 changed = true;
7012 }
7013 else
7014 ei_next (&ei);
7015 }
7016
7017 return changed;
7018 }
7019
7020 bool
7021 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
7022 {
7023 bool changed = false;
7024 unsigned i;
7025 bitmap_iterator bi;
7026
7027 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7028 {
7029 basic_block bb = BASIC_BLOCK (i);
7030
7031 /* Earlier gimple_purge_dead_eh_edges could have removed
7032 this basic block already. */
7033 gcc_assert (bb || changed);
7034 if (bb != NULL)
7035 changed |= gimple_purge_dead_eh_edges (bb);
7036 }
7037
7038 return changed;
7039 }
7040
7041 /* This function is called whenever a new edge is created or
7042 redirected. */
7043
7044 static void
7045 gimple_execute_on_growing_pred (edge e)
7046 {
7047 basic_block bb = e->dest;
7048
7049 if (!gimple_seq_empty_p (phi_nodes (bb)))
7050 reserve_phi_args_for_new_edge (bb);
7051 }
7052
7053 /* This function is called immediately before edge E is removed from
7054 the edge vector E->dest->preds. */
7055
7056 static void
7057 gimple_execute_on_shrinking_pred (edge e)
7058 {
7059 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
7060 remove_phi_args (e);
7061 }
7062
7063 /*---------------------------------------------------------------------------
7064 Helper functions for Loop versioning
7065 ---------------------------------------------------------------------------*/
7066
7067 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
7068 of 'first'. Both of them are dominated by 'new_head' basic block. When
7069 'new_head' was created by 'second's incoming edge it received phi arguments
7070 on the edge by split_edge(). Later, additional edge 'e' was created to
7071 connect 'new_head' and 'first'. Now this routine adds phi args on this
7072 additional edge 'e' that new_head to second edge received as part of edge
7073 splitting. */
7074
7075 static void
7076 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
7077 basic_block new_head, edge e)
7078 {
7079 gimple phi1, phi2;
7080 gimple_stmt_iterator psi1, psi2;
7081 tree def;
7082 edge e2 = find_edge (new_head, second);
7083
7084 /* Because NEW_HEAD has been created by splitting SECOND's incoming
7085 edge, we should always have an edge from NEW_HEAD to SECOND. */
7086 gcc_assert (e2 != NULL);
7087
7088 /* Browse all 'second' basic block phi nodes and add phi args to
7089 edge 'e' for 'first' head. PHI args are always in correct order. */
7090
7091 for (psi2 = gsi_start_phis (second),
7092 psi1 = gsi_start_phis (first);
7093 !gsi_end_p (psi2) && !gsi_end_p (psi1);
7094 gsi_next (&psi2), gsi_next (&psi1))
7095 {
7096 phi1 = gsi_stmt (psi1);
7097 phi2 = gsi_stmt (psi2);
7098 def = PHI_ARG_DEF (phi2, e2->dest_idx);
7099 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
7100 }
7101 }
7102
7103
7104 /* Adds a if else statement to COND_BB with condition COND_EXPR.
7105 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
7106 the destination of the ELSE part. */
7107
7108 static void
7109 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
7110 basic_block second_head ATTRIBUTE_UNUSED,
7111 basic_block cond_bb, void *cond_e)
7112 {
7113 gimple_stmt_iterator gsi;
7114 gimple new_cond_expr;
7115 tree cond_expr = (tree) cond_e;
7116 edge e0;
7117
7118 /* Build new conditional expr */
7119 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
7120 NULL_TREE, NULL_TREE);
7121
7122 /* Add new cond in cond_bb. */
7123 gsi = gsi_last_bb (cond_bb);
7124 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
7125
7126 /* Adjust edges appropriately to connect new head with first head
7127 as well as second head. */
7128 e0 = single_succ_edge (cond_bb);
7129 e0->flags &= ~EDGE_FALLTHRU;
7130 e0->flags |= EDGE_FALSE_VALUE;
7131 }
7132
7133 struct cfg_hooks gimple_cfg_hooks = {
7134 "gimple",
7135 gimple_verify_flow_info,
7136 gimple_dump_bb, /* dump_bb */
7137 create_bb, /* create_basic_block */
7138 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
7139 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
7140 gimple_can_remove_branch_p, /* can_remove_branch_p */
7141 remove_bb, /* delete_basic_block */
7142 gimple_split_block, /* split_block */
7143 gimple_move_block_after, /* move_block_after */
7144 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
7145 gimple_merge_blocks, /* merge_blocks */
7146 gimple_predict_edge, /* predict_edge */
7147 gimple_predicted_by_p, /* predicted_by_p */
7148 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
7149 gimple_duplicate_bb, /* duplicate_block */
7150 gimple_split_edge, /* split_edge */
7151 gimple_make_forwarder_block, /* make_forward_block */
7152 NULL, /* tidy_fallthru_edge */
7153 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
7154 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
7155 gimple_flow_call_edges_add, /* flow_call_edges_add */
7156 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
7157 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
7158 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
7159 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
7160 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
7161 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
7162 flush_pending_stmts /* flush_pending_stmts */
7163 };
7164
7165
7166 /* Split all critical edges. */
7167
7168 static unsigned int
7169 split_critical_edges (void)
7170 {
7171 basic_block bb;
7172 edge e;
7173 edge_iterator ei;
7174
7175 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
7176 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
7177 mappings around the calls to split_edge. */
7178 start_recording_case_labels ();
7179 FOR_ALL_BB (bb)
7180 {
7181 FOR_EACH_EDGE (e, ei, bb->succs)
7182 {
7183 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
7184 split_edge (e);
7185 /* PRE inserts statements to edges and expects that
7186 since split_critical_edges was done beforehand, committing edge
7187 insertions will not split more edges. In addition to critical
7188 edges we must split edges that have multiple successors and
7189 end by control flow statements, such as RESX.
7190 Go ahead and split them too. This matches the logic in
7191 gimple_find_edge_insert_loc. */
7192 else if ((!single_pred_p (e->dest)
7193 || !gimple_seq_empty_p (phi_nodes (e->dest))
7194 || e->dest == EXIT_BLOCK_PTR)
7195 && e->src != ENTRY_BLOCK_PTR
7196 && !(e->flags & EDGE_ABNORMAL))
7197 {
7198 gimple_stmt_iterator gsi;
7199
7200 gsi = gsi_last_bb (e->src);
7201 if (!gsi_end_p (gsi)
7202 && stmt_ends_bb_p (gsi_stmt (gsi))
7203 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
7204 && !gimple_call_builtin_p (gsi_stmt (gsi),
7205 BUILT_IN_RETURN)))
7206 split_edge (e);
7207 }
7208 }
7209 }
7210 end_recording_case_labels ();
7211 return 0;
7212 }
7213
7214 struct gimple_opt_pass pass_split_crit_edges =
7215 {
7216 {
7217 GIMPLE_PASS,
7218 "crited", /* name */
7219 NULL, /* gate */
7220 split_critical_edges, /* execute */
7221 NULL, /* sub */
7222 NULL, /* next */
7223 0, /* static_pass_number */
7224 TV_TREE_SPLIT_EDGES, /* tv_id */
7225 PROP_cfg, /* properties required */
7226 PROP_no_crit_edges, /* properties_provided */
7227 0, /* properties_destroyed */
7228 0, /* todo_flags_start */
7229 TODO_dump_func | TODO_verify_flow /* todo_flags_finish */
7230 }
7231 };
7232
7233
7234 /* Build a ternary operation and gimplify it. Emit code before GSI.
7235 Return the gimple_val holding the result. */
7236
7237 tree
7238 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
7239 tree type, tree a, tree b, tree c)
7240 {
7241 tree ret;
7242 location_t loc = gimple_location (gsi_stmt (*gsi));
7243
7244 ret = fold_build3_loc (loc, code, type, a, b, c);
7245 STRIP_NOPS (ret);
7246
7247 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7248 GSI_SAME_STMT);
7249 }
7250
7251 /* Build a binary operation and gimplify it. Emit code before GSI.
7252 Return the gimple_val holding the result. */
7253
7254 tree
7255 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
7256 tree type, tree a, tree b)
7257 {
7258 tree ret;
7259
7260 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
7261 STRIP_NOPS (ret);
7262
7263 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7264 GSI_SAME_STMT);
7265 }
7266
7267 /* Build a unary operation and gimplify it. Emit code before GSI.
7268 Return the gimple_val holding the result. */
7269
7270 tree
7271 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
7272 tree a)
7273 {
7274 tree ret;
7275
7276 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
7277 STRIP_NOPS (ret);
7278
7279 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7280 GSI_SAME_STMT);
7281 }
7282
7283
7284 \f
7285 /* Emit return warnings. */
7286
7287 static unsigned int
7288 execute_warn_function_return (void)
7289 {
7290 source_location location;
7291 gimple last;
7292 edge e;
7293 edge_iterator ei;
7294
7295 /* If we have a path to EXIT, then we do return. */
7296 if (TREE_THIS_VOLATILE (cfun->decl)
7297 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
7298 {
7299 location = UNKNOWN_LOCATION;
7300 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7301 {
7302 last = last_stmt (e->src);
7303 if ((gimple_code (last) == GIMPLE_RETURN
7304 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
7305 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
7306 break;
7307 }
7308 if (location == UNKNOWN_LOCATION)
7309 location = cfun->function_end_locus;
7310 warning_at (location, 0, "%<noreturn%> function does return");
7311 }
7312
7313 /* If we see "return;" in some basic block, then we do reach the end
7314 without returning a value. */
7315 else if (warn_return_type
7316 && !TREE_NO_WARNING (cfun->decl)
7317 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
7318 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
7319 {
7320 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7321 {
7322 gimple last = last_stmt (e->src);
7323 if (gimple_code (last) == GIMPLE_RETURN
7324 && gimple_return_retval (last) == NULL
7325 && !gimple_no_warning_p (last))
7326 {
7327 location = gimple_location (last);
7328 if (location == UNKNOWN_LOCATION)
7329 location = cfun->function_end_locus;
7330 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
7331 TREE_NO_WARNING (cfun->decl) = 1;
7332 break;
7333 }
7334 }
7335 }
7336 return 0;
7337 }
7338
7339
7340 /* Given a basic block B which ends with a conditional and has
7341 precisely two successors, determine which of the edges is taken if
7342 the conditional is true and which is taken if the conditional is
7343 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
7344
7345 void
7346 extract_true_false_edges_from_block (basic_block b,
7347 edge *true_edge,
7348 edge *false_edge)
7349 {
7350 edge e = EDGE_SUCC (b, 0);
7351
7352 if (e->flags & EDGE_TRUE_VALUE)
7353 {
7354 *true_edge = e;
7355 *false_edge = EDGE_SUCC (b, 1);
7356 }
7357 else
7358 {
7359 *false_edge = e;
7360 *true_edge = EDGE_SUCC (b, 1);
7361 }
7362 }
7363
7364 struct gimple_opt_pass pass_warn_function_return =
7365 {
7366 {
7367 GIMPLE_PASS,
7368 "*warn_function_return", /* name */
7369 NULL, /* gate */
7370 execute_warn_function_return, /* execute */
7371 NULL, /* sub */
7372 NULL, /* next */
7373 0, /* static_pass_number */
7374 TV_NONE, /* tv_id */
7375 PROP_cfg, /* properties_required */
7376 0, /* properties_provided */
7377 0, /* properties_destroyed */
7378 0, /* todo_flags_start */
7379 0 /* todo_flags_finish */
7380 }
7381 };
7382
7383 /* Emit noreturn warnings. */
7384
7385 static unsigned int
7386 execute_warn_function_noreturn (void)
7387 {
7388 if (!TREE_THIS_VOLATILE (current_function_decl)
7389 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0)
7390 warn_function_noreturn (current_function_decl);
7391 return 0;
7392 }
7393
7394 static bool
7395 gate_warn_function_noreturn (void)
7396 {
7397 return warn_suggest_attribute_noreturn;
7398 }
7399
7400 struct gimple_opt_pass pass_warn_function_noreturn =
7401 {
7402 {
7403 GIMPLE_PASS,
7404 "*warn_function_noreturn", /* name */
7405 gate_warn_function_noreturn, /* gate */
7406 execute_warn_function_noreturn, /* execute */
7407 NULL, /* sub */
7408 NULL, /* next */
7409 0, /* static_pass_number */
7410 TV_NONE, /* tv_id */
7411 PROP_cfg, /* properties_required */
7412 0, /* properties_provided */
7413 0, /* properties_destroyed */
7414 0, /* todo_flags_start */
7415 0 /* todo_flags_finish */
7416 }
7417 };
7418
7419
7420 /* Walk a gimplified function and warn for functions whose return value is
7421 ignored and attribute((warn_unused_result)) is set. This is done before
7422 inlining, so we don't have to worry about that. */
7423
7424 static void
7425 do_warn_unused_result (gimple_seq seq)
7426 {
7427 tree fdecl, ftype;
7428 gimple_stmt_iterator i;
7429
7430 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
7431 {
7432 gimple g = gsi_stmt (i);
7433
7434 switch (gimple_code (g))
7435 {
7436 case GIMPLE_BIND:
7437 do_warn_unused_result (gimple_bind_body (g));
7438 break;
7439 case GIMPLE_TRY:
7440 do_warn_unused_result (gimple_try_eval (g));
7441 do_warn_unused_result (gimple_try_cleanup (g));
7442 break;
7443 case GIMPLE_CATCH:
7444 do_warn_unused_result (gimple_catch_handler (g));
7445 break;
7446 case GIMPLE_EH_FILTER:
7447 do_warn_unused_result (gimple_eh_filter_failure (g));
7448 break;
7449
7450 case GIMPLE_CALL:
7451 if (gimple_call_lhs (g))
7452 break;
7453
7454 /* This is a naked call, as opposed to a GIMPLE_CALL with an
7455 LHS. All calls whose value is ignored should be
7456 represented like this. Look for the attribute. */
7457 fdecl = gimple_call_fndecl (g);
7458 ftype = TREE_TYPE (TREE_TYPE (gimple_call_fn (g)));
7459
7460 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
7461 {
7462 location_t loc = gimple_location (g);
7463
7464 if (fdecl)
7465 warning_at (loc, OPT_Wunused_result,
7466 "ignoring return value of %qD, "
7467 "declared with attribute warn_unused_result",
7468 fdecl);
7469 else
7470 warning_at (loc, OPT_Wunused_result,
7471 "ignoring return value of function "
7472 "declared with attribute warn_unused_result");
7473 }
7474 break;
7475
7476 default:
7477 /* Not a container, not a call, or a call whose value is used. */
7478 break;
7479 }
7480 }
7481 }
7482
7483 static unsigned int
7484 run_warn_unused_result (void)
7485 {
7486 do_warn_unused_result (gimple_body (current_function_decl));
7487 return 0;
7488 }
7489
7490 static bool
7491 gate_warn_unused_result (void)
7492 {
7493 return flag_warn_unused_result;
7494 }
7495
7496 struct gimple_opt_pass pass_warn_unused_result =
7497 {
7498 {
7499 GIMPLE_PASS,
7500 "*warn_unused_result", /* name */
7501 gate_warn_unused_result, /* gate */
7502 run_warn_unused_result, /* execute */
7503 NULL, /* sub */
7504 NULL, /* next */
7505 0, /* static_pass_number */
7506 TV_NONE, /* tv_id */
7507 PROP_gimple_any, /* properties_required */
7508 0, /* properties_provided */
7509 0, /* properties_destroyed */
7510 0, /* todo_flags_start */
7511 0, /* todo_flags_finish */
7512 }
7513 };
7514