tree.c (list_equal_p): New function.
[gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "output.h"
32 #include "flags.h"
33 #include "function.h"
34 #include "expr.h"
35 #include "ggc.h"
36 #include "langhooks.h"
37 #include "diagnostic.h"
38 #include "tree-flow.h"
39 #include "timevar.h"
40 #include "tree-dump.h"
41 #include "tree-pass.h"
42 #include "toplev.h"
43 #include "except.h"
44 #include "cfgloop.h"
45 #include "cfglayout.h"
46 #include "tree-ssa-propagate.h"
47 #include "value-prof.h"
48 #include "pointer-set.h"
49 #include "tree-inline.h"
50
51 /* This file contains functions for building the Control Flow Graph (CFG)
52 for a function tree. */
53
54 /* Local declarations. */
55
56 /* Initial capacity for the basic block array. */
57 static const int initial_cfg_capacity = 20;
58
59 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
60 which use a particular edge. The CASE_LABEL_EXPRs are chained together
61 via their TREE_CHAIN field, which we clear after we're done with the
62 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
63
64 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
65 update the case vector in response to edge redirections.
66
67 Right now this table is set up and torn down at key points in the
68 compilation process. It would be nice if we could make the table
69 more persistent. The key is getting notification of changes to
70 the CFG (particularly edge removal, creation and redirection). */
71
72 static struct pointer_map_t *edge_to_cases;
73
74 /* CFG statistics. */
75 struct cfg_stats_d
76 {
77 long num_merged_labels;
78 };
79
80 static struct cfg_stats_d cfg_stats;
81
82 /* Nonzero if we found a computed goto while building basic blocks. */
83 static bool found_computed_goto;
84
85 /* Basic blocks and flowgraphs. */
86 static void make_blocks (gimple_seq);
87 static void factor_computed_gotos (void);
88
89 /* Edges. */
90 static void make_edges (void);
91 static void make_cond_expr_edges (basic_block);
92 static void make_gimple_switch_edges (basic_block);
93 static void make_goto_expr_edges (basic_block);
94 static edge gimple_redirect_edge_and_branch (edge, basic_block);
95 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
96 static unsigned int split_critical_edges (void);
97
98 /* Various helpers. */
99 static inline bool stmt_starts_bb_p (gimple, gimple);
100 static int gimple_verify_flow_info (void);
101 static void gimple_make_forwarder_block (edge);
102 static void gimple_cfg2vcg (FILE *);
103
104 /* Flowgraph optimization and cleanup. */
105 static void gimple_merge_blocks (basic_block, basic_block);
106 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
107 static void remove_bb (basic_block);
108 static edge find_taken_edge_computed_goto (basic_block, tree);
109 static edge find_taken_edge_cond_expr (basic_block, tree);
110 static edge find_taken_edge_switch_expr (basic_block, tree);
111 static tree find_case_label_for_value (gimple, tree);
112
113 void
114 init_empty_tree_cfg_for_function (struct function *fn)
115 {
116 /* Initialize the basic block array. */
117 init_flow (fn);
118 profile_status_for_function (fn) = PROFILE_ABSENT;
119 n_basic_blocks_for_function (fn) = NUM_FIXED_BLOCKS;
120 last_basic_block_for_function (fn) = NUM_FIXED_BLOCKS;
121 basic_block_info_for_function (fn)
122 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
123 VEC_safe_grow_cleared (basic_block, gc,
124 basic_block_info_for_function (fn),
125 initial_cfg_capacity);
126
127 /* Build a mapping of labels to their associated blocks. */
128 label_to_block_map_for_function (fn)
129 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
130 VEC_safe_grow_cleared (basic_block, gc,
131 label_to_block_map_for_function (fn),
132 initial_cfg_capacity);
133
134 SET_BASIC_BLOCK_FOR_FUNCTION (fn, ENTRY_BLOCK,
135 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn));
136 SET_BASIC_BLOCK_FOR_FUNCTION (fn, EXIT_BLOCK,
137 EXIT_BLOCK_PTR_FOR_FUNCTION (fn));
138
139 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)->next_bb
140 = EXIT_BLOCK_PTR_FOR_FUNCTION (fn);
141 EXIT_BLOCK_PTR_FOR_FUNCTION (fn)->prev_bb
142 = ENTRY_BLOCK_PTR_FOR_FUNCTION (fn);
143 }
144
145 void
146 init_empty_tree_cfg (void)
147 {
148 init_empty_tree_cfg_for_function (cfun);
149 }
150
151 /*---------------------------------------------------------------------------
152 Create basic blocks
153 ---------------------------------------------------------------------------*/
154
155 /* Entry point to the CFG builder for trees. SEQ is the sequence of
156 statements to be added to the flowgraph. */
157
158 static void
159 build_gimple_cfg (gimple_seq seq)
160 {
161 /* Register specific gimple functions. */
162 gimple_register_cfg_hooks ();
163
164 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
165
166 init_empty_tree_cfg ();
167
168 found_computed_goto = 0;
169 make_blocks (seq);
170
171 /* Computed gotos are hell to deal with, especially if there are
172 lots of them with a large number of destinations. So we factor
173 them to a common computed goto location before we build the
174 edge list. After we convert back to normal form, we will un-factor
175 the computed gotos since factoring introduces an unwanted jump. */
176 if (found_computed_goto)
177 factor_computed_gotos ();
178
179 /* Make sure there is always at least one block, even if it's empty. */
180 if (n_basic_blocks == NUM_FIXED_BLOCKS)
181 create_empty_bb (ENTRY_BLOCK_PTR);
182
183 /* Adjust the size of the array. */
184 if (VEC_length (basic_block, basic_block_info) < (size_t) n_basic_blocks)
185 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, n_basic_blocks);
186
187 /* To speed up statement iterator walks, we first purge dead labels. */
188 cleanup_dead_labels ();
189
190 /* Group case nodes to reduce the number of edges.
191 We do this after cleaning up dead labels because otherwise we miss
192 a lot of obvious case merging opportunities. */
193 group_case_labels ();
194
195 /* Create the edges of the flowgraph. */
196 make_edges ();
197 cleanup_dead_labels ();
198
199 /* Debugging dumps. */
200
201 /* Write the flowgraph to a VCG file. */
202 {
203 int local_dump_flags;
204 FILE *vcg_file = dump_begin (TDI_vcg, &local_dump_flags);
205 if (vcg_file)
206 {
207 gimple_cfg2vcg (vcg_file);
208 dump_end (TDI_vcg, vcg_file);
209 }
210 }
211
212 #ifdef ENABLE_CHECKING
213 verify_stmts ();
214 #endif
215 }
216
217 static unsigned int
218 execute_build_cfg (void)
219 {
220 gimple_seq body = gimple_body (current_function_decl);
221
222 build_gimple_cfg (body);
223 gimple_set_body (current_function_decl, NULL);
224 if (dump_file && (dump_flags & TDF_DETAILS))
225 {
226 fprintf (dump_file, "Scope blocks:\n");
227 dump_scope_blocks (dump_file, dump_flags);
228 }
229 return 0;
230 }
231
232 struct gimple_opt_pass pass_build_cfg =
233 {
234 {
235 GIMPLE_PASS,
236 "cfg", /* name */
237 NULL, /* gate */
238 execute_build_cfg, /* execute */
239 NULL, /* sub */
240 NULL, /* next */
241 0, /* static_pass_number */
242 TV_TREE_CFG, /* tv_id */
243 PROP_gimple_leh, /* properties_required */
244 PROP_cfg, /* properties_provided */
245 0, /* properties_destroyed */
246 0, /* todo_flags_start */
247 TODO_verify_stmts | TODO_cleanup_cfg
248 | TODO_dump_func /* todo_flags_finish */
249 }
250 };
251
252
253 /* Return true if T is a computed goto. */
254
255 static bool
256 computed_goto_p (gimple t)
257 {
258 return (gimple_code (t) == GIMPLE_GOTO
259 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
260 }
261
262
263 /* Search the CFG for any computed gotos. If found, factor them to a
264 common computed goto site. Also record the location of that site so
265 that we can un-factor the gotos after we have converted back to
266 normal form. */
267
268 static void
269 factor_computed_gotos (void)
270 {
271 basic_block bb;
272 tree factored_label_decl = NULL;
273 tree var = NULL;
274 gimple factored_computed_goto_label = NULL;
275 gimple factored_computed_goto = NULL;
276
277 /* We know there are one or more computed gotos in this function.
278 Examine the last statement in each basic block to see if the block
279 ends with a computed goto. */
280
281 FOR_EACH_BB (bb)
282 {
283 gimple_stmt_iterator gsi = gsi_last_bb (bb);
284 gimple last;
285
286 if (gsi_end_p (gsi))
287 continue;
288
289 last = gsi_stmt (gsi);
290
291 /* Ignore the computed goto we create when we factor the original
292 computed gotos. */
293 if (last == factored_computed_goto)
294 continue;
295
296 /* If the last statement is a computed goto, factor it. */
297 if (computed_goto_p (last))
298 {
299 gimple assignment;
300
301 /* The first time we find a computed goto we need to create
302 the factored goto block and the variable each original
303 computed goto will use for their goto destination. */
304 if (!factored_computed_goto)
305 {
306 basic_block new_bb = create_empty_bb (bb);
307 gimple_stmt_iterator new_gsi = gsi_start_bb (new_bb);
308
309 /* Create the destination of the factored goto. Each original
310 computed goto will put its desired destination into this
311 variable and jump to the label we create immediately
312 below. */
313 var = create_tmp_var (ptr_type_node, "gotovar");
314
315 /* Build a label for the new block which will contain the
316 factored computed goto. */
317 factored_label_decl = create_artificial_label ();
318 factored_computed_goto_label
319 = gimple_build_label (factored_label_decl);
320 gsi_insert_after (&new_gsi, factored_computed_goto_label,
321 GSI_NEW_STMT);
322
323 /* Build our new computed goto. */
324 factored_computed_goto = gimple_build_goto (var);
325 gsi_insert_after (&new_gsi, factored_computed_goto, GSI_NEW_STMT);
326 }
327
328 /* Copy the original computed goto's destination into VAR. */
329 assignment = gimple_build_assign (var, gimple_goto_dest (last));
330 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
331
332 /* And re-vector the computed goto to the new destination. */
333 gimple_goto_set_dest (last, factored_label_decl);
334 }
335 }
336 }
337
338
339 /* Build a flowgraph for the sequence of stmts SEQ. */
340
341 static void
342 make_blocks (gimple_seq seq)
343 {
344 gimple_stmt_iterator i = gsi_start (seq);
345 gimple stmt = NULL;
346 bool start_new_block = true;
347 bool first_stmt_of_seq = true;
348 basic_block bb = ENTRY_BLOCK_PTR;
349
350 while (!gsi_end_p (i))
351 {
352 gimple prev_stmt;
353
354 prev_stmt = stmt;
355 stmt = gsi_stmt (i);
356
357 /* If the statement starts a new basic block or if we have determined
358 in a previous pass that we need to create a new block for STMT, do
359 so now. */
360 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
361 {
362 if (!first_stmt_of_seq)
363 seq = gsi_split_seq_before (&i);
364 bb = create_basic_block (seq, NULL, bb);
365 start_new_block = false;
366 }
367
368 /* Now add STMT to BB and create the subgraphs for special statement
369 codes. */
370 gimple_set_bb (stmt, bb);
371
372 if (computed_goto_p (stmt))
373 found_computed_goto = true;
374
375 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
376 next iteration. */
377 if (stmt_ends_bb_p (stmt))
378 {
379 /* If the stmt can make abnormal goto use a new temporary
380 for the assignment to the LHS. This makes sure the old value
381 of the LHS is available on the abnormal edge. Otherwise
382 we will end up with overlapping life-ranges for abnormal
383 SSA names. */
384 if (gimple_has_lhs (stmt)
385 && stmt_can_make_abnormal_goto (stmt)
386 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
387 {
388 tree lhs = gimple_get_lhs (stmt);
389 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
390 gimple s = gimple_build_assign (lhs, tmp);
391 gimple_set_location (s, gimple_location (stmt));
392 gimple_set_block (s, gimple_block (stmt));
393 gimple_set_lhs (stmt, tmp);
394 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
395 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
396 DECL_GIMPLE_REG_P (tmp) = 1;
397 gsi_insert_after (&i, s, GSI_SAME_STMT);
398 }
399 start_new_block = true;
400 }
401
402 gsi_next (&i);
403 first_stmt_of_seq = false;
404 }
405 }
406
407
408 /* Create and return a new empty basic block after bb AFTER. */
409
410 static basic_block
411 create_bb (void *h, void *e, basic_block after)
412 {
413 basic_block bb;
414
415 gcc_assert (!e);
416
417 /* Create and initialize a new basic block. Since alloc_block uses
418 ggc_alloc_cleared to allocate a basic block, we do not have to
419 clear the newly allocated basic block here. */
420 bb = alloc_block ();
421
422 bb->index = last_basic_block;
423 bb->flags = BB_NEW;
424 bb->il.gimple = GGC_CNEW (struct gimple_bb_info);
425 set_bb_seq (bb, h ? (gimple_seq) h : gimple_seq_alloc ());
426
427 /* Add the new block to the linked list of blocks. */
428 link_block (bb, after);
429
430 /* Grow the basic block array if needed. */
431 if ((size_t) last_basic_block == VEC_length (basic_block, basic_block_info))
432 {
433 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
434 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
435 }
436
437 /* Add the newly created block to the array. */
438 SET_BASIC_BLOCK (last_basic_block, bb);
439
440 n_basic_blocks++;
441 last_basic_block++;
442
443 return bb;
444 }
445
446
447 /*---------------------------------------------------------------------------
448 Edge creation
449 ---------------------------------------------------------------------------*/
450
451 /* Fold COND_EXPR_COND of each COND_EXPR. */
452
453 void
454 fold_cond_expr_cond (void)
455 {
456 basic_block bb;
457
458 FOR_EACH_BB (bb)
459 {
460 gimple stmt = last_stmt (bb);
461
462 if (stmt && gimple_code (stmt) == GIMPLE_COND)
463 {
464 tree cond;
465 bool zerop, onep;
466
467 fold_defer_overflow_warnings ();
468 cond = fold_binary (gimple_cond_code (stmt), boolean_type_node,
469 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
470 if (cond)
471 {
472 zerop = integer_zerop (cond);
473 onep = integer_onep (cond);
474 }
475 else
476 zerop = onep = false;
477
478 fold_undefer_overflow_warnings (zerop || onep,
479 stmt,
480 WARN_STRICT_OVERFLOW_CONDITIONAL);
481 if (zerop)
482 gimple_cond_make_false (stmt);
483 else if (onep)
484 gimple_cond_make_true (stmt);
485 }
486 }
487 }
488
489 /* Join all the blocks in the flowgraph. */
490
491 static void
492 make_edges (void)
493 {
494 basic_block bb;
495 struct omp_region *cur_region = NULL;
496
497 /* Create an edge from entry to the first block with executable
498 statements in it. */
499 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
500
501 /* Traverse the basic block array placing edges. */
502 FOR_EACH_BB (bb)
503 {
504 gimple last = last_stmt (bb);
505 bool fallthru;
506
507 if (last)
508 {
509 enum gimple_code code = gimple_code (last);
510 switch (code)
511 {
512 case GIMPLE_GOTO:
513 make_goto_expr_edges (bb);
514 fallthru = false;
515 break;
516 case GIMPLE_RETURN:
517 make_edge (bb, EXIT_BLOCK_PTR, 0);
518 fallthru = false;
519 break;
520 case GIMPLE_COND:
521 make_cond_expr_edges (bb);
522 fallthru = false;
523 break;
524 case GIMPLE_SWITCH:
525 make_gimple_switch_edges (bb);
526 fallthru = false;
527 break;
528 case GIMPLE_RESX:
529 make_eh_edges (last);
530 fallthru = false;
531 break;
532
533 case GIMPLE_CALL:
534 /* If this function receives a nonlocal goto, then we need to
535 make edges from this call site to all the nonlocal goto
536 handlers. */
537 if (stmt_can_make_abnormal_goto (last))
538 make_abnormal_goto_edges (bb, true);
539
540 /* If this statement has reachable exception handlers, then
541 create abnormal edges to them. */
542 make_eh_edges (last);
543
544 /* Some calls are known not to return. */
545 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
546 break;
547
548 case GIMPLE_ASSIGN:
549 /* A GIMPLE_ASSIGN may throw internally and thus be considered
550 control-altering. */
551 if (is_ctrl_altering_stmt (last))
552 {
553 make_eh_edges (last);
554 }
555 fallthru = true;
556 break;
557
558 case GIMPLE_OMP_PARALLEL:
559 case GIMPLE_OMP_TASK:
560 case GIMPLE_OMP_FOR:
561 case GIMPLE_OMP_SINGLE:
562 case GIMPLE_OMP_MASTER:
563 case GIMPLE_OMP_ORDERED:
564 case GIMPLE_OMP_CRITICAL:
565 case GIMPLE_OMP_SECTION:
566 cur_region = new_omp_region (bb, code, cur_region);
567 fallthru = true;
568 break;
569
570 case GIMPLE_OMP_SECTIONS:
571 cur_region = new_omp_region (bb, code, cur_region);
572 fallthru = true;
573 break;
574
575 case GIMPLE_OMP_SECTIONS_SWITCH:
576 fallthru = false;
577 break;
578
579
580 case GIMPLE_OMP_ATOMIC_LOAD:
581 case GIMPLE_OMP_ATOMIC_STORE:
582 fallthru = true;
583 break;
584
585
586 case GIMPLE_OMP_RETURN:
587 /* In the case of a GIMPLE_OMP_SECTION, the edge will go
588 somewhere other than the next block. This will be
589 created later. */
590 cur_region->exit = bb;
591 fallthru = cur_region->type != GIMPLE_OMP_SECTION;
592 cur_region = cur_region->outer;
593 break;
594
595 case GIMPLE_OMP_CONTINUE:
596 cur_region->cont = bb;
597 switch (cur_region->type)
598 {
599 case GIMPLE_OMP_FOR:
600 /* Mark all GIMPLE_OMP_FOR and GIMPLE_OMP_CONTINUE
601 succs edges as abnormal to prevent splitting
602 them. */
603 single_succ_edge (cur_region->entry)->flags |= EDGE_ABNORMAL;
604 /* Make the loopback edge. */
605 make_edge (bb, single_succ (cur_region->entry),
606 EDGE_ABNORMAL);
607
608 /* Create an edge from GIMPLE_OMP_FOR to exit, which
609 corresponds to the case that the body of the loop
610 is not executed at all. */
611 make_edge (cur_region->entry, bb->next_bb, EDGE_ABNORMAL);
612 make_edge (bb, bb->next_bb, EDGE_FALLTHRU | EDGE_ABNORMAL);
613 fallthru = false;
614 break;
615
616 case GIMPLE_OMP_SECTIONS:
617 /* Wire up the edges into and out of the nested sections. */
618 {
619 basic_block switch_bb = single_succ (cur_region->entry);
620
621 struct omp_region *i;
622 for (i = cur_region->inner; i ; i = i->next)
623 {
624 gcc_assert (i->type == GIMPLE_OMP_SECTION);
625 make_edge (switch_bb, i->entry, 0);
626 make_edge (i->exit, bb, EDGE_FALLTHRU);
627 }
628
629 /* Make the loopback edge to the block with
630 GIMPLE_OMP_SECTIONS_SWITCH. */
631 make_edge (bb, switch_bb, 0);
632
633 /* Make the edge from the switch to exit. */
634 make_edge (switch_bb, bb->next_bb, 0);
635 fallthru = false;
636 }
637 break;
638
639 default:
640 gcc_unreachable ();
641 }
642 break;
643
644 default:
645 gcc_assert (!stmt_ends_bb_p (last));
646 fallthru = true;
647 }
648 }
649 else
650 fallthru = true;
651
652 if (fallthru)
653 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
654 }
655
656 if (root_omp_region)
657 free_omp_regions ();
658
659 /* Fold COND_EXPR_COND of each COND_EXPR. */
660 fold_cond_expr_cond ();
661 }
662
663
664 /* Create the edges for a GIMPLE_COND starting at block BB. */
665
666 static void
667 make_cond_expr_edges (basic_block bb)
668 {
669 gimple entry = last_stmt (bb);
670 gimple then_stmt, else_stmt;
671 basic_block then_bb, else_bb;
672 tree then_label, else_label;
673 edge e;
674
675 gcc_assert (entry);
676 gcc_assert (gimple_code (entry) == GIMPLE_COND);
677
678 /* Entry basic blocks for each component. */
679 then_label = gimple_cond_true_label (entry);
680 else_label = gimple_cond_false_label (entry);
681 then_bb = label_to_block (then_label);
682 else_bb = label_to_block (else_label);
683 then_stmt = first_stmt (then_bb);
684 else_stmt = first_stmt (else_bb);
685
686 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
687 e->goto_locus = gimple_location (then_stmt);
688 if (e->goto_locus)
689 e->goto_block = gimple_block (then_stmt);
690 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
691 if (e)
692 {
693 e->goto_locus = gimple_location (else_stmt);
694 if (e->goto_locus)
695 e->goto_block = gimple_block (else_stmt);
696 }
697
698 /* We do not need the labels anymore. */
699 gimple_cond_set_true_label (entry, NULL_TREE);
700 gimple_cond_set_false_label (entry, NULL_TREE);
701 }
702
703
704 /* Called for each element in the hash table (P) as we delete the
705 edge to cases hash table.
706
707 Clear all the TREE_CHAINs to prevent problems with copying of
708 SWITCH_EXPRs and structure sharing rules, then free the hash table
709 element. */
710
711 static bool
712 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
713 void *data ATTRIBUTE_UNUSED)
714 {
715 tree t, next;
716
717 for (t = (tree) *value; t; t = next)
718 {
719 next = TREE_CHAIN (t);
720 TREE_CHAIN (t) = NULL;
721 }
722
723 *value = NULL;
724 return false;
725 }
726
727 /* Start recording information mapping edges to case labels. */
728
729 void
730 start_recording_case_labels (void)
731 {
732 gcc_assert (edge_to_cases == NULL);
733 edge_to_cases = pointer_map_create ();
734 }
735
736 /* Return nonzero if we are recording information for case labels. */
737
738 static bool
739 recording_case_labels_p (void)
740 {
741 return (edge_to_cases != NULL);
742 }
743
744 /* Stop recording information mapping edges to case labels and
745 remove any information we have recorded. */
746 void
747 end_recording_case_labels (void)
748 {
749 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
750 pointer_map_destroy (edge_to_cases);
751 edge_to_cases = NULL;
752 }
753
754 /* If we are inside a {start,end}_recording_cases block, then return
755 a chain of CASE_LABEL_EXPRs from T which reference E.
756
757 Otherwise return NULL. */
758
759 static tree
760 get_cases_for_edge (edge e, gimple t)
761 {
762 void **slot;
763 size_t i, n;
764
765 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
766 chains available. Return NULL so the caller can detect this case. */
767 if (!recording_case_labels_p ())
768 return NULL;
769
770 slot = pointer_map_contains (edge_to_cases, e);
771 if (slot)
772 return (tree) *slot;
773
774 /* If we did not find E in the hash table, then this must be the first
775 time we have been queried for information about E & T. Add all the
776 elements from T to the hash table then perform the query again. */
777
778 n = gimple_switch_num_labels (t);
779 for (i = 0; i < n; i++)
780 {
781 tree elt = gimple_switch_label (t, i);
782 tree lab = CASE_LABEL (elt);
783 basic_block label_bb = label_to_block (lab);
784 edge this_edge = find_edge (e->src, label_bb);
785
786 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
787 a new chain. */
788 slot = pointer_map_insert (edge_to_cases, this_edge);
789 TREE_CHAIN (elt) = (tree) *slot;
790 *slot = elt;
791 }
792
793 return (tree) *pointer_map_contains (edge_to_cases, e);
794 }
795
796 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
797
798 static void
799 make_gimple_switch_edges (basic_block bb)
800 {
801 gimple entry = last_stmt (bb);
802 size_t i, n;
803
804 n = gimple_switch_num_labels (entry);
805
806 for (i = 0; i < n; ++i)
807 {
808 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
809 basic_block label_bb = label_to_block (lab);
810 make_edge (bb, label_bb, 0);
811 }
812 }
813
814
815 /* Return the basic block holding label DEST. */
816
817 basic_block
818 label_to_block_fn (struct function *ifun, tree dest)
819 {
820 int uid = LABEL_DECL_UID (dest);
821
822 /* We would die hard when faced by an undefined label. Emit a label to
823 the very first basic block. This will hopefully make even the dataflow
824 and undefined variable warnings quite right. */
825 if ((errorcount || sorrycount) && uid < 0)
826 {
827 gimple_stmt_iterator gsi = gsi_start_bb (BASIC_BLOCK (NUM_FIXED_BLOCKS));
828 gimple stmt;
829
830 stmt = gimple_build_label (dest);
831 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
832 uid = LABEL_DECL_UID (dest);
833 }
834 if (VEC_length (basic_block, ifun->cfg->x_label_to_block_map)
835 <= (unsigned int) uid)
836 return NULL;
837 return VEC_index (basic_block, ifun->cfg->x_label_to_block_map, uid);
838 }
839
840 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
841 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
842
843 void
844 make_abnormal_goto_edges (basic_block bb, bool for_call)
845 {
846 basic_block target_bb;
847 gimple_stmt_iterator gsi;
848
849 FOR_EACH_BB (target_bb)
850 for (gsi = gsi_start_bb (target_bb); !gsi_end_p (gsi); gsi_next (&gsi))
851 {
852 gimple label_stmt = gsi_stmt (gsi);
853 tree target;
854
855 if (gimple_code (label_stmt) != GIMPLE_LABEL)
856 break;
857
858 target = gimple_label_label (label_stmt);
859
860 /* Make an edge to every label block that has been marked as a
861 potential target for a computed goto or a non-local goto. */
862 if ((FORCED_LABEL (target) && !for_call)
863 || (DECL_NONLOCAL (target) && for_call))
864 {
865 make_edge (bb, target_bb, EDGE_ABNORMAL);
866 break;
867 }
868 }
869 }
870
871 /* Create edges for a goto statement at block BB. */
872
873 static void
874 make_goto_expr_edges (basic_block bb)
875 {
876 gimple_stmt_iterator last = gsi_last_bb (bb);
877 gimple goto_t = gsi_stmt (last);
878
879 /* A simple GOTO creates normal edges. */
880 if (simple_goto_p (goto_t))
881 {
882 tree dest = gimple_goto_dest (goto_t);
883 edge e = make_edge (bb, label_to_block (dest), EDGE_FALLTHRU);
884 e->goto_locus = gimple_location (goto_t);
885 if (e->goto_locus)
886 e->goto_block = gimple_block (goto_t);
887 gsi_remove (&last, true);
888 return;
889 }
890
891 /* A computed GOTO creates abnormal edges. */
892 make_abnormal_goto_edges (bb, false);
893 }
894
895
896 /*---------------------------------------------------------------------------
897 Flowgraph analysis
898 ---------------------------------------------------------------------------*/
899
900 /* Cleanup useless labels in basic blocks. This is something we wish
901 to do early because it allows us to group case labels before creating
902 the edges for the CFG, and it speeds up block statement iterators in
903 all passes later on.
904 We rerun this pass after CFG is created, to get rid of the labels that
905 are no longer referenced. After then we do not run it any more, since
906 (almost) no new labels should be created. */
907
908 /* A map from basic block index to the leading label of that block. */
909 static struct label_record
910 {
911 /* The label. */
912 tree label;
913
914 /* True if the label is referenced from somewhere. */
915 bool used;
916 } *label_for_bb;
917
918 /* Callback for for_each_eh_region. Helper for cleanup_dead_labels. */
919 static void
920 update_eh_label (struct eh_region *region)
921 {
922 tree old_label = get_eh_region_tree_label (region);
923 if (old_label)
924 {
925 tree new_label;
926 basic_block bb = label_to_block (old_label);
927
928 /* ??? After optimizing, there may be EH regions with labels
929 that have already been removed from the function body, so
930 there is no basic block for them. */
931 if (! bb)
932 return;
933
934 new_label = label_for_bb[bb->index].label;
935 label_for_bb[bb->index].used = true;
936 set_eh_region_tree_label (region, new_label);
937 }
938 }
939
940
941 /* Given LABEL return the first label in the same basic block. */
942
943 static tree
944 main_block_label (tree label)
945 {
946 basic_block bb = label_to_block (label);
947 tree main_label = label_for_bb[bb->index].label;
948
949 /* label_to_block possibly inserted undefined label into the chain. */
950 if (!main_label)
951 {
952 label_for_bb[bb->index].label = label;
953 main_label = label;
954 }
955
956 label_for_bb[bb->index].used = true;
957 return main_label;
958 }
959
960 /* Cleanup redundant labels. This is a three-step process:
961 1) Find the leading label for each block.
962 2) Redirect all references to labels to the leading labels.
963 3) Cleanup all useless labels. */
964
965 void
966 cleanup_dead_labels (void)
967 {
968 basic_block bb;
969 label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
970
971 /* Find a suitable label for each block. We use the first user-defined
972 label if there is one, or otherwise just the first label we see. */
973 FOR_EACH_BB (bb)
974 {
975 gimple_stmt_iterator i;
976
977 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
978 {
979 tree label;
980 gimple stmt = gsi_stmt (i);
981
982 if (gimple_code (stmt) != GIMPLE_LABEL)
983 break;
984
985 label = gimple_label_label (stmt);
986
987 /* If we have not yet seen a label for the current block,
988 remember this one and see if there are more labels. */
989 if (!label_for_bb[bb->index].label)
990 {
991 label_for_bb[bb->index].label = label;
992 continue;
993 }
994
995 /* If we did see a label for the current block already, but it
996 is an artificially created label, replace it if the current
997 label is a user defined label. */
998 if (!DECL_ARTIFICIAL (label)
999 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1000 {
1001 label_for_bb[bb->index].label = label;
1002 break;
1003 }
1004 }
1005 }
1006
1007 /* Now redirect all jumps/branches to the selected label.
1008 First do so for each block ending in a control statement. */
1009 FOR_EACH_BB (bb)
1010 {
1011 gimple stmt = last_stmt (bb);
1012 if (!stmt)
1013 continue;
1014
1015 switch (gimple_code (stmt))
1016 {
1017 case GIMPLE_COND:
1018 {
1019 tree true_label = gimple_cond_true_label (stmt);
1020 tree false_label = gimple_cond_false_label (stmt);
1021
1022 if (true_label)
1023 gimple_cond_set_true_label (stmt, main_block_label (true_label));
1024 if (false_label)
1025 gimple_cond_set_false_label (stmt, main_block_label (false_label));
1026 break;
1027 }
1028
1029 case GIMPLE_SWITCH:
1030 {
1031 size_t i, n = gimple_switch_num_labels (stmt);
1032
1033 /* Replace all destination labels. */
1034 for (i = 0; i < n; ++i)
1035 {
1036 tree case_label = gimple_switch_label (stmt, i);
1037 tree label = main_block_label (CASE_LABEL (case_label));
1038 CASE_LABEL (case_label) = label;
1039 }
1040 break;
1041 }
1042
1043 /* We have to handle gotos until they're removed, and we don't
1044 remove them until after we've created the CFG edges. */
1045 case GIMPLE_GOTO:
1046 if (!computed_goto_p (stmt))
1047 {
1048 tree new_dest = main_block_label (gimple_goto_dest (stmt));
1049 gimple_goto_set_dest (stmt, new_dest);
1050 break;
1051 }
1052
1053 default:
1054 break;
1055 }
1056 }
1057
1058 for_each_eh_region (update_eh_label);
1059
1060 /* Finally, purge dead labels. All user-defined labels and labels that
1061 can be the target of non-local gotos and labels which have their
1062 address taken are preserved. */
1063 FOR_EACH_BB (bb)
1064 {
1065 gimple_stmt_iterator i;
1066 tree label_for_this_bb = label_for_bb[bb->index].label;
1067
1068 if (!label_for_this_bb)
1069 continue;
1070
1071 /* If the main label of the block is unused, we may still remove it. */
1072 if (!label_for_bb[bb->index].used)
1073 label_for_this_bb = NULL;
1074
1075 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1076 {
1077 tree label;
1078 gimple stmt = gsi_stmt (i);
1079
1080 if (gimple_code (stmt) != GIMPLE_LABEL)
1081 break;
1082
1083 label = gimple_label_label (stmt);
1084
1085 if (label == label_for_this_bb
1086 || !DECL_ARTIFICIAL (label)
1087 || DECL_NONLOCAL (label)
1088 || FORCED_LABEL (label))
1089 gsi_next (&i);
1090 else
1091 gsi_remove (&i, true);
1092 }
1093 }
1094
1095 free (label_for_bb);
1096 }
1097
1098 /* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
1099 and scan the sorted vector of cases. Combine the ones jumping to the
1100 same label.
1101 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1102
1103 void
1104 group_case_labels (void)
1105 {
1106 basic_block bb;
1107
1108 FOR_EACH_BB (bb)
1109 {
1110 gimple stmt = last_stmt (bb);
1111 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1112 {
1113 int old_size = gimple_switch_num_labels (stmt);
1114 int i, j, new_size = old_size;
1115 tree default_case = NULL_TREE;
1116 tree default_label = NULL_TREE;
1117 bool has_default;
1118
1119 /* The default label is always the first case in a switch
1120 statement after gimplification if it was not optimized
1121 away */
1122 if (!CASE_LOW (gimple_switch_default_label (stmt))
1123 && !CASE_HIGH (gimple_switch_default_label (stmt)))
1124 {
1125 default_case = gimple_switch_default_label (stmt);
1126 default_label = CASE_LABEL (default_case);
1127 has_default = true;
1128 }
1129 else
1130 has_default = false;
1131
1132 /* Look for possible opportunities to merge cases. */
1133 if (has_default)
1134 i = 1;
1135 else
1136 i = 0;
1137 while (i < old_size)
1138 {
1139 tree base_case, base_label, base_high;
1140 base_case = gimple_switch_label (stmt, i);
1141
1142 gcc_assert (base_case);
1143 base_label = CASE_LABEL (base_case);
1144
1145 /* Discard cases that have the same destination as the
1146 default case. */
1147 if (base_label == default_label)
1148 {
1149 gimple_switch_set_label (stmt, i, NULL_TREE);
1150 i++;
1151 new_size--;
1152 continue;
1153 }
1154
1155 base_high = CASE_HIGH (base_case)
1156 ? CASE_HIGH (base_case)
1157 : CASE_LOW (base_case);
1158 i++;
1159
1160 /* Try to merge case labels. Break out when we reach the end
1161 of the label vector or when we cannot merge the next case
1162 label with the current one. */
1163 while (i < old_size)
1164 {
1165 tree merge_case = gimple_switch_label (stmt, i);
1166 tree merge_label = CASE_LABEL (merge_case);
1167 tree t = int_const_binop (PLUS_EXPR, base_high,
1168 integer_one_node, 1);
1169
1170 /* Merge the cases if they jump to the same place,
1171 and their ranges are consecutive. */
1172 if (merge_label == base_label
1173 && tree_int_cst_equal (CASE_LOW (merge_case), t))
1174 {
1175 base_high = CASE_HIGH (merge_case) ?
1176 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1177 CASE_HIGH (base_case) = base_high;
1178 gimple_switch_set_label (stmt, i, NULL_TREE);
1179 new_size--;
1180 i++;
1181 }
1182 else
1183 break;
1184 }
1185 }
1186
1187 /* Compress the case labels in the label vector, and adjust the
1188 length of the vector. */
1189 for (i = 0, j = 0; i < new_size; i++)
1190 {
1191 while (! gimple_switch_label (stmt, j))
1192 j++;
1193 gimple_switch_set_label (stmt, i,
1194 gimple_switch_label (stmt, j++));
1195 }
1196
1197 gcc_assert (new_size <= old_size);
1198 gimple_switch_set_num_labels (stmt, new_size);
1199 }
1200 }
1201 }
1202
1203 /* Checks whether we can merge block B into block A. */
1204
1205 static bool
1206 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1207 {
1208 gimple stmt;
1209 gimple_stmt_iterator gsi;
1210 gimple_seq phis;
1211
1212 if (!single_succ_p (a))
1213 return false;
1214
1215 if (single_succ_edge (a)->flags & (EDGE_ABNORMAL | EDGE_EH))
1216 return false;
1217
1218 if (single_succ (a) != b)
1219 return false;
1220
1221 if (!single_pred_p (b))
1222 return false;
1223
1224 if (b == EXIT_BLOCK_PTR)
1225 return false;
1226
1227 /* If A ends by a statement causing exceptions or something similar, we
1228 cannot merge the blocks. */
1229 stmt = last_stmt (a);
1230 if (stmt && stmt_ends_bb_p (stmt))
1231 return false;
1232
1233 /* Do not allow a block with only a non-local label to be merged. */
1234 if (stmt
1235 && gimple_code (stmt) == GIMPLE_LABEL
1236 && DECL_NONLOCAL (gimple_label_label (stmt)))
1237 return false;
1238
1239 /* It must be possible to eliminate all phi nodes in B. If ssa form
1240 is not up-to-date, we cannot eliminate any phis; however, if only
1241 some symbols as whole are marked for renaming, this is not a problem,
1242 as phi nodes for those symbols are irrelevant in updating anyway. */
1243 phis = phi_nodes (b);
1244 if (!gimple_seq_empty_p (phis))
1245 {
1246 gimple_stmt_iterator i;
1247
1248 if (name_mappings_registered_p ())
1249 return false;
1250
1251 for (i = gsi_start (phis); !gsi_end_p (i); gsi_next (&i))
1252 {
1253 gimple phi = gsi_stmt (i);
1254
1255 if (!is_gimple_reg (gimple_phi_result (phi))
1256 && !may_propagate_copy (gimple_phi_result (phi),
1257 gimple_phi_arg_def (phi, 0)))
1258 return false;
1259 }
1260 }
1261
1262 /* Do not remove user labels. */
1263 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
1264 {
1265 stmt = gsi_stmt (gsi);
1266 if (gimple_code (stmt) != GIMPLE_LABEL)
1267 break;
1268 if (!DECL_ARTIFICIAL (gimple_label_label (stmt)))
1269 return false;
1270 }
1271
1272 /* Protect the loop latches. */
1273 if (current_loops
1274 && b->loop_father->latch == b)
1275 return false;
1276
1277 return true;
1278 }
1279
1280 /* Replaces all uses of NAME by VAL. */
1281
1282 void
1283 replace_uses_by (tree name, tree val)
1284 {
1285 imm_use_iterator imm_iter;
1286 use_operand_p use;
1287 gimple stmt;
1288 edge e;
1289
1290 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1291 {
1292 if (gimple_code (stmt) != GIMPLE_PHI)
1293 push_stmt_changes (&stmt);
1294
1295 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1296 {
1297 replace_exp (use, val);
1298
1299 if (gimple_code (stmt) == GIMPLE_PHI)
1300 {
1301 e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
1302 if (e->flags & EDGE_ABNORMAL)
1303 {
1304 /* This can only occur for virtual operands, since
1305 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1306 would prevent replacement. */
1307 gcc_assert (!is_gimple_reg (name));
1308 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1309 }
1310 }
1311 }
1312
1313 if (gimple_code (stmt) != GIMPLE_PHI)
1314 {
1315 size_t i;
1316
1317 fold_stmt_inplace (stmt);
1318 if (cfgcleanup_altered_bbs)
1319 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1320
1321 /* FIXME. This should go in pop_stmt_changes. */
1322 for (i = 0; i < gimple_num_ops (stmt); i++)
1323 {
1324 tree op = gimple_op (stmt, i);
1325 /* Operands may be empty here. For example, the labels
1326 of a GIMPLE_COND are nulled out following the creation
1327 of the corresponding CFG edges. */
1328 if (op && TREE_CODE (op) == ADDR_EXPR)
1329 recompute_tree_invariant_for_addr_expr (op);
1330 }
1331
1332 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1333
1334 pop_stmt_changes (&stmt);
1335 }
1336 }
1337
1338 gcc_assert (has_zero_uses (name));
1339
1340 /* Also update the trees stored in loop structures. */
1341 if (current_loops)
1342 {
1343 struct loop *loop;
1344 loop_iterator li;
1345
1346 FOR_EACH_LOOP (li, loop, 0)
1347 {
1348 substitute_in_loop_info (loop, name, val);
1349 }
1350 }
1351 }
1352
1353 /* Merge block B into block A. */
1354
1355 static void
1356 gimple_merge_blocks (basic_block a, basic_block b)
1357 {
1358 gimple_stmt_iterator last, gsi, psi;
1359 gimple_seq phis = phi_nodes (b);
1360
1361 if (dump_file)
1362 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1363
1364 /* Remove all single-valued PHI nodes from block B of the form
1365 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1366 gsi = gsi_last_bb (a);
1367 for (psi = gsi_start (phis); !gsi_end_p (psi); )
1368 {
1369 gimple phi = gsi_stmt (psi);
1370 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1371 gimple copy;
1372 bool may_replace_uses = !is_gimple_reg (def)
1373 || may_propagate_copy (def, use);
1374
1375 /* In case we maintain loop closed ssa form, do not propagate arguments
1376 of loop exit phi nodes. */
1377 if (current_loops
1378 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1379 && is_gimple_reg (def)
1380 && TREE_CODE (use) == SSA_NAME
1381 && a->loop_father != b->loop_father)
1382 may_replace_uses = false;
1383
1384 if (!may_replace_uses)
1385 {
1386 gcc_assert (is_gimple_reg (def));
1387
1388 /* Note that just emitting the copies is fine -- there is no problem
1389 with ordering of phi nodes. This is because A is the single
1390 predecessor of B, therefore results of the phi nodes cannot
1391 appear as arguments of the phi nodes. */
1392 copy = gimple_build_assign (def, use);
1393 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1394 remove_phi_node (&psi, false);
1395 }
1396 else
1397 {
1398 /* If we deal with a PHI for virtual operands, we can simply
1399 propagate these without fussing with folding or updating
1400 the stmt. */
1401 if (!is_gimple_reg (def))
1402 {
1403 imm_use_iterator iter;
1404 use_operand_p use_p;
1405 gimple stmt;
1406
1407 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1408 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1409 SET_USE (use_p, use);
1410 }
1411 else
1412 replace_uses_by (def, use);
1413
1414 remove_phi_node (&psi, true);
1415 }
1416 }
1417
1418 /* Ensure that B follows A. */
1419 move_block_after (b, a);
1420
1421 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1422 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1423
1424 /* Remove labels from B and set gimple_bb to A for other statements. */
1425 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1426 {
1427 if (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL)
1428 {
1429 gimple label = gsi_stmt (gsi);
1430
1431 gsi_remove (&gsi, false);
1432
1433 /* Now that we can thread computed gotos, we might have
1434 a situation where we have a forced label in block B
1435 However, the label at the start of block B might still be
1436 used in other ways (think about the runtime checking for
1437 Fortran assigned gotos). So we can not just delete the
1438 label. Instead we move the label to the start of block A. */
1439 if (FORCED_LABEL (gimple_label_label (label)))
1440 {
1441 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1442 gsi_insert_before (&dest_gsi, label, GSI_NEW_STMT);
1443 }
1444 }
1445 else
1446 {
1447 gimple_set_bb (gsi_stmt (gsi), a);
1448 gsi_next (&gsi);
1449 }
1450 }
1451
1452 /* Merge the sequences. */
1453 last = gsi_last_bb (a);
1454 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1455 set_bb_seq (b, NULL);
1456
1457 if (cfgcleanup_altered_bbs)
1458 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1459 }
1460
1461
1462 /* Return the one of two successors of BB that is not reachable by a
1463 reached by a complex edge, if there is one. Else, return BB. We use
1464 this in optimizations that use post-dominators for their heuristics,
1465 to catch the cases in C++ where function calls are involved. */
1466
1467 basic_block
1468 single_noncomplex_succ (basic_block bb)
1469 {
1470 edge e0, e1;
1471 if (EDGE_COUNT (bb->succs) != 2)
1472 return bb;
1473
1474 e0 = EDGE_SUCC (bb, 0);
1475 e1 = EDGE_SUCC (bb, 1);
1476 if (e0->flags & EDGE_COMPLEX)
1477 return e1->dest;
1478 if (e1->flags & EDGE_COMPLEX)
1479 return e0->dest;
1480
1481 return bb;
1482 }
1483
1484
1485 /* Walk the function tree removing unnecessary statements.
1486
1487 * Empty statement nodes are removed
1488
1489 * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1490
1491 * Unnecessary COND_EXPRs are removed
1492
1493 * Some unnecessary BIND_EXPRs are removed
1494
1495 * GOTO_EXPRs immediately preceding destination are removed.
1496
1497 Clearly more work could be done. The trick is doing the analysis
1498 and removal fast enough to be a net improvement in compile times.
1499
1500 Note that when we remove a control structure such as a COND_EXPR
1501 BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1502 to ensure we eliminate all the useless code. */
1503
1504 struct rus_data
1505 {
1506 bool repeat;
1507 bool may_throw;
1508 bool may_branch;
1509 bool has_label;
1510 bool last_was_goto;
1511 gimple_stmt_iterator last_goto_gsi;
1512 };
1513
1514
1515 static void remove_useless_stmts_1 (gimple_stmt_iterator *gsi, struct rus_data *);
1516
1517 /* Given a statement sequence, find the first executable statement with
1518 location information, and warn that it is unreachable. When searching,
1519 descend into containers in execution order. */
1520
1521 static bool
1522 remove_useless_stmts_warn_notreached (gimple_seq stmts)
1523 {
1524 gimple_stmt_iterator gsi;
1525
1526 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
1527 {
1528 gimple stmt = gsi_stmt (gsi);
1529
1530 if (gimple_has_location (stmt))
1531 {
1532 location_t loc = gimple_location (stmt);
1533 if (LOCATION_LINE (loc) > 0)
1534 {
1535 warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
1536 return true;
1537 }
1538 }
1539
1540 switch (gimple_code (stmt))
1541 {
1542 /* Unfortunately, we need the CFG now to detect unreachable
1543 branches in a conditional, so conditionals are not handled here. */
1544
1545 case GIMPLE_TRY:
1546 if (remove_useless_stmts_warn_notreached (gimple_try_eval (stmt)))
1547 return true;
1548 if (remove_useless_stmts_warn_notreached (gimple_try_cleanup (stmt)))
1549 return true;
1550 break;
1551
1552 case GIMPLE_CATCH:
1553 return remove_useless_stmts_warn_notreached (gimple_catch_handler (stmt));
1554
1555 case GIMPLE_EH_FILTER:
1556 return remove_useless_stmts_warn_notreached (gimple_eh_filter_failure (stmt));
1557
1558 case GIMPLE_BIND:
1559 return remove_useless_stmts_warn_notreached (gimple_bind_body (stmt));
1560
1561 default:
1562 break;
1563 }
1564 }
1565
1566 return false;
1567 }
1568
1569 /* Helper for remove_useless_stmts_1. Handle GIMPLE_COND statements. */
1570
1571 static void
1572 remove_useless_stmts_cond (gimple_stmt_iterator *gsi, struct rus_data *data)
1573 {
1574 gimple stmt = gsi_stmt (*gsi);
1575
1576 /* The folded result must still be a conditional statement. */
1577 fold_stmt (gsi);
1578 gcc_assert (gsi_stmt (*gsi) == stmt);
1579
1580 data->may_branch = true;
1581
1582 /* Replace trivial conditionals with gotos. */
1583 if (gimple_cond_true_p (stmt))
1584 {
1585 /* Goto THEN label. */
1586 tree then_label = gimple_cond_true_label (stmt);
1587
1588 gsi_replace (gsi, gimple_build_goto (then_label), false);
1589 data->last_goto_gsi = *gsi;
1590 data->last_was_goto = true;
1591 data->repeat = true;
1592 }
1593 else if (gimple_cond_false_p (stmt))
1594 {
1595 /* Goto ELSE label. */
1596 tree else_label = gimple_cond_false_label (stmt);
1597
1598 gsi_replace (gsi, gimple_build_goto (else_label), false);
1599 data->last_goto_gsi = *gsi;
1600 data->last_was_goto = true;
1601 data->repeat = true;
1602 }
1603 else
1604 {
1605 tree then_label = gimple_cond_true_label (stmt);
1606 tree else_label = gimple_cond_false_label (stmt);
1607
1608 if (then_label == else_label)
1609 {
1610 /* Goto common destination. */
1611 gsi_replace (gsi, gimple_build_goto (then_label), false);
1612 data->last_goto_gsi = *gsi;
1613 data->last_was_goto = true;
1614 data->repeat = true;
1615 }
1616 }
1617
1618 gsi_next (gsi);
1619
1620 data->last_was_goto = false;
1621 }
1622
1623 /* Helper for remove_useless_stmts_1.
1624 Handle the try-finally case for GIMPLE_TRY statements. */
1625
1626 static void
1627 remove_useless_stmts_tf (gimple_stmt_iterator *gsi, struct rus_data *data)
1628 {
1629 bool save_may_branch, save_may_throw;
1630 bool this_may_branch, this_may_throw;
1631
1632 gimple_seq eval_seq, cleanup_seq;
1633 gimple_stmt_iterator eval_gsi, cleanup_gsi;
1634
1635 gimple stmt = gsi_stmt (*gsi);
1636
1637 /* Collect may_branch and may_throw information for the body only. */
1638 save_may_branch = data->may_branch;
1639 save_may_throw = data->may_throw;
1640 data->may_branch = false;
1641 data->may_throw = false;
1642 data->last_was_goto = false;
1643
1644 eval_seq = gimple_try_eval (stmt);
1645 eval_gsi = gsi_start (eval_seq);
1646 remove_useless_stmts_1 (&eval_gsi, data);
1647
1648 this_may_branch = data->may_branch;
1649 this_may_throw = data->may_throw;
1650 data->may_branch |= save_may_branch;
1651 data->may_throw |= save_may_throw;
1652 data->last_was_goto = false;
1653
1654 cleanup_seq = gimple_try_cleanup (stmt);
1655 cleanup_gsi = gsi_start (cleanup_seq);
1656 remove_useless_stmts_1 (&cleanup_gsi, data);
1657
1658 /* If the body is empty, then we can emit the FINALLY block without
1659 the enclosing TRY_FINALLY_EXPR. */
1660 if (gimple_seq_empty_p (eval_seq))
1661 {
1662 gsi_insert_seq_before (gsi, cleanup_seq, GSI_SAME_STMT);
1663 gsi_remove (gsi, false);
1664 data->repeat = true;
1665 }
1666
1667 /* If the handler is empty, then we can emit the TRY block without
1668 the enclosing TRY_FINALLY_EXPR. */
1669 else if (gimple_seq_empty_p (cleanup_seq))
1670 {
1671 gsi_insert_seq_before (gsi, eval_seq, GSI_SAME_STMT);
1672 gsi_remove (gsi, false);
1673 data->repeat = true;
1674 }
1675
1676 /* If the body neither throws, nor branches, then we can safely
1677 string the TRY and FINALLY blocks together. */
1678 else if (!this_may_branch && !this_may_throw)
1679 {
1680 gsi_insert_seq_before (gsi, eval_seq, GSI_SAME_STMT);
1681 gsi_insert_seq_before (gsi, cleanup_seq, GSI_SAME_STMT);
1682 gsi_remove (gsi, false);
1683 data->repeat = true;
1684 }
1685 else
1686 gsi_next (gsi);
1687 }
1688
1689 /* Helper for remove_useless_stmts_1.
1690 Handle the try-catch case for GIMPLE_TRY statements. */
1691
1692 static void
1693 remove_useless_stmts_tc (gimple_stmt_iterator *gsi, struct rus_data *data)
1694 {
1695 bool save_may_throw, this_may_throw;
1696
1697 gimple_seq eval_seq, cleanup_seq, handler_seq, failure_seq;
1698 gimple_stmt_iterator eval_gsi, cleanup_gsi, handler_gsi, failure_gsi;
1699
1700 gimple stmt = gsi_stmt (*gsi);
1701
1702 /* Collect may_throw information for the body only. */
1703 save_may_throw = data->may_throw;
1704 data->may_throw = false;
1705 data->last_was_goto = false;
1706
1707 eval_seq = gimple_try_eval (stmt);
1708 eval_gsi = gsi_start (eval_seq);
1709 remove_useless_stmts_1 (&eval_gsi, data);
1710
1711 this_may_throw = data->may_throw;
1712 data->may_throw = save_may_throw;
1713
1714 cleanup_seq = gimple_try_cleanup (stmt);
1715
1716 /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */
1717 if (!this_may_throw)
1718 {
1719 if (warn_notreached)
1720 {
1721 remove_useless_stmts_warn_notreached (cleanup_seq);
1722 }
1723 gsi_insert_seq_before (gsi, eval_seq, GSI_SAME_STMT);
1724 gsi_remove (gsi, false);
1725 data->repeat = true;
1726 return;
1727 }
1728
1729 /* Process the catch clause specially. We may be able to tell that
1730 no exceptions propagate past this point. */
1731
1732 this_may_throw = true;
1733 cleanup_gsi = gsi_start (cleanup_seq);
1734 stmt = gsi_stmt (cleanup_gsi);
1735 data->last_was_goto = false;
1736
1737 switch (gimple_code (stmt))
1738 {
1739 case GIMPLE_CATCH:
1740 /* If the first element is a catch, they all must be. */
1741 while (!gsi_end_p (cleanup_gsi))
1742 {
1743 stmt = gsi_stmt (cleanup_gsi);
1744 /* If we catch all exceptions, then the body does not
1745 propagate exceptions past this point. */
1746 if (gimple_catch_types (stmt) == NULL)
1747 this_may_throw = false;
1748 data->last_was_goto = false;
1749 handler_seq = gimple_catch_handler (stmt);
1750 handler_gsi = gsi_start (handler_seq);
1751 remove_useless_stmts_1 (&handler_gsi, data);
1752 gsi_next (&cleanup_gsi);
1753 }
1754 gsi_next (gsi);
1755 break;
1756
1757 case GIMPLE_EH_FILTER:
1758 /* If the first element is an eh_filter, it should stand alone. */
1759 if (gimple_eh_filter_must_not_throw (stmt))
1760 this_may_throw = false;
1761 else if (gimple_eh_filter_types (stmt) == NULL)
1762 this_may_throw = false;
1763 failure_seq = gimple_eh_filter_failure (stmt);
1764 failure_gsi = gsi_start (failure_seq);
1765 remove_useless_stmts_1 (&failure_gsi, data);
1766 gsi_next (gsi);
1767 break;
1768
1769 default:
1770 /* Otherwise this is a list of cleanup statements. */
1771 remove_useless_stmts_1 (&cleanup_gsi, data);
1772
1773 /* If the cleanup is empty, then we can emit the TRY block without
1774 the enclosing TRY_CATCH_EXPR. */
1775 if (gimple_seq_empty_p (cleanup_seq))
1776 {
1777 gsi_insert_seq_before (gsi, eval_seq, GSI_SAME_STMT);
1778 gsi_remove(gsi, false);
1779 data->repeat = true;
1780 }
1781 else
1782 gsi_next (gsi);
1783 break;
1784 }
1785
1786 data->may_throw |= this_may_throw;
1787 }
1788
1789 /* Helper for remove_useless_stmts_1. Handle GIMPLE_BIND statements. */
1790
1791 static void
1792 remove_useless_stmts_bind (gimple_stmt_iterator *gsi, struct rus_data *data ATTRIBUTE_UNUSED)
1793 {
1794 tree block;
1795 gimple_seq body_seq, fn_body_seq;
1796 gimple_stmt_iterator body_gsi;
1797
1798 gimple stmt = gsi_stmt (*gsi);
1799
1800 /* First remove anything underneath the BIND_EXPR. */
1801
1802 body_seq = gimple_bind_body (stmt);
1803 body_gsi = gsi_start (body_seq);
1804 remove_useless_stmts_1 (&body_gsi, data);
1805
1806 /* If the GIMPLE_BIND has no variables, then we can pull everything
1807 up one level and remove the GIMPLE_BIND, unless this is the toplevel
1808 GIMPLE_BIND for the current function or an inlined function.
1809
1810 When this situation occurs we will want to apply this
1811 optimization again. */
1812 block = gimple_bind_block (stmt);
1813 fn_body_seq = gimple_body (current_function_decl);
1814 if (gimple_bind_vars (stmt) == NULL_TREE
1815 && (gimple_seq_empty_p (fn_body_seq)
1816 || stmt != gimple_seq_first_stmt (fn_body_seq))
1817 && (! block
1818 || ! BLOCK_ABSTRACT_ORIGIN (block)
1819 || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block))
1820 != FUNCTION_DECL)))
1821 {
1822 tree var = NULL_TREE;
1823 /* Even if there are no gimple_bind_vars, there might be other
1824 decls in BLOCK_VARS rendering the GIMPLE_BIND not useless. */
1825 if (block && !BLOCK_NUM_NONLOCALIZED_VARS (block))
1826 for (var = BLOCK_VARS (block); var; var = TREE_CHAIN (var))
1827 if (TREE_CODE (var) == IMPORTED_DECL)
1828 break;
1829 if (var || (block && BLOCK_NUM_NONLOCALIZED_VARS (block)))
1830 gsi_next (gsi);
1831 else
1832 {
1833 gsi_insert_seq_before (gsi, body_seq, GSI_SAME_STMT);
1834 gsi_remove (gsi, false);
1835 data->repeat = true;
1836 }
1837 }
1838 else
1839 gsi_next (gsi);
1840 }
1841
1842 /* Helper for remove_useless_stmts_1. Handle GIMPLE_GOTO statements. */
1843
1844 static void
1845 remove_useless_stmts_goto (gimple_stmt_iterator *gsi, struct rus_data *data)
1846 {
1847 gimple stmt = gsi_stmt (*gsi);
1848
1849 tree dest = gimple_goto_dest (stmt);
1850
1851 data->may_branch = true;
1852 data->last_was_goto = false;
1853
1854 /* Record iterator for last goto expr, so that we can delete it if unnecessary. */
1855 if (TREE_CODE (dest) == LABEL_DECL)
1856 {
1857 data->last_goto_gsi = *gsi;
1858 data->last_was_goto = true;
1859 }
1860
1861 gsi_next(gsi);
1862 }
1863
1864 /* Helper for remove_useless_stmts_1. Handle GIMPLE_LABEL statements. */
1865
1866 static void
1867 remove_useless_stmts_label (gimple_stmt_iterator *gsi, struct rus_data *data)
1868 {
1869 gimple stmt = gsi_stmt (*gsi);
1870
1871 tree label = gimple_label_label (stmt);
1872
1873 data->has_label = true;
1874
1875 /* We do want to jump across non-local label receiver code. */
1876 if (DECL_NONLOCAL (label))
1877 data->last_was_goto = false;
1878
1879 else if (data->last_was_goto
1880 && gimple_goto_dest (gsi_stmt (data->last_goto_gsi)) == label)
1881 {
1882 /* Replace the preceding GIMPLE_GOTO statement with
1883 a GIMPLE_NOP, which will be subsequently removed.
1884 In this way, we avoid invalidating other iterators
1885 active on the statement sequence. */
1886 gsi_replace(&data->last_goto_gsi, gimple_build_nop(), false);
1887 data->last_was_goto = false;
1888 data->repeat = true;
1889 }
1890
1891 /* ??? Add something here to delete unused labels. */
1892
1893 gsi_next (gsi);
1894 }
1895
1896
1897 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1898
1899 void
1900 notice_special_calls (gimple call)
1901 {
1902 int flags = gimple_call_flags (call);
1903
1904 if (flags & ECF_MAY_BE_ALLOCA)
1905 cfun->calls_alloca = true;
1906 if (flags & ECF_RETURNS_TWICE)
1907 cfun->calls_setjmp = true;
1908 }
1909
1910
1911 /* Clear flags set by notice_special_calls. Used by dead code removal
1912 to update the flags. */
1913
1914 void
1915 clear_special_calls (void)
1916 {
1917 cfun->calls_alloca = false;
1918 cfun->calls_setjmp = false;
1919 }
1920
1921 /* Remove useless statements from a statement sequence, and perform
1922 some preliminary simplifications. */
1923
1924 static void
1925 remove_useless_stmts_1 (gimple_stmt_iterator *gsi, struct rus_data *data)
1926 {
1927 while (!gsi_end_p (*gsi))
1928 {
1929 gimple stmt = gsi_stmt (*gsi);
1930
1931 switch (gimple_code (stmt))
1932 {
1933 case GIMPLE_COND:
1934 remove_useless_stmts_cond (gsi, data);
1935 break;
1936
1937 case GIMPLE_GOTO:
1938 remove_useless_stmts_goto (gsi, data);
1939 break;
1940
1941 case GIMPLE_LABEL:
1942 remove_useless_stmts_label (gsi, data);
1943 break;
1944
1945 case GIMPLE_ASSIGN:
1946 fold_stmt (gsi);
1947 stmt = gsi_stmt (*gsi);
1948 data->last_was_goto = false;
1949 if (stmt_could_throw_p (stmt))
1950 data->may_throw = true;
1951 gsi_next (gsi);
1952 break;
1953
1954 case GIMPLE_ASM:
1955 fold_stmt (gsi);
1956 data->last_was_goto = false;
1957 gsi_next (gsi);
1958 break;
1959
1960 case GIMPLE_CALL:
1961 fold_stmt (gsi);
1962 stmt = gsi_stmt (*gsi);
1963 data->last_was_goto = false;
1964 if (is_gimple_call (stmt))
1965 notice_special_calls (stmt);
1966
1967 /* We used to call update_gimple_call_flags here,
1968 which copied side-effects and nothrows status
1969 from the function decl to the call. In the new
1970 tuplified GIMPLE, the accessors for this information
1971 always consult the function decl, so this copying
1972 is no longer necessary. */
1973 if (stmt_could_throw_p (stmt))
1974 data->may_throw = true;
1975 gsi_next (gsi);
1976 break;
1977
1978 case GIMPLE_RETURN:
1979 fold_stmt (gsi);
1980 data->last_was_goto = false;
1981 data->may_branch = true;
1982 gsi_next (gsi);
1983 break;
1984
1985 case GIMPLE_BIND:
1986 remove_useless_stmts_bind (gsi, data);
1987 break;
1988
1989 case GIMPLE_TRY:
1990 if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
1991 remove_useless_stmts_tc (gsi, data);
1992 else if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
1993 remove_useless_stmts_tf (gsi, data);
1994 else
1995 gcc_unreachable ();
1996 break;
1997
1998 case GIMPLE_CATCH:
1999 gcc_unreachable ();
2000 break;
2001
2002 case GIMPLE_NOP:
2003 gsi_remove (gsi, false);
2004 break;
2005
2006 case GIMPLE_OMP_FOR:
2007 {
2008 gimple_seq pre_body_seq = gimple_omp_for_pre_body (stmt);
2009 gimple_stmt_iterator pre_body_gsi = gsi_start (pre_body_seq);
2010
2011 remove_useless_stmts_1 (&pre_body_gsi, data);
2012 data->last_was_goto = false;
2013 }
2014 /* FALLTHROUGH */
2015 case GIMPLE_OMP_CRITICAL:
2016 case GIMPLE_OMP_CONTINUE:
2017 case GIMPLE_OMP_MASTER:
2018 case GIMPLE_OMP_ORDERED:
2019 case GIMPLE_OMP_SECTION:
2020 case GIMPLE_OMP_SECTIONS:
2021 case GIMPLE_OMP_SINGLE:
2022 {
2023 gimple_seq body_seq = gimple_omp_body (stmt);
2024 gimple_stmt_iterator body_gsi = gsi_start (body_seq);
2025
2026 remove_useless_stmts_1 (&body_gsi, data);
2027 data->last_was_goto = false;
2028 gsi_next (gsi);
2029 }
2030 break;
2031
2032 case GIMPLE_OMP_PARALLEL:
2033 case GIMPLE_OMP_TASK:
2034 {
2035 /* Make sure the outermost GIMPLE_BIND isn't removed
2036 as useless. */
2037 gimple_seq body_seq = gimple_omp_body (stmt);
2038 gimple bind = gimple_seq_first_stmt (body_seq);
2039 gimple_seq bind_seq = gimple_bind_body (bind);
2040 gimple_stmt_iterator bind_gsi = gsi_start (bind_seq);
2041
2042 remove_useless_stmts_1 (&bind_gsi, data);
2043 data->last_was_goto = false;
2044 gsi_next (gsi);
2045 }
2046 break;
2047
2048 case GIMPLE_CHANGE_DYNAMIC_TYPE:
2049 /* If we do not optimize remove GIMPLE_CHANGE_DYNAMIC_TYPE as
2050 expansion is confused about them and we only remove them
2051 during alias computation otherwise. */
2052 if (!optimize)
2053 {
2054 data->last_was_goto = false;
2055 gsi_remove (gsi, false);
2056 break;
2057 }
2058 /* Fallthru. */
2059
2060 default:
2061 data->last_was_goto = false;
2062 gsi_next (gsi);
2063 break;
2064 }
2065 }
2066 }
2067
2068 /* Walk the function tree, removing useless statements and performing
2069 some preliminary simplifications. */
2070
2071 static unsigned int
2072 remove_useless_stmts (void)
2073 {
2074 struct rus_data data;
2075
2076 clear_special_calls ();
2077
2078 do
2079 {
2080 gimple_stmt_iterator gsi;
2081
2082 gsi = gsi_start (gimple_body (current_function_decl));
2083 memset (&data, 0, sizeof (data));
2084 remove_useless_stmts_1 (&gsi, &data);
2085 }
2086 while (data.repeat);
2087 return 0;
2088 }
2089
2090
2091 struct gimple_opt_pass pass_remove_useless_stmts =
2092 {
2093 {
2094 GIMPLE_PASS,
2095 "useless", /* name */
2096 NULL, /* gate */
2097 remove_useless_stmts, /* execute */
2098 NULL, /* sub */
2099 NULL, /* next */
2100 0, /* static_pass_number */
2101 TV_NONE, /* tv_id */
2102 PROP_gimple_any, /* properties_required */
2103 0, /* properties_provided */
2104 0, /* properties_destroyed */
2105 0, /* todo_flags_start */
2106 TODO_dump_func /* todo_flags_finish */
2107 }
2108 };
2109
2110 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2111
2112 static void
2113 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2114 {
2115 /* Since this block is no longer reachable, we can just delete all
2116 of its PHI nodes. */
2117 remove_phi_nodes (bb);
2118
2119 /* Remove edges to BB's successors. */
2120 while (EDGE_COUNT (bb->succs) > 0)
2121 remove_edge (EDGE_SUCC (bb, 0));
2122 }
2123
2124
2125 /* Remove statements of basic block BB. */
2126
2127 static void
2128 remove_bb (basic_block bb)
2129 {
2130 gimple_stmt_iterator i;
2131 source_location loc = UNKNOWN_LOCATION;
2132
2133 if (dump_file)
2134 {
2135 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2136 if (dump_flags & TDF_DETAILS)
2137 {
2138 dump_bb (bb, dump_file, 0);
2139 fprintf (dump_file, "\n");
2140 }
2141 }
2142
2143 if (current_loops)
2144 {
2145 struct loop *loop = bb->loop_father;
2146
2147 /* If a loop gets removed, clean up the information associated
2148 with it. */
2149 if (loop->latch == bb
2150 || loop->header == bb)
2151 free_numbers_of_iterations_estimates_loop (loop);
2152 }
2153
2154 /* Remove all the instructions in the block. */
2155 if (bb_seq (bb) != NULL)
2156 {
2157 for (i = gsi_start_bb (bb); !gsi_end_p (i);)
2158 {
2159 gimple stmt = gsi_stmt (i);
2160 if (gimple_code (stmt) == GIMPLE_LABEL
2161 && (FORCED_LABEL (gimple_label_label (stmt))
2162 || DECL_NONLOCAL (gimple_label_label (stmt))))
2163 {
2164 basic_block new_bb;
2165 gimple_stmt_iterator new_gsi;
2166
2167 /* A non-reachable non-local label may still be referenced.
2168 But it no longer needs to carry the extra semantics of
2169 non-locality. */
2170 if (DECL_NONLOCAL (gimple_label_label (stmt)))
2171 {
2172 DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
2173 FORCED_LABEL (gimple_label_label (stmt)) = 1;
2174 }
2175
2176 new_bb = bb->prev_bb;
2177 new_gsi = gsi_start_bb (new_bb);
2178 gsi_remove (&i, false);
2179 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2180 }
2181 else
2182 {
2183 /* Release SSA definitions if we are in SSA. Note that we
2184 may be called when not in SSA. For example,
2185 final_cleanup calls this function via
2186 cleanup_tree_cfg. */
2187 if (gimple_in_ssa_p (cfun))
2188 release_defs (stmt);
2189
2190 gsi_remove (&i, true);
2191 }
2192
2193 /* Don't warn for removed gotos. Gotos are often removed due to
2194 jump threading, thus resulting in bogus warnings. Not great,
2195 since this way we lose warnings for gotos in the original
2196 program that are indeed unreachable. */
2197 if (gimple_code (stmt) != GIMPLE_GOTO
2198 && gimple_has_location (stmt)
2199 && !loc)
2200 loc = gimple_location (stmt);
2201 }
2202 }
2203
2204 /* If requested, give a warning that the first statement in the
2205 block is unreachable. We walk statements backwards in the
2206 loop above, so the last statement we process is the first statement
2207 in the block. */
2208 if (loc > BUILTINS_LOCATION && LOCATION_LINE (loc) > 0)
2209 warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
2210
2211 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2212 bb->il.gimple = NULL;
2213 }
2214
2215
2216 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2217 predicate VAL, return the edge that will be taken out of the block.
2218 If VAL does not match a unique edge, NULL is returned. */
2219
2220 edge
2221 find_taken_edge (basic_block bb, tree val)
2222 {
2223 gimple stmt;
2224
2225 stmt = last_stmt (bb);
2226
2227 gcc_assert (stmt);
2228 gcc_assert (is_ctrl_stmt (stmt));
2229
2230 if (val == NULL)
2231 return NULL;
2232
2233 if (!is_gimple_min_invariant (val))
2234 return NULL;
2235
2236 if (gimple_code (stmt) == GIMPLE_COND)
2237 return find_taken_edge_cond_expr (bb, val);
2238
2239 if (gimple_code (stmt) == GIMPLE_SWITCH)
2240 return find_taken_edge_switch_expr (bb, val);
2241
2242 if (computed_goto_p (stmt))
2243 {
2244 /* Only optimize if the argument is a label, if the argument is
2245 not a label then we can not construct a proper CFG.
2246
2247 It may be the case that we only need to allow the LABEL_REF to
2248 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2249 appear inside a LABEL_EXPR just to be safe. */
2250 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2251 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2252 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2253 return NULL;
2254 }
2255
2256 gcc_unreachable ();
2257 }
2258
2259 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2260 statement, determine which of the outgoing edges will be taken out of the
2261 block. Return NULL if either edge may be taken. */
2262
2263 static edge
2264 find_taken_edge_computed_goto (basic_block bb, tree val)
2265 {
2266 basic_block dest;
2267 edge e = NULL;
2268
2269 dest = label_to_block (val);
2270 if (dest)
2271 {
2272 e = find_edge (bb, dest);
2273 gcc_assert (e != NULL);
2274 }
2275
2276 return e;
2277 }
2278
2279 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2280 statement, determine which of the two edges will be taken out of the
2281 block. Return NULL if either edge may be taken. */
2282
2283 static edge
2284 find_taken_edge_cond_expr (basic_block bb, tree val)
2285 {
2286 edge true_edge, false_edge;
2287
2288 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2289
2290 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2291 return (integer_zerop (val) ? false_edge : true_edge);
2292 }
2293
2294 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2295 statement, determine which edge will be taken out of the block. Return
2296 NULL if any edge may be taken. */
2297
2298 static edge
2299 find_taken_edge_switch_expr (basic_block bb, tree val)
2300 {
2301 basic_block dest_bb;
2302 edge e;
2303 gimple switch_stmt;
2304 tree taken_case;
2305
2306 switch_stmt = last_stmt (bb);
2307 taken_case = find_case_label_for_value (switch_stmt, val);
2308 dest_bb = label_to_block (CASE_LABEL (taken_case));
2309
2310 e = find_edge (bb, dest_bb);
2311 gcc_assert (e);
2312 return e;
2313 }
2314
2315
2316 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2317 We can make optimal use here of the fact that the case labels are
2318 sorted: We can do a binary search for a case matching VAL. */
2319
2320 static tree
2321 find_case_label_for_value (gimple switch_stmt, tree val)
2322 {
2323 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2324 tree default_case = gimple_switch_default_label (switch_stmt);
2325
2326 for (low = 0, high = n; high - low > 1; )
2327 {
2328 size_t i = (high + low) / 2;
2329 tree t = gimple_switch_label (switch_stmt, i);
2330 int cmp;
2331
2332 /* Cache the result of comparing CASE_LOW and val. */
2333 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2334
2335 if (cmp > 0)
2336 high = i;
2337 else
2338 low = i;
2339
2340 if (CASE_HIGH (t) == NULL)
2341 {
2342 /* A singe-valued case label. */
2343 if (cmp == 0)
2344 return t;
2345 }
2346 else
2347 {
2348 /* A case range. We can only handle integer ranges. */
2349 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2350 return t;
2351 }
2352 }
2353
2354 return default_case;
2355 }
2356
2357
2358 /* Dump a basic block on stderr. */
2359
2360 void
2361 gimple_debug_bb (basic_block bb)
2362 {
2363 gimple_dump_bb (bb, stderr, 0, TDF_VOPS|TDF_MEMSYMS);
2364 }
2365
2366
2367 /* Dump basic block with index N on stderr. */
2368
2369 basic_block
2370 gimple_debug_bb_n (int n)
2371 {
2372 gimple_debug_bb (BASIC_BLOCK (n));
2373 return BASIC_BLOCK (n);
2374 }
2375
2376
2377 /* Dump the CFG on stderr.
2378
2379 FLAGS are the same used by the tree dumping functions
2380 (see TDF_* in tree-pass.h). */
2381
2382 void
2383 gimple_debug_cfg (int flags)
2384 {
2385 gimple_dump_cfg (stderr, flags);
2386 }
2387
2388
2389 /* Dump the program showing basic block boundaries on the given FILE.
2390
2391 FLAGS are the same used by the tree dumping functions (see TDF_* in
2392 tree.h). */
2393
2394 void
2395 gimple_dump_cfg (FILE *file, int flags)
2396 {
2397 if (flags & TDF_DETAILS)
2398 {
2399 const char *funcname
2400 = lang_hooks.decl_printable_name (current_function_decl, 2);
2401
2402 fputc ('\n', file);
2403 fprintf (file, ";; Function %s\n\n", funcname);
2404 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2405 n_basic_blocks, n_edges, last_basic_block);
2406
2407 brief_dump_cfg (file);
2408 fprintf (file, "\n");
2409 }
2410
2411 if (flags & TDF_STATS)
2412 dump_cfg_stats (file);
2413
2414 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2415 }
2416
2417
2418 /* Dump CFG statistics on FILE. */
2419
2420 void
2421 dump_cfg_stats (FILE *file)
2422 {
2423 static long max_num_merged_labels = 0;
2424 unsigned long size, total = 0;
2425 long num_edges;
2426 basic_block bb;
2427 const char * const fmt_str = "%-30s%-13s%12s\n";
2428 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2429 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2430 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2431 const char *funcname
2432 = lang_hooks.decl_printable_name (current_function_decl, 2);
2433
2434
2435 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2436
2437 fprintf (file, "---------------------------------------------------------\n");
2438 fprintf (file, fmt_str, "", " Number of ", "Memory");
2439 fprintf (file, fmt_str, "", " instances ", "used ");
2440 fprintf (file, "---------------------------------------------------------\n");
2441
2442 size = n_basic_blocks * sizeof (struct basic_block_def);
2443 total += size;
2444 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2445 SCALE (size), LABEL (size));
2446
2447 num_edges = 0;
2448 FOR_EACH_BB (bb)
2449 num_edges += EDGE_COUNT (bb->succs);
2450 size = num_edges * sizeof (struct edge_def);
2451 total += size;
2452 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2453
2454 fprintf (file, "---------------------------------------------------------\n");
2455 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2456 LABEL (total));
2457 fprintf (file, "---------------------------------------------------------\n");
2458 fprintf (file, "\n");
2459
2460 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2461 max_num_merged_labels = cfg_stats.num_merged_labels;
2462
2463 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2464 cfg_stats.num_merged_labels, max_num_merged_labels);
2465
2466 fprintf (file, "\n");
2467 }
2468
2469
2470 /* Dump CFG statistics on stderr. Keep extern so that it's always
2471 linked in the final executable. */
2472
2473 void
2474 debug_cfg_stats (void)
2475 {
2476 dump_cfg_stats (stderr);
2477 }
2478
2479
2480 /* Dump the flowgraph to a .vcg FILE. */
2481
2482 static void
2483 gimple_cfg2vcg (FILE *file)
2484 {
2485 edge e;
2486 edge_iterator ei;
2487 basic_block bb;
2488 const char *funcname
2489 = lang_hooks.decl_printable_name (current_function_decl, 2);
2490
2491 /* Write the file header. */
2492 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2493 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2494 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2495
2496 /* Write blocks and edges. */
2497 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2498 {
2499 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2500 e->dest->index);
2501
2502 if (e->flags & EDGE_FAKE)
2503 fprintf (file, " linestyle: dotted priority: 10");
2504 else
2505 fprintf (file, " linestyle: solid priority: 100");
2506
2507 fprintf (file, " }\n");
2508 }
2509 fputc ('\n', file);
2510
2511 FOR_EACH_BB (bb)
2512 {
2513 enum gimple_code head_code, end_code;
2514 const char *head_name, *end_name;
2515 int head_line = 0;
2516 int end_line = 0;
2517 gimple first = first_stmt (bb);
2518 gimple last = last_stmt (bb);
2519
2520 if (first)
2521 {
2522 head_code = gimple_code (first);
2523 head_name = gimple_code_name[head_code];
2524 head_line = get_lineno (first);
2525 }
2526 else
2527 head_name = "no-statement";
2528
2529 if (last)
2530 {
2531 end_code = gimple_code (last);
2532 end_name = gimple_code_name[end_code];
2533 end_line = get_lineno (last);
2534 }
2535 else
2536 end_name = "no-statement";
2537
2538 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2539 bb->index, bb->index, head_name, head_line, end_name,
2540 end_line);
2541
2542 FOR_EACH_EDGE (e, ei, bb->succs)
2543 {
2544 if (e->dest == EXIT_BLOCK_PTR)
2545 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2546 else
2547 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2548
2549 if (e->flags & EDGE_FAKE)
2550 fprintf (file, " priority: 10 linestyle: dotted");
2551 else
2552 fprintf (file, " priority: 100 linestyle: solid");
2553
2554 fprintf (file, " }\n");
2555 }
2556
2557 if (bb->next_bb != EXIT_BLOCK_PTR)
2558 fputc ('\n', file);
2559 }
2560
2561 fputs ("}\n\n", file);
2562 }
2563
2564
2565
2566 /*---------------------------------------------------------------------------
2567 Miscellaneous helpers
2568 ---------------------------------------------------------------------------*/
2569
2570 /* Return true if T represents a stmt that always transfers control. */
2571
2572 bool
2573 is_ctrl_stmt (gimple t)
2574 {
2575 return gimple_code (t) == GIMPLE_COND
2576 || gimple_code (t) == GIMPLE_SWITCH
2577 || gimple_code (t) == GIMPLE_GOTO
2578 || gimple_code (t) == GIMPLE_RETURN
2579 || gimple_code (t) == GIMPLE_RESX;
2580 }
2581
2582
2583 /* Return true if T is a statement that may alter the flow of control
2584 (e.g., a call to a non-returning function). */
2585
2586 bool
2587 is_ctrl_altering_stmt (gimple t)
2588 {
2589 gcc_assert (t);
2590
2591 if (is_gimple_call (t))
2592 {
2593 int flags = gimple_call_flags (t);
2594
2595 /* A non-pure/const call alters flow control if the current
2596 function has nonlocal labels. */
2597 if (!(flags & (ECF_CONST | ECF_PURE))
2598 && cfun->has_nonlocal_label)
2599 return true;
2600
2601 /* A call also alters control flow if it does not return. */
2602 if (gimple_call_flags (t) & ECF_NORETURN)
2603 return true;
2604 }
2605
2606 /* OpenMP directives alter control flow. */
2607 if (is_gimple_omp (t))
2608 return true;
2609
2610 /* If a statement can throw, it alters control flow. */
2611 return stmt_can_throw_internal (t);
2612 }
2613
2614
2615 /* Return true if T is a simple local goto. */
2616
2617 bool
2618 simple_goto_p (gimple t)
2619 {
2620 return (gimple_code (t) == GIMPLE_GOTO
2621 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2622 }
2623
2624
2625 /* Return true if T can make an abnormal transfer of control flow.
2626 Transfers of control flow associated with EH are excluded. */
2627
2628 bool
2629 stmt_can_make_abnormal_goto (gimple t)
2630 {
2631 if (computed_goto_p (t))
2632 return true;
2633 if (is_gimple_call (t))
2634 return gimple_has_side_effects (t) && cfun->has_nonlocal_label;
2635 return false;
2636 }
2637
2638
2639 /* Return true if STMT should start a new basic block. PREV_STMT is
2640 the statement preceding STMT. It is used when STMT is a label or a
2641 case label. Labels should only start a new basic block if their
2642 previous statement wasn't a label. Otherwise, sequence of labels
2643 would generate unnecessary basic blocks that only contain a single
2644 label. */
2645
2646 static inline bool
2647 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2648 {
2649 if (stmt == NULL)
2650 return false;
2651
2652 /* Labels start a new basic block only if the preceding statement
2653 wasn't a label of the same type. This prevents the creation of
2654 consecutive blocks that have nothing but a single label. */
2655 if (gimple_code (stmt) == GIMPLE_LABEL)
2656 {
2657 /* Nonlocal and computed GOTO targets always start a new block. */
2658 if (DECL_NONLOCAL (gimple_label_label (stmt))
2659 || FORCED_LABEL (gimple_label_label (stmt)))
2660 return true;
2661
2662 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2663 {
2664 if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
2665 return true;
2666
2667 cfg_stats.num_merged_labels++;
2668 return false;
2669 }
2670 else
2671 return true;
2672 }
2673
2674 return false;
2675 }
2676
2677
2678 /* Return true if T should end a basic block. */
2679
2680 bool
2681 stmt_ends_bb_p (gimple t)
2682 {
2683 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2684 }
2685
2686 /* Remove block annotations and other data structures. */
2687
2688 void
2689 delete_tree_cfg_annotations (void)
2690 {
2691 label_to_block_map = NULL;
2692 }
2693
2694
2695 /* Return the first statement in basic block BB. */
2696
2697 gimple
2698 first_stmt (basic_block bb)
2699 {
2700 gimple_stmt_iterator i = gsi_start_bb (bb);
2701 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2702 }
2703
2704 /* Return the last statement in basic block BB. */
2705
2706 gimple
2707 last_stmt (basic_block bb)
2708 {
2709 gimple_stmt_iterator b = gsi_last_bb (bb);
2710 return !gsi_end_p (b) ? gsi_stmt (b) : NULL;
2711 }
2712
2713 /* Return the last statement of an otherwise empty block. Return NULL
2714 if the block is totally empty, or if it contains more than one
2715 statement. */
2716
2717 gimple
2718 last_and_only_stmt (basic_block bb)
2719 {
2720 gimple_stmt_iterator i = gsi_last_bb (bb);
2721 gimple last, prev;
2722
2723 if (gsi_end_p (i))
2724 return NULL;
2725
2726 last = gsi_stmt (i);
2727 gsi_prev (&i);
2728 if (gsi_end_p (i))
2729 return last;
2730
2731 /* Empty statements should no longer appear in the instruction stream.
2732 Everything that might have appeared before should be deleted by
2733 remove_useless_stmts, and the optimizers should just gsi_remove
2734 instead of smashing with build_empty_stmt.
2735
2736 Thus the only thing that should appear here in a block containing
2737 one executable statement is a label. */
2738 prev = gsi_stmt (i);
2739 if (gimple_code (prev) == GIMPLE_LABEL)
2740 return last;
2741 else
2742 return NULL;
2743 }
2744
2745 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2746
2747 static void
2748 reinstall_phi_args (edge new_edge, edge old_edge)
2749 {
2750 edge_var_map_vector v;
2751 edge_var_map *vm;
2752 int i;
2753 gimple_stmt_iterator phis;
2754
2755 v = redirect_edge_var_map_vector (old_edge);
2756 if (!v)
2757 return;
2758
2759 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2760 VEC_iterate (edge_var_map, v, i, vm) && !gsi_end_p (phis);
2761 i++, gsi_next (&phis))
2762 {
2763 gimple phi = gsi_stmt (phis);
2764 tree result = redirect_edge_var_map_result (vm);
2765 tree arg = redirect_edge_var_map_def (vm);
2766
2767 gcc_assert (result == gimple_phi_result (phi));
2768
2769 add_phi_arg (phi, arg, new_edge);
2770 }
2771
2772 redirect_edge_var_map_clear (old_edge);
2773 }
2774
2775 /* Returns the basic block after which the new basic block created
2776 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2777 near its "logical" location. This is of most help to humans looking
2778 at debugging dumps. */
2779
2780 static basic_block
2781 split_edge_bb_loc (edge edge_in)
2782 {
2783 basic_block dest = edge_in->dest;
2784
2785 if (dest->prev_bb && find_edge (dest->prev_bb, dest))
2786 return edge_in->src;
2787 else
2788 return dest->prev_bb;
2789 }
2790
2791 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2792 Abort on abnormal edges. */
2793
2794 static basic_block
2795 gimple_split_edge (edge edge_in)
2796 {
2797 basic_block new_bb, after_bb, dest;
2798 edge new_edge, e;
2799
2800 /* Abnormal edges cannot be split. */
2801 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2802
2803 dest = edge_in->dest;
2804
2805 after_bb = split_edge_bb_loc (edge_in);
2806
2807 new_bb = create_empty_bb (after_bb);
2808 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2809 new_bb->count = edge_in->count;
2810 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2811 new_edge->probability = REG_BR_PROB_BASE;
2812 new_edge->count = edge_in->count;
2813
2814 e = redirect_edge_and_branch (edge_in, new_bb);
2815 gcc_assert (e == edge_in);
2816 reinstall_phi_args (new_edge, e);
2817
2818 return new_bb;
2819 }
2820
2821 /* Callback for walk_tree, check that all elements with address taken are
2822 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2823 inside a PHI node. */
2824
2825 static tree
2826 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2827 {
2828 tree t = *tp, x;
2829
2830 if (TYPE_P (t))
2831 *walk_subtrees = 0;
2832
2833 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2834 #define CHECK_OP(N, MSG) \
2835 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2836 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2837
2838 switch (TREE_CODE (t))
2839 {
2840 case SSA_NAME:
2841 if (SSA_NAME_IN_FREE_LIST (t))
2842 {
2843 error ("SSA name in freelist but still referenced");
2844 return *tp;
2845 }
2846 break;
2847
2848 case INDIRECT_REF:
2849 x = TREE_OPERAND (t, 0);
2850 if (!is_gimple_reg (x) && !is_gimple_min_invariant (x))
2851 {
2852 error ("Indirect reference's operand is not a register or a constant.");
2853 return x;
2854 }
2855 break;
2856
2857 case ASSERT_EXPR:
2858 x = fold (ASSERT_EXPR_COND (t));
2859 if (x == boolean_false_node)
2860 {
2861 error ("ASSERT_EXPR with an always-false condition");
2862 return *tp;
2863 }
2864 break;
2865
2866 case MODIFY_EXPR:
2867 error ("MODIFY_EXPR not expected while having tuples.");
2868 return *tp;
2869
2870 case ADDR_EXPR:
2871 {
2872 bool old_constant;
2873 bool old_side_effects;
2874 bool new_constant;
2875 bool new_side_effects;
2876
2877 gcc_assert (is_gimple_address (t));
2878
2879 old_constant = TREE_CONSTANT (t);
2880 old_side_effects = TREE_SIDE_EFFECTS (t);
2881
2882 recompute_tree_invariant_for_addr_expr (t);
2883 new_side_effects = TREE_SIDE_EFFECTS (t);
2884 new_constant = TREE_CONSTANT (t);
2885
2886 if (old_constant != new_constant)
2887 {
2888 error ("constant not recomputed when ADDR_EXPR changed");
2889 return t;
2890 }
2891 if (old_side_effects != new_side_effects)
2892 {
2893 error ("side effects not recomputed when ADDR_EXPR changed");
2894 return t;
2895 }
2896
2897 /* Skip any references (they will be checked when we recurse down the
2898 tree) and ensure that any variable used as a prefix is marked
2899 addressable. */
2900 for (x = TREE_OPERAND (t, 0);
2901 handled_component_p (x);
2902 x = TREE_OPERAND (x, 0))
2903 ;
2904
2905 if (!(TREE_CODE (x) == VAR_DECL
2906 || TREE_CODE (x) == PARM_DECL
2907 || TREE_CODE (x) == RESULT_DECL))
2908 return NULL;
2909 if (!TREE_ADDRESSABLE (x))
2910 {
2911 error ("address taken, but ADDRESSABLE bit not set");
2912 return x;
2913 }
2914 if (DECL_GIMPLE_REG_P (x))
2915 {
2916 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2917 return x;
2918 }
2919
2920 break;
2921 }
2922
2923 case COND_EXPR:
2924 x = COND_EXPR_COND (t);
2925 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2926 {
2927 error ("non-integral used in condition");
2928 return x;
2929 }
2930 if (!is_gimple_condexpr (x))
2931 {
2932 error ("invalid conditional operand");
2933 return x;
2934 }
2935 break;
2936
2937 case NON_LVALUE_EXPR:
2938 gcc_unreachable ();
2939
2940 CASE_CONVERT:
2941 case FIX_TRUNC_EXPR:
2942 case FLOAT_EXPR:
2943 case NEGATE_EXPR:
2944 case ABS_EXPR:
2945 case BIT_NOT_EXPR:
2946 case TRUTH_NOT_EXPR:
2947 CHECK_OP (0, "invalid operand to unary operator");
2948 break;
2949
2950 case REALPART_EXPR:
2951 case IMAGPART_EXPR:
2952 case COMPONENT_REF:
2953 case ARRAY_REF:
2954 case ARRAY_RANGE_REF:
2955 case BIT_FIELD_REF:
2956 case VIEW_CONVERT_EXPR:
2957 /* We have a nest of references. Verify that each of the operands
2958 that determine where to reference is either a constant or a variable,
2959 verify that the base is valid, and then show we've already checked
2960 the subtrees. */
2961 while (handled_component_p (t))
2962 {
2963 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2964 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2965 else if (TREE_CODE (t) == ARRAY_REF
2966 || TREE_CODE (t) == ARRAY_RANGE_REF)
2967 {
2968 CHECK_OP (1, "invalid array index");
2969 if (TREE_OPERAND (t, 2))
2970 CHECK_OP (2, "invalid array lower bound");
2971 if (TREE_OPERAND (t, 3))
2972 CHECK_OP (3, "invalid array stride");
2973 }
2974 else if (TREE_CODE (t) == BIT_FIELD_REF)
2975 {
2976 if (!host_integerp (TREE_OPERAND (t, 1), 1)
2977 || !host_integerp (TREE_OPERAND (t, 2), 1))
2978 {
2979 error ("invalid position or size operand to BIT_FIELD_REF");
2980 return t;
2981 }
2982 else if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2983 && (TYPE_PRECISION (TREE_TYPE (t))
2984 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2985 {
2986 error ("integral result type precision does not match "
2987 "field size of BIT_FIELD_REF");
2988 return t;
2989 }
2990 if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2991 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2992 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2993 {
2994 error ("mode precision of non-integral result does not "
2995 "match field size of BIT_FIELD_REF");
2996 return t;
2997 }
2998 }
2999
3000 t = TREE_OPERAND (t, 0);
3001 }
3002
3003 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
3004 {
3005 error ("invalid reference prefix");
3006 return t;
3007 }
3008 *walk_subtrees = 0;
3009 break;
3010 case PLUS_EXPR:
3011 case MINUS_EXPR:
3012 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3013 POINTER_PLUS_EXPR. */
3014 if (POINTER_TYPE_P (TREE_TYPE (t)))
3015 {
3016 error ("invalid operand to plus/minus, type is a pointer");
3017 return t;
3018 }
3019 CHECK_OP (0, "invalid operand to binary operator");
3020 CHECK_OP (1, "invalid operand to binary operator");
3021 break;
3022
3023 case POINTER_PLUS_EXPR:
3024 /* Check to make sure the first operand is a pointer or reference type. */
3025 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3026 {
3027 error ("invalid operand to pointer plus, first operand is not a pointer");
3028 return t;
3029 }
3030 /* Check to make sure the second operand is an integer with type of
3031 sizetype. */
3032 if (!useless_type_conversion_p (sizetype,
3033 TREE_TYPE (TREE_OPERAND (t, 1))))
3034 {
3035 error ("invalid operand to pointer plus, second operand is not an "
3036 "integer with type of sizetype.");
3037 return t;
3038 }
3039 /* FALLTHROUGH */
3040 case LT_EXPR:
3041 case LE_EXPR:
3042 case GT_EXPR:
3043 case GE_EXPR:
3044 case EQ_EXPR:
3045 case NE_EXPR:
3046 case UNORDERED_EXPR:
3047 case ORDERED_EXPR:
3048 case UNLT_EXPR:
3049 case UNLE_EXPR:
3050 case UNGT_EXPR:
3051 case UNGE_EXPR:
3052 case UNEQ_EXPR:
3053 case LTGT_EXPR:
3054 case MULT_EXPR:
3055 case TRUNC_DIV_EXPR:
3056 case CEIL_DIV_EXPR:
3057 case FLOOR_DIV_EXPR:
3058 case ROUND_DIV_EXPR:
3059 case TRUNC_MOD_EXPR:
3060 case CEIL_MOD_EXPR:
3061 case FLOOR_MOD_EXPR:
3062 case ROUND_MOD_EXPR:
3063 case RDIV_EXPR:
3064 case EXACT_DIV_EXPR:
3065 case MIN_EXPR:
3066 case MAX_EXPR:
3067 case LSHIFT_EXPR:
3068 case RSHIFT_EXPR:
3069 case LROTATE_EXPR:
3070 case RROTATE_EXPR:
3071 case BIT_IOR_EXPR:
3072 case BIT_XOR_EXPR:
3073 case BIT_AND_EXPR:
3074 CHECK_OP (0, "invalid operand to binary operator");
3075 CHECK_OP (1, "invalid operand to binary operator");
3076 break;
3077
3078 case CONSTRUCTOR:
3079 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3080 *walk_subtrees = 0;
3081 break;
3082
3083 default:
3084 break;
3085 }
3086 return NULL;
3087
3088 #undef CHECK_OP
3089 }
3090
3091
3092 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3093 Returns true if there is an error, otherwise false. */
3094
3095 static bool
3096 verify_types_in_gimple_min_lval (tree expr)
3097 {
3098 tree op;
3099
3100 if (is_gimple_id (expr))
3101 return false;
3102
3103 if (!INDIRECT_REF_P (expr)
3104 && TREE_CODE (expr) != TARGET_MEM_REF)
3105 {
3106 error ("invalid expression for min lvalue");
3107 return true;
3108 }
3109
3110 /* TARGET_MEM_REFs are strange beasts. */
3111 if (TREE_CODE (expr) == TARGET_MEM_REF)
3112 return false;
3113
3114 op = TREE_OPERAND (expr, 0);
3115 if (!is_gimple_val (op))
3116 {
3117 error ("invalid operand in indirect reference");
3118 debug_generic_stmt (op);
3119 return true;
3120 }
3121 if (!useless_type_conversion_p (TREE_TYPE (expr),
3122 TREE_TYPE (TREE_TYPE (op))))
3123 {
3124 error ("type mismatch in indirect reference");
3125 debug_generic_stmt (TREE_TYPE (expr));
3126 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3127 return true;
3128 }
3129
3130 return false;
3131 }
3132
3133 /* Verify if EXPR is a valid GIMPLE reference expression. Returns true
3134 if there is an error, otherwise false. */
3135
3136 static bool
3137 verify_types_in_gimple_reference (tree expr)
3138 {
3139 while (handled_component_p (expr))
3140 {
3141 tree op = TREE_OPERAND (expr, 0);
3142
3143 if (TREE_CODE (expr) == ARRAY_REF
3144 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3145 {
3146 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3147 || (TREE_OPERAND (expr, 2)
3148 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3149 || (TREE_OPERAND (expr, 3)
3150 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3151 {
3152 error ("invalid operands to array reference");
3153 debug_generic_stmt (expr);
3154 return true;
3155 }
3156 }
3157
3158 /* Verify if the reference array element types are compatible. */
3159 if (TREE_CODE (expr) == ARRAY_REF
3160 && !useless_type_conversion_p (TREE_TYPE (expr),
3161 TREE_TYPE (TREE_TYPE (op))))
3162 {
3163 error ("type mismatch in array reference");
3164 debug_generic_stmt (TREE_TYPE (expr));
3165 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3166 return true;
3167 }
3168 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3169 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3170 TREE_TYPE (TREE_TYPE (op))))
3171 {
3172 error ("type mismatch in array range reference");
3173 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3174 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3175 return true;
3176 }
3177
3178 if ((TREE_CODE (expr) == REALPART_EXPR
3179 || TREE_CODE (expr) == IMAGPART_EXPR)
3180 && !useless_type_conversion_p (TREE_TYPE (expr),
3181 TREE_TYPE (TREE_TYPE (op))))
3182 {
3183 error ("type mismatch in real/imagpart reference");
3184 debug_generic_stmt (TREE_TYPE (expr));
3185 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3186 return true;
3187 }
3188
3189 if (TREE_CODE (expr) == COMPONENT_REF
3190 && !useless_type_conversion_p (TREE_TYPE (expr),
3191 TREE_TYPE (TREE_OPERAND (expr, 1))))
3192 {
3193 error ("type mismatch in component reference");
3194 debug_generic_stmt (TREE_TYPE (expr));
3195 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3196 return true;
3197 }
3198
3199 /* For VIEW_CONVERT_EXPRs which are allowed here, too, there
3200 is nothing to verify. Gross mismatches at most invoke
3201 undefined behavior. */
3202 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3203 && !handled_component_p (op))
3204 return false;
3205
3206 expr = op;
3207 }
3208
3209 return verify_types_in_gimple_min_lval (expr);
3210 }
3211
3212 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3213 list of pointer-to types that is trivially convertible to DEST. */
3214
3215 static bool
3216 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3217 {
3218 tree src;
3219
3220 if (!TYPE_POINTER_TO (src_obj))
3221 return true;
3222
3223 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3224 if (useless_type_conversion_p (dest, src))
3225 return true;
3226
3227 return false;
3228 }
3229
3230 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3231 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3232
3233 static bool
3234 valid_fixed_convert_types_p (tree type1, tree type2)
3235 {
3236 return (FIXED_POINT_TYPE_P (type1)
3237 && (INTEGRAL_TYPE_P (type2)
3238 || SCALAR_FLOAT_TYPE_P (type2)
3239 || FIXED_POINT_TYPE_P (type2)));
3240 }
3241
3242 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3243 is a problem, otherwise false. */
3244
3245 static bool
3246 verify_gimple_call (gimple stmt)
3247 {
3248 tree fn = gimple_call_fn (stmt);
3249 tree fntype;
3250
3251 if (!POINTER_TYPE_P (TREE_TYPE (fn))
3252 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3253 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE))
3254 {
3255 error ("non-function in gimple call");
3256 return true;
3257 }
3258
3259 if (gimple_call_lhs (stmt)
3260 && !is_gimple_lvalue (gimple_call_lhs (stmt)))
3261 {
3262 error ("invalid LHS in gimple call");
3263 return true;
3264 }
3265
3266 fntype = TREE_TYPE (TREE_TYPE (fn));
3267 if (gimple_call_lhs (stmt)
3268 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3269 TREE_TYPE (fntype))
3270 /* ??? At least C++ misses conversions at assignments from
3271 void * call results.
3272 ??? Java is completely off. Especially with functions
3273 returning java.lang.Object.
3274 For now simply allow arbitrary pointer type conversions. */
3275 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3276 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3277 {
3278 error ("invalid conversion in gimple call");
3279 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3280 debug_generic_stmt (TREE_TYPE (fntype));
3281 return true;
3282 }
3283
3284 /* ??? The C frontend passes unpromoted arguments in case it
3285 didn't see a function declaration before the call. So for now
3286 leave the call arguments unverified. Once we gimplify
3287 unit-at-a-time we have a chance to fix this. */
3288
3289 return false;
3290 }
3291
3292 /* Verifies the gimple comparison with the result type TYPE and
3293 the operands OP0 and OP1. */
3294
3295 static bool
3296 verify_gimple_comparison (tree type, tree op0, tree op1)
3297 {
3298 tree op0_type = TREE_TYPE (op0);
3299 tree op1_type = TREE_TYPE (op1);
3300
3301 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3302 {
3303 error ("invalid operands in gimple comparison");
3304 return true;
3305 }
3306
3307 /* For comparisons we do not have the operations type as the
3308 effective type the comparison is carried out in. Instead
3309 we require that either the first operand is trivially
3310 convertible into the second, or the other way around.
3311 The resulting type of a comparison may be any integral type.
3312 Because we special-case pointers to void we allow
3313 comparisons of pointers with the same mode as well. */
3314 if ((!useless_type_conversion_p (op0_type, op1_type)
3315 && !useless_type_conversion_p (op1_type, op0_type)
3316 && (!POINTER_TYPE_P (op0_type)
3317 || !POINTER_TYPE_P (op1_type)
3318 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3319 || !INTEGRAL_TYPE_P (type))
3320 {
3321 error ("type mismatch in comparison expression");
3322 debug_generic_expr (type);
3323 debug_generic_expr (op0_type);
3324 debug_generic_expr (op1_type);
3325 return true;
3326 }
3327
3328 return false;
3329 }
3330
3331 /* Verify a gimple assignment statement STMT with an unary rhs.
3332 Returns true if anything is wrong. */
3333
3334 static bool
3335 verify_gimple_assign_unary (gimple stmt)
3336 {
3337 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3338 tree lhs = gimple_assign_lhs (stmt);
3339 tree lhs_type = TREE_TYPE (lhs);
3340 tree rhs1 = gimple_assign_rhs1 (stmt);
3341 tree rhs1_type = TREE_TYPE (rhs1);
3342
3343 if (!is_gimple_reg (lhs)
3344 && !(optimize == 0
3345 && TREE_CODE (lhs_type) == COMPLEX_TYPE))
3346 {
3347 error ("non-register as LHS of unary operation");
3348 return true;
3349 }
3350
3351 if (!is_gimple_val (rhs1))
3352 {
3353 error ("invalid operand in unary operation");
3354 return true;
3355 }
3356
3357 /* First handle conversions. */
3358 switch (rhs_code)
3359 {
3360 CASE_CONVERT:
3361 {
3362 /* Allow conversions between integral types and pointers only if
3363 there is no sign or zero extension involved.
3364 For targets were the precision of sizetype doesn't match that
3365 of pointers we need to allow arbitrary conversions from and
3366 to sizetype. */
3367 if ((POINTER_TYPE_P (lhs_type)
3368 && INTEGRAL_TYPE_P (rhs1_type)
3369 && (TYPE_PRECISION (lhs_type) >= TYPE_PRECISION (rhs1_type)
3370 || rhs1_type == sizetype))
3371 || (POINTER_TYPE_P (rhs1_type)
3372 && INTEGRAL_TYPE_P (lhs_type)
3373 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3374 || lhs_type == sizetype)))
3375 return false;
3376
3377 /* Allow conversion from integer to offset type and vice versa. */
3378 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3379 && TREE_CODE (rhs1_type) == INTEGER_TYPE)
3380 || (TREE_CODE (lhs_type) == INTEGER_TYPE
3381 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3382 return false;
3383
3384 /* Otherwise assert we are converting between types of the
3385 same kind. */
3386 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3387 {
3388 error ("invalid types in nop conversion");
3389 debug_generic_expr (lhs_type);
3390 debug_generic_expr (rhs1_type);
3391 return true;
3392 }
3393
3394 return false;
3395 }
3396
3397 case FIXED_CONVERT_EXPR:
3398 {
3399 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3400 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3401 {
3402 error ("invalid types in fixed-point conversion");
3403 debug_generic_expr (lhs_type);
3404 debug_generic_expr (rhs1_type);
3405 return true;
3406 }
3407
3408 return false;
3409 }
3410
3411 case FLOAT_EXPR:
3412 {
3413 if (!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3414 {
3415 error ("invalid types in conversion to floating point");
3416 debug_generic_expr (lhs_type);
3417 debug_generic_expr (rhs1_type);
3418 return true;
3419 }
3420
3421 return false;
3422 }
3423
3424 case FIX_TRUNC_EXPR:
3425 {
3426 if (!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3427 {
3428 error ("invalid types in conversion to integer");
3429 debug_generic_expr (lhs_type);
3430 debug_generic_expr (rhs1_type);
3431 return true;
3432 }
3433
3434 return false;
3435 }
3436
3437 case VEC_UNPACK_HI_EXPR:
3438 case VEC_UNPACK_LO_EXPR:
3439 case REDUC_MAX_EXPR:
3440 case REDUC_MIN_EXPR:
3441 case REDUC_PLUS_EXPR:
3442 case VEC_UNPACK_FLOAT_HI_EXPR:
3443 case VEC_UNPACK_FLOAT_LO_EXPR:
3444 /* FIXME. */
3445 return false;
3446
3447 case TRUTH_NOT_EXPR:
3448 case NEGATE_EXPR:
3449 case ABS_EXPR:
3450 case BIT_NOT_EXPR:
3451 case PAREN_EXPR:
3452 case NON_LVALUE_EXPR:
3453 case CONJ_EXPR:
3454 break;
3455
3456 default:
3457 gcc_unreachable ();
3458 }
3459
3460 /* For the remaining codes assert there is no conversion involved. */
3461 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3462 {
3463 error ("non-trivial conversion in unary operation");
3464 debug_generic_expr (lhs_type);
3465 debug_generic_expr (rhs1_type);
3466 return true;
3467 }
3468
3469 return false;
3470 }
3471
3472 /* Verify a gimple assignment statement STMT with a binary rhs.
3473 Returns true if anything is wrong. */
3474
3475 static bool
3476 verify_gimple_assign_binary (gimple stmt)
3477 {
3478 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3479 tree lhs = gimple_assign_lhs (stmt);
3480 tree lhs_type = TREE_TYPE (lhs);
3481 tree rhs1 = gimple_assign_rhs1 (stmt);
3482 tree rhs1_type = TREE_TYPE (rhs1);
3483 tree rhs2 = gimple_assign_rhs2 (stmt);
3484 tree rhs2_type = TREE_TYPE (rhs2);
3485
3486 if (!is_gimple_reg (lhs)
3487 && !(optimize == 0
3488 && TREE_CODE (lhs_type) == COMPLEX_TYPE))
3489 {
3490 error ("non-register as LHS of binary operation");
3491 return true;
3492 }
3493
3494 if (!is_gimple_val (rhs1)
3495 || !is_gimple_val (rhs2))
3496 {
3497 error ("invalid operands in binary operation");
3498 return true;
3499 }
3500
3501 /* First handle operations that involve different types. */
3502 switch (rhs_code)
3503 {
3504 case COMPLEX_EXPR:
3505 {
3506 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3507 || !(INTEGRAL_TYPE_P (rhs1_type)
3508 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3509 || !(INTEGRAL_TYPE_P (rhs2_type)
3510 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3511 {
3512 error ("type mismatch in complex expression");
3513 debug_generic_expr (lhs_type);
3514 debug_generic_expr (rhs1_type);
3515 debug_generic_expr (rhs2_type);
3516 return true;
3517 }
3518
3519 return false;
3520 }
3521
3522 case LSHIFT_EXPR:
3523 case RSHIFT_EXPR:
3524 case LROTATE_EXPR:
3525 case RROTATE_EXPR:
3526 {
3527 /* Shifts and rotates are ok on integral types, fixed point
3528 types and integer vector types. */
3529 if ((!INTEGRAL_TYPE_P (rhs1_type)
3530 && !FIXED_POINT_TYPE_P (rhs1_type)
3531 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3532 && TREE_CODE (TREE_TYPE (rhs1_type)) == INTEGER_TYPE))
3533 || (!INTEGRAL_TYPE_P (rhs2_type)
3534 /* Vector shifts of vectors are also ok. */
3535 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3536 && TREE_CODE (TREE_TYPE (rhs1_type)) == INTEGER_TYPE
3537 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3538 && TREE_CODE (TREE_TYPE (rhs2_type)) == INTEGER_TYPE))
3539 || !useless_type_conversion_p (lhs_type, rhs1_type))
3540 {
3541 error ("type mismatch in shift expression");
3542 debug_generic_expr (lhs_type);
3543 debug_generic_expr (rhs1_type);
3544 debug_generic_expr (rhs2_type);
3545 return true;
3546 }
3547
3548 return false;
3549 }
3550
3551 case VEC_LSHIFT_EXPR:
3552 case VEC_RSHIFT_EXPR:
3553 {
3554 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3555 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3556 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type)))
3557 || (!INTEGRAL_TYPE_P (rhs2_type)
3558 && (TREE_CODE (rhs2_type) != VECTOR_TYPE
3559 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3560 || !useless_type_conversion_p (lhs_type, rhs1_type))
3561 {
3562 error ("type mismatch in vector shift expression");
3563 debug_generic_expr (lhs_type);
3564 debug_generic_expr (rhs1_type);
3565 debug_generic_expr (rhs2_type);
3566 return true;
3567 }
3568
3569 return false;
3570 }
3571
3572 case PLUS_EXPR:
3573 {
3574 /* We use regular PLUS_EXPR for vectors.
3575 ??? This just makes the checker happy and may not be what is
3576 intended. */
3577 if (TREE_CODE (lhs_type) == VECTOR_TYPE
3578 && POINTER_TYPE_P (TREE_TYPE (lhs_type)))
3579 {
3580 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3581 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3582 {
3583 error ("invalid non-vector operands to vector valued plus");
3584 return true;
3585 }
3586 lhs_type = TREE_TYPE (lhs_type);
3587 rhs1_type = TREE_TYPE (rhs1_type);
3588 rhs2_type = TREE_TYPE (rhs2_type);
3589 /* PLUS_EXPR is commutative, so we might end up canonicalizing
3590 the pointer to 2nd place. */
3591 if (POINTER_TYPE_P (rhs2_type))
3592 {
3593 tree tem = rhs1_type;
3594 rhs1_type = rhs2_type;
3595 rhs2_type = tem;
3596 }
3597 goto do_pointer_plus_expr_check;
3598 }
3599 }
3600 /* Fallthru. */
3601 case MINUS_EXPR:
3602 {
3603 if (POINTER_TYPE_P (lhs_type)
3604 || POINTER_TYPE_P (rhs1_type)
3605 || POINTER_TYPE_P (rhs2_type))
3606 {
3607 error ("invalid (pointer) operands to plus/minus");
3608 return true;
3609 }
3610
3611 /* Continue with generic binary expression handling. */
3612 break;
3613 }
3614
3615 case POINTER_PLUS_EXPR:
3616 {
3617 do_pointer_plus_expr_check:
3618 if (!POINTER_TYPE_P (rhs1_type)
3619 || !useless_type_conversion_p (lhs_type, rhs1_type)
3620 || !useless_type_conversion_p (sizetype, rhs2_type))
3621 {
3622 error ("type mismatch in pointer plus expression");
3623 debug_generic_stmt (lhs_type);
3624 debug_generic_stmt (rhs1_type);
3625 debug_generic_stmt (rhs2_type);
3626 return true;
3627 }
3628
3629 return false;
3630 }
3631
3632 case TRUTH_ANDIF_EXPR:
3633 case TRUTH_ORIF_EXPR:
3634 gcc_unreachable ();
3635
3636 case TRUTH_AND_EXPR:
3637 case TRUTH_OR_EXPR:
3638 case TRUTH_XOR_EXPR:
3639 {
3640 /* We allow any kind of integral typed argument and result. */
3641 if (!INTEGRAL_TYPE_P (rhs1_type)
3642 || !INTEGRAL_TYPE_P (rhs2_type)
3643 || !INTEGRAL_TYPE_P (lhs_type))
3644 {
3645 error ("type mismatch in binary truth expression");
3646 debug_generic_expr (lhs_type);
3647 debug_generic_expr (rhs1_type);
3648 debug_generic_expr (rhs2_type);
3649 return true;
3650 }
3651
3652 return false;
3653 }
3654
3655 case LT_EXPR:
3656 case LE_EXPR:
3657 case GT_EXPR:
3658 case GE_EXPR:
3659 case EQ_EXPR:
3660 case NE_EXPR:
3661 case UNORDERED_EXPR:
3662 case ORDERED_EXPR:
3663 case UNLT_EXPR:
3664 case UNLE_EXPR:
3665 case UNGT_EXPR:
3666 case UNGE_EXPR:
3667 case UNEQ_EXPR:
3668 case LTGT_EXPR:
3669 /* Comparisons are also binary, but the result type is not
3670 connected to the operand types. */
3671 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3672
3673 case WIDEN_SUM_EXPR:
3674 case WIDEN_MULT_EXPR:
3675 case VEC_WIDEN_MULT_HI_EXPR:
3676 case VEC_WIDEN_MULT_LO_EXPR:
3677 case VEC_PACK_TRUNC_EXPR:
3678 case VEC_PACK_SAT_EXPR:
3679 case VEC_PACK_FIX_TRUNC_EXPR:
3680 case VEC_EXTRACT_EVEN_EXPR:
3681 case VEC_EXTRACT_ODD_EXPR:
3682 case VEC_INTERLEAVE_HIGH_EXPR:
3683 case VEC_INTERLEAVE_LOW_EXPR:
3684 /* FIXME. */
3685 return false;
3686
3687 case MULT_EXPR:
3688 case TRUNC_DIV_EXPR:
3689 case CEIL_DIV_EXPR:
3690 case FLOOR_DIV_EXPR:
3691 case ROUND_DIV_EXPR:
3692 case TRUNC_MOD_EXPR:
3693 case CEIL_MOD_EXPR:
3694 case FLOOR_MOD_EXPR:
3695 case ROUND_MOD_EXPR:
3696 case RDIV_EXPR:
3697 case EXACT_DIV_EXPR:
3698 case MIN_EXPR:
3699 case MAX_EXPR:
3700 case BIT_IOR_EXPR:
3701 case BIT_XOR_EXPR:
3702 case BIT_AND_EXPR:
3703 /* Continue with generic binary expression handling. */
3704 break;
3705
3706 default:
3707 gcc_unreachable ();
3708 }
3709
3710 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3711 || !useless_type_conversion_p (lhs_type, rhs2_type))
3712 {
3713 error ("type mismatch in binary expression");
3714 debug_generic_stmt (lhs_type);
3715 debug_generic_stmt (rhs1_type);
3716 debug_generic_stmt (rhs2_type);
3717 return true;
3718 }
3719
3720 return false;
3721 }
3722
3723 /* Verify a gimple assignment statement STMT with a single rhs.
3724 Returns true if anything is wrong. */
3725
3726 static bool
3727 verify_gimple_assign_single (gimple stmt)
3728 {
3729 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3730 tree lhs = gimple_assign_lhs (stmt);
3731 tree lhs_type = TREE_TYPE (lhs);
3732 tree rhs1 = gimple_assign_rhs1 (stmt);
3733 tree rhs1_type = TREE_TYPE (rhs1);
3734 bool res = false;
3735
3736 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3737 {
3738 error ("non-trivial conversion at assignment");
3739 debug_generic_expr (lhs_type);
3740 debug_generic_expr (rhs1_type);
3741 return true;
3742 }
3743
3744 if (handled_component_p (lhs))
3745 res |= verify_types_in_gimple_reference (lhs);
3746
3747 /* Special codes we cannot handle via their class. */
3748 switch (rhs_code)
3749 {
3750 case ADDR_EXPR:
3751 {
3752 tree op = TREE_OPERAND (rhs1, 0);
3753 if (!is_gimple_addressable (op))
3754 {
3755 error ("invalid operand in unary expression");
3756 return true;
3757 }
3758
3759 if (!one_pointer_to_useless_type_conversion_p (lhs_type,
3760 TREE_TYPE (op)))
3761 {
3762 error ("type mismatch in address expression");
3763 debug_generic_stmt (lhs_type);
3764 debug_generic_stmt (TYPE_POINTER_TO (TREE_TYPE (op)));
3765 return true;
3766 }
3767
3768 return verify_types_in_gimple_reference (op);
3769 }
3770
3771 /* tcc_reference */
3772 case COMPONENT_REF:
3773 case BIT_FIELD_REF:
3774 case INDIRECT_REF:
3775 case ALIGN_INDIRECT_REF:
3776 case MISALIGNED_INDIRECT_REF:
3777 case ARRAY_REF:
3778 case ARRAY_RANGE_REF:
3779 case VIEW_CONVERT_EXPR:
3780 case REALPART_EXPR:
3781 case IMAGPART_EXPR:
3782 case TARGET_MEM_REF:
3783 if (!is_gimple_reg (lhs)
3784 && is_gimple_reg_type (TREE_TYPE (lhs)))
3785 {
3786 error ("invalid rhs for gimple memory store");
3787 debug_generic_stmt (lhs);
3788 debug_generic_stmt (rhs1);
3789 return true;
3790 }
3791 return res || verify_types_in_gimple_reference (rhs1);
3792
3793 /* tcc_constant */
3794 case SSA_NAME:
3795 case INTEGER_CST:
3796 case REAL_CST:
3797 case FIXED_CST:
3798 case COMPLEX_CST:
3799 case VECTOR_CST:
3800 case STRING_CST:
3801 return res;
3802
3803 /* tcc_declaration */
3804 case CONST_DECL:
3805 return res;
3806 case VAR_DECL:
3807 case PARM_DECL:
3808 if (!is_gimple_reg (lhs)
3809 && !is_gimple_reg (rhs1)
3810 && is_gimple_reg_type (TREE_TYPE (lhs)))
3811 {
3812 error ("invalid rhs for gimple memory store");
3813 debug_generic_stmt (lhs);
3814 debug_generic_stmt (rhs1);
3815 return true;
3816 }
3817 return res;
3818
3819 case COND_EXPR:
3820 case CONSTRUCTOR:
3821 case OBJ_TYPE_REF:
3822 case ASSERT_EXPR:
3823 case WITH_SIZE_EXPR:
3824 case EXC_PTR_EXPR:
3825 case FILTER_EXPR:
3826 case POLYNOMIAL_CHREC:
3827 case DOT_PROD_EXPR:
3828 case VEC_COND_EXPR:
3829 case REALIGN_LOAD_EXPR:
3830 /* FIXME. */
3831 return res;
3832
3833 default:;
3834 }
3835
3836 return res;
3837 }
3838
3839 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
3840 is a problem, otherwise false. */
3841
3842 static bool
3843 verify_gimple_assign (gimple stmt)
3844 {
3845 switch (gimple_assign_rhs_class (stmt))
3846 {
3847 case GIMPLE_SINGLE_RHS:
3848 return verify_gimple_assign_single (stmt);
3849
3850 case GIMPLE_UNARY_RHS:
3851 return verify_gimple_assign_unary (stmt);
3852
3853 case GIMPLE_BINARY_RHS:
3854 return verify_gimple_assign_binary (stmt);
3855
3856 default:
3857 gcc_unreachable ();
3858 }
3859 }
3860
3861 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
3862 is a problem, otherwise false. */
3863
3864 static bool
3865 verify_gimple_return (gimple stmt)
3866 {
3867 tree op = gimple_return_retval (stmt);
3868 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
3869
3870 /* We cannot test for present return values as we do not fix up missing
3871 return values from the original source. */
3872 if (op == NULL)
3873 return false;
3874
3875 if (!is_gimple_val (op)
3876 && TREE_CODE (op) != RESULT_DECL)
3877 {
3878 error ("invalid operand in return statement");
3879 debug_generic_stmt (op);
3880 return true;
3881 }
3882
3883 if (!useless_type_conversion_p (restype, TREE_TYPE (op))
3884 /* ??? With C++ we can have the situation that the result
3885 decl is a reference type while the return type is an aggregate. */
3886 && !(TREE_CODE (op) == RESULT_DECL
3887 && TREE_CODE (TREE_TYPE (op)) == REFERENCE_TYPE
3888 && useless_type_conversion_p (restype, TREE_TYPE (TREE_TYPE (op)))))
3889 {
3890 error ("invalid conversion in return statement");
3891 debug_generic_stmt (restype);
3892 debug_generic_stmt (TREE_TYPE (op));
3893 return true;
3894 }
3895
3896 return false;
3897 }
3898
3899
3900 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
3901 is a problem, otherwise false. */
3902
3903 static bool
3904 verify_gimple_goto (gimple stmt)
3905 {
3906 tree dest = gimple_goto_dest (stmt);
3907
3908 /* ??? We have two canonical forms of direct goto destinations, a
3909 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
3910 if (TREE_CODE (dest) != LABEL_DECL
3911 && (!is_gimple_val (dest)
3912 || !POINTER_TYPE_P (TREE_TYPE (dest))))
3913 {
3914 error ("goto destination is neither a label nor a pointer");
3915 return true;
3916 }
3917
3918 return false;
3919 }
3920
3921 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
3922 is a problem, otherwise false. */
3923
3924 static bool
3925 verify_gimple_switch (gimple stmt)
3926 {
3927 if (!is_gimple_val (gimple_switch_index (stmt)))
3928 {
3929 error ("invalid operand to switch statement");
3930 debug_generic_stmt (gimple_switch_index (stmt));
3931 return true;
3932 }
3933
3934 return false;
3935 }
3936
3937
3938 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
3939 and false otherwise. */
3940
3941 static bool
3942 verify_gimple_phi (gimple stmt)
3943 {
3944 tree type = TREE_TYPE (gimple_phi_result (stmt));
3945 unsigned i;
3946
3947 if (!is_gimple_variable (gimple_phi_result (stmt)))
3948 {
3949 error ("Invalid PHI result");
3950 return true;
3951 }
3952
3953 for (i = 0; i < gimple_phi_num_args (stmt); i++)
3954 {
3955 tree arg = gimple_phi_arg_def (stmt, i);
3956 if ((is_gimple_reg (gimple_phi_result (stmt))
3957 && !is_gimple_val (arg))
3958 || (!is_gimple_reg (gimple_phi_result (stmt))
3959 && !is_gimple_addressable (arg)))
3960 {
3961 error ("Invalid PHI argument");
3962 debug_generic_stmt (arg);
3963 return true;
3964 }
3965 if (!useless_type_conversion_p (type, TREE_TYPE (arg)))
3966 {
3967 error ("Incompatible types in PHI argument %u", i);
3968 debug_generic_stmt (type);
3969 debug_generic_stmt (TREE_TYPE (arg));
3970 return true;
3971 }
3972 }
3973
3974 return false;
3975 }
3976
3977
3978 /* Verify the GIMPLE statement STMT. Returns true if there is an
3979 error, otherwise false. */
3980
3981 static bool
3982 verify_types_in_gimple_stmt (gimple stmt)
3983 {
3984 if (is_gimple_omp (stmt))
3985 {
3986 /* OpenMP directives are validated by the FE and never operated
3987 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
3988 non-gimple expressions when the main index variable has had
3989 its address taken. This does not affect the loop itself
3990 because the header of an GIMPLE_OMP_FOR is merely used to determine
3991 how to setup the parallel iteration. */
3992 return false;
3993 }
3994
3995 switch (gimple_code (stmt))
3996 {
3997 case GIMPLE_ASSIGN:
3998 return verify_gimple_assign (stmt);
3999
4000 case GIMPLE_LABEL:
4001 return TREE_CODE (gimple_label_label (stmt)) != LABEL_DECL;
4002
4003 case GIMPLE_CALL:
4004 return verify_gimple_call (stmt);
4005
4006 case GIMPLE_COND:
4007 return verify_gimple_comparison (boolean_type_node,
4008 gimple_cond_lhs (stmt),
4009 gimple_cond_rhs (stmt));
4010
4011 case GIMPLE_GOTO:
4012 return verify_gimple_goto (stmt);
4013
4014 case GIMPLE_SWITCH:
4015 return verify_gimple_switch (stmt);
4016
4017 case GIMPLE_RETURN:
4018 return verify_gimple_return (stmt);
4019
4020 case GIMPLE_ASM:
4021 return false;
4022
4023 case GIMPLE_CHANGE_DYNAMIC_TYPE:
4024 return (!is_gimple_val (gimple_cdt_location (stmt))
4025 || !POINTER_TYPE_P (TREE_TYPE (gimple_cdt_location (stmt))));
4026
4027 case GIMPLE_PHI:
4028 return verify_gimple_phi (stmt);
4029
4030 /* Tuples that do not have tree operands. */
4031 case GIMPLE_NOP:
4032 case GIMPLE_RESX:
4033 case GIMPLE_PREDICT:
4034 return false;
4035
4036 default:
4037 gcc_unreachable ();
4038 }
4039 }
4040
4041 /* Verify the GIMPLE statements inside the sequence STMTS. */
4042
4043 static bool
4044 verify_types_in_gimple_seq_2 (gimple_seq stmts)
4045 {
4046 gimple_stmt_iterator ittr;
4047 bool err = false;
4048
4049 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4050 {
4051 gimple stmt = gsi_stmt (ittr);
4052
4053 switch (gimple_code (stmt))
4054 {
4055 case GIMPLE_BIND:
4056 err |= verify_types_in_gimple_seq_2 (gimple_bind_body (stmt));
4057 break;
4058
4059 case GIMPLE_TRY:
4060 err |= verify_types_in_gimple_seq_2 (gimple_try_eval (stmt));
4061 err |= verify_types_in_gimple_seq_2 (gimple_try_cleanup (stmt));
4062 break;
4063
4064 case GIMPLE_EH_FILTER:
4065 err |= verify_types_in_gimple_seq_2 (gimple_eh_filter_failure (stmt));
4066 break;
4067
4068 case GIMPLE_CATCH:
4069 err |= verify_types_in_gimple_seq_2 (gimple_catch_handler (stmt));
4070 break;
4071
4072 default:
4073 {
4074 bool err2 = verify_types_in_gimple_stmt (stmt);
4075 if (err2)
4076 debug_gimple_stmt (stmt);
4077 err |= err2;
4078 }
4079 }
4080 }
4081
4082 return err;
4083 }
4084
4085
4086 /* Verify the GIMPLE statements inside the statement list STMTS. */
4087
4088 void
4089 verify_types_in_gimple_seq (gimple_seq stmts)
4090 {
4091 if (verify_types_in_gimple_seq_2 (stmts))
4092 internal_error ("verify_gimple failed");
4093 }
4094
4095
4096 /* Verify STMT, return true if STMT is not in GIMPLE form.
4097 TODO: Implement type checking. */
4098
4099 static bool
4100 verify_stmt (gimple_stmt_iterator *gsi)
4101 {
4102 tree addr;
4103 struct walk_stmt_info wi;
4104 bool last_in_block = gsi_one_before_end_p (*gsi);
4105 gimple stmt = gsi_stmt (*gsi);
4106
4107 if (is_gimple_omp (stmt))
4108 {
4109 /* OpenMP directives are validated by the FE and never operated
4110 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4111 non-gimple expressions when the main index variable has had
4112 its address taken. This does not affect the loop itself
4113 because the header of an GIMPLE_OMP_FOR is merely used to determine
4114 how to setup the parallel iteration. */
4115 return false;
4116 }
4117
4118 /* FIXME. The C frontend passes unpromoted arguments in case it
4119 didn't see a function declaration before the call. */
4120 if (is_gimple_call (stmt))
4121 {
4122 tree decl;
4123
4124 if (!is_gimple_call_addr (gimple_call_fn (stmt)))
4125 {
4126 error ("invalid function in call statement");
4127 return true;
4128 }
4129
4130 decl = gimple_call_fndecl (stmt);
4131 if (decl
4132 && TREE_CODE (decl) == FUNCTION_DECL
4133 && DECL_LOOPING_CONST_OR_PURE_P (decl)
4134 && (!DECL_PURE_P (decl))
4135 && (!TREE_READONLY (decl)))
4136 {
4137 error ("invalid pure const state for function");
4138 return true;
4139 }
4140 }
4141
4142 memset (&wi, 0, sizeof (wi));
4143 addr = walk_gimple_op (gsi_stmt (*gsi), verify_expr, &wi);
4144 if (addr)
4145 {
4146 debug_generic_expr (addr);
4147 inform (input_location, "in statement");
4148 debug_gimple_stmt (stmt);
4149 return true;
4150 }
4151
4152 /* If the statement is marked as part of an EH region, then it is
4153 expected that the statement could throw. Verify that when we
4154 have optimizations that simplify statements such that we prove
4155 that they cannot throw, that we update other data structures
4156 to match. */
4157 if (lookup_stmt_eh_region (stmt) >= 0)
4158 {
4159 /* During IPA passes, ipa-pure-const sets nothrow flags on calls
4160 and they are updated on statements only after fixup_cfg
4161 is executed at beggining of expansion stage. */
4162 if (!stmt_could_throw_p (stmt) && cgraph_state != CGRAPH_STATE_IPA_SSA)
4163 {
4164 error ("statement marked for throw, but doesn%'t");
4165 goto fail;
4166 }
4167 if (!last_in_block && stmt_can_throw_internal (stmt))
4168 {
4169 error ("statement marked for throw in middle of block");
4170 goto fail;
4171 }
4172 }
4173
4174 return false;
4175
4176 fail:
4177 debug_gimple_stmt (stmt);
4178 return true;
4179 }
4180
4181
4182 /* Return true when the T can be shared. */
4183
4184 static bool
4185 tree_node_can_be_shared (tree t)
4186 {
4187 if (IS_TYPE_OR_DECL_P (t)
4188 || is_gimple_min_invariant (t)
4189 || TREE_CODE (t) == SSA_NAME
4190 || t == error_mark_node
4191 || TREE_CODE (t) == IDENTIFIER_NODE)
4192 return true;
4193
4194 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4195 return true;
4196
4197 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4198 && is_gimple_min_invariant (TREE_OPERAND (t, 1)))
4199 || TREE_CODE (t) == COMPONENT_REF
4200 || TREE_CODE (t) == REALPART_EXPR
4201 || TREE_CODE (t) == IMAGPART_EXPR)
4202 t = TREE_OPERAND (t, 0);
4203
4204 if (DECL_P (t))
4205 return true;
4206
4207 return false;
4208 }
4209
4210
4211 /* Called via walk_gimple_stmt. Verify tree sharing. */
4212
4213 static tree
4214 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4215 {
4216 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4217 struct pointer_set_t *visited = (struct pointer_set_t *) wi->info;
4218
4219 if (tree_node_can_be_shared (*tp))
4220 {
4221 *walk_subtrees = false;
4222 return NULL;
4223 }
4224
4225 if (pointer_set_insert (visited, *tp))
4226 return *tp;
4227
4228 return NULL;
4229 }
4230
4231
4232 static bool eh_error_found;
4233 static int
4234 verify_eh_throw_stmt_node (void **slot, void *data)
4235 {
4236 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4237 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4238
4239 if (!pointer_set_contains (visited, node->stmt))
4240 {
4241 error ("Dead STMT in EH table");
4242 debug_gimple_stmt (node->stmt);
4243 eh_error_found = true;
4244 }
4245 return 1;
4246 }
4247
4248
4249 /* Verify the GIMPLE statements in every basic block. */
4250
4251 void
4252 verify_stmts (void)
4253 {
4254 basic_block bb;
4255 gimple_stmt_iterator gsi;
4256 bool err = false;
4257 struct pointer_set_t *visited, *visited_stmts;
4258 tree addr;
4259 struct walk_stmt_info wi;
4260
4261 timevar_push (TV_TREE_STMT_VERIFY);
4262 visited = pointer_set_create ();
4263 visited_stmts = pointer_set_create ();
4264
4265 memset (&wi, 0, sizeof (wi));
4266 wi.info = (void *) visited;
4267
4268 FOR_EACH_BB (bb)
4269 {
4270 gimple phi;
4271 size_t i;
4272
4273 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4274 {
4275 phi = gsi_stmt (gsi);
4276 pointer_set_insert (visited_stmts, phi);
4277 if (gimple_bb (phi) != bb)
4278 {
4279 error ("gimple_bb (phi) is set to a wrong basic block");
4280 err |= true;
4281 }
4282
4283 for (i = 0; i < gimple_phi_num_args (phi); i++)
4284 {
4285 tree t = gimple_phi_arg_def (phi, i);
4286 tree addr;
4287
4288 if (!t)
4289 {
4290 error ("missing PHI def");
4291 debug_gimple_stmt (phi);
4292 err |= true;
4293 continue;
4294 }
4295 /* Addressable variables do have SSA_NAMEs but they
4296 are not considered gimple values. */
4297 else if (TREE_CODE (t) != SSA_NAME
4298 && TREE_CODE (t) != FUNCTION_DECL
4299 && !is_gimple_min_invariant (t))
4300 {
4301 error ("PHI argument is not a GIMPLE value");
4302 debug_gimple_stmt (phi);
4303 debug_generic_expr (t);
4304 err |= true;
4305 }
4306
4307 addr = walk_tree (&t, verify_node_sharing, visited, NULL);
4308 if (addr)
4309 {
4310 error ("incorrect sharing of tree nodes");
4311 debug_gimple_stmt (phi);
4312 debug_generic_expr (addr);
4313 err |= true;
4314 }
4315 }
4316 }
4317
4318 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
4319 {
4320 gimple stmt = gsi_stmt (gsi);
4321
4322 if (gimple_code (stmt) == GIMPLE_WITH_CLEANUP_EXPR
4323 || gimple_code (stmt) == GIMPLE_BIND)
4324 {
4325 error ("invalid GIMPLE statement");
4326 debug_gimple_stmt (stmt);
4327 err |= true;
4328 }
4329
4330 pointer_set_insert (visited_stmts, stmt);
4331
4332 if (gimple_bb (stmt) != bb)
4333 {
4334 error ("gimple_bb (stmt) is set to a wrong basic block");
4335 err |= true;
4336 }
4337
4338 if (gimple_code (stmt) == GIMPLE_LABEL)
4339 {
4340 tree decl = gimple_label_label (stmt);
4341 int uid = LABEL_DECL_UID (decl);
4342
4343 if (uid == -1
4344 || VEC_index (basic_block, label_to_block_map, uid) != bb)
4345 {
4346 error ("incorrect entry in label_to_block_map.\n");
4347 err |= true;
4348 }
4349 }
4350
4351 err |= verify_stmt (&gsi);
4352 addr = walk_gimple_op (gsi_stmt (gsi), verify_node_sharing, &wi);
4353 if (addr)
4354 {
4355 error ("incorrect sharing of tree nodes");
4356 debug_gimple_stmt (stmt);
4357 debug_generic_expr (addr);
4358 err |= true;
4359 }
4360 gsi_next (&gsi);
4361 }
4362 }
4363
4364 eh_error_found = false;
4365 if (get_eh_throw_stmt_table (cfun))
4366 htab_traverse (get_eh_throw_stmt_table (cfun),
4367 verify_eh_throw_stmt_node,
4368 visited_stmts);
4369
4370 if (err | eh_error_found)
4371 internal_error ("verify_stmts failed");
4372
4373 pointer_set_destroy (visited);
4374 pointer_set_destroy (visited_stmts);
4375 verify_histograms ();
4376 timevar_pop (TV_TREE_STMT_VERIFY);
4377 }
4378
4379
4380 /* Verifies that the flow information is OK. */
4381
4382 static int
4383 gimple_verify_flow_info (void)
4384 {
4385 int err = 0;
4386 basic_block bb;
4387 gimple_stmt_iterator gsi;
4388 gimple stmt;
4389 edge e;
4390 edge_iterator ei;
4391
4392 if (ENTRY_BLOCK_PTR->il.gimple)
4393 {
4394 error ("ENTRY_BLOCK has IL associated with it");
4395 err = 1;
4396 }
4397
4398 if (EXIT_BLOCK_PTR->il.gimple)
4399 {
4400 error ("EXIT_BLOCK has IL associated with it");
4401 err = 1;
4402 }
4403
4404 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4405 if (e->flags & EDGE_FALLTHRU)
4406 {
4407 error ("fallthru to exit from bb %d", e->src->index);
4408 err = 1;
4409 }
4410
4411 FOR_EACH_BB (bb)
4412 {
4413 bool found_ctrl_stmt = false;
4414
4415 stmt = NULL;
4416
4417 /* Skip labels on the start of basic block. */
4418 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4419 {
4420 tree label;
4421 gimple prev_stmt = stmt;
4422
4423 stmt = gsi_stmt (gsi);
4424
4425 if (gimple_code (stmt) != GIMPLE_LABEL)
4426 break;
4427
4428 label = gimple_label_label (stmt);
4429 if (prev_stmt && DECL_NONLOCAL (label))
4430 {
4431 error ("nonlocal label ");
4432 print_generic_expr (stderr, label, 0);
4433 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4434 bb->index);
4435 err = 1;
4436 }
4437
4438 if (label_to_block (label) != bb)
4439 {
4440 error ("label ");
4441 print_generic_expr (stderr, label, 0);
4442 fprintf (stderr, " to block does not match in bb %d",
4443 bb->index);
4444 err = 1;
4445 }
4446
4447 if (decl_function_context (label) != current_function_decl)
4448 {
4449 error ("label ");
4450 print_generic_expr (stderr, label, 0);
4451 fprintf (stderr, " has incorrect context in bb %d",
4452 bb->index);
4453 err = 1;
4454 }
4455 }
4456
4457 /* Verify that body of basic block BB is free of control flow. */
4458 for (; !gsi_end_p (gsi); gsi_next (&gsi))
4459 {
4460 gimple stmt = gsi_stmt (gsi);
4461
4462 if (found_ctrl_stmt)
4463 {
4464 error ("control flow in the middle of basic block %d",
4465 bb->index);
4466 err = 1;
4467 }
4468
4469 if (stmt_ends_bb_p (stmt))
4470 found_ctrl_stmt = true;
4471
4472 if (gimple_code (stmt) == GIMPLE_LABEL)
4473 {
4474 error ("label ");
4475 print_generic_expr (stderr, gimple_label_label (stmt), 0);
4476 fprintf (stderr, " in the middle of basic block %d", bb->index);
4477 err = 1;
4478 }
4479 }
4480
4481 gsi = gsi_last_bb (bb);
4482 if (gsi_end_p (gsi))
4483 continue;
4484
4485 stmt = gsi_stmt (gsi);
4486
4487 err |= verify_eh_edges (stmt);
4488
4489 if (is_ctrl_stmt (stmt))
4490 {
4491 FOR_EACH_EDGE (e, ei, bb->succs)
4492 if (e->flags & EDGE_FALLTHRU)
4493 {
4494 error ("fallthru edge after a control statement in bb %d",
4495 bb->index);
4496 err = 1;
4497 }
4498 }
4499
4500 if (gimple_code (stmt) != GIMPLE_COND)
4501 {
4502 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4503 after anything else but if statement. */
4504 FOR_EACH_EDGE (e, ei, bb->succs)
4505 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
4506 {
4507 error ("true/false edge after a non-GIMPLE_COND in bb %d",
4508 bb->index);
4509 err = 1;
4510 }
4511 }
4512
4513 switch (gimple_code (stmt))
4514 {
4515 case GIMPLE_COND:
4516 {
4517 edge true_edge;
4518 edge false_edge;
4519
4520 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
4521
4522 if (!true_edge
4523 || !false_edge
4524 || !(true_edge->flags & EDGE_TRUE_VALUE)
4525 || !(false_edge->flags & EDGE_FALSE_VALUE)
4526 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4527 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4528 || EDGE_COUNT (bb->succs) >= 3)
4529 {
4530 error ("wrong outgoing edge flags at end of bb %d",
4531 bb->index);
4532 err = 1;
4533 }
4534 }
4535 break;
4536
4537 case GIMPLE_GOTO:
4538 if (simple_goto_p (stmt))
4539 {
4540 error ("explicit goto at end of bb %d", bb->index);
4541 err = 1;
4542 }
4543 else
4544 {
4545 /* FIXME. We should double check that the labels in the
4546 destination blocks have their address taken. */
4547 FOR_EACH_EDGE (e, ei, bb->succs)
4548 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
4549 | EDGE_FALSE_VALUE))
4550 || !(e->flags & EDGE_ABNORMAL))
4551 {
4552 error ("wrong outgoing edge flags at end of bb %d",
4553 bb->index);
4554 err = 1;
4555 }
4556 }
4557 break;
4558
4559 case GIMPLE_RETURN:
4560 if (!single_succ_p (bb)
4561 || (single_succ_edge (bb)->flags
4562 & (EDGE_FALLTHRU | EDGE_ABNORMAL
4563 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4564 {
4565 error ("wrong outgoing edge flags at end of bb %d", bb->index);
4566 err = 1;
4567 }
4568 if (single_succ (bb) != EXIT_BLOCK_PTR)
4569 {
4570 error ("return edge does not point to exit in bb %d",
4571 bb->index);
4572 err = 1;
4573 }
4574 break;
4575
4576 case GIMPLE_SWITCH:
4577 {
4578 tree prev;
4579 edge e;
4580 size_t i, n;
4581
4582 n = gimple_switch_num_labels (stmt);
4583
4584 /* Mark all the destination basic blocks. */
4585 for (i = 0; i < n; ++i)
4586 {
4587 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
4588 basic_block label_bb = label_to_block (lab);
4589 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
4590 label_bb->aux = (void *)1;
4591 }
4592
4593 /* Verify that the case labels are sorted. */
4594 prev = gimple_switch_label (stmt, 0);
4595 for (i = 1; i < n; ++i)
4596 {
4597 tree c = gimple_switch_label (stmt, i);
4598 if (!CASE_LOW (c))
4599 {
4600 error ("found default case not at the start of "
4601 "case vector");
4602 err = 1;
4603 continue;
4604 }
4605 if (CASE_LOW (prev)
4606 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
4607 {
4608 error ("case labels not sorted: ");
4609 print_generic_expr (stderr, prev, 0);
4610 fprintf (stderr," is greater than ");
4611 print_generic_expr (stderr, c, 0);
4612 fprintf (stderr," but comes before it.\n");
4613 err = 1;
4614 }
4615 prev = c;
4616 }
4617 /* VRP will remove the default case if it can prove it will
4618 never be executed. So do not verify there always exists
4619 a default case here. */
4620
4621 FOR_EACH_EDGE (e, ei, bb->succs)
4622 {
4623 if (!e->dest->aux)
4624 {
4625 error ("extra outgoing edge %d->%d",
4626 bb->index, e->dest->index);
4627 err = 1;
4628 }
4629
4630 e->dest->aux = (void *)2;
4631 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
4632 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4633 {
4634 error ("wrong outgoing edge flags at end of bb %d",
4635 bb->index);
4636 err = 1;
4637 }
4638 }
4639
4640 /* Check that we have all of them. */
4641 for (i = 0; i < n; ++i)
4642 {
4643 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
4644 basic_block label_bb = label_to_block (lab);
4645
4646 if (label_bb->aux != (void *)2)
4647 {
4648 error ("missing edge %i->%i", bb->index, label_bb->index);
4649 err = 1;
4650 }
4651 }
4652
4653 FOR_EACH_EDGE (e, ei, bb->succs)
4654 e->dest->aux = (void *)0;
4655 }
4656
4657 default: ;
4658 }
4659 }
4660
4661 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
4662 verify_dominators (CDI_DOMINATORS);
4663
4664 return err;
4665 }
4666
4667
4668 /* Updates phi nodes after creating a forwarder block joined
4669 by edge FALLTHRU. */
4670
4671 static void
4672 gimple_make_forwarder_block (edge fallthru)
4673 {
4674 edge e;
4675 edge_iterator ei;
4676 basic_block dummy, bb;
4677 tree var;
4678 gimple_stmt_iterator gsi;
4679
4680 dummy = fallthru->src;
4681 bb = fallthru->dest;
4682
4683 if (single_pred_p (bb))
4684 return;
4685
4686 /* If we redirected a branch we must create new PHI nodes at the
4687 start of BB. */
4688 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
4689 {
4690 gimple phi, new_phi;
4691
4692 phi = gsi_stmt (gsi);
4693 var = gimple_phi_result (phi);
4694 new_phi = create_phi_node (var, bb);
4695 SSA_NAME_DEF_STMT (var) = new_phi;
4696 gimple_phi_set_result (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
4697 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru);
4698 }
4699
4700 /* Add the arguments we have stored on edges. */
4701 FOR_EACH_EDGE (e, ei, bb->preds)
4702 {
4703 if (e == fallthru)
4704 continue;
4705
4706 flush_pending_stmts (e);
4707 }
4708 }
4709
4710
4711 /* Return a non-special label in the head of basic block BLOCK.
4712 Create one if it doesn't exist. */
4713
4714 tree
4715 gimple_block_label (basic_block bb)
4716 {
4717 gimple_stmt_iterator i, s = gsi_start_bb (bb);
4718 bool first = true;
4719 tree label;
4720 gimple stmt;
4721
4722 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
4723 {
4724 stmt = gsi_stmt (i);
4725 if (gimple_code (stmt) != GIMPLE_LABEL)
4726 break;
4727 label = gimple_label_label (stmt);
4728 if (!DECL_NONLOCAL (label))
4729 {
4730 if (!first)
4731 gsi_move_before (&i, &s);
4732 return label;
4733 }
4734 }
4735
4736 label = create_artificial_label ();
4737 stmt = gimple_build_label (label);
4738 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
4739 return label;
4740 }
4741
4742
4743 /* Attempt to perform edge redirection by replacing a possibly complex
4744 jump instruction by a goto or by removing the jump completely.
4745 This can apply only if all edges now point to the same block. The
4746 parameters and return values are equivalent to
4747 redirect_edge_and_branch. */
4748
4749 static edge
4750 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
4751 {
4752 basic_block src = e->src;
4753 gimple_stmt_iterator i;
4754 gimple stmt;
4755
4756 /* We can replace or remove a complex jump only when we have exactly
4757 two edges. */
4758 if (EDGE_COUNT (src->succs) != 2
4759 /* Verify that all targets will be TARGET. Specifically, the
4760 edge that is not E must also go to TARGET. */
4761 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
4762 return NULL;
4763
4764 i = gsi_last_bb (src);
4765 if (gsi_end_p (i))
4766 return NULL;
4767
4768 stmt = gsi_stmt (i);
4769
4770 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
4771 {
4772 gsi_remove (&i, true);
4773 e = ssa_redirect_edge (e, target);
4774 e->flags = EDGE_FALLTHRU;
4775 return e;
4776 }
4777
4778 return NULL;
4779 }
4780
4781
4782 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
4783 edge representing the redirected branch. */
4784
4785 static edge
4786 gimple_redirect_edge_and_branch (edge e, basic_block dest)
4787 {
4788 basic_block bb = e->src;
4789 gimple_stmt_iterator gsi;
4790 edge ret;
4791 gimple stmt;
4792
4793 if (e->flags & EDGE_ABNORMAL)
4794 return NULL;
4795
4796 if (e->src != ENTRY_BLOCK_PTR
4797 && (ret = gimple_try_redirect_by_replacing_jump (e, dest)))
4798 return ret;
4799
4800 if (e->dest == dest)
4801 return NULL;
4802
4803 if (e->flags & EDGE_EH)
4804 return redirect_eh_edge (e, dest);
4805
4806 gsi = gsi_last_bb (bb);
4807 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
4808
4809 switch (stmt ? gimple_code (stmt) : ERROR_MARK)
4810 {
4811 case GIMPLE_COND:
4812 /* For COND_EXPR, we only need to redirect the edge. */
4813 break;
4814
4815 case GIMPLE_GOTO:
4816 /* No non-abnormal edges should lead from a non-simple goto, and
4817 simple ones should be represented implicitly. */
4818 gcc_unreachable ();
4819
4820 case GIMPLE_SWITCH:
4821 {
4822 tree label = gimple_block_label (dest);
4823 tree cases = get_cases_for_edge (e, stmt);
4824
4825 /* If we have a list of cases associated with E, then use it
4826 as it's a lot faster than walking the entire case vector. */
4827 if (cases)
4828 {
4829 edge e2 = find_edge (e->src, dest);
4830 tree last, first;
4831
4832 first = cases;
4833 while (cases)
4834 {
4835 last = cases;
4836 CASE_LABEL (cases) = label;
4837 cases = TREE_CHAIN (cases);
4838 }
4839
4840 /* If there was already an edge in the CFG, then we need
4841 to move all the cases associated with E to E2. */
4842 if (e2)
4843 {
4844 tree cases2 = get_cases_for_edge (e2, stmt);
4845
4846 TREE_CHAIN (last) = TREE_CHAIN (cases2);
4847 TREE_CHAIN (cases2) = first;
4848 }
4849 }
4850 else
4851 {
4852 size_t i, n = gimple_switch_num_labels (stmt);
4853
4854 for (i = 0; i < n; i++)
4855 {
4856 tree elt = gimple_switch_label (stmt, i);
4857 if (label_to_block (CASE_LABEL (elt)) == e->dest)
4858 CASE_LABEL (elt) = label;
4859 }
4860 }
4861
4862 break;
4863 }
4864
4865 case GIMPLE_RETURN:
4866 gsi_remove (&gsi, true);
4867 e->flags |= EDGE_FALLTHRU;
4868 break;
4869
4870 case GIMPLE_OMP_RETURN:
4871 case GIMPLE_OMP_CONTINUE:
4872 case GIMPLE_OMP_SECTIONS_SWITCH:
4873 case GIMPLE_OMP_FOR:
4874 /* The edges from OMP constructs can be simply redirected. */
4875 break;
4876
4877 default:
4878 /* Otherwise it must be a fallthru edge, and we don't need to
4879 do anything besides redirecting it. */
4880 gcc_assert (e->flags & EDGE_FALLTHRU);
4881 break;
4882 }
4883
4884 /* Update/insert PHI nodes as necessary. */
4885
4886 /* Now update the edges in the CFG. */
4887 e = ssa_redirect_edge (e, dest);
4888
4889 return e;
4890 }
4891
4892 /* Returns true if it is possible to remove edge E by redirecting
4893 it to the destination of the other edge from E->src. */
4894
4895 static bool
4896 gimple_can_remove_branch_p (const_edge e)
4897 {
4898 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
4899 return false;
4900
4901 return true;
4902 }
4903
4904 /* Simple wrapper, as we can always redirect fallthru edges. */
4905
4906 static basic_block
4907 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
4908 {
4909 e = gimple_redirect_edge_and_branch (e, dest);
4910 gcc_assert (e);
4911
4912 return NULL;
4913 }
4914
4915
4916 /* Splits basic block BB after statement STMT (but at least after the
4917 labels). If STMT is NULL, BB is split just after the labels. */
4918
4919 static basic_block
4920 gimple_split_block (basic_block bb, void *stmt)
4921 {
4922 gimple_stmt_iterator gsi;
4923 gimple_stmt_iterator gsi_tgt;
4924 gimple act;
4925 gimple_seq list;
4926 basic_block new_bb;
4927 edge e;
4928 edge_iterator ei;
4929
4930 new_bb = create_empty_bb (bb);
4931
4932 /* Redirect the outgoing edges. */
4933 new_bb->succs = bb->succs;
4934 bb->succs = NULL;
4935 FOR_EACH_EDGE (e, ei, new_bb->succs)
4936 e->src = new_bb;
4937
4938 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
4939 stmt = NULL;
4940
4941 /* Move everything from GSI to the new basic block. */
4942 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4943 {
4944 act = gsi_stmt (gsi);
4945 if (gimple_code (act) == GIMPLE_LABEL)
4946 continue;
4947
4948 if (!stmt)
4949 break;
4950
4951 if (stmt == act)
4952 {
4953 gsi_next (&gsi);
4954 break;
4955 }
4956 }
4957
4958 if (gsi_end_p (gsi))
4959 return new_bb;
4960
4961 /* Split the statement list - avoid re-creating new containers as this
4962 brings ugly quadratic memory consumption in the inliner.
4963 (We are still quadratic since we need to update stmt BB pointers,
4964 sadly.) */
4965 list = gsi_split_seq_before (&gsi);
4966 set_bb_seq (new_bb, list);
4967 for (gsi_tgt = gsi_start (list);
4968 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
4969 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
4970
4971 return new_bb;
4972 }
4973
4974
4975 /* Moves basic block BB after block AFTER. */
4976
4977 static bool
4978 gimple_move_block_after (basic_block bb, basic_block after)
4979 {
4980 if (bb->prev_bb == after)
4981 return true;
4982
4983 unlink_block (bb);
4984 link_block (bb, after);
4985
4986 return true;
4987 }
4988
4989
4990 /* Return true if basic_block can be duplicated. */
4991
4992 static bool
4993 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
4994 {
4995 return true;
4996 }
4997
4998 /* Create a duplicate of the basic block BB. NOTE: This does not
4999 preserve SSA form. */
5000
5001 static basic_block
5002 gimple_duplicate_bb (basic_block bb)
5003 {
5004 basic_block new_bb;
5005 gimple_stmt_iterator gsi, gsi_tgt;
5006 gimple_seq phis = phi_nodes (bb);
5007 gimple phi, stmt, copy;
5008
5009 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
5010
5011 /* Copy the PHI nodes. We ignore PHI node arguments here because
5012 the incoming edges have not been setup yet. */
5013 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
5014 {
5015 phi = gsi_stmt (gsi);
5016 copy = create_phi_node (gimple_phi_result (phi), new_bb);
5017 create_new_def_for (gimple_phi_result (copy), copy,
5018 gimple_phi_result_ptr (copy));
5019 }
5020
5021 gsi_tgt = gsi_start_bb (new_bb);
5022 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5023 {
5024 def_operand_p def_p;
5025 ssa_op_iter op_iter;
5026 int region;
5027
5028 stmt = gsi_stmt (gsi);
5029 if (gimple_code (stmt) == GIMPLE_LABEL)
5030 continue;
5031
5032 /* Create a new copy of STMT and duplicate STMT's virtual
5033 operands. */
5034 copy = gimple_copy (stmt);
5035 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5036 region = lookup_stmt_eh_region (stmt);
5037 if (region >= 0)
5038 add_stmt_to_eh_region (copy, region);
5039 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5040
5041 /* Create new names for all the definitions created by COPY and
5042 add replacement mappings for each new name. */
5043 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5044 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5045 }
5046
5047 return new_bb;
5048 }
5049
5050 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5051
5052 static void
5053 add_phi_args_after_copy_edge (edge e_copy)
5054 {
5055 basic_block bb, bb_copy = e_copy->src, dest;
5056 edge e;
5057 edge_iterator ei;
5058 gimple phi, phi_copy;
5059 tree def;
5060 gimple_stmt_iterator psi, psi_copy;
5061
5062 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5063 return;
5064
5065 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5066
5067 if (e_copy->dest->flags & BB_DUPLICATED)
5068 dest = get_bb_original (e_copy->dest);
5069 else
5070 dest = e_copy->dest;
5071
5072 e = find_edge (bb, dest);
5073 if (!e)
5074 {
5075 /* During loop unrolling the target of the latch edge is copied.
5076 In this case we are not looking for edge to dest, but to
5077 duplicated block whose original was dest. */
5078 FOR_EACH_EDGE (e, ei, bb->succs)
5079 {
5080 if ((e->dest->flags & BB_DUPLICATED)
5081 && get_bb_original (e->dest) == dest)
5082 break;
5083 }
5084
5085 gcc_assert (e != NULL);
5086 }
5087
5088 for (psi = gsi_start_phis (e->dest),
5089 psi_copy = gsi_start_phis (e_copy->dest);
5090 !gsi_end_p (psi);
5091 gsi_next (&psi), gsi_next (&psi_copy))
5092 {
5093 phi = gsi_stmt (psi);
5094 phi_copy = gsi_stmt (psi_copy);
5095 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5096 add_phi_arg (phi_copy, def, e_copy);
5097 }
5098 }
5099
5100
5101 /* Basic block BB_COPY was created by code duplication. Add phi node
5102 arguments for edges going out of BB_COPY. The blocks that were
5103 duplicated have BB_DUPLICATED set. */
5104
5105 void
5106 add_phi_args_after_copy_bb (basic_block bb_copy)
5107 {
5108 edge e_copy;
5109 edge_iterator ei;
5110
5111 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5112 {
5113 add_phi_args_after_copy_edge (e_copy);
5114 }
5115 }
5116
5117 /* Blocks in REGION_COPY array of length N_REGION were created by
5118 duplication of basic blocks. Add phi node arguments for edges
5119 going from these blocks. If E_COPY is not NULL, also add
5120 phi node arguments for its destination.*/
5121
5122 void
5123 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5124 edge e_copy)
5125 {
5126 unsigned i;
5127
5128 for (i = 0; i < n_region; i++)
5129 region_copy[i]->flags |= BB_DUPLICATED;
5130
5131 for (i = 0; i < n_region; i++)
5132 add_phi_args_after_copy_bb (region_copy[i]);
5133 if (e_copy)
5134 add_phi_args_after_copy_edge (e_copy);
5135
5136 for (i = 0; i < n_region; i++)
5137 region_copy[i]->flags &= ~BB_DUPLICATED;
5138 }
5139
5140 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5141 important exit edge EXIT. By important we mean that no SSA name defined
5142 inside region is live over the other exit edges of the region. All entry
5143 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5144 to the duplicate of the region. SSA form, dominance and loop information
5145 is updated. The new basic blocks are stored to REGION_COPY in the same
5146 order as they had in REGION, provided that REGION_COPY is not NULL.
5147 The function returns false if it is unable to copy the region,
5148 true otherwise. */
5149
5150 bool
5151 gimple_duplicate_sese_region (edge entry, edge exit,
5152 basic_block *region, unsigned n_region,
5153 basic_block *region_copy)
5154 {
5155 unsigned i;
5156 bool free_region_copy = false, copying_header = false;
5157 struct loop *loop = entry->dest->loop_father;
5158 edge exit_copy;
5159 VEC (basic_block, heap) *doms;
5160 edge redirected;
5161 int total_freq = 0, entry_freq = 0;
5162 gcov_type total_count = 0, entry_count = 0;
5163
5164 if (!can_copy_bbs_p (region, n_region))
5165 return false;
5166
5167 /* Some sanity checking. Note that we do not check for all possible
5168 missuses of the functions. I.e. if you ask to copy something weird,
5169 it will work, but the state of structures probably will not be
5170 correct. */
5171 for (i = 0; i < n_region; i++)
5172 {
5173 /* We do not handle subloops, i.e. all the blocks must belong to the
5174 same loop. */
5175 if (region[i]->loop_father != loop)
5176 return false;
5177
5178 if (region[i] != entry->dest
5179 && region[i] == loop->header)
5180 return false;
5181 }
5182
5183 set_loop_copy (loop, loop);
5184
5185 /* In case the function is used for loop header copying (which is the primary
5186 use), ensure that EXIT and its copy will be new latch and entry edges. */
5187 if (loop->header == entry->dest)
5188 {
5189 copying_header = true;
5190 set_loop_copy (loop, loop_outer (loop));
5191
5192 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5193 return false;
5194
5195 for (i = 0; i < n_region; i++)
5196 if (region[i] != exit->src
5197 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5198 return false;
5199 }
5200
5201 if (!region_copy)
5202 {
5203 region_copy = XNEWVEC (basic_block, n_region);
5204 free_region_copy = true;
5205 }
5206
5207 gcc_assert (!need_ssa_update_p (cfun));
5208
5209 /* Record blocks outside the region that are dominated by something
5210 inside. */
5211 doms = NULL;
5212 initialize_original_copy_tables ();
5213
5214 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5215
5216 if (entry->dest->count)
5217 {
5218 total_count = entry->dest->count;
5219 entry_count = entry->count;
5220 /* Fix up corner cases, to avoid division by zero or creation of negative
5221 frequencies. */
5222 if (entry_count > total_count)
5223 entry_count = total_count;
5224 }
5225 else
5226 {
5227 total_freq = entry->dest->frequency;
5228 entry_freq = EDGE_FREQUENCY (entry);
5229 /* Fix up corner cases, to avoid division by zero or creation of negative
5230 frequencies. */
5231 if (total_freq == 0)
5232 total_freq = 1;
5233 else if (entry_freq > total_freq)
5234 entry_freq = total_freq;
5235 }
5236
5237 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5238 split_edge_bb_loc (entry));
5239 if (total_count)
5240 {
5241 scale_bbs_frequencies_gcov_type (region, n_region,
5242 total_count - entry_count,
5243 total_count);
5244 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
5245 total_count);
5246 }
5247 else
5248 {
5249 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5250 total_freq);
5251 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5252 }
5253
5254 if (copying_header)
5255 {
5256 loop->header = exit->dest;
5257 loop->latch = exit->src;
5258 }
5259
5260 /* Redirect the entry and add the phi node arguments. */
5261 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
5262 gcc_assert (redirected != NULL);
5263 flush_pending_stmts (entry);
5264
5265 /* Concerning updating of dominators: We must recount dominators
5266 for entry block and its copy. Anything that is outside of the
5267 region, but was dominated by something inside needs recounting as
5268 well. */
5269 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
5270 VEC_safe_push (basic_block, heap, doms, get_bb_original (entry->dest));
5271 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5272 VEC_free (basic_block, heap, doms);
5273
5274 /* Add the other PHI node arguments. */
5275 add_phi_args_after_copy (region_copy, n_region, NULL);
5276
5277 /* Update the SSA web. */
5278 update_ssa (TODO_update_ssa);
5279
5280 if (free_region_copy)
5281 free (region_copy);
5282
5283 free_original_copy_tables ();
5284 return true;
5285 }
5286
5287 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
5288 are stored to REGION_COPY in the same order in that they appear
5289 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5290 the region, EXIT an exit from it. The condition guarding EXIT
5291 is moved to ENTRY. Returns true if duplication succeeds, false
5292 otherwise.
5293
5294 For example,
5295
5296 some_code;
5297 if (cond)
5298 A;
5299 else
5300 B;
5301
5302 is transformed to
5303
5304 if (cond)
5305 {
5306 some_code;
5307 A;
5308 }
5309 else
5310 {
5311 some_code;
5312 B;
5313 }
5314 */
5315
5316 bool
5317 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
5318 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
5319 basic_block *region_copy ATTRIBUTE_UNUSED)
5320 {
5321 unsigned i;
5322 bool free_region_copy = false;
5323 struct loop *loop = exit->dest->loop_father;
5324 struct loop *orig_loop = entry->dest->loop_father;
5325 basic_block switch_bb, entry_bb, nentry_bb;
5326 VEC (basic_block, heap) *doms;
5327 int total_freq = 0, exit_freq = 0;
5328 gcov_type total_count = 0, exit_count = 0;
5329 edge exits[2], nexits[2], e;
5330 gimple_stmt_iterator gsi;
5331 gimple cond_stmt;
5332 edge sorig, snew;
5333
5334 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
5335 exits[0] = exit;
5336 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
5337
5338 if (!can_copy_bbs_p (region, n_region))
5339 return false;
5340
5341 /* Some sanity checking. Note that we do not check for all possible
5342 missuses of the functions. I.e. if you ask to copy something weird
5343 (e.g., in the example, if there is a jump from inside to the middle
5344 of some_code, or come_code defines some of the values used in cond)
5345 it will work, but the resulting code will not be correct. */
5346 for (i = 0; i < n_region; i++)
5347 {
5348 /* We do not handle subloops, i.e. all the blocks must belong to the
5349 same loop. */
5350 if (region[i]->loop_father != orig_loop)
5351 return false;
5352
5353 if (region[i] == orig_loop->latch)
5354 return false;
5355 }
5356
5357 initialize_original_copy_tables ();
5358 set_loop_copy (orig_loop, loop);
5359
5360 if (!region_copy)
5361 {
5362 region_copy = XNEWVEC (basic_block, n_region);
5363 free_region_copy = true;
5364 }
5365
5366 gcc_assert (!need_ssa_update_p (cfun));
5367
5368 /* Record blocks outside the region that are dominated by something
5369 inside. */
5370 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5371
5372 if (exit->src->count)
5373 {
5374 total_count = exit->src->count;
5375 exit_count = exit->count;
5376 /* Fix up corner cases, to avoid division by zero or creation of negative
5377 frequencies. */
5378 if (exit_count > total_count)
5379 exit_count = total_count;
5380 }
5381 else
5382 {
5383 total_freq = exit->src->frequency;
5384 exit_freq = EDGE_FREQUENCY (exit);
5385 /* Fix up corner cases, to avoid division by zero or creation of negative
5386 frequencies. */
5387 if (total_freq == 0)
5388 total_freq = 1;
5389 if (exit_freq > total_freq)
5390 exit_freq = total_freq;
5391 }
5392
5393 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
5394 split_edge_bb_loc (exit));
5395 if (total_count)
5396 {
5397 scale_bbs_frequencies_gcov_type (region, n_region,
5398 total_count - exit_count,
5399 total_count);
5400 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
5401 total_count);
5402 }
5403 else
5404 {
5405 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
5406 total_freq);
5407 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
5408 }
5409
5410 /* Create the switch block, and put the exit condition to it. */
5411 entry_bb = entry->dest;
5412 nentry_bb = get_bb_copy (entry_bb);
5413 if (!last_stmt (entry->src)
5414 || !stmt_ends_bb_p (last_stmt (entry->src)))
5415 switch_bb = entry->src;
5416 else
5417 switch_bb = split_edge (entry);
5418 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
5419
5420 gsi = gsi_last_bb (switch_bb);
5421 cond_stmt = last_stmt (exit->src);
5422 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
5423 cond_stmt = gimple_copy (cond_stmt);
5424 gimple_cond_set_lhs (cond_stmt, unshare_expr (gimple_cond_lhs (cond_stmt)));
5425 gimple_cond_set_rhs (cond_stmt, unshare_expr (gimple_cond_rhs (cond_stmt)));
5426 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
5427
5428 sorig = single_succ_edge (switch_bb);
5429 sorig->flags = exits[1]->flags;
5430 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
5431
5432 /* Register the new edge from SWITCH_BB in loop exit lists. */
5433 rescan_loop_exit (snew, true, false);
5434
5435 /* Add the PHI node arguments. */
5436 add_phi_args_after_copy (region_copy, n_region, snew);
5437
5438 /* Get rid of now superfluous conditions and associated edges (and phi node
5439 arguments). */
5440 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
5441 PENDING_STMT (e) = NULL;
5442 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
5443 PENDING_STMT (e) = NULL;
5444
5445 /* Anything that is outside of the region, but was dominated by something
5446 inside needs to update dominance info. */
5447 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5448 VEC_free (basic_block, heap, doms);
5449
5450 /* Update the SSA web. */
5451 update_ssa (TODO_update_ssa);
5452
5453 if (free_region_copy)
5454 free (region_copy);
5455
5456 free_original_copy_tables ();
5457 return true;
5458 }
5459
5460 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
5461 adding blocks when the dominator traversal reaches EXIT. This
5462 function silently assumes that ENTRY strictly dominates EXIT. */
5463
5464 void
5465 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
5466 VEC(basic_block,heap) **bbs_p)
5467 {
5468 basic_block son;
5469
5470 for (son = first_dom_son (CDI_DOMINATORS, entry);
5471 son;
5472 son = next_dom_son (CDI_DOMINATORS, son))
5473 {
5474 VEC_safe_push (basic_block, heap, *bbs_p, son);
5475 if (son != exit)
5476 gather_blocks_in_sese_region (son, exit, bbs_p);
5477 }
5478 }
5479
5480 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
5481 The duplicates are recorded in VARS_MAP. */
5482
5483 static void
5484 replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
5485 tree to_context)
5486 {
5487 tree t = *tp, new_t;
5488 struct function *f = DECL_STRUCT_FUNCTION (to_context);
5489 void **loc;
5490
5491 if (DECL_CONTEXT (t) == to_context)
5492 return;
5493
5494 loc = pointer_map_contains (vars_map, t);
5495
5496 if (!loc)
5497 {
5498 loc = pointer_map_insert (vars_map, t);
5499
5500 if (SSA_VAR_P (t))
5501 {
5502 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
5503 f->local_decls = tree_cons (NULL_TREE, new_t, f->local_decls);
5504 }
5505 else
5506 {
5507 gcc_assert (TREE_CODE (t) == CONST_DECL);
5508 new_t = copy_node (t);
5509 }
5510 DECL_CONTEXT (new_t) = to_context;
5511
5512 *loc = new_t;
5513 }
5514 else
5515 new_t = (tree) *loc;
5516
5517 *tp = new_t;
5518 }
5519
5520
5521 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
5522 VARS_MAP maps old ssa names and var_decls to the new ones. */
5523
5524 static tree
5525 replace_ssa_name (tree name, struct pointer_map_t *vars_map,
5526 tree to_context)
5527 {
5528 void **loc;
5529 tree new_name, decl = SSA_NAME_VAR (name);
5530
5531 gcc_assert (is_gimple_reg (name));
5532
5533 loc = pointer_map_contains (vars_map, name);
5534
5535 if (!loc)
5536 {
5537 replace_by_duplicate_decl (&decl, vars_map, to_context);
5538
5539 push_cfun (DECL_STRUCT_FUNCTION (to_context));
5540 if (gimple_in_ssa_p (cfun))
5541 add_referenced_var (decl);
5542
5543 new_name = make_ssa_name (decl, SSA_NAME_DEF_STMT (name));
5544 if (SSA_NAME_IS_DEFAULT_DEF (name))
5545 set_default_def (decl, new_name);
5546 pop_cfun ();
5547
5548 loc = pointer_map_insert (vars_map, name);
5549 *loc = new_name;
5550 }
5551 else
5552 new_name = (tree) *loc;
5553
5554 return new_name;
5555 }
5556
5557 struct move_stmt_d
5558 {
5559 tree orig_block;
5560 tree new_block;
5561 tree from_context;
5562 tree to_context;
5563 struct pointer_map_t *vars_map;
5564 htab_t new_label_map;
5565 bool remap_decls_p;
5566 };
5567
5568 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
5569 contained in *TP if it has been ORIG_BLOCK previously and change the
5570 DECL_CONTEXT of every local variable referenced in *TP. */
5571
5572 static tree
5573 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
5574 {
5575 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5576 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
5577 tree t = *tp;
5578
5579 if (EXPR_P (t))
5580 /* We should never have TREE_BLOCK set on non-statements. */
5581 gcc_assert (!TREE_BLOCK (t));
5582
5583 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
5584 {
5585 if (TREE_CODE (t) == SSA_NAME)
5586 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
5587 else if (TREE_CODE (t) == LABEL_DECL)
5588 {
5589 if (p->new_label_map)
5590 {
5591 struct tree_map in, *out;
5592 in.base.from = t;
5593 out = (struct tree_map *)
5594 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
5595 if (out)
5596 *tp = t = out->to;
5597 }
5598
5599 DECL_CONTEXT (t) = p->to_context;
5600 }
5601 else if (p->remap_decls_p)
5602 {
5603 /* Replace T with its duplicate. T should no longer appear in the
5604 parent function, so this looks wasteful; however, it may appear
5605 in referenced_vars, and more importantly, as virtual operands of
5606 statements, and in alias lists of other variables. It would be
5607 quite difficult to expunge it from all those places. ??? It might
5608 suffice to do this for addressable variables. */
5609 if ((TREE_CODE (t) == VAR_DECL
5610 && !is_global_var (t))
5611 || TREE_CODE (t) == CONST_DECL)
5612 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
5613
5614 if (SSA_VAR_P (t)
5615 && gimple_in_ssa_p (cfun))
5616 {
5617 push_cfun (DECL_STRUCT_FUNCTION (p->to_context));
5618 add_referenced_var (*tp);
5619 pop_cfun ();
5620 }
5621 }
5622 *walk_subtrees = 0;
5623 }
5624 else if (TYPE_P (t))
5625 *walk_subtrees = 0;
5626
5627 return NULL_TREE;
5628 }
5629
5630 /* Like move_stmt_op, but for gimple statements.
5631
5632 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
5633 contained in the current statement in *GSI_P and change the
5634 DECL_CONTEXT of every local variable referenced in the current
5635 statement. */
5636
5637 static tree
5638 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
5639 struct walk_stmt_info *wi)
5640 {
5641 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
5642 gimple stmt = gsi_stmt (*gsi_p);
5643 tree block = gimple_block (stmt);
5644
5645 if (p->orig_block == NULL_TREE
5646 || block == p->orig_block
5647 || block == NULL_TREE)
5648 gimple_set_block (stmt, p->new_block);
5649 #ifdef ENABLE_CHECKING
5650 else if (block != p->new_block)
5651 {
5652 while (block && block != p->orig_block)
5653 block = BLOCK_SUPERCONTEXT (block);
5654 gcc_assert (block);
5655 }
5656 #endif
5657
5658 if (is_gimple_omp (stmt)
5659 && gimple_code (stmt) != GIMPLE_OMP_RETURN
5660 && gimple_code (stmt) != GIMPLE_OMP_CONTINUE)
5661 {
5662 /* Do not remap variables inside OMP directives. Variables
5663 referenced in clauses and directive header belong to the
5664 parent function and should not be moved into the child
5665 function. */
5666 bool save_remap_decls_p = p->remap_decls_p;
5667 p->remap_decls_p = false;
5668 *handled_ops_p = true;
5669
5670 walk_gimple_seq (gimple_omp_body (stmt), move_stmt_r, move_stmt_op, wi);
5671
5672 p->remap_decls_p = save_remap_decls_p;
5673 }
5674
5675 return NULL_TREE;
5676 }
5677
5678 /* Marks virtual operands of all statements in basic blocks BBS for
5679 renaming. */
5680
5681 void
5682 mark_virtual_ops_in_bb (basic_block bb)
5683 {
5684 gimple_stmt_iterator gsi;
5685
5686 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5687 mark_virtual_ops_for_renaming (gsi_stmt (gsi));
5688
5689 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5690 mark_virtual_ops_for_renaming (gsi_stmt (gsi));
5691 }
5692
5693 /* Move basic block BB from function CFUN to function DEST_FN. The
5694 block is moved out of the original linked list and placed after
5695 block AFTER in the new list. Also, the block is removed from the
5696 original array of blocks and placed in DEST_FN's array of blocks.
5697 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
5698 updated to reflect the moved edges.
5699
5700 The local variables are remapped to new instances, VARS_MAP is used
5701 to record the mapping. */
5702
5703 static void
5704 move_block_to_fn (struct function *dest_cfun, basic_block bb,
5705 basic_block after, bool update_edge_count_p,
5706 struct move_stmt_d *d, int eh_offset)
5707 {
5708 struct control_flow_graph *cfg;
5709 edge_iterator ei;
5710 edge e;
5711 gimple_stmt_iterator si;
5712 unsigned old_len, new_len;
5713
5714 /* Remove BB from dominance structures. */
5715 delete_from_dominance_info (CDI_DOMINATORS, bb);
5716 if (current_loops)
5717 remove_bb_from_loops (bb);
5718
5719 /* Link BB to the new linked list. */
5720 move_block_after (bb, after);
5721
5722 /* Update the edge count in the corresponding flowgraphs. */
5723 if (update_edge_count_p)
5724 FOR_EACH_EDGE (e, ei, bb->succs)
5725 {
5726 cfun->cfg->x_n_edges--;
5727 dest_cfun->cfg->x_n_edges++;
5728 }
5729
5730 /* Remove BB from the original basic block array. */
5731 VEC_replace (basic_block, cfun->cfg->x_basic_block_info, bb->index, NULL);
5732 cfun->cfg->x_n_basic_blocks--;
5733
5734 /* Grow DEST_CFUN's basic block array if needed. */
5735 cfg = dest_cfun->cfg;
5736 cfg->x_n_basic_blocks++;
5737 if (bb->index >= cfg->x_last_basic_block)
5738 cfg->x_last_basic_block = bb->index + 1;
5739
5740 old_len = VEC_length (basic_block, cfg->x_basic_block_info);
5741 if ((unsigned) cfg->x_last_basic_block >= old_len)
5742 {
5743 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
5744 VEC_safe_grow_cleared (basic_block, gc, cfg->x_basic_block_info,
5745 new_len);
5746 }
5747
5748 VEC_replace (basic_block, cfg->x_basic_block_info,
5749 bb->index, bb);
5750
5751 /* Remap the variables in phi nodes. */
5752 for (si = gsi_start_phis (bb); !gsi_end_p (si); )
5753 {
5754 gimple phi = gsi_stmt (si);
5755 use_operand_p use;
5756 tree op = PHI_RESULT (phi);
5757 ssa_op_iter oi;
5758
5759 if (!is_gimple_reg (op))
5760 {
5761 /* Remove the phi nodes for virtual operands (alias analysis will be
5762 run for the new function, anyway). */
5763 remove_phi_node (&si, true);
5764 continue;
5765 }
5766
5767 SET_PHI_RESULT (phi,
5768 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
5769 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
5770 {
5771 op = USE_FROM_PTR (use);
5772 if (TREE_CODE (op) == SSA_NAME)
5773 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
5774 }
5775
5776 gsi_next (&si);
5777 }
5778
5779 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5780 {
5781 gimple stmt = gsi_stmt (si);
5782 int region;
5783 struct walk_stmt_info wi;
5784
5785 memset (&wi, 0, sizeof (wi));
5786 wi.info = d;
5787 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
5788
5789 if (gimple_code (stmt) == GIMPLE_LABEL)
5790 {
5791 tree label = gimple_label_label (stmt);
5792 int uid = LABEL_DECL_UID (label);
5793
5794 gcc_assert (uid > -1);
5795
5796 old_len = VEC_length (basic_block, cfg->x_label_to_block_map);
5797 if (old_len <= (unsigned) uid)
5798 {
5799 new_len = 3 * uid / 2 + 1;
5800 VEC_safe_grow_cleared (basic_block, gc,
5801 cfg->x_label_to_block_map, new_len);
5802 }
5803
5804 VEC_replace (basic_block, cfg->x_label_to_block_map, uid, bb);
5805 VEC_replace (basic_block, cfun->cfg->x_label_to_block_map, uid, NULL);
5806
5807 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
5808
5809 if (uid >= dest_cfun->cfg->last_label_uid)
5810 dest_cfun->cfg->last_label_uid = uid + 1;
5811 }
5812 else if (gimple_code (stmt) == GIMPLE_RESX && eh_offset != 0)
5813 gimple_resx_set_region (stmt, gimple_resx_region (stmt) + eh_offset);
5814
5815 region = lookup_stmt_eh_region (stmt);
5816 if (region >= 0)
5817 {
5818 add_stmt_to_eh_region_fn (dest_cfun, stmt, region + eh_offset);
5819 remove_stmt_from_eh_region (stmt);
5820 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
5821 gimple_remove_stmt_histograms (cfun, stmt);
5822 }
5823
5824 /* We cannot leave any operands allocated from the operand caches of
5825 the current function. */
5826 free_stmt_operands (stmt);
5827 push_cfun (dest_cfun);
5828 update_stmt (stmt);
5829 pop_cfun ();
5830 }
5831
5832 FOR_EACH_EDGE (e, ei, bb->succs)
5833 if (e->goto_locus)
5834 {
5835 tree block = e->goto_block;
5836 if (d->orig_block == NULL_TREE
5837 || block == d->orig_block)
5838 e->goto_block = d->new_block;
5839 #ifdef ENABLE_CHECKING
5840 else if (block != d->new_block)
5841 {
5842 while (block && block != d->orig_block)
5843 block = BLOCK_SUPERCONTEXT (block);
5844 gcc_assert (block);
5845 }
5846 #endif
5847 }
5848 }
5849
5850 /* Examine the statements in BB (which is in SRC_CFUN); find and return
5851 the outermost EH region. Use REGION as the incoming base EH region. */
5852
5853 static int
5854 find_outermost_region_in_block (struct function *src_cfun,
5855 basic_block bb, int region)
5856 {
5857 gimple_stmt_iterator si;
5858
5859 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5860 {
5861 gimple stmt = gsi_stmt (si);
5862 int stmt_region;
5863
5864 if (gimple_code (stmt) == GIMPLE_RESX)
5865 stmt_region = gimple_resx_region (stmt);
5866 else
5867 stmt_region = lookup_stmt_eh_region_fn (src_cfun, stmt);
5868 if (stmt_region > 0)
5869 {
5870 if (region < 0)
5871 region = stmt_region;
5872 else if (stmt_region != region)
5873 {
5874 region = eh_region_outermost (src_cfun, stmt_region, region);
5875 gcc_assert (region != -1);
5876 }
5877 }
5878 }
5879
5880 return region;
5881 }
5882
5883 static tree
5884 new_label_mapper (tree decl, void *data)
5885 {
5886 htab_t hash = (htab_t) data;
5887 struct tree_map *m;
5888 void **slot;
5889
5890 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
5891
5892 m = XNEW (struct tree_map);
5893 m->hash = DECL_UID (decl);
5894 m->base.from = decl;
5895 m->to = create_artificial_label ();
5896 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
5897 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
5898 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
5899
5900 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
5901 gcc_assert (*slot == NULL);
5902
5903 *slot = m;
5904
5905 return m->to;
5906 }
5907
5908 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
5909 subblocks. */
5910
5911 static void
5912 replace_block_vars_by_duplicates (tree block, struct pointer_map_t *vars_map,
5913 tree to_context)
5914 {
5915 tree *tp, t;
5916
5917 for (tp = &BLOCK_VARS (block); *tp; tp = &TREE_CHAIN (*tp))
5918 {
5919 t = *tp;
5920 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
5921 continue;
5922 replace_by_duplicate_decl (&t, vars_map, to_context);
5923 if (t != *tp)
5924 {
5925 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
5926 {
5927 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
5928 DECL_HAS_VALUE_EXPR_P (t) = 1;
5929 }
5930 TREE_CHAIN (t) = TREE_CHAIN (*tp);
5931 *tp = t;
5932 }
5933 }
5934
5935 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
5936 replace_block_vars_by_duplicates (block, vars_map, to_context);
5937 }
5938
5939 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
5940 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
5941 single basic block in the original CFG and the new basic block is
5942 returned. DEST_CFUN must not have a CFG yet.
5943
5944 Note that the region need not be a pure SESE region. Blocks inside
5945 the region may contain calls to abort/exit. The only restriction
5946 is that ENTRY_BB should be the only entry point and it must
5947 dominate EXIT_BB.
5948
5949 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
5950 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
5951 to the new function.
5952
5953 All local variables referenced in the region are assumed to be in
5954 the corresponding BLOCK_VARS and unexpanded variable lists
5955 associated with DEST_CFUN. */
5956
5957 basic_block
5958 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
5959 basic_block exit_bb, tree orig_block)
5960 {
5961 VEC(basic_block,heap) *bbs, *dom_bbs;
5962 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
5963 basic_block after, bb, *entry_pred, *exit_succ, abb;
5964 struct function *saved_cfun = cfun;
5965 int *entry_flag, *exit_flag, eh_offset;
5966 unsigned *entry_prob, *exit_prob;
5967 unsigned i, num_entry_edges, num_exit_edges;
5968 edge e;
5969 edge_iterator ei;
5970 htab_t new_label_map;
5971 struct pointer_map_t *vars_map;
5972 struct loop *loop = entry_bb->loop_father;
5973 struct move_stmt_d d;
5974
5975 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
5976 region. */
5977 gcc_assert (entry_bb != exit_bb
5978 && (!exit_bb
5979 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
5980
5981 /* Collect all the blocks in the region. Manually add ENTRY_BB
5982 because it won't be added by dfs_enumerate_from. */
5983 bbs = NULL;
5984 VEC_safe_push (basic_block, heap, bbs, entry_bb);
5985 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
5986
5987 /* The blocks that used to be dominated by something in BBS will now be
5988 dominated by the new block. */
5989 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
5990 VEC_address (basic_block, bbs),
5991 VEC_length (basic_block, bbs));
5992
5993 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
5994 the predecessor edges to ENTRY_BB and the successor edges to
5995 EXIT_BB so that we can re-attach them to the new basic block that
5996 will replace the region. */
5997 num_entry_edges = EDGE_COUNT (entry_bb->preds);
5998 entry_pred = (basic_block *) xcalloc (num_entry_edges, sizeof (basic_block));
5999 entry_flag = (int *) xcalloc (num_entry_edges, sizeof (int));
6000 entry_prob = XNEWVEC (unsigned, num_entry_edges);
6001 i = 0;
6002 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
6003 {
6004 entry_prob[i] = e->probability;
6005 entry_flag[i] = e->flags;
6006 entry_pred[i++] = e->src;
6007 remove_edge (e);
6008 }
6009
6010 if (exit_bb)
6011 {
6012 num_exit_edges = EDGE_COUNT (exit_bb->succs);
6013 exit_succ = (basic_block *) xcalloc (num_exit_edges,
6014 sizeof (basic_block));
6015 exit_flag = (int *) xcalloc (num_exit_edges, sizeof (int));
6016 exit_prob = XNEWVEC (unsigned, num_exit_edges);
6017 i = 0;
6018 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
6019 {
6020 exit_prob[i] = e->probability;
6021 exit_flag[i] = e->flags;
6022 exit_succ[i++] = e->dest;
6023 remove_edge (e);
6024 }
6025 }
6026 else
6027 {
6028 num_exit_edges = 0;
6029 exit_succ = NULL;
6030 exit_flag = NULL;
6031 exit_prob = NULL;
6032 }
6033
6034 /* Switch context to the child function to initialize DEST_FN's CFG. */
6035 gcc_assert (dest_cfun->cfg == NULL);
6036 push_cfun (dest_cfun);
6037
6038 init_empty_tree_cfg ();
6039
6040 /* Initialize EH information for the new function. */
6041 eh_offset = 0;
6042 new_label_map = NULL;
6043 if (saved_cfun->eh)
6044 {
6045 int region = -1;
6046
6047 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
6048 region = find_outermost_region_in_block (saved_cfun, bb, region);
6049
6050 init_eh_for_function ();
6051 if (region != -1)
6052 {
6053 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6054 eh_offset = duplicate_eh_regions (saved_cfun, new_label_mapper,
6055 new_label_map, region, 0);
6056 }
6057 }
6058
6059 pop_cfun ();
6060
6061 /* Move blocks from BBS into DEST_CFUN. */
6062 gcc_assert (VEC_length (basic_block, bbs) >= 2);
6063 after = dest_cfun->cfg->x_entry_block_ptr;
6064 vars_map = pointer_map_create ();
6065
6066 memset (&d, 0, sizeof (d));
6067 d.vars_map = vars_map;
6068 d.from_context = cfun->decl;
6069 d.to_context = dest_cfun->decl;
6070 d.new_label_map = new_label_map;
6071 d.remap_decls_p = true;
6072 d.orig_block = orig_block;
6073 d.new_block = DECL_INITIAL (dest_cfun->decl);
6074
6075 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
6076 {
6077 /* No need to update edge counts on the last block. It has
6078 already been updated earlier when we detached the region from
6079 the original CFG. */
6080 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d, eh_offset);
6081 after = bb;
6082 }
6083
6084 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
6085 if (orig_block)
6086 {
6087 tree block;
6088 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6089 == NULL_TREE);
6090 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6091 = BLOCK_SUBBLOCKS (orig_block);
6092 for (block = BLOCK_SUBBLOCKS (orig_block);
6093 block; block = BLOCK_CHAIN (block))
6094 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
6095 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
6096 }
6097
6098 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
6099 vars_map, dest_cfun->decl);
6100
6101 if (new_label_map)
6102 htab_delete (new_label_map);
6103 pointer_map_destroy (vars_map);
6104
6105 /* Rewire the entry and exit blocks. The successor to the entry
6106 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6107 the child function. Similarly, the predecessor of DEST_FN's
6108 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
6109 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6110 various CFG manipulation function get to the right CFG.
6111
6112 FIXME, this is silly. The CFG ought to become a parameter to
6113 these helpers. */
6114 push_cfun (dest_cfun);
6115 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
6116 if (exit_bb)
6117 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
6118 pop_cfun ();
6119
6120 /* Back in the original function, the SESE region has disappeared,
6121 create a new basic block in its place. */
6122 bb = create_empty_bb (entry_pred[0]);
6123 if (current_loops)
6124 add_bb_to_loop (bb, loop);
6125 for (i = 0; i < num_entry_edges; i++)
6126 {
6127 e = make_edge (entry_pred[i], bb, entry_flag[i]);
6128 e->probability = entry_prob[i];
6129 }
6130
6131 for (i = 0; i < num_exit_edges; i++)
6132 {
6133 e = make_edge (bb, exit_succ[i], exit_flag[i]);
6134 e->probability = exit_prob[i];
6135 }
6136
6137 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
6138 for (i = 0; VEC_iterate (basic_block, dom_bbs, i, abb); i++)
6139 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
6140 VEC_free (basic_block, heap, dom_bbs);
6141
6142 if (exit_bb)
6143 {
6144 free (exit_prob);
6145 free (exit_flag);
6146 free (exit_succ);
6147 }
6148 free (entry_prob);
6149 free (entry_flag);
6150 free (entry_pred);
6151 VEC_free (basic_block, heap, bbs);
6152
6153 return bb;
6154 }
6155
6156
6157 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree-pass.h)
6158 */
6159
6160 void
6161 dump_function_to_file (tree fn, FILE *file, int flags)
6162 {
6163 tree arg, vars, var;
6164 struct function *dsf;
6165 bool ignore_topmost_bind = false, any_var = false;
6166 basic_block bb;
6167 tree chain;
6168
6169 fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
6170
6171 arg = DECL_ARGUMENTS (fn);
6172 while (arg)
6173 {
6174 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
6175 fprintf (file, " ");
6176 print_generic_expr (file, arg, dump_flags);
6177 if (flags & TDF_VERBOSE)
6178 print_node (file, "", arg, 4);
6179 if (TREE_CHAIN (arg))
6180 fprintf (file, ", ");
6181 arg = TREE_CHAIN (arg);
6182 }
6183 fprintf (file, ")\n");
6184
6185 if (flags & TDF_VERBOSE)
6186 print_node (file, "", fn, 2);
6187
6188 dsf = DECL_STRUCT_FUNCTION (fn);
6189 if (dsf && (flags & TDF_DETAILS))
6190 dump_eh_tree (file, dsf);
6191
6192 if (flags & TDF_RAW && !gimple_has_body_p (fn))
6193 {
6194 dump_node (fn, TDF_SLIM | flags, file);
6195 return;
6196 }
6197
6198 /* Switch CFUN to point to FN. */
6199 push_cfun (DECL_STRUCT_FUNCTION (fn));
6200
6201 /* When GIMPLE is lowered, the variables are no longer available in
6202 BIND_EXPRs, so display them separately. */
6203 if (cfun && cfun->decl == fn && cfun->local_decls)
6204 {
6205 ignore_topmost_bind = true;
6206
6207 fprintf (file, "{\n");
6208 for (vars = cfun->local_decls; vars; vars = TREE_CHAIN (vars))
6209 {
6210 var = TREE_VALUE (vars);
6211
6212 print_generic_decl (file, var, flags);
6213 if (flags & TDF_VERBOSE)
6214 print_node (file, "", var, 4);
6215 fprintf (file, "\n");
6216
6217 any_var = true;
6218 }
6219 }
6220
6221 if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
6222 {
6223 /* If the CFG has been built, emit a CFG-based dump. */
6224 check_bb_profile (ENTRY_BLOCK_PTR, file);
6225 if (!ignore_topmost_bind)
6226 fprintf (file, "{\n");
6227
6228 if (any_var && n_basic_blocks)
6229 fprintf (file, "\n");
6230
6231 FOR_EACH_BB (bb)
6232 gimple_dump_bb (bb, file, 2, flags);
6233
6234 fprintf (file, "}\n");
6235 check_bb_profile (EXIT_BLOCK_PTR, file);
6236 }
6237 else if (DECL_SAVED_TREE (fn) == NULL)
6238 {
6239 /* The function is now in GIMPLE form but the CFG has not been
6240 built yet. Emit the single sequence of GIMPLE statements
6241 that make up its body. */
6242 gimple_seq body = gimple_body (fn);
6243
6244 if (gimple_seq_first_stmt (body)
6245 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
6246 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
6247 print_gimple_seq (file, body, 0, flags);
6248 else
6249 {
6250 if (!ignore_topmost_bind)
6251 fprintf (file, "{\n");
6252
6253 if (any_var)
6254 fprintf (file, "\n");
6255
6256 print_gimple_seq (file, body, 2, flags);
6257 fprintf (file, "}\n");
6258 }
6259 }
6260 else
6261 {
6262 int indent;
6263
6264 /* Make a tree based dump. */
6265 chain = DECL_SAVED_TREE (fn);
6266
6267 if (chain && TREE_CODE (chain) == BIND_EXPR)
6268 {
6269 if (ignore_topmost_bind)
6270 {
6271 chain = BIND_EXPR_BODY (chain);
6272 indent = 2;
6273 }
6274 else
6275 indent = 0;
6276 }
6277 else
6278 {
6279 if (!ignore_topmost_bind)
6280 fprintf (file, "{\n");
6281 indent = 2;
6282 }
6283
6284 if (any_var)
6285 fprintf (file, "\n");
6286
6287 print_generic_stmt_indented (file, chain, flags, indent);
6288 if (ignore_topmost_bind)
6289 fprintf (file, "}\n");
6290 }
6291
6292 fprintf (file, "\n\n");
6293
6294 /* Restore CFUN. */
6295 pop_cfun ();
6296 }
6297
6298
6299 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
6300
6301 void
6302 debug_function (tree fn, int flags)
6303 {
6304 dump_function_to_file (fn, stderr, flags);
6305 }
6306
6307
6308 /* Print on FILE the indexes for the predecessors of basic_block BB. */
6309
6310 static void
6311 print_pred_bbs (FILE *file, basic_block bb)
6312 {
6313 edge e;
6314 edge_iterator ei;
6315
6316 FOR_EACH_EDGE (e, ei, bb->preds)
6317 fprintf (file, "bb_%d ", e->src->index);
6318 }
6319
6320
6321 /* Print on FILE the indexes for the successors of basic_block BB. */
6322
6323 static void
6324 print_succ_bbs (FILE *file, basic_block bb)
6325 {
6326 edge e;
6327 edge_iterator ei;
6328
6329 FOR_EACH_EDGE (e, ei, bb->succs)
6330 fprintf (file, "bb_%d ", e->dest->index);
6331 }
6332
6333 /* Print to FILE the basic block BB following the VERBOSITY level. */
6334
6335 void
6336 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
6337 {
6338 char *s_indent = (char *) alloca ((size_t) indent + 1);
6339 memset ((void *) s_indent, ' ', (size_t) indent);
6340 s_indent[indent] = '\0';
6341
6342 /* Print basic_block's header. */
6343 if (verbosity >= 2)
6344 {
6345 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
6346 print_pred_bbs (file, bb);
6347 fprintf (file, "}, succs = {");
6348 print_succ_bbs (file, bb);
6349 fprintf (file, "})\n");
6350 }
6351
6352 /* Print basic_block's body. */
6353 if (verbosity >= 3)
6354 {
6355 fprintf (file, "%s {\n", s_indent);
6356 gimple_dump_bb (bb, file, indent + 4, TDF_VOPS|TDF_MEMSYMS);
6357 fprintf (file, "%s }\n", s_indent);
6358 }
6359 }
6360
6361 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
6362
6363 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
6364 VERBOSITY level this outputs the contents of the loop, or just its
6365 structure. */
6366
6367 static void
6368 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
6369 {
6370 char *s_indent;
6371 basic_block bb;
6372
6373 if (loop == NULL)
6374 return;
6375
6376 s_indent = (char *) alloca ((size_t) indent + 1);
6377 memset ((void *) s_indent, ' ', (size_t) indent);
6378 s_indent[indent] = '\0';
6379
6380 /* Print loop's header. */
6381 fprintf (file, "%sloop_%d (header = %d, latch = %d", s_indent,
6382 loop->num, loop->header->index, loop->latch->index);
6383 fprintf (file, ", niter = ");
6384 print_generic_expr (file, loop->nb_iterations, 0);
6385
6386 if (loop->any_upper_bound)
6387 {
6388 fprintf (file, ", upper_bound = ");
6389 dump_double_int (file, loop->nb_iterations_upper_bound, true);
6390 }
6391
6392 if (loop->any_estimate)
6393 {
6394 fprintf (file, ", estimate = ");
6395 dump_double_int (file, loop->nb_iterations_estimate, true);
6396 }
6397 fprintf (file, ")\n");
6398
6399 /* Print loop's body. */
6400 if (verbosity >= 1)
6401 {
6402 fprintf (file, "%s{\n", s_indent);
6403 FOR_EACH_BB (bb)
6404 if (bb->loop_father == loop)
6405 print_loops_bb (file, bb, indent, verbosity);
6406
6407 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
6408 fprintf (file, "%s}\n", s_indent);
6409 }
6410 }
6411
6412 /* Print the LOOP and its sibling loops on FILE, indented INDENT
6413 spaces. Following VERBOSITY level this outputs the contents of the
6414 loop, or just its structure. */
6415
6416 static void
6417 print_loop_and_siblings (FILE *file, struct loop *loop, int indent, int verbosity)
6418 {
6419 if (loop == NULL)
6420 return;
6421
6422 print_loop (file, loop, indent, verbosity);
6423 print_loop_and_siblings (file, loop->next, indent, verbosity);
6424 }
6425
6426 /* Follow a CFG edge from the entry point of the program, and on entry
6427 of a loop, pretty print the loop structure on FILE. */
6428
6429 void
6430 print_loops (FILE *file, int verbosity)
6431 {
6432 basic_block bb;
6433
6434 bb = ENTRY_BLOCK_PTR;
6435 if (bb && bb->loop_father)
6436 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
6437 }
6438
6439
6440 /* Debugging loops structure at tree level, at some VERBOSITY level. */
6441
6442 void
6443 debug_loops (int verbosity)
6444 {
6445 print_loops (stderr, verbosity);
6446 }
6447
6448 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
6449
6450 void
6451 debug_loop (struct loop *loop, int verbosity)
6452 {
6453 print_loop (stderr, loop, 0, verbosity);
6454 }
6455
6456 /* Print on stderr the code of loop number NUM, at some VERBOSITY
6457 level. */
6458
6459 void
6460 debug_loop_num (unsigned num, int verbosity)
6461 {
6462 debug_loop (get_loop (num), verbosity);
6463 }
6464
6465 /* Return true if BB ends with a call, possibly followed by some
6466 instructions that must stay with the call. Return false,
6467 otherwise. */
6468
6469 static bool
6470 gimple_block_ends_with_call_p (basic_block bb)
6471 {
6472 gimple_stmt_iterator gsi = gsi_last_bb (bb);
6473 return is_gimple_call (gsi_stmt (gsi));
6474 }
6475
6476
6477 /* Return true if BB ends with a conditional branch. Return false,
6478 otherwise. */
6479
6480 static bool
6481 gimple_block_ends_with_condjump_p (const_basic_block bb)
6482 {
6483 gimple stmt = last_stmt (CONST_CAST_BB (bb));
6484 return (stmt && gimple_code (stmt) == GIMPLE_COND);
6485 }
6486
6487
6488 /* Return true if we need to add fake edge to exit at statement T.
6489 Helper function for gimple_flow_call_edges_add. */
6490
6491 static bool
6492 need_fake_edge_p (gimple t)
6493 {
6494 tree fndecl = NULL_TREE;
6495 int call_flags = 0;
6496
6497 /* NORETURN and LONGJMP calls already have an edge to exit.
6498 CONST and PURE calls do not need one.
6499 We don't currently check for CONST and PURE here, although
6500 it would be a good idea, because those attributes are
6501 figured out from the RTL in mark_constant_function, and
6502 the counter incrementation code from -fprofile-arcs
6503 leads to different results from -fbranch-probabilities. */
6504 if (is_gimple_call (t))
6505 {
6506 fndecl = gimple_call_fndecl (t);
6507 call_flags = gimple_call_flags (t);
6508 }
6509
6510 if (is_gimple_call (t)
6511 && fndecl
6512 && DECL_BUILT_IN (fndecl)
6513 && (call_flags & ECF_NOTHROW)
6514 && !(call_flags & ECF_RETURNS_TWICE)
6515 /* fork() doesn't really return twice, but the effect of
6516 wrapping it in __gcov_fork() which calls __gcov_flush()
6517 and clears the counters before forking has the same
6518 effect as returning twice. Force a fake edge. */
6519 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6520 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
6521 return false;
6522
6523 if (is_gimple_call (t)
6524 && !(call_flags & ECF_NORETURN))
6525 return true;
6526
6527 if (gimple_code (t) == GIMPLE_ASM
6528 && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
6529 return true;
6530
6531 return false;
6532 }
6533
6534
6535 /* Add fake edges to the function exit for any non constant and non
6536 noreturn calls, volatile inline assembly in the bitmap of blocks
6537 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
6538 the number of blocks that were split.
6539
6540 The goal is to expose cases in which entering a basic block does
6541 not imply that all subsequent instructions must be executed. */
6542
6543 static int
6544 gimple_flow_call_edges_add (sbitmap blocks)
6545 {
6546 int i;
6547 int blocks_split = 0;
6548 int last_bb = last_basic_block;
6549 bool check_last_block = false;
6550
6551 if (n_basic_blocks == NUM_FIXED_BLOCKS)
6552 return 0;
6553
6554 if (! blocks)
6555 check_last_block = true;
6556 else
6557 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
6558
6559 /* In the last basic block, before epilogue generation, there will be
6560 a fallthru edge to EXIT. Special care is required if the last insn
6561 of the last basic block is a call because make_edge folds duplicate
6562 edges, which would result in the fallthru edge also being marked
6563 fake, which would result in the fallthru edge being removed by
6564 remove_fake_edges, which would result in an invalid CFG.
6565
6566 Moreover, we can't elide the outgoing fake edge, since the block
6567 profiler needs to take this into account in order to solve the minimal
6568 spanning tree in the case that the call doesn't return.
6569
6570 Handle this by adding a dummy instruction in a new last basic block. */
6571 if (check_last_block)
6572 {
6573 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
6574 gimple_stmt_iterator gsi = gsi_last_bb (bb);
6575 gimple t = NULL;
6576
6577 if (!gsi_end_p (gsi))
6578 t = gsi_stmt (gsi);
6579
6580 if (t && need_fake_edge_p (t))
6581 {
6582 edge e;
6583
6584 e = find_edge (bb, EXIT_BLOCK_PTR);
6585 if (e)
6586 {
6587 gsi_insert_on_edge (e, gimple_build_nop ());
6588 gsi_commit_edge_inserts ();
6589 }
6590 }
6591 }
6592
6593 /* Now add fake edges to the function exit for any non constant
6594 calls since there is no way that we can determine if they will
6595 return or not... */
6596 for (i = 0; i < last_bb; i++)
6597 {
6598 basic_block bb = BASIC_BLOCK (i);
6599 gimple_stmt_iterator gsi;
6600 gimple stmt, last_stmt;
6601
6602 if (!bb)
6603 continue;
6604
6605 if (blocks && !TEST_BIT (blocks, i))
6606 continue;
6607
6608 gsi = gsi_last_bb (bb);
6609 if (!gsi_end_p (gsi))
6610 {
6611 last_stmt = gsi_stmt (gsi);
6612 do
6613 {
6614 stmt = gsi_stmt (gsi);
6615 if (need_fake_edge_p (stmt))
6616 {
6617 edge e;
6618
6619 /* The handling above of the final block before the
6620 epilogue should be enough to verify that there is
6621 no edge to the exit block in CFG already.
6622 Calling make_edge in such case would cause us to
6623 mark that edge as fake and remove it later. */
6624 #ifdef ENABLE_CHECKING
6625 if (stmt == last_stmt)
6626 {
6627 e = find_edge (bb, EXIT_BLOCK_PTR);
6628 gcc_assert (e == NULL);
6629 }
6630 #endif
6631
6632 /* Note that the following may create a new basic block
6633 and renumber the existing basic blocks. */
6634 if (stmt != last_stmt)
6635 {
6636 e = split_block (bb, stmt);
6637 if (e)
6638 blocks_split++;
6639 }
6640 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
6641 }
6642 gsi_prev (&gsi);
6643 }
6644 while (!gsi_end_p (gsi));
6645 }
6646 }
6647
6648 if (blocks_split)
6649 verify_flow_info ();
6650
6651 return blocks_split;
6652 }
6653
6654 /* Purge dead abnormal call edges from basic block BB. */
6655
6656 bool
6657 gimple_purge_dead_abnormal_call_edges (basic_block bb)
6658 {
6659 bool changed = gimple_purge_dead_eh_edges (bb);
6660
6661 if (cfun->has_nonlocal_label)
6662 {
6663 gimple stmt = last_stmt (bb);
6664 edge_iterator ei;
6665 edge e;
6666
6667 if (!(stmt && stmt_can_make_abnormal_goto (stmt)))
6668 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6669 {
6670 if (e->flags & EDGE_ABNORMAL)
6671 {
6672 remove_edge (e);
6673 changed = true;
6674 }
6675 else
6676 ei_next (&ei);
6677 }
6678
6679 /* See gimple_purge_dead_eh_edges below. */
6680 if (changed)
6681 free_dominance_info (CDI_DOMINATORS);
6682 }
6683
6684 return changed;
6685 }
6686
6687 /* Removes edge E and all the blocks dominated by it, and updates dominance
6688 information. The IL in E->src needs to be updated separately.
6689 If dominance info is not available, only the edge E is removed.*/
6690
6691 void
6692 remove_edge_and_dominated_blocks (edge e)
6693 {
6694 VEC (basic_block, heap) *bbs_to_remove = NULL;
6695 VEC (basic_block, heap) *bbs_to_fix_dom = NULL;
6696 bitmap df, df_idom;
6697 edge f;
6698 edge_iterator ei;
6699 bool none_removed = false;
6700 unsigned i;
6701 basic_block bb, dbb;
6702 bitmap_iterator bi;
6703
6704 if (!dom_info_available_p (CDI_DOMINATORS))
6705 {
6706 remove_edge (e);
6707 return;
6708 }
6709
6710 /* No updating is needed for edges to exit. */
6711 if (e->dest == EXIT_BLOCK_PTR)
6712 {
6713 if (cfgcleanup_altered_bbs)
6714 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
6715 remove_edge (e);
6716 return;
6717 }
6718
6719 /* First, we find the basic blocks to remove. If E->dest has a predecessor
6720 that is not dominated by E->dest, then this set is empty. Otherwise,
6721 all the basic blocks dominated by E->dest are removed.
6722
6723 Also, to DF_IDOM we store the immediate dominators of the blocks in
6724 the dominance frontier of E (i.e., of the successors of the
6725 removed blocks, if there are any, and of E->dest otherwise). */
6726 FOR_EACH_EDGE (f, ei, e->dest->preds)
6727 {
6728 if (f == e)
6729 continue;
6730
6731 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
6732 {
6733 none_removed = true;
6734 break;
6735 }
6736 }
6737
6738 df = BITMAP_ALLOC (NULL);
6739 df_idom = BITMAP_ALLOC (NULL);
6740
6741 if (none_removed)
6742 bitmap_set_bit (df_idom,
6743 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
6744 else
6745 {
6746 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
6747 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6748 {
6749 FOR_EACH_EDGE (f, ei, bb->succs)
6750 {
6751 if (f->dest != EXIT_BLOCK_PTR)
6752 bitmap_set_bit (df, f->dest->index);
6753 }
6754 }
6755 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6756 bitmap_clear_bit (df, bb->index);
6757
6758 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
6759 {
6760 bb = BASIC_BLOCK (i);
6761 bitmap_set_bit (df_idom,
6762 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
6763 }
6764 }
6765
6766 if (cfgcleanup_altered_bbs)
6767 {
6768 /* Record the set of the altered basic blocks. */
6769 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
6770 bitmap_ior_into (cfgcleanup_altered_bbs, df);
6771 }
6772
6773 /* Remove E and the cancelled blocks. */
6774 if (none_removed)
6775 remove_edge (e);
6776 else
6777 {
6778 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6779 delete_basic_block (bb);
6780 }
6781
6782 /* Update the dominance information. The immediate dominator may change only
6783 for blocks whose immediate dominator belongs to DF_IDOM:
6784
6785 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
6786 removal. Let Z the arbitrary block such that idom(Z) = Y and
6787 Z dominates X after the removal. Before removal, there exists a path P
6788 from Y to X that avoids Z. Let F be the last edge on P that is
6789 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
6790 dominates W, and because of P, Z does not dominate W), and W belongs to
6791 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
6792 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
6793 {
6794 bb = BASIC_BLOCK (i);
6795 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
6796 dbb;
6797 dbb = next_dom_son (CDI_DOMINATORS, dbb))
6798 VEC_safe_push (basic_block, heap, bbs_to_fix_dom, dbb);
6799 }
6800
6801 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
6802
6803 BITMAP_FREE (df);
6804 BITMAP_FREE (df_idom);
6805 VEC_free (basic_block, heap, bbs_to_remove);
6806 VEC_free (basic_block, heap, bbs_to_fix_dom);
6807 }
6808
6809 /* Purge dead EH edges from basic block BB. */
6810
6811 bool
6812 gimple_purge_dead_eh_edges (basic_block bb)
6813 {
6814 bool changed = false;
6815 edge e;
6816 edge_iterator ei;
6817 gimple stmt = last_stmt (bb);
6818
6819 if (stmt && stmt_can_throw_internal (stmt))
6820 return false;
6821
6822 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6823 {
6824 if (e->flags & EDGE_EH)
6825 {
6826 remove_edge_and_dominated_blocks (e);
6827 changed = true;
6828 }
6829 else
6830 ei_next (&ei);
6831 }
6832
6833 return changed;
6834 }
6835
6836 bool
6837 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
6838 {
6839 bool changed = false;
6840 unsigned i;
6841 bitmap_iterator bi;
6842
6843 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
6844 {
6845 basic_block bb = BASIC_BLOCK (i);
6846
6847 /* Earlier gimple_purge_dead_eh_edges could have removed
6848 this basic block already. */
6849 gcc_assert (bb || changed);
6850 if (bb != NULL)
6851 changed |= gimple_purge_dead_eh_edges (bb);
6852 }
6853
6854 return changed;
6855 }
6856
6857 /* This function is called whenever a new edge is created or
6858 redirected. */
6859
6860 static void
6861 gimple_execute_on_growing_pred (edge e)
6862 {
6863 basic_block bb = e->dest;
6864
6865 if (phi_nodes (bb))
6866 reserve_phi_args_for_new_edge (bb);
6867 }
6868
6869 /* This function is called immediately before edge E is removed from
6870 the edge vector E->dest->preds. */
6871
6872 static void
6873 gimple_execute_on_shrinking_pred (edge e)
6874 {
6875 if (phi_nodes (e->dest))
6876 remove_phi_args (e);
6877 }
6878
6879 /*---------------------------------------------------------------------------
6880 Helper functions for Loop versioning
6881 ---------------------------------------------------------------------------*/
6882
6883 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
6884 of 'first'. Both of them are dominated by 'new_head' basic block. When
6885 'new_head' was created by 'second's incoming edge it received phi arguments
6886 on the edge by split_edge(). Later, additional edge 'e' was created to
6887 connect 'new_head' and 'first'. Now this routine adds phi args on this
6888 additional edge 'e' that new_head to second edge received as part of edge
6889 splitting. */
6890
6891 static void
6892 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
6893 basic_block new_head, edge e)
6894 {
6895 gimple phi1, phi2;
6896 gimple_stmt_iterator psi1, psi2;
6897 tree def;
6898 edge e2 = find_edge (new_head, second);
6899
6900 /* Because NEW_HEAD has been created by splitting SECOND's incoming
6901 edge, we should always have an edge from NEW_HEAD to SECOND. */
6902 gcc_assert (e2 != NULL);
6903
6904 /* Browse all 'second' basic block phi nodes and add phi args to
6905 edge 'e' for 'first' head. PHI args are always in correct order. */
6906
6907 for (psi2 = gsi_start_phis (second),
6908 psi1 = gsi_start_phis (first);
6909 !gsi_end_p (psi2) && !gsi_end_p (psi1);
6910 gsi_next (&psi2), gsi_next (&psi1))
6911 {
6912 phi1 = gsi_stmt (psi1);
6913 phi2 = gsi_stmt (psi2);
6914 def = PHI_ARG_DEF (phi2, e2->dest_idx);
6915 add_phi_arg (phi1, def, e);
6916 }
6917 }
6918
6919
6920 /* Adds a if else statement to COND_BB with condition COND_EXPR.
6921 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
6922 the destination of the ELSE part. */
6923
6924 static void
6925 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
6926 basic_block second_head ATTRIBUTE_UNUSED,
6927 basic_block cond_bb, void *cond_e)
6928 {
6929 gimple_stmt_iterator gsi;
6930 gimple new_cond_expr;
6931 tree cond_expr = (tree) cond_e;
6932 edge e0;
6933
6934 /* Build new conditional expr */
6935 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
6936 NULL_TREE, NULL_TREE);
6937
6938 /* Add new cond in cond_bb. */
6939 gsi = gsi_last_bb (cond_bb);
6940 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
6941
6942 /* Adjust edges appropriately to connect new head with first head
6943 as well as second head. */
6944 e0 = single_succ_edge (cond_bb);
6945 e0->flags &= ~EDGE_FALLTHRU;
6946 e0->flags |= EDGE_FALSE_VALUE;
6947 }
6948
6949 struct cfg_hooks gimple_cfg_hooks = {
6950 "gimple",
6951 gimple_verify_flow_info,
6952 gimple_dump_bb, /* dump_bb */
6953 create_bb, /* create_basic_block */
6954 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
6955 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
6956 gimple_can_remove_branch_p, /* can_remove_branch_p */
6957 remove_bb, /* delete_basic_block */
6958 gimple_split_block, /* split_block */
6959 gimple_move_block_after, /* move_block_after */
6960 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
6961 gimple_merge_blocks, /* merge_blocks */
6962 gimple_predict_edge, /* predict_edge */
6963 gimple_predicted_by_p, /* predicted_by_p */
6964 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
6965 gimple_duplicate_bb, /* duplicate_block */
6966 gimple_split_edge, /* split_edge */
6967 gimple_make_forwarder_block, /* make_forward_block */
6968 NULL, /* tidy_fallthru_edge */
6969 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
6970 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
6971 gimple_flow_call_edges_add, /* flow_call_edges_add */
6972 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
6973 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
6974 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
6975 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
6976 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
6977 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
6978 flush_pending_stmts /* flush_pending_stmts */
6979 };
6980
6981
6982 /* Split all critical edges. */
6983
6984 static unsigned int
6985 split_critical_edges (void)
6986 {
6987 basic_block bb;
6988 edge e;
6989 edge_iterator ei;
6990
6991 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
6992 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
6993 mappings around the calls to split_edge. */
6994 start_recording_case_labels ();
6995 FOR_ALL_BB (bb)
6996 {
6997 FOR_EACH_EDGE (e, ei, bb->succs)
6998 {
6999 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
7000 split_edge (e);
7001 /* PRE inserts statements to edges and expects that
7002 since split_critical_edges was done beforehand, committing edge
7003 insertions will not split more edges. In addition to critical
7004 edges we must split edges that have multiple successors and
7005 end by control flow statements, such as RESX.
7006 Go ahead and split them too. This matches the logic in
7007 gimple_find_edge_insert_loc. */
7008 else if ((!single_pred_p (e->dest)
7009 || phi_nodes (e->dest)
7010 || e->dest == EXIT_BLOCK_PTR)
7011 && e->src != ENTRY_BLOCK_PTR
7012 && !(e->flags & EDGE_ABNORMAL))
7013 {
7014 gimple_stmt_iterator gsi;
7015
7016 gsi = gsi_last_bb (e->src);
7017 if (!gsi_end_p (gsi)
7018 && stmt_ends_bb_p (gsi_stmt (gsi))
7019 && gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN)
7020 split_edge (e);
7021 }
7022 }
7023 }
7024 end_recording_case_labels ();
7025 return 0;
7026 }
7027
7028 struct gimple_opt_pass pass_split_crit_edges =
7029 {
7030 {
7031 GIMPLE_PASS,
7032 "crited", /* name */
7033 NULL, /* gate */
7034 split_critical_edges, /* execute */
7035 NULL, /* sub */
7036 NULL, /* next */
7037 0, /* static_pass_number */
7038 TV_TREE_SPLIT_EDGES, /* tv_id */
7039 PROP_cfg, /* properties required */
7040 PROP_no_crit_edges, /* properties_provided */
7041 0, /* properties_destroyed */
7042 0, /* todo_flags_start */
7043 TODO_dump_func /* todo_flags_finish */
7044 }
7045 };
7046
7047
7048 /* Build a ternary operation and gimplify it. Emit code before GSI.
7049 Return the gimple_val holding the result. */
7050
7051 tree
7052 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
7053 tree type, tree a, tree b, tree c)
7054 {
7055 tree ret;
7056
7057 ret = fold_build3 (code, type, a, b, c);
7058 STRIP_NOPS (ret);
7059
7060 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7061 GSI_SAME_STMT);
7062 }
7063
7064 /* Build a binary operation and gimplify it. Emit code before GSI.
7065 Return the gimple_val holding the result. */
7066
7067 tree
7068 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
7069 tree type, tree a, tree b)
7070 {
7071 tree ret;
7072
7073 ret = fold_build2 (code, type, a, b);
7074 STRIP_NOPS (ret);
7075
7076 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7077 GSI_SAME_STMT);
7078 }
7079
7080 /* Build a unary operation and gimplify it. Emit code before GSI.
7081 Return the gimple_val holding the result. */
7082
7083 tree
7084 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
7085 tree a)
7086 {
7087 tree ret;
7088
7089 ret = fold_build1 (code, type, a);
7090 STRIP_NOPS (ret);
7091
7092 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7093 GSI_SAME_STMT);
7094 }
7095
7096
7097 \f
7098 /* Emit return warnings. */
7099
7100 static unsigned int
7101 execute_warn_function_return (void)
7102 {
7103 source_location location;
7104 gimple last;
7105 edge e;
7106 edge_iterator ei;
7107
7108 /* If we have a path to EXIT, then we do return. */
7109 if (TREE_THIS_VOLATILE (cfun->decl)
7110 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
7111 {
7112 location = UNKNOWN_LOCATION;
7113 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7114 {
7115 last = last_stmt (e->src);
7116 if (gimple_code (last) == GIMPLE_RETURN
7117 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
7118 break;
7119 }
7120 if (location == UNKNOWN_LOCATION)
7121 location = cfun->function_end_locus;
7122 warning (0, "%H%<noreturn%> function does return", &location);
7123 }
7124
7125 /* If we see "return;" in some basic block, then we do reach the end
7126 without returning a value. */
7127 else if (warn_return_type
7128 && !TREE_NO_WARNING (cfun->decl)
7129 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
7130 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
7131 {
7132 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7133 {
7134 gimple last = last_stmt (e->src);
7135 if (gimple_code (last) == GIMPLE_RETURN
7136 && gimple_return_retval (last) == NULL
7137 && !gimple_no_warning_p (last))
7138 {
7139 location = gimple_location (last);
7140 if (location == UNKNOWN_LOCATION)
7141 location = cfun->function_end_locus;
7142 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
7143 TREE_NO_WARNING (cfun->decl) = 1;
7144 break;
7145 }
7146 }
7147 }
7148 return 0;
7149 }
7150
7151
7152 /* Given a basic block B which ends with a conditional and has
7153 precisely two successors, determine which of the edges is taken if
7154 the conditional is true and which is taken if the conditional is
7155 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
7156
7157 void
7158 extract_true_false_edges_from_block (basic_block b,
7159 edge *true_edge,
7160 edge *false_edge)
7161 {
7162 edge e = EDGE_SUCC (b, 0);
7163
7164 if (e->flags & EDGE_TRUE_VALUE)
7165 {
7166 *true_edge = e;
7167 *false_edge = EDGE_SUCC (b, 1);
7168 }
7169 else
7170 {
7171 *false_edge = e;
7172 *true_edge = EDGE_SUCC (b, 1);
7173 }
7174 }
7175
7176 struct gimple_opt_pass pass_warn_function_return =
7177 {
7178 {
7179 GIMPLE_PASS,
7180 NULL, /* name */
7181 NULL, /* gate */
7182 execute_warn_function_return, /* execute */
7183 NULL, /* sub */
7184 NULL, /* next */
7185 0, /* static_pass_number */
7186 TV_NONE, /* tv_id */
7187 PROP_cfg, /* properties_required */
7188 0, /* properties_provided */
7189 0, /* properties_destroyed */
7190 0, /* todo_flags_start */
7191 0 /* todo_flags_finish */
7192 }
7193 };
7194
7195 /* Emit noreturn warnings. */
7196
7197 static unsigned int
7198 execute_warn_function_noreturn (void)
7199 {
7200 if (warn_missing_noreturn
7201 && !TREE_THIS_VOLATILE (cfun->decl)
7202 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
7203 && !lang_hooks.missing_noreturn_ok_p (cfun->decl))
7204 warning (OPT_Wmissing_noreturn, "%Jfunction might be possible candidate "
7205 "for attribute %<noreturn%>",
7206 cfun->decl);
7207 return 0;
7208 }
7209
7210 struct gimple_opt_pass pass_warn_function_noreturn =
7211 {
7212 {
7213 GIMPLE_PASS,
7214 NULL, /* name */
7215 NULL, /* gate */
7216 execute_warn_function_noreturn, /* execute */
7217 NULL, /* sub */
7218 NULL, /* next */
7219 0, /* static_pass_number */
7220 TV_NONE, /* tv_id */
7221 PROP_cfg, /* properties_required */
7222 0, /* properties_provided */
7223 0, /* properties_destroyed */
7224 0, /* todo_flags_start */
7225 0 /* todo_flags_finish */
7226 }
7227 };