re PR preprocessor/36674 (#include location is offset by one row in errors from prepr...
[gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "output.h"
32 #include "flags.h"
33 #include "function.h"
34 #include "expr.h"
35 #include "ggc.h"
36 #include "langhooks.h"
37 #include "diagnostic.h"
38 #include "tree-flow.h"
39 #include "timevar.h"
40 #include "tree-dump.h"
41 #include "tree-pass.h"
42 #include "toplev.h"
43 #include "except.h"
44 #include "cfgloop.h"
45 #include "cfglayout.h"
46 #include "tree-ssa-propagate.h"
47 #include "value-prof.h"
48 #include "pointer-set.h"
49 #include "tree-inline.h"
50
51 /* This file contains functions for building the Control Flow Graph (CFG)
52 for a function tree. */
53
54 /* Local declarations. */
55
56 /* Initial capacity for the basic block array. */
57 static const int initial_cfg_capacity = 20;
58
59 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
60 which use a particular edge. The CASE_LABEL_EXPRs are chained together
61 via their TREE_CHAIN field, which we clear after we're done with the
62 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
63
64 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
65 update the case vector in response to edge redirections.
66
67 Right now this table is set up and torn down at key points in the
68 compilation process. It would be nice if we could make the table
69 more persistent. The key is getting notification of changes to
70 the CFG (particularly edge removal, creation and redirection). */
71
72 static struct pointer_map_t *edge_to_cases;
73
74 /* CFG statistics. */
75 struct cfg_stats_d
76 {
77 long num_merged_labels;
78 };
79
80 static struct cfg_stats_d cfg_stats;
81
82 /* Nonzero if we found a computed goto while building basic blocks. */
83 static bool found_computed_goto;
84
85 /* Basic blocks and flowgraphs. */
86 static void make_blocks (gimple_seq);
87 static void factor_computed_gotos (void);
88
89 /* Edges. */
90 static void make_edges (void);
91 static void make_cond_expr_edges (basic_block);
92 static void make_gimple_switch_edges (basic_block);
93 static void make_goto_expr_edges (basic_block);
94 static edge gimple_redirect_edge_and_branch (edge, basic_block);
95 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
96 static unsigned int split_critical_edges (void);
97
98 /* Various helpers. */
99 static inline bool stmt_starts_bb_p (gimple, gimple);
100 static int gimple_verify_flow_info (void);
101 static void gimple_make_forwarder_block (edge);
102 static void gimple_cfg2vcg (FILE *);
103
104 /* Flowgraph optimization and cleanup. */
105 static void gimple_merge_blocks (basic_block, basic_block);
106 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
107 static void remove_bb (basic_block);
108 static edge find_taken_edge_computed_goto (basic_block, tree);
109 static edge find_taken_edge_cond_expr (basic_block, tree);
110 static edge find_taken_edge_switch_expr (basic_block, tree);
111 static tree find_case_label_for_value (gimple, tree);
112
113 void
114 init_empty_tree_cfg_for_function (struct function *fn)
115 {
116 /* Initialize the basic block array. */
117 init_flow (fn);
118 profile_status_for_function (fn) = PROFILE_ABSENT;
119 n_basic_blocks_for_function (fn) = NUM_FIXED_BLOCKS;
120 last_basic_block_for_function (fn) = NUM_FIXED_BLOCKS;
121 basic_block_info_for_function (fn)
122 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
123 VEC_safe_grow_cleared (basic_block, gc,
124 basic_block_info_for_function (fn),
125 initial_cfg_capacity);
126
127 /* Build a mapping of labels to their associated blocks. */
128 label_to_block_map_for_function (fn)
129 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
130 VEC_safe_grow_cleared (basic_block, gc,
131 label_to_block_map_for_function (fn),
132 initial_cfg_capacity);
133
134 SET_BASIC_BLOCK_FOR_FUNCTION (fn, ENTRY_BLOCK,
135 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn));
136 SET_BASIC_BLOCK_FOR_FUNCTION (fn, EXIT_BLOCK,
137 EXIT_BLOCK_PTR_FOR_FUNCTION (fn));
138
139 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)->next_bb
140 = EXIT_BLOCK_PTR_FOR_FUNCTION (fn);
141 EXIT_BLOCK_PTR_FOR_FUNCTION (fn)->prev_bb
142 = ENTRY_BLOCK_PTR_FOR_FUNCTION (fn);
143 }
144
145 void
146 init_empty_tree_cfg (void)
147 {
148 init_empty_tree_cfg_for_function (cfun);
149 }
150
151 /*---------------------------------------------------------------------------
152 Create basic blocks
153 ---------------------------------------------------------------------------*/
154
155 /* Entry point to the CFG builder for trees. SEQ is the sequence of
156 statements to be added to the flowgraph. */
157
158 static void
159 build_gimple_cfg (gimple_seq seq)
160 {
161 /* Register specific gimple functions. */
162 gimple_register_cfg_hooks ();
163
164 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
165
166 init_empty_tree_cfg ();
167
168 found_computed_goto = 0;
169 make_blocks (seq);
170
171 /* Computed gotos are hell to deal with, especially if there are
172 lots of them with a large number of destinations. So we factor
173 them to a common computed goto location before we build the
174 edge list. After we convert back to normal form, we will un-factor
175 the computed gotos since factoring introduces an unwanted jump. */
176 if (found_computed_goto)
177 factor_computed_gotos ();
178
179 /* Make sure there is always at least one block, even if it's empty. */
180 if (n_basic_blocks == NUM_FIXED_BLOCKS)
181 create_empty_bb (ENTRY_BLOCK_PTR);
182
183 /* Adjust the size of the array. */
184 if (VEC_length (basic_block, basic_block_info) < (size_t) n_basic_blocks)
185 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, n_basic_blocks);
186
187 /* To speed up statement iterator walks, we first purge dead labels. */
188 cleanup_dead_labels ();
189
190 /* Group case nodes to reduce the number of edges.
191 We do this after cleaning up dead labels because otherwise we miss
192 a lot of obvious case merging opportunities. */
193 group_case_labels ();
194
195 /* Create the edges of the flowgraph. */
196 make_edges ();
197 cleanup_dead_labels ();
198
199 /* Debugging dumps. */
200
201 /* Write the flowgraph to a VCG file. */
202 {
203 int local_dump_flags;
204 FILE *vcg_file = dump_begin (TDI_vcg, &local_dump_flags);
205 if (vcg_file)
206 {
207 gimple_cfg2vcg (vcg_file);
208 dump_end (TDI_vcg, vcg_file);
209 }
210 }
211
212 #ifdef ENABLE_CHECKING
213 verify_stmts ();
214 #endif
215 }
216
217 static unsigned int
218 execute_build_cfg (void)
219 {
220 gimple_seq body = gimple_body (current_function_decl);
221
222 build_gimple_cfg (body);
223 gimple_set_body (current_function_decl, NULL);
224 if (dump_file && (dump_flags & TDF_DETAILS))
225 {
226 fprintf (dump_file, "Scope blocks:\n");
227 dump_scope_blocks (dump_file, dump_flags);
228 }
229 return 0;
230 }
231
232 struct gimple_opt_pass pass_build_cfg =
233 {
234 {
235 GIMPLE_PASS,
236 "cfg", /* name */
237 NULL, /* gate */
238 execute_build_cfg, /* execute */
239 NULL, /* sub */
240 NULL, /* next */
241 0, /* static_pass_number */
242 TV_TREE_CFG, /* tv_id */
243 PROP_gimple_leh, /* properties_required */
244 PROP_cfg, /* properties_provided */
245 0, /* properties_destroyed */
246 0, /* todo_flags_start */
247 TODO_verify_stmts | TODO_cleanup_cfg
248 | TODO_dump_func /* todo_flags_finish */
249 }
250 };
251
252
253 /* Return true if T is a computed goto. */
254
255 static bool
256 computed_goto_p (gimple t)
257 {
258 return (gimple_code (t) == GIMPLE_GOTO
259 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
260 }
261
262
263 /* Search the CFG for any computed gotos. If found, factor them to a
264 common computed goto site. Also record the location of that site so
265 that we can un-factor the gotos after we have converted back to
266 normal form. */
267
268 static void
269 factor_computed_gotos (void)
270 {
271 basic_block bb;
272 tree factored_label_decl = NULL;
273 tree var = NULL;
274 gimple factored_computed_goto_label = NULL;
275 gimple factored_computed_goto = NULL;
276
277 /* We know there are one or more computed gotos in this function.
278 Examine the last statement in each basic block to see if the block
279 ends with a computed goto. */
280
281 FOR_EACH_BB (bb)
282 {
283 gimple_stmt_iterator gsi = gsi_last_bb (bb);
284 gimple last;
285
286 if (gsi_end_p (gsi))
287 continue;
288
289 last = gsi_stmt (gsi);
290
291 /* Ignore the computed goto we create when we factor the original
292 computed gotos. */
293 if (last == factored_computed_goto)
294 continue;
295
296 /* If the last statement is a computed goto, factor it. */
297 if (computed_goto_p (last))
298 {
299 gimple assignment;
300
301 /* The first time we find a computed goto we need to create
302 the factored goto block and the variable each original
303 computed goto will use for their goto destination. */
304 if (!factored_computed_goto)
305 {
306 basic_block new_bb = create_empty_bb (bb);
307 gimple_stmt_iterator new_gsi = gsi_start_bb (new_bb);
308
309 /* Create the destination of the factored goto. Each original
310 computed goto will put its desired destination into this
311 variable and jump to the label we create immediately
312 below. */
313 var = create_tmp_var (ptr_type_node, "gotovar");
314
315 /* Build a label for the new block which will contain the
316 factored computed goto. */
317 factored_label_decl = create_artificial_label ();
318 factored_computed_goto_label
319 = gimple_build_label (factored_label_decl);
320 gsi_insert_after (&new_gsi, factored_computed_goto_label,
321 GSI_NEW_STMT);
322
323 /* Build our new computed goto. */
324 factored_computed_goto = gimple_build_goto (var);
325 gsi_insert_after (&new_gsi, factored_computed_goto, GSI_NEW_STMT);
326 }
327
328 /* Copy the original computed goto's destination into VAR. */
329 assignment = gimple_build_assign (var, gimple_goto_dest (last));
330 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
331
332 /* And re-vector the computed goto to the new destination. */
333 gimple_goto_set_dest (last, factored_label_decl);
334 }
335 }
336 }
337
338
339 /* Build a flowgraph for the sequence of stmts SEQ. */
340
341 static void
342 make_blocks (gimple_seq seq)
343 {
344 gimple_stmt_iterator i = gsi_start (seq);
345 gimple stmt = NULL;
346 bool start_new_block = true;
347 bool first_stmt_of_seq = true;
348 basic_block bb = ENTRY_BLOCK_PTR;
349
350 while (!gsi_end_p (i))
351 {
352 gimple prev_stmt;
353
354 prev_stmt = stmt;
355 stmt = gsi_stmt (i);
356
357 /* If the statement starts a new basic block or if we have determined
358 in a previous pass that we need to create a new block for STMT, do
359 so now. */
360 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
361 {
362 if (!first_stmt_of_seq)
363 seq = gsi_split_seq_before (&i);
364 bb = create_basic_block (seq, NULL, bb);
365 start_new_block = false;
366 }
367
368 /* Now add STMT to BB and create the subgraphs for special statement
369 codes. */
370 gimple_set_bb (stmt, bb);
371
372 if (computed_goto_p (stmt))
373 found_computed_goto = true;
374
375 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
376 next iteration. */
377 if (stmt_ends_bb_p (stmt))
378 {
379 /* If the stmt can make abnormal goto use a new temporary
380 for the assignment to the LHS. This makes sure the old value
381 of the LHS is available on the abnormal edge. Otherwise
382 we will end up with overlapping life-ranges for abnormal
383 SSA names. */
384 if (gimple_has_lhs (stmt)
385 && stmt_can_make_abnormal_goto (stmt)
386 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
387 {
388 tree lhs = gimple_get_lhs (stmt);
389 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
390 gimple s = gimple_build_assign (lhs, tmp);
391 gimple_set_location (s, gimple_location (stmt));
392 gimple_set_block (s, gimple_block (stmt));
393 gimple_set_lhs (stmt, tmp);
394 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
395 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
396 DECL_GIMPLE_REG_P (tmp) = 1;
397 gsi_insert_after (&i, s, GSI_SAME_STMT);
398 }
399 start_new_block = true;
400 }
401
402 gsi_next (&i);
403 first_stmt_of_seq = false;
404 }
405 }
406
407
408 /* Create and return a new empty basic block after bb AFTER. */
409
410 static basic_block
411 create_bb (void *h, void *e, basic_block after)
412 {
413 basic_block bb;
414
415 gcc_assert (!e);
416
417 /* Create and initialize a new basic block. Since alloc_block uses
418 ggc_alloc_cleared to allocate a basic block, we do not have to
419 clear the newly allocated basic block here. */
420 bb = alloc_block ();
421
422 bb->index = last_basic_block;
423 bb->flags = BB_NEW;
424 bb->il.gimple = GGC_CNEW (struct gimple_bb_info);
425 set_bb_seq (bb, h ? (gimple_seq) h : gimple_seq_alloc ());
426
427 /* Add the new block to the linked list of blocks. */
428 link_block (bb, after);
429
430 /* Grow the basic block array if needed. */
431 if ((size_t) last_basic_block == VEC_length (basic_block, basic_block_info))
432 {
433 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
434 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
435 }
436
437 /* Add the newly created block to the array. */
438 SET_BASIC_BLOCK (last_basic_block, bb);
439
440 n_basic_blocks++;
441 last_basic_block++;
442
443 return bb;
444 }
445
446
447 /*---------------------------------------------------------------------------
448 Edge creation
449 ---------------------------------------------------------------------------*/
450
451 /* Fold COND_EXPR_COND of each COND_EXPR. */
452
453 void
454 fold_cond_expr_cond (void)
455 {
456 basic_block bb;
457
458 FOR_EACH_BB (bb)
459 {
460 gimple stmt = last_stmt (bb);
461
462 if (stmt && gimple_code (stmt) == GIMPLE_COND)
463 {
464 tree cond;
465 bool zerop, onep;
466
467 fold_defer_overflow_warnings ();
468 cond = fold_binary (gimple_cond_code (stmt), boolean_type_node,
469 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
470 if (cond)
471 {
472 zerop = integer_zerop (cond);
473 onep = integer_onep (cond);
474 }
475 else
476 zerop = onep = false;
477
478 fold_undefer_overflow_warnings (zerop || onep,
479 stmt,
480 WARN_STRICT_OVERFLOW_CONDITIONAL);
481 if (zerop)
482 gimple_cond_make_false (stmt);
483 else if (onep)
484 gimple_cond_make_true (stmt);
485 }
486 }
487 }
488
489 /* Join all the blocks in the flowgraph. */
490
491 static void
492 make_edges (void)
493 {
494 basic_block bb;
495 struct omp_region *cur_region = NULL;
496
497 /* Create an edge from entry to the first block with executable
498 statements in it. */
499 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
500
501 /* Traverse the basic block array placing edges. */
502 FOR_EACH_BB (bb)
503 {
504 gimple last = last_stmt (bb);
505 bool fallthru;
506
507 if (last)
508 {
509 enum gimple_code code = gimple_code (last);
510 switch (code)
511 {
512 case GIMPLE_GOTO:
513 make_goto_expr_edges (bb);
514 fallthru = false;
515 break;
516 case GIMPLE_RETURN:
517 make_edge (bb, EXIT_BLOCK_PTR, 0);
518 fallthru = false;
519 break;
520 case GIMPLE_COND:
521 make_cond_expr_edges (bb);
522 fallthru = false;
523 break;
524 case GIMPLE_SWITCH:
525 make_gimple_switch_edges (bb);
526 fallthru = false;
527 break;
528 case GIMPLE_RESX:
529 make_eh_edges (last);
530 fallthru = false;
531 break;
532
533 case GIMPLE_CALL:
534 /* If this function receives a nonlocal goto, then we need to
535 make edges from this call site to all the nonlocal goto
536 handlers. */
537 if (stmt_can_make_abnormal_goto (last))
538 make_abnormal_goto_edges (bb, true);
539
540 /* If this statement has reachable exception handlers, then
541 create abnormal edges to them. */
542 make_eh_edges (last);
543
544 /* Some calls are known not to return. */
545 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
546 break;
547
548 case GIMPLE_ASSIGN:
549 /* A GIMPLE_ASSIGN may throw internally and thus be considered
550 control-altering. */
551 if (is_ctrl_altering_stmt (last))
552 {
553 make_eh_edges (last);
554 }
555 fallthru = true;
556 break;
557
558 case GIMPLE_OMP_PARALLEL:
559 case GIMPLE_OMP_TASK:
560 case GIMPLE_OMP_FOR:
561 case GIMPLE_OMP_SINGLE:
562 case GIMPLE_OMP_MASTER:
563 case GIMPLE_OMP_ORDERED:
564 case GIMPLE_OMP_CRITICAL:
565 case GIMPLE_OMP_SECTION:
566 cur_region = new_omp_region (bb, code, cur_region);
567 fallthru = true;
568 break;
569
570 case GIMPLE_OMP_SECTIONS:
571 cur_region = new_omp_region (bb, code, cur_region);
572 fallthru = true;
573 break;
574
575 case GIMPLE_OMP_SECTIONS_SWITCH:
576 fallthru = false;
577 break;
578
579
580 case GIMPLE_OMP_ATOMIC_LOAD:
581 case GIMPLE_OMP_ATOMIC_STORE:
582 fallthru = true;
583 break;
584
585
586 case GIMPLE_OMP_RETURN:
587 /* In the case of a GIMPLE_OMP_SECTION, the edge will go
588 somewhere other than the next block. This will be
589 created later. */
590 cur_region->exit = bb;
591 fallthru = cur_region->type != GIMPLE_OMP_SECTION;
592 cur_region = cur_region->outer;
593 break;
594
595 case GIMPLE_OMP_CONTINUE:
596 cur_region->cont = bb;
597 switch (cur_region->type)
598 {
599 case GIMPLE_OMP_FOR:
600 /* Mark all GIMPLE_OMP_FOR and GIMPLE_OMP_CONTINUE
601 succs edges as abnormal to prevent splitting
602 them. */
603 single_succ_edge (cur_region->entry)->flags |= EDGE_ABNORMAL;
604 /* Make the loopback edge. */
605 make_edge (bb, single_succ (cur_region->entry),
606 EDGE_ABNORMAL);
607
608 /* Create an edge from GIMPLE_OMP_FOR to exit, which
609 corresponds to the case that the body of the loop
610 is not executed at all. */
611 make_edge (cur_region->entry, bb->next_bb, EDGE_ABNORMAL);
612 make_edge (bb, bb->next_bb, EDGE_FALLTHRU | EDGE_ABNORMAL);
613 fallthru = false;
614 break;
615
616 case GIMPLE_OMP_SECTIONS:
617 /* Wire up the edges into and out of the nested sections. */
618 {
619 basic_block switch_bb = single_succ (cur_region->entry);
620
621 struct omp_region *i;
622 for (i = cur_region->inner; i ; i = i->next)
623 {
624 gcc_assert (i->type == GIMPLE_OMP_SECTION);
625 make_edge (switch_bb, i->entry, 0);
626 make_edge (i->exit, bb, EDGE_FALLTHRU);
627 }
628
629 /* Make the loopback edge to the block with
630 GIMPLE_OMP_SECTIONS_SWITCH. */
631 make_edge (bb, switch_bb, 0);
632
633 /* Make the edge from the switch to exit. */
634 make_edge (switch_bb, bb->next_bb, 0);
635 fallthru = false;
636 }
637 break;
638
639 default:
640 gcc_unreachable ();
641 }
642 break;
643
644 default:
645 gcc_assert (!stmt_ends_bb_p (last));
646 fallthru = true;
647 }
648 }
649 else
650 fallthru = true;
651
652 if (fallthru)
653 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
654 }
655
656 if (root_omp_region)
657 free_omp_regions ();
658
659 /* Fold COND_EXPR_COND of each COND_EXPR. */
660 fold_cond_expr_cond ();
661 }
662
663
664 /* Create the edges for a GIMPLE_COND starting at block BB. */
665
666 static void
667 make_cond_expr_edges (basic_block bb)
668 {
669 gimple entry = last_stmt (bb);
670 gimple then_stmt, else_stmt;
671 basic_block then_bb, else_bb;
672 tree then_label, else_label;
673 edge e;
674
675 gcc_assert (entry);
676 gcc_assert (gimple_code (entry) == GIMPLE_COND);
677
678 /* Entry basic blocks for each component. */
679 then_label = gimple_cond_true_label (entry);
680 else_label = gimple_cond_false_label (entry);
681 then_bb = label_to_block (then_label);
682 else_bb = label_to_block (else_label);
683 then_stmt = first_stmt (then_bb);
684 else_stmt = first_stmt (else_bb);
685
686 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
687 e->goto_locus = gimple_location (then_stmt);
688 if (e->goto_locus)
689 e->goto_block = gimple_block (then_stmt);
690 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
691 if (e)
692 {
693 e->goto_locus = gimple_location (else_stmt);
694 if (e->goto_locus)
695 e->goto_block = gimple_block (else_stmt);
696 }
697
698 /* We do not need the labels anymore. */
699 gimple_cond_set_true_label (entry, NULL_TREE);
700 gimple_cond_set_false_label (entry, NULL_TREE);
701 }
702
703
704 /* Called for each element in the hash table (P) as we delete the
705 edge to cases hash table.
706
707 Clear all the TREE_CHAINs to prevent problems with copying of
708 SWITCH_EXPRs and structure sharing rules, then free the hash table
709 element. */
710
711 static bool
712 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
713 void *data ATTRIBUTE_UNUSED)
714 {
715 tree t, next;
716
717 for (t = (tree) *value; t; t = next)
718 {
719 next = TREE_CHAIN (t);
720 TREE_CHAIN (t) = NULL;
721 }
722
723 *value = NULL;
724 return false;
725 }
726
727 /* Start recording information mapping edges to case labels. */
728
729 void
730 start_recording_case_labels (void)
731 {
732 gcc_assert (edge_to_cases == NULL);
733 edge_to_cases = pointer_map_create ();
734 }
735
736 /* Return nonzero if we are recording information for case labels. */
737
738 static bool
739 recording_case_labels_p (void)
740 {
741 return (edge_to_cases != NULL);
742 }
743
744 /* Stop recording information mapping edges to case labels and
745 remove any information we have recorded. */
746 void
747 end_recording_case_labels (void)
748 {
749 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
750 pointer_map_destroy (edge_to_cases);
751 edge_to_cases = NULL;
752 }
753
754 /* If we are inside a {start,end}_recording_cases block, then return
755 a chain of CASE_LABEL_EXPRs from T which reference E.
756
757 Otherwise return NULL. */
758
759 static tree
760 get_cases_for_edge (edge e, gimple t)
761 {
762 void **slot;
763 size_t i, n;
764
765 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
766 chains available. Return NULL so the caller can detect this case. */
767 if (!recording_case_labels_p ())
768 return NULL;
769
770 slot = pointer_map_contains (edge_to_cases, e);
771 if (slot)
772 return (tree) *slot;
773
774 /* If we did not find E in the hash table, then this must be the first
775 time we have been queried for information about E & T. Add all the
776 elements from T to the hash table then perform the query again. */
777
778 n = gimple_switch_num_labels (t);
779 for (i = 0; i < n; i++)
780 {
781 tree elt = gimple_switch_label (t, i);
782 tree lab = CASE_LABEL (elt);
783 basic_block label_bb = label_to_block (lab);
784 edge this_edge = find_edge (e->src, label_bb);
785
786 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
787 a new chain. */
788 slot = pointer_map_insert (edge_to_cases, this_edge);
789 TREE_CHAIN (elt) = (tree) *slot;
790 *slot = elt;
791 }
792
793 return (tree) *pointer_map_contains (edge_to_cases, e);
794 }
795
796 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
797
798 static void
799 make_gimple_switch_edges (basic_block bb)
800 {
801 gimple entry = last_stmt (bb);
802 size_t i, n;
803
804 n = gimple_switch_num_labels (entry);
805
806 for (i = 0; i < n; ++i)
807 {
808 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
809 basic_block label_bb = label_to_block (lab);
810 make_edge (bb, label_bb, 0);
811 }
812 }
813
814
815 /* Return the basic block holding label DEST. */
816
817 basic_block
818 label_to_block_fn (struct function *ifun, tree dest)
819 {
820 int uid = LABEL_DECL_UID (dest);
821
822 /* We would die hard when faced by an undefined label. Emit a label to
823 the very first basic block. This will hopefully make even the dataflow
824 and undefined variable warnings quite right. */
825 if ((errorcount || sorrycount) && uid < 0)
826 {
827 gimple_stmt_iterator gsi = gsi_start_bb (BASIC_BLOCK (NUM_FIXED_BLOCKS));
828 gimple stmt;
829
830 stmt = gimple_build_label (dest);
831 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
832 uid = LABEL_DECL_UID (dest);
833 }
834 if (VEC_length (basic_block, ifun->cfg->x_label_to_block_map)
835 <= (unsigned int) uid)
836 return NULL;
837 return VEC_index (basic_block, ifun->cfg->x_label_to_block_map, uid);
838 }
839
840 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
841 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
842
843 void
844 make_abnormal_goto_edges (basic_block bb, bool for_call)
845 {
846 basic_block target_bb;
847 gimple_stmt_iterator gsi;
848
849 FOR_EACH_BB (target_bb)
850 for (gsi = gsi_start_bb (target_bb); !gsi_end_p (gsi); gsi_next (&gsi))
851 {
852 gimple label_stmt = gsi_stmt (gsi);
853 tree target;
854
855 if (gimple_code (label_stmt) != GIMPLE_LABEL)
856 break;
857
858 target = gimple_label_label (label_stmt);
859
860 /* Make an edge to every label block that has been marked as a
861 potential target for a computed goto or a non-local goto. */
862 if ((FORCED_LABEL (target) && !for_call)
863 || (DECL_NONLOCAL (target) && for_call))
864 {
865 make_edge (bb, target_bb, EDGE_ABNORMAL);
866 break;
867 }
868 }
869 }
870
871 /* Create edges for a goto statement at block BB. */
872
873 static void
874 make_goto_expr_edges (basic_block bb)
875 {
876 gimple_stmt_iterator last = gsi_last_bb (bb);
877 gimple goto_t = gsi_stmt (last);
878
879 /* A simple GOTO creates normal edges. */
880 if (simple_goto_p (goto_t))
881 {
882 tree dest = gimple_goto_dest (goto_t);
883 edge e = make_edge (bb, label_to_block (dest), EDGE_FALLTHRU);
884 e->goto_locus = gimple_location (goto_t);
885 if (e->goto_locus)
886 e->goto_block = gimple_block (goto_t);
887 gsi_remove (&last, true);
888 return;
889 }
890
891 /* A computed GOTO creates abnormal edges. */
892 make_abnormal_goto_edges (bb, false);
893 }
894
895
896 /*---------------------------------------------------------------------------
897 Flowgraph analysis
898 ---------------------------------------------------------------------------*/
899
900 /* Cleanup useless labels in basic blocks. This is something we wish
901 to do early because it allows us to group case labels before creating
902 the edges for the CFG, and it speeds up block statement iterators in
903 all passes later on.
904 We rerun this pass after CFG is created, to get rid of the labels that
905 are no longer referenced. After then we do not run it any more, since
906 (almost) no new labels should be created. */
907
908 /* A map from basic block index to the leading label of that block. */
909 static struct label_record
910 {
911 /* The label. */
912 tree label;
913
914 /* True if the label is referenced from somewhere. */
915 bool used;
916 } *label_for_bb;
917
918 /* Callback for for_each_eh_region. Helper for cleanup_dead_labels. */
919 static void
920 update_eh_label (struct eh_region *region)
921 {
922 tree old_label = get_eh_region_tree_label (region);
923 if (old_label)
924 {
925 tree new_label;
926 basic_block bb = label_to_block (old_label);
927
928 /* ??? After optimizing, there may be EH regions with labels
929 that have already been removed from the function body, so
930 there is no basic block for them. */
931 if (! bb)
932 return;
933
934 new_label = label_for_bb[bb->index].label;
935 label_for_bb[bb->index].used = true;
936 set_eh_region_tree_label (region, new_label);
937 }
938 }
939
940
941 /* Given LABEL return the first label in the same basic block. */
942
943 static tree
944 main_block_label (tree label)
945 {
946 basic_block bb = label_to_block (label);
947 tree main_label = label_for_bb[bb->index].label;
948
949 /* label_to_block possibly inserted undefined label into the chain. */
950 if (!main_label)
951 {
952 label_for_bb[bb->index].label = label;
953 main_label = label;
954 }
955
956 label_for_bb[bb->index].used = true;
957 return main_label;
958 }
959
960 /* Cleanup redundant labels. This is a three-step process:
961 1) Find the leading label for each block.
962 2) Redirect all references to labels to the leading labels.
963 3) Cleanup all useless labels. */
964
965 void
966 cleanup_dead_labels (void)
967 {
968 basic_block bb;
969 label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
970
971 /* Find a suitable label for each block. We use the first user-defined
972 label if there is one, or otherwise just the first label we see. */
973 FOR_EACH_BB (bb)
974 {
975 gimple_stmt_iterator i;
976
977 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
978 {
979 tree label;
980 gimple stmt = gsi_stmt (i);
981
982 if (gimple_code (stmt) != GIMPLE_LABEL)
983 break;
984
985 label = gimple_label_label (stmt);
986
987 /* If we have not yet seen a label for the current block,
988 remember this one and see if there are more labels. */
989 if (!label_for_bb[bb->index].label)
990 {
991 label_for_bb[bb->index].label = label;
992 continue;
993 }
994
995 /* If we did see a label for the current block already, but it
996 is an artificially created label, replace it if the current
997 label is a user defined label. */
998 if (!DECL_ARTIFICIAL (label)
999 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1000 {
1001 label_for_bb[bb->index].label = label;
1002 break;
1003 }
1004 }
1005 }
1006
1007 /* Now redirect all jumps/branches to the selected label.
1008 First do so for each block ending in a control statement. */
1009 FOR_EACH_BB (bb)
1010 {
1011 gimple stmt = last_stmt (bb);
1012 if (!stmt)
1013 continue;
1014
1015 switch (gimple_code (stmt))
1016 {
1017 case GIMPLE_COND:
1018 {
1019 tree true_label = gimple_cond_true_label (stmt);
1020 tree false_label = gimple_cond_false_label (stmt);
1021
1022 if (true_label)
1023 gimple_cond_set_true_label (stmt, main_block_label (true_label));
1024 if (false_label)
1025 gimple_cond_set_false_label (stmt, main_block_label (false_label));
1026 break;
1027 }
1028
1029 case GIMPLE_SWITCH:
1030 {
1031 size_t i, n = gimple_switch_num_labels (stmt);
1032
1033 /* Replace all destination labels. */
1034 for (i = 0; i < n; ++i)
1035 {
1036 tree case_label = gimple_switch_label (stmt, i);
1037 tree label = main_block_label (CASE_LABEL (case_label));
1038 CASE_LABEL (case_label) = label;
1039 }
1040 break;
1041 }
1042
1043 /* We have to handle gotos until they're removed, and we don't
1044 remove them until after we've created the CFG edges. */
1045 case GIMPLE_GOTO:
1046 if (!computed_goto_p (stmt))
1047 {
1048 tree new_dest = main_block_label (gimple_goto_dest (stmt));
1049 gimple_goto_set_dest (stmt, new_dest);
1050 break;
1051 }
1052
1053 default:
1054 break;
1055 }
1056 }
1057
1058 for_each_eh_region (update_eh_label);
1059
1060 /* Finally, purge dead labels. All user-defined labels and labels that
1061 can be the target of non-local gotos and labels which have their
1062 address taken are preserved. */
1063 FOR_EACH_BB (bb)
1064 {
1065 gimple_stmt_iterator i;
1066 tree label_for_this_bb = label_for_bb[bb->index].label;
1067
1068 if (!label_for_this_bb)
1069 continue;
1070
1071 /* If the main label of the block is unused, we may still remove it. */
1072 if (!label_for_bb[bb->index].used)
1073 label_for_this_bb = NULL;
1074
1075 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1076 {
1077 tree label;
1078 gimple stmt = gsi_stmt (i);
1079
1080 if (gimple_code (stmt) != GIMPLE_LABEL)
1081 break;
1082
1083 label = gimple_label_label (stmt);
1084
1085 if (label == label_for_this_bb
1086 || !DECL_ARTIFICIAL (label)
1087 || DECL_NONLOCAL (label)
1088 || FORCED_LABEL (label))
1089 gsi_next (&i);
1090 else
1091 gsi_remove (&i, true);
1092 }
1093 }
1094
1095 free (label_for_bb);
1096 }
1097
1098 /* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
1099 and scan the sorted vector of cases. Combine the ones jumping to the
1100 same label.
1101 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1102
1103 void
1104 group_case_labels (void)
1105 {
1106 basic_block bb;
1107
1108 FOR_EACH_BB (bb)
1109 {
1110 gimple stmt = last_stmt (bb);
1111 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1112 {
1113 int old_size = gimple_switch_num_labels (stmt);
1114 int i, j, new_size = old_size;
1115 tree default_case = NULL_TREE;
1116 tree default_label = NULL_TREE;
1117 bool has_default;
1118
1119 /* The default label is always the first case in a switch
1120 statement after gimplification if it was not optimized
1121 away */
1122 if (!CASE_LOW (gimple_switch_default_label (stmt))
1123 && !CASE_HIGH (gimple_switch_default_label (stmt)))
1124 {
1125 default_case = gimple_switch_default_label (stmt);
1126 default_label = CASE_LABEL (default_case);
1127 has_default = true;
1128 }
1129 else
1130 has_default = false;
1131
1132 /* Look for possible opportunities to merge cases. */
1133 if (has_default)
1134 i = 1;
1135 else
1136 i = 0;
1137 while (i < old_size)
1138 {
1139 tree base_case, base_label, base_high;
1140 base_case = gimple_switch_label (stmt, i);
1141
1142 gcc_assert (base_case);
1143 base_label = CASE_LABEL (base_case);
1144
1145 /* Discard cases that have the same destination as the
1146 default case. */
1147 if (base_label == default_label)
1148 {
1149 gimple_switch_set_label (stmt, i, NULL_TREE);
1150 i++;
1151 new_size--;
1152 continue;
1153 }
1154
1155 base_high = CASE_HIGH (base_case)
1156 ? CASE_HIGH (base_case)
1157 : CASE_LOW (base_case);
1158 i++;
1159
1160 /* Try to merge case labels. Break out when we reach the end
1161 of the label vector or when we cannot merge the next case
1162 label with the current one. */
1163 while (i < old_size)
1164 {
1165 tree merge_case = gimple_switch_label (stmt, i);
1166 tree merge_label = CASE_LABEL (merge_case);
1167 tree t = int_const_binop (PLUS_EXPR, base_high,
1168 integer_one_node, 1);
1169
1170 /* Merge the cases if they jump to the same place,
1171 and their ranges are consecutive. */
1172 if (merge_label == base_label
1173 && tree_int_cst_equal (CASE_LOW (merge_case), t))
1174 {
1175 base_high = CASE_HIGH (merge_case) ?
1176 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1177 CASE_HIGH (base_case) = base_high;
1178 gimple_switch_set_label (stmt, i, NULL_TREE);
1179 new_size--;
1180 i++;
1181 }
1182 else
1183 break;
1184 }
1185 }
1186
1187 /* Compress the case labels in the label vector, and adjust the
1188 length of the vector. */
1189 for (i = 0, j = 0; i < new_size; i++)
1190 {
1191 while (! gimple_switch_label (stmt, j))
1192 j++;
1193 gimple_switch_set_label (stmt, i,
1194 gimple_switch_label (stmt, j++));
1195 }
1196
1197 gcc_assert (new_size <= old_size);
1198 gimple_switch_set_num_labels (stmt, new_size);
1199 }
1200 }
1201 }
1202
1203 /* Checks whether we can merge block B into block A. */
1204
1205 static bool
1206 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1207 {
1208 gimple stmt;
1209 gimple_stmt_iterator gsi;
1210 gimple_seq phis;
1211
1212 if (!single_succ_p (a))
1213 return false;
1214
1215 if (single_succ_edge (a)->flags & (EDGE_ABNORMAL | EDGE_EH))
1216 return false;
1217
1218 if (single_succ (a) != b)
1219 return false;
1220
1221 if (!single_pred_p (b))
1222 return false;
1223
1224 if (b == EXIT_BLOCK_PTR)
1225 return false;
1226
1227 /* If A ends by a statement causing exceptions or something similar, we
1228 cannot merge the blocks. */
1229 stmt = last_stmt (a);
1230 if (stmt && stmt_ends_bb_p (stmt))
1231 return false;
1232
1233 /* Do not allow a block with only a non-local label to be merged. */
1234 if (stmt
1235 && gimple_code (stmt) == GIMPLE_LABEL
1236 && DECL_NONLOCAL (gimple_label_label (stmt)))
1237 return false;
1238
1239 /* It must be possible to eliminate all phi nodes in B. If ssa form
1240 is not up-to-date, we cannot eliminate any phis; however, if only
1241 some symbols as whole are marked for renaming, this is not a problem,
1242 as phi nodes for those symbols are irrelevant in updating anyway. */
1243 phis = phi_nodes (b);
1244 if (!gimple_seq_empty_p (phis))
1245 {
1246 gimple_stmt_iterator i;
1247
1248 if (name_mappings_registered_p ())
1249 return false;
1250
1251 for (i = gsi_start (phis); !gsi_end_p (i); gsi_next (&i))
1252 {
1253 gimple phi = gsi_stmt (i);
1254
1255 if (!is_gimple_reg (gimple_phi_result (phi))
1256 && !may_propagate_copy (gimple_phi_result (phi),
1257 gimple_phi_arg_def (phi, 0)))
1258 return false;
1259 }
1260 }
1261
1262 /* Do not remove user labels. */
1263 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
1264 {
1265 stmt = gsi_stmt (gsi);
1266 if (gimple_code (stmt) != GIMPLE_LABEL)
1267 break;
1268 if (!DECL_ARTIFICIAL (gimple_label_label (stmt)))
1269 return false;
1270 }
1271
1272 /* Protect the loop latches. */
1273 if (current_loops
1274 && b->loop_father->latch == b)
1275 return false;
1276
1277 return true;
1278 }
1279
1280 /* Replaces all uses of NAME by VAL. */
1281
1282 void
1283 replace_uses_by (tree name, tree val)
1284 {
1285 imm_use_iterator imm_iter;
1286 use_operand_p use;
1287 gimple stmt;
1288 edge e;
1289
1290 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1291 {
1292 if (gimple_code (stmt) != GIMPLE_PHI)
1293 push_stmt_changes (&stmt);
1294
1295 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1296 {
1297 replace_exp (use, val);
1298
1299 if (gimple_code (stmt) == GIMPLE_PHI)
1300 {
1301 e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
1302 if (e->flags & EDGE_ABNORMAL)
1303 {
1304 /* This can only occur for virtual operands, since
1305 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1306 would prevent replacement. */
1307 gcc_assert (!is_gimple_reg (name));
1308 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1309 }
1310 }
1311 }
1312
1313 if (gimple_code (stmt) != GIMPLE_PHI)
1314 {
1315 size_t i;
1316
1317 fold_stmt_inplace (stmt);
1318 if (cfgcleanup_altered_bbs)
1319 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1320
1321 /* FIXME. This should go in pop_stmt_changes. */
1322 for (i = 0; i < gimple_num_ops (stmt); i++)
1323 {
1324 tree op = gimple_op (stmt, i);
1325 /* Operands may be empty here. For example, the labels
1326 of a GIMPLE_COND are nulled out following the creation
1327 of the corresponding CFG edges. */
1328 if (op && TREE_CODE (op) == ADDR_EXPR)
1329 recompute_tree_invariant_for_addr_expr (op);
1330 }
1331
1332 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1333
1334 pop_stmt_changes (&stmt);
1335 }
1336 }
1337
1338 gcc_assert (has_zero_uses (name));
1339
1340 /* Also update the trees stored in loop structures. */
1341 if (current_loops)
1342 {
1343 struct loop *loop;
1344 loop_iterator li;
1345
1346 FOR_EACH_LOOP (li, loop, 0)
1347 {
1348 substitute_in_loop_info (loop, name, val);
1349 }
1350 }
1351 }
1352
1353 /* Merge block B into block A. */
1354
1355 static void
1356 gimple_merge_blocks (basic_block a, basic_block b)
1357 {
1358 gimple_stmt_iterator last, gsi, psi;
1359 gimple_seq phis = phi_nodes (b);
1360
1361 if (dump_file)
1362 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1363
1364 /* Remove all single-valued PHI nodes from block B of the form
1365 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1366 gsi = gsi_last_bb (a);
1367 for (psi = gsi_start (phis); !gsi_end_p (psi); )
1368 {
1369 gimple phi = gsi_stmt (psi);
1370 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1371 gimple copy;
1372 bool may_replace_uses = !is_gimple_reg (def)
1373 || may_propagate_copy (def, use);
1374
1375 /* In case we maintain loop closed ssa form, do not propagate arguments
1376 of loop exit phi nodes. */
1377 if (current_loops
1378 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1379 && is_gimple_reg (def)
1380 && TREE_CODE (use) == SSA_NAME
1381 && a->loop_father != b->loop_father)
1382 may_replace_uses = false;
1383
1384 if (!may_replace_uses)
1385 {
1386 gcc_assert (is_gimple_reg (def));
1387
1388 /* Note that just emitting the copies is fine -- there is no problem
1389 with ordering of phi nodes. This is because A is the single
1390 predecessor of B, therefore results of the phi nodes cannot
1391 appear as arguments of the phi nodes. */
1392 copy = gimple_build_assign (def, use);
1393 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1394 remove_phi_node (&psi, false);
1395 }
1396 else
1397 {
1398 /* If we deal with a PHI for virtual operands, we can simply
1399 propagate these without fussing with folding or updating
1400 the stmt. */
1401 if (!is_gimple_reg (def))
1402 {
1403 imm_use_iterator iter;
1404 use_operand_p use_p;
1405 gimple stmt;
1406
1407 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1408 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1409 SET_USE (use_p, use);
1410 }
1411 else
1412 replace_uses_by (def, use);
1413
1414 remove_phi_node (&psi, true);
1415 }
1416 }
1417
1418 /* Ensure that B follows A. */
1419 move_block_after (b, a);
1420
1421 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1422 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1423
1424 /* Remove labels from B and set gimple_bb to A for other statements. */
1425 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1426 {
1427 if (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL)
1428 {
1429 gimple label = gsi_stmt (gsi);
1430
1431 gsi_remove (&gsi, false);
1432
1433 /* Now that we can thread computed gotos, we might have
1434 a situation where we have a forced label in block B
1435 However, the label at the start of block B might still be
1436 used in other ways (think about the runtime checking for
1437 Fortran assigned gotos). So we can not just delete the
1438 label. Instead we move the label to the start of block A. */
1439 if (FORCED_LABEL (gimple_label_label (label)))
1440 {
1441 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1442 gsi_insert_before (&dest_gsi, label, GSI_NEW_STMT);
1443 }
1444 }
1445 else
1446 {
1447 gimple_set_bb (gsi_stmt (gsi), a);
1448 gsi_next (&gsi);
1449 }
1450 }
1451
1452 /* Merge the sequences. */
1453 last = gsi_last_bb (a);
1454 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1455 set_bb_seq (b, NULL);
1456
1457 if (cfgcleanup_altered_bbs)
1458 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1459 }
1460
1461
1462 /* Return the one of two successors of BB that is not reachable by a
1463 reached by a complex edge, if there is one. Else, return BB. We use
1464 this in optimizations that use post-dominators for their heuristics,
1465 to catch the cases in C++ where function calls are involved. */
1466
1467 basic_block
1468 single_noncomplex_succ (basic_block bb)
1469 {
1470 edge e0, e1;
1471 if (EDGE_COUNT (bb->succs) != 2)
1472 return bb;
1473
1474 e0 = EDGE_SUCC (bb, 0);
1475 e1 = EDGE_SUCC (bb, 1);
1476 if (e0->flags & EDGE_COMPLEX)
1477 return e1->dest;
1478 if (e1->flags & EDGE_COMPLEX)
1479 return e0->dest;
1480
1481 return bb;
1482 }
1483
1484
1485 /* Walk the function tree removing unnecessary statements.
1486
1487 * Empty statement nodes are removed
1488
1489 * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1490
1491 * Unnecessary COND_EXPRs are removed
1492
1493 * Some unnecessary BIND_EXPRs are removed
1494
1495 * GOTO_EXPRs immediately preceding destination are removed.
1496
1497 Clearly more work could be done. The trick is doing the analysis
1498 and removal fast enough to be a net improvement in compile times.
1499
1500 Note that when we remove a control structure such as a COND_EXPR
1501 BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1502 to ensure we eliminate all the useless code. */
1503
1504 struct rus_data
1505 {
1506 bool repeat;
1507 bool may_throw;
1508 bool may_branch;
1509 bool has_label;
1510 bool last_was_goto;
1511 gimple_stmt_iterator last_goto_gsi;
1512 };
1513
1514
1515 static void remove_useless_stmts_1 (gimple_stmt_iterator *gsi, struct rus_data *);
1516
1517 /* Given a statement sequence, find the first executable statement with
1518 location information, and warn that it is unreachable. When searching,
1519 descend into containers in execution order. */
1520
1521 static bool
1522 remove_useless_stmts_warn_notreached (gimple_seq stmts)
1523 {
1524 gimple_stmt_iterator gsi;
1525
1526 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
1527 {
1528 gimple stmt = gsi_stmt (gsi);
1529
1530 if (gimple_has_location (stmt))
1531 {
1532 location_t loc = gimple_location (stmt);
1533 if (LOCATION_LINE (loc) > 0)
1534 {
1535 warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
1536 return true;
1537 }
1538 }
1539
1540 switch (gimple_code (stmt))
1541 {
1542 /* Unfortunately, we need the CFG now to detect unreachable
1543 branches in a conditional, so conditionals are not handled here. */
1544
1545 case GIMPLE_TRY:
1546 if (remove_useless_stmts_warn_notreached (gimple_try_eval (stmt)))
1547 return true;
1548 if (remove_useless_stmts_warn_notreached (gimple_try_cleanup (stmt)))
1549 return true;
1550 break;
1551
1552 case GIMPLE_CATCH:
1553 return remove_useless_stmts_warn_notreached (gimple_catch_handler (stmt));
1554
1555 case GIMPLE_EH_FILTER:
1556 return remove_useless_stmts_warn_notreached (gimple_eh_filter_failure (stmt));
1557
1558 case GIMPLE_BIND:
1559 return remove_useless_stmts_warn_notreached (gimple_bind_body (stmt));
1560
1561 default:
1562 break;
1563 }
1564 }
1565
1566 return false;
1567 }
1568
1569 /* Helper for remove_useless_stmts_1. Handle GIMPLE_COND statements. */
1570
1571 static void
1572 remove_useless_stmts_cond (gimple_stmt_iterator *gsi, struct rus_data *data)
1573 {
1574 gimple stmt = gsi_stmt (*gsi);
1575
1576 /* The folded result must still be a conditional statement. */
1577 fold_stmt (gsi);
1578 gcc_assert (gsi_stmt (*gsi) == stmt);
1579
1580 data->may_branch = true;
1581
1582 /* Replace trivial conditionals with gotos. */
1583 if (gimple_cond_true_p (stmt))
1584 {
1585 /* Goto THEN label. */
1586 tree then_label = gimple_cond_true_label (stmt);
1587
1588 gsi_replace (gsi, gimple_build_goto (then_label), false);
1589 data->last_goto_gsi = *gsi;
1590 data->last_was_goto = true;
1591 data->repeat = true;
1592 }
1593 else if (gimple_cond_false_p (stmt))
1594 {
1595 /* Goto ELSE label. */
1596 tree else_label = gimple_cond_false_label (stmt);
1597
1598 gsi_replace (gsi, gimple_build_goto (else_label), false);
1599 data->last_goto_gsi = *gsi;
1600 data->last_was_goto = true;
1601 data->repeat = true;
1602 }
1603 else
1604 {
1605 tree then_label = gimple_cond_true_label (stmt);
1606 tree else_label = gimple_cond_false_label (stmt);
1607
1608 if (then_label == else_label)
1609 {
1610 /* Goto common destination. */
1611 gsi_replace (gsi, gimple_build_goto (then_label), false);
1612 data->last_goto_gsi = *gsi;
1613 data->last_was_goto = true;
1614 data->repeat = true;
1615 }
1616 }
1617
1618 gsi_next (gsi);
1619
1620 data->last_was_goto = false;
1621 }
1622
1623 /* Helper for remove_useless_stmts_1.
1624 Handle the try-finally case for GIMPLE_TRY statements. */
1625
1626 static void
1627 remove_useless_stmts_tf (gimple_stmt_iterator *gsi, struct rus_data *data)
1628 {
1629 bool save_may_branch, save_may_throw;
1630 bool this_may_branch, this_may_throw;
1631
1632 gimple_seq eval_seq, cleanup_seq;
1633 gimple_stmt_iterator eval_gsi, cleanup_gsi;
1634
1635 gimple stmt = gsi_stmt (*gsi);
1636
1637 /* Collect may_branch and may_throw information for the body only. */
1638 save_may_branch = data->may_branch;
1639 save_may_throw = data->may_throw;
1640 data->may_branch = false;
1641 data->may_throw = false;
1642 data->last_was_goto = false;
1643
1644 eval_seq = gimple_try_eval (stmt);
1645 eval_gsi = gsi_start (eval_seq);
1646 remove_useless_stmts_1 (&eval_gsi, data);
1647
1648 this_may_branch = data->may_branch;
1649 this_may_throw = data->may_throw;
1650 data->may_branch |= save_may_branch;
1651 data->may_throw |= save_may_throw;
1652 data->last_was_goto = false;
1653
1654 cleanup_seq = gimple_try_cleanup (stmt);
1655 cleanup_gsi = gsi_start (cleanup_seq);
1656 remove_useless_stmts_1 (&cleanup_gsi, data);
1657
1658 /* If the body is empty, then we can emit the FINALLY block without
1659 the enclosing TRY_FINALLY_EXPR. */
1660 if (gimple_seq_empty_p (eval_seq))
1661 {
1662 gsi_insert_seq_before (gsi, cleanup_seq, GSI_SAME_STMT);
1663 gsi_remove (gsi, false);
1664 data->repeat = true;
1665 }
1666
1667 /* If the handler is empty, then we can emit the TRY block without
1668 the enclosing TRY_FINALLY_EXPR. */
1669 else if (gimple_seq_empty_p (cleanup_seq))
1670 {
1671 gsi_insert_seq_before (gsi, eval_seq, GSI_SAME_STMT);
1672 gsi_remove (gsi, false);
1673 data->repeat = true;
1674 }
1675
1676 /* If the body neither throws, nor branches, then we can safely
1677 string the TRY and FINALLY blocks together. */
1678 else if (!this_may_branch && !this_may_throw)
1679 {
1680 gsi_insert_seq_before (gsi, eval_seq, GSI_SAME_STMT);
1681 gsi_insert_seq_before (gsi, cleanup_seq, GSI_SAME_STMT);
1682 gsi_remove (gsi, false);
1683 data->repeat = true;
1684 }
1685 else
1686 gsi_next (gsi);
1687 }
1688
1689 /* Helper for remove_useless_stmts_1.
1690 Handle the try-catch case for GIMPLE_TRY statements. */
1691
1692 static void
1693 remove_useless_stmts_tc (gimple_stmt_iterator *gsi, struct rus_data *data)
1694 {
1695 bool save_may_throw, this_may_throw;
1696
1697 gimple_seq eval_seq, cleanup_seq, handler_seq, failure_seq;
1698 gimple_stmt_iterator eval_gsi, cleanup_gsi, handler_gsi, failure_gsi;
1699
1700 gimple stmt = gsi_stmt (*gsi);
1701
1702 /* Collect may_throw information for the body only. */
1703 save_may_throw = data->may_throw;
1704 data->may_throw = false;
1705 data->last_was_goto = false;
1706
1707 eval_seq = gimple_try_eval (stmt);
1708 eval_gsi = gsi_start (eval_seq);
1709 remove_useless_stmts_1 (&eval_gsi, data);
1710
1711 this_may_throw = data->may_throw;
1712 data->may_throw = save_may_throw;
1713
1714 cleanup_seq = gimple_try_cleanup (stmt);
1715
1716 /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */
1717 if (!this_may_throw)
1718 {
1719 if (warn_notreached)
1720 {
1721 remove_useless_stmts_warn_notreached (cleanup_seq);
1722 }
1723 gsi_insert_seq_before (gsi, eval_seq, GSI_SAME_STMT);
1724 gsi_remove (gsi, false);
1725 data->repeat = true;
1726 return;
1727 }
1728
1729 /* Process the catch clause specially. We may be able to tell that
1730 no exceptions propagate past this point. */
1731
1732 this_may_throw = true;
1733 cleanup_gsi = gsi_start (cleanup_seq);
1734 stmt = gsi_stmt (cleanup_gsi);
1735 data->last_was_goto = false;
1736
1737 switch (gimple_code (stmt))
1738 {
1739 case GIMPLE_CATCH:
1740 /* If the first element is a catch, they all must be. */
1741 while (!gsi_end_p (cleanup_gsi))
1742 {
1743 stmt = gsi_stmt (cleanup_gsi);
1744 /* If we catch all exceptions, then the body does not
1745 propagate exceptions past this point. */
1746 if (gimple_catch_types (stmt) == NULL)
1747 this_may_throw = false;
1748 data->last_was_goto = false;
1749 handler_seq = gimple_catch_handler (stmt);
1750 handler_gsi = gsi_start (handler_seq);
1751 remove_useless_stmts_1 (&handler_gsi, data);
1752 gsi_next (&cleanup_gsi);
1753 }
1754 gsi_next (gsi);
1755 break;
1756
1757 case GIMPLE_EH_FILTER:
1758 /* If the first element is an eh_filter, it should stand alone. */
1759 if (gimple_eh_filter_must_not_throw (stmt))
1760 this_may_throw = false;
1761 else if (gimple_eh_filter_types (stmt) == NULL)
1762 this_may_throw = false;
1763 failure_seq = gimple_eh_filter_failure (stmt);
1764 failure_gsi = gsi_start (failure_seq);
1765 remove_useless_stmts_1 (&failure_gsi, data);
1766 gsi_next (gsi);
1767 break;
1768
1769 default:
1770 /* Otherwise this is a list of cleanup statements. */
1771 remove_useless_stmts_1 (&cleanup_gsi, data);
1772
1773 /* If the cleanup is empty, then we can emit the TRY block without
1774 the enclosing TRY_CATCH_EXPR. */
1775 if (gimple_seq_empty_p (cleanup_seq))
1776 {
1777 gsi_insert_seq_before (gsi, eval_seq, GSI_SAME_STMT);
1778 gsi_remove(gsi, false);
1779 data->repeat = true;
1780 }
1781 else
1782 gsi_next (gsi);
1783 break;
1784 }
1785
1786 data->may_throw |= this_may_throw;
1787 }
1788
1789 /* Helper for remove_useless_stmts_1. Handle GIMPLE_BIND statements. */
1790
1791 static void
1792 remove_useless_stmts_bind (gimple_stmt_iterator *gsi, struct rus_data *data ATTRIBUTE_UNUSED)
1793 {
1794 tree block;
1795 gimple_seq body_seq, fn_body_seq;
1796 gimple_stmt_iterator body_gsi;
1797
1798 gimple stmt = gsi_stmt (*gsi);
1799
1800 /* First remove anything underneath the BIND_EXPR. */
1801
1802 body_seq = gimple_bind_body (stmt);
1803 body_gsi = gsi_start (body_seq);
1804 remove_useless_stmts_1 (&body_gsi, data);
1805
1806 /* If the GIMPLE_BIND has no variables, then we can pull everything
1807 up one level and remove the GIMPLE_BIND, unless this is the toplevel
1808 GIMPLE_BIND for the current function or an inlined function.
1809
1810 When this situation occurs we will want to apply this
1811 optimization again. */
1812 block = gimple_bind_block (stmt);
1813 fn_body_seq = gimple_body (current_function_decl);
1814 if (gimple_bind_vars (stmt) == NULL_TREE
1815 && (gimple_seq_empty_p (fn_body_seq)
1816 || stmt != gimple_seq_first_stmt (fn_body_seq))
1817 && (! block
1818 || ! BLOCK_ABSTRACT_ORIGIN (block)
1819 || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block))
1820 != FUNCTION_DECL)))
1821 {
1822 tree var = NULL_TREE;
1823 /* Even if there are no gimple_bind_vars, there might be other
1824 decls in BLOCK_VARS rendering the GIMPLE_BIND not useless. */
1825 if (block && !BLOCK_NUM_NONLOCALIZED_VARS (block))
1826 for (var = BLOCK_VARS (block); var; var = TREE_CHAIN (var))
1827 if (TREE_CODE (var) == IMPORTED_DECL)
1828 break;
1829 if (var || (block && BLOCK_NUM_NONLOCALIZED_VARS (block)))
1830 gsi_next (gsi);
1831 else
1832 {
1833 gsi_insert_seq_before (gsi, body_seq, GSI_SAME_STMT);
1834 gsi_remove (gsi, false);
1835 data->repeat = true;
1836 }
1837 }
1838 else
1839 gsi_next (gsi);
1840 }
1841
1842 /* Helper for remove_useless_stmts_1. Handle GIMPLE_GOTO statements. */
1843
1844 static void
1845 remove_useless_stmts_goto (gimple_stmt_iterator *gsi, struct rus_data *data)
1846 {
1847 gimple stmt = gsi_stmt (*gsi);
1848
1849 tree dest = gimple_goto_dest (stmt);
1850
1851 data->may_branch = true;
1852 data->last_was_goto = false;
1853
1854 /* Record iterator for last goto expr, so that we can delete it if unnecessary. */
1855 if (TREE_CODE (dest) == LABEL_DECL)
1856 {
1857 data->last_goto_gsi = *gsi;
1858 data->last_was_goto = true;
1859 }
1860
1861 gsi_next(gsi);
1862 }
1863
1864 /* Helper for remove_useless_stmts_1. Handle GIMPLE_LABEL statements. */
1865
1866 static void
1867 remove_useless_stmts_label (gimple_stmt_iterator *gsi, struct rus_data *data)
1868 {
1869 gimple stmt = gsi_stmt (*gsi);
1870
1871 tree label = gimple_label_label (stmt);
1872
1873 data->has_label = true;
1874
1875 /* We do want to jump across non-local label receiver code. */
1876 if (DECL_NONLOCAL (label))
1877 data->last_was_goto = false;
1878
1879 else if (data->last_was_goto
1880 && gimple_goto_dest (gsi_stmt (data->last_goto_gsi)) == label)
1881 {
1882 /* Replace the preceding GIMPLE_GOTO statement with
1883 a GIMPLE_NOP, which will be subsequently removed.
1884 In this way, we avoid invalidating other iterators
1885 active on the statement sequence. */
1886 gsi_replace(&data->last_goto_gsi, gimple_build_nop(), false);
1887 data->last_was_goto = false;
1888 data->repeat = true;
1889 }
1890
1891 /* ??? Add something here to delete unused labels. */
1892
1893 gsi_next (gsi);
1894 }
1895
1896
1897 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1898
1899 void
1900 notice_special_calls (gimple call)
1901 {
1902 int flags = gimple_call_flags (call);
1903
1904 if (flags & ECF_MAY_BE_ALLOCA)
1905 cfun->calls_alloca = true;
1906 if (flags & ECF_RETURNS_TWICE)
1907 cfun->calls_setjmp = true;
1908 }
1909
1910
1911 /* Clear flags set by notice_special_calls. Used by dead code removal
1912 to update the flags. */
1913
1914 void
1915 clear_special_calls (void)
1916 {
1917 cfun->calls_alloca = false;
1918 cfun->calls_setjmp = false;
1919 }
1920
1921 /* Remove useless statements from a statement sequence, and perform
1922 some preliminary simplifications. */
1923
1924 static void
1925 remove_useless_stmts_1 (gimple_stmt_iterator *gsi, struct rus_data *data)
1926 {
1927 while (!gsi_end_p (*gsi))
1928 {
1929 gimple stmt = gsi_stmt (*gsi);
1930
1931 switch (gimple_code (stmt))
1932 {
1933 case GIMPLE_COND:
1934 remove_useless_stmts_cond (gsi, data);
1935 break;
1936
1937 case GIMPLE_GOTO:
1938 remove_useless_stmts_goto (gsi, data);
1939 break;
1940
1941 case GIMPLE_LABEL:
1942 remove_useless_stmts_label (gsi, data);
1943 break;
1944
1945 case GIMPLE_ASSIGN:
1946 fold_stmt (gsi);
1947 stmt = gsi_stmt (*gsi);
1948 data->last_was_goto = false;
1949 if (stmt_could_throw_p (stmt))
1950 data->may_throw = true;
1951 gsi_next (gsi);
1952 break;
1953
1954 case GIMPLE_ASM:
1955 fold_stmt (gsi);
1956 data->last_was_goto = false;
1957 gsi_next (gsi);
1958 break;
1959
1960 case GIMPLE_CALL:
1961 fold_stmt (gsi);
1962 stmt = gsi_stmt (*gsi);
1963 data->last_was_goto = false;
1964 if (is_gimple_call (stmt))
1965 notice_special_calls (stmt);
1966
1967 /* We used to call update_gimple_call_flags here,
1968 which copied side-effects and nothrows status
1969 from the function decl to the call. In the new
1970 tuplified GIMPLE, the accessors for this information
1971 always consult the function decl, so this copying
1972 is no longer necessary. */
1973 if (stmt_could_throw_p (stmt))
1974 data->may_throw = true;
1975 gsi_next (gsi);
1976 break;
1977
1978 case GIMPLE_RETURN:
1979 fold_stmt (gsi);
1980 data->last_was_goto = false;
1981 data->may_branch = true;
1982 gsi_next (gsi);
1983 break;
1984
1985 case GIMPLE_BIND:
1986 remove_useless_stmts_bind (gsi, data);
1987 break;
1988
1989 case GIMPLE_TRY:
1990 if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
1991 remove_useless_stmts_tc (gsi, data);
1992 else if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
1993 remove_useless_stmts_tf (gsi, data);
1994 else
1995 gcc_unreachable ();
1996 break;
1997
1998 case GIMPLE_CATCH:
1999 gcc_unreachable ();
2000 break;
2001
2002 case GIMPLE_NOP:
2003 gsi_remove (gsi, false);
2004 break;
2005
2006 case GIMPLE_OMP_FOR:
2007 {
2008 gimple_seq pre_body_seq = gimple_omp_for_pre_body (stmt);
2009 gimple_stmt_iterator pre_body_gsi = gsi_start (pre_body_seq);
2010
2011 remove_useless_stmts_1 (&pre_body_gsi, data);
2012 data->last_was_goto = false;
2013 }
2014 /* FALLTHROUGH */
2015 case GIMPLE_OMP_CRITICAL:
2016 case GIMPLE_OMP_CONTINUE:
2017 case GIMPLE_OMP_MASTER:
2018 case GIMPLE_OMP_ORDERED:
2019 case GIMPLE_OMP_SECTION:
2020 case GIMPLE_OMP_SECTIONS:
2021 case GIMPLE_OMP_SINGLE:
2022 {
2023 gimple_seq body_seq = gimple_omp_body (stmt);
2024 gimple_stmt_iterator body_gsi = gsi_start (body_seq);
2025
2026 remove_useless_stmts_1 (&body_gsi, data);
2027 data->last_was_goto = false;
2028 gsi_next (gsi);
2029 }
2030 break;
2031
2032 case GIMPLE_OMP_PARALLEL:
2033 case GIMPLE_OMP_TASK:
2034 {
2035 /* Make sure the outermost GIMPLE_BIND isn't removed
2036 as useless. */
2037 gimple_seq body_seq = gimple_omp_body (stmt);
2038 gimple bind = gimple_seq_first_stmt (body_seq);
2039 gimple_seq bind_seq = gimple_bind_body (bind);
2040 gimple_stmt_iterator bind_gsi = gsi_start (bind_seq);
2041
2042 remove_useless_stmts_1 (&bind_gsi, data);
2043 data->last_was_goto = false;
2044 gsi_next (gsi);
2045 }
2046 break;
2047
2048 case GIMPLE_CHANGE_DYNAMIC_TYPE:
2049 /* If we do not optimize remove GIMPLE_CHANGE_DYNAMIC_TYPE as
2050 expansion is confused about them and we only remove them
2051 during alias computation otherwise. */
2052 if (!optimize)
2053 {
2054 data->last_was_goto = false;
2055 gsi_remove (gsi, false);
2056 break;
2057 }
2058 /* Fallthru. */
2059
2060 default:
2061 data->last_was_goto = false;
2062 gsi_next (gsi);
2063 break;
2064 }
2065 }
2066 }
2067
2068 /* Walk the function tree, removing useless statements and performing
2069 some preliminary simplifications. */
2070
2071 static unsigned int
2072 remove_useless_stmts (void)
2073 {
2074 struct rus_data data;
2075
2076 clear_special_calls ();
2077
2078 do
2079 {
2080 gimple_stmt_iterator gsi;
2081
2082 gsi = gsi_start (gimple_body (current_function_decl));
2083 memset (&data, 0, sizeof (data));
2084 remove_useless_stmts_1 (&gsi, &data);
2085 }
2086 while (data.repeat);
2087
2088 #ifdef ENABLE_TYPES_CHECKING
2089 verify_types_in_gimple_seq (gimple_body (current_function_decl));
2090 #endif
2091
2092 return 0;
2093 }
2094
2095
2096 struct gimple_opt_pass pass_remove_useless_stmts =
2097 {
2098 {
2099 GIMPLE_PASS,
2100 "useless", /* name */
2101 NULL, /* gate */
2102 remove_useless_stmts, /* execute */
2103 NULL, /* sub */
2104 NULL, /* next */
2105 0, /* static_pass_number */
2106 TV_NONE, /* tv_id */
2107 PROP_gimple_any, /* properties_required */
2108 0, /* properties_provided */
2109 0, /* properties_destroyed */
2110 0, /* todo_flags_start */
2111 TODO_dump_func /* todo_flags_finish */
2112 }
2113 };
2114
2115 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2116
2117 static void
2118 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2119 {
2120 /* Since this block is no longer reachable, we can just delete all
2121 of its PHI nodes. */
2122 remove_phi_nodes (bb);
2123
2124 /* Remove edges to BB's successors. */
2125 while (EDGE_COUNT (bb->succs) > 0)
2126 remove_edge (EDGE_SUCC (bb, 0));
2127 }
2128
2129
2130 /* Remove statements of basic block BB. */
2131
2132 static void
2133 remove_bb (basic_block bb)
2134 {
2135 gimple_stmt_iterator i;
2136 source_location loc = UNKNOWN_LOCATION;
2137
2138 if (dump_file)
2139 {
2140 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2141 if (dump_flags & TDF_DETAILS)
2142 {
2143 dump_bb (bb, dump_file, 0);
2144 fprintf (dump_file, "\n");
2145 }
2146 }
2147
2148 if (current_loops)
2149 {
2150 struct loop *loop = bb->loop_father;
2151
2152 /* If a loop gets removed, clean up the information associated
2153 with it. */
2154 if (loop->latch == bb
2155 || loop->header == bb)
2156 free_numbers_of_iterations_estimates_loop (loop);
2157 }
2158
2159 /* Remove all the instructions in the block. */
2160 if (bb_seq (bb) != NULL)
2161 {
2162 for (i = gsi_start_bb (bb); !gsi_end_p (i);)
2163 {
2164 gimple stmt = gsi_stmt (i);
2165 if (gimple_code (stmt) == GIMPLE_LABEL
2166 && (FORCED_LABEL (gimple_label_label (stmt))
2167 || DECL_NONLOCAL (gimple_label_label (stmt))))
2168 {
2169 basic_block new_bb;
2170 gimple_stmt_iterator new_gsi;
2171
2172 /* A non-reachable non-local label may still be referenced.
2173 But it no longer needs to carry the extra semantics of
2174 non-locality. */
2175 if (DECL_NONLOCAL (gimple_label_label (stmt)))
2176 {
2177 DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
2178 FORCED_LABEL (gimple_label_label (stmt)) = 1;
2179 }
2180
2181 new_bb = bb->prev_bb;
2182 new_gsi = gsi_start_bb (new_bb);
2183 gsi_remove (&i, false);
2184 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2185 }
2186 else
2187 {
2188 /* Release SSA definitions if we are in SSA. Note that we
2189 may be called when not in SSA. For example,
2190 final_cleanup calls this function via
2191 cleanup_tree_cfg. */
2192 if (gimple_in_ssa_p (cfun))
2193 release_defs (stmt);
2194
2195 gsi_remove (&i, true);
2196 }
2197
2198 /* Don't warn for removed gotos. Gotos are often removed due to
2199 jump threading, thus resulting in bogus warnings. Not great,
2200 since this way we lose warnings for gotos in the original
2201 program that are indeed unreachable. */
2202 if (gimple_code (stmt) != GIMPLE_GOTO
2203 && gimple_has_location (stmt)
2204 && !loc)
2205 loc = gimple_location (stmt);
2206 }
2207 }
2208
2209 /* If requested, give a warning that the first statement in the
2210 block is unreachable. We walk statements backwards in the
2211 loop above, so the last statement we process is the first statement
2212 in the block. */
2213 if (loc > BUILTINS_LOCATION && LOCATION_LINE (loc) > 0)
2214 warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
2215
2216 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2217 bb->il.gimple = NULL;
2218 }
2219
2220
2221 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2222 predicate VAL, return the edge that will be taken out of the block.
2223 If VAL does not match a unique edge, NULL is returned. */
2224
2225 edge
2226 find_taken_edge (basic_block bb, tree val)
2227 {
2228 gimple stmt;
2229
2230 stmt = last_stmt (bb);
2231
2232 gcc_assert (stmt);
2233 gcc_assert (is_ctrl_stmt (stmt));
2234
2235 if (val == NULL)
2236 return NULL;
2237
2238 if (!is_gimple_min_invariant (val))
2239 return NULL;
2240
2241 if (gimple_code (stmt) == GIMPLE_COND)
2242 return find_taken_edge_cond_expr (bb, val);
2243
2244 if (gimple_code (stmt) == GIMPLE_SWITCH)
2245 return find_taken_edge_switch_expr (bb, val);
2246
2247 if (computed_goto_p (stmt))
2248 {
2249 /* Only optimize if the argument is a label, if the argument is
2250 not a label then we can not construct a proper CFG.
2251
2252 It may be the case that we only need to allow the LABEL_REF to
2253 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2254 appear inside a LABEL_EXPR just to be safe. */
2255 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2256 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2257 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2258 return NULL;
2259 }
2260
2261 gcc_unreachable ();
2262 }
2263
2264 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2265 statement, determine which of the outgoing edges will be taken out of the
2266 block. Return NULL if either edge may be taken. */
2267
2268 static edge
2269 find_taken_edge_computed_goto (basic_block bb, tree val)
2270 {
2271 basic_block dest;
2272 edge e = NULL;
2273
2274 dest = label_to_block (val);
2275 if (dest)
2276 {
2277 e = find_edge (bb, dest);
2278 gcc_assert (e != NULL);
2279 }
2280
2281 return e;
2282 }
2283
2284 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2285 statement, determine which of the two edges will be taken out of the
2286 block. Return NULL if either edge may be taken. */
2287
2288 static edge
2289 find_taken_edge_cond_expr (basic_block bb, tree val)
2290 {
2291 edge true_edge, false_edge;
2292
2293 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2294
2295 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2296 return (integer_zerop (val) ? false_edge : true_edge);
2297 }
2298
2299 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2300 statement, determine which edge will be taken out of the block. Return
2301 NULL if any edge may be taken. */
2302
2303 static edge
2304 find_taken_edge_switch_expr (basic_block bb, tree val)
2305 {
2306 basic_block dest_bb;
2307 edge e;
2308 gimple switch_stmt;
2309 tree taken_case;
2310
2311 switch_stmt = last_stmt (bb);
2312 taken_case = find_case_label_for_value (switch_stmt, val);
2313 dest_bb = label_to_block (CASE_LABEL (taken_case));
2314
2315 e = find_edge (bb, dest_bb);
2316 gcc_assert (e);
2317 return e;
2318 }
2319
2320
2321 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2322 We can make optimal use here of the fact that the case labels are
2323 sorted: We can do a binary search for a case matching VAL. */
2324
2325 static tree
2326 find_case_label_for_value (gimple switch_stmt, tree val)
2327 {
2328 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2329 tree default_case = gimple_switch_default_label (switch_stmt);
2330
2331 for (low = 0, high = n; high - low > 1; )
2332 {
2333 size_t i = (high + low) / 2;
2334 tree t = gimple_switch_label (switch_stmt, i);
2335 int cmp;
2336
2337 /* Cache the result of comparing CASE_LOW and val. */
2338 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2339
2340 if (cmp > 0)
2341 high = i;
2342 else
2343 low = i;
2344
2345 if (CASE_HIGH (t) == NULL)
2346 {
2347 /* A singe-valued case label. */
2348 if (cmp == 0)
2349 return t;
2350 }
2351 else
2352 {
2353 /* A case range. We can only handle integer ranges. */
2354 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2355 return t;
2356 }
2357 }
2358
2359 return default_case;
2360 }
2361
2362
2363 /* Dump a basic block on stderr. */
2364
2365 void
2366 gimple_debug_bb (basic_block bb)
2367 {
2368 gimple_dump_bb (bb, stderr, 0, TDF_VOPS|TDF_MEMSYMS);
2369 }
2370
2371
2372 /* Dump basic block with index N on stderr. */
2373
2374 basic_block
2375 gimple_debug_bb_n (int n)
2376 {
2377 gimple_debug_bb (BASIC_BLOCK (n));
2378 return BASIC_BLOCK (n);
2379 }
2380
2381
2382 /* Dump the CFG on stderr.
2383
2384 FLAGS are the same used by the tree dumping functions
2385 (see TDF_* in tree-pass.h). */
2386
2387 void
2388 gimple_debug_cfg (int flags)
2389 {
2390 gimple_dump_cfg (stderr, flags);
2391 }
2392
2393
2394 /* Dump the program showing basic block boundaries on the given FILE.
2395
2396 FLAGS are the same used by the tree dumping functions (see TDF_* in
2397 tree.h). */
2398
2399 void
2400 gimple_dump_cfg (FILE *file, int flags)
2401 {
2402 if (flags & TDF_DETAILS)
2403 {
2404 const char *funcname
2405 = lang_hooks.decl_printable_name (current_function_decl, 2);
2406
2407 fputc ('\n', file);
2408 fprintf (file, ";; Function %s\n\n", funcname);
2409 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2410 n_basic_blocks, n_edges, last_basic_block);
2411
2412 brief_dump_cfg (file);
2413 fprintf (file, "\n");
2414 }
2415
2416 if (flags & TDF_STATS)
2417 dump_cfg_stats (file);
2418
2419 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2420 }
2421
2422
2423 /* Dump CFG statistics on FILE. */
2424
2425 void
2426 dump_cfg_stats (FILE *file)
2427 {
2428 static long max_num_merged_labels = 0;
2429 unsigned long size, total = 0;
2430 long num_edges;
2431 basic_block bb;
2432 const char * const fmt_str = "%-30s%-13s%12s\n";
2433 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2434 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2435 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2436 const char *funcname
2437 = lang_hooks.decl_printable_name (current_function_decl, 2);
2438
2439
2440 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2441
2442 fprintf (file, "---------------------------------------------------------\n");
2443 fprintf (file, fmt_str, "", " Number of ", "Memory");
2444 fprintf (file, fmt_str, "", " instances ", "used ");
2445 fprintf (file, "---------------------------------------------------------\n");
2446
2447 size = n_basic_blocks * sizeof (struct basic_block_def);
2448 total += size;
2449 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2450 SCALE (size), LABEL (size));
2451
2452 num_edges = 0;
2453 FOR_EACH_BB (bb)
2454 num_edges += EDGE_COUNT (bb->succs);
2455 size = num_edges * sizeof (struct edge_def);
2456 total += size;
2457 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2458
2459 fprintf (file, "---------------------------------------------------------\n");
2460 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2461 LABEL (total));
2462 fprintf (file, "---------------------------------------------------------\n");
2463 fprintf (file, "\n");
2464
2465 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2466 max_num_merged_labels = cfg_stats.num_merged_labels;
2467
2468 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2469 cfg_stats.num_merged_labels, max_num_merged_labels);
2470
2471 fprintf (file, "\n");
2472 }
2473
2474
2475 /* Dump CFG statistics on stderr. Keep extern so that it's always
2476 linked in the final executable. */
2477
2478 void
2479 debug_cfg_stats (void)
2480 {
2481 dump_cfg_stats (stderr);
2482 }
2483
2484
2485 /* Dump the flowgraph to a .vcg FILE. */
2486
2487 static void
2488 gimple_cfg2vcg (FILE *file)
2489 {
2490 edge e;
2491 edge_iterator ei;
2492 basic_block bb;
2493 const char *funcname
2494 = lang_hooks.decl_printable_name (current_function_decl, 2);
2495
2496 /* Write the file header. */
2497 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2498 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2499 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2500
2501 /* Write blocks and edges. */
2502 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2503 {
2504 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2505 e->dest->index);
2506
2507 if (e->flags & EDGE_FAKE)
2508 fprintf (file, " linestyle: dotted priority: 10");
2509 else
2510 fprintf (file, " linestyle: solid priority: 100");
2511
2512 fprintf (file, " }\n");
2513 }
2514 fputc ('\n', file);
2515
2516 FOR_EACH_BB (bb)
2517 {
2518 enum gimple_code head_code, end_code;
2519 const char *head_name, *end_name;
2520 int head_line = 0;
2521 int end_line = 0;
2522 gimple first = first_stmt (bb);
2523 gimple last = last_stmt (bb);
2524
2525 if (first)
2526 {
2527 head_code = gimple_code (first);
2528 head_name = gimple_code_name[head_code];
2529 head_line = get_lineno (first);
2530 }
2531 else
2532 head_name = "no-statement";
2533
2534 if (last)
2535 {
2536 end_code = gimple_code (last);
2537 end_name = gimple_code_name[end_code];
2538 end_line = get_lineno (last);
2539 }
2540 else
2541 end_name = "no-statement";
2542
2543 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2544 bb->index, bb->index, head_name, head_line, end_name,
2545 end_line);
2546
2547 FOR_EACH_EDGE (e, ei, bb->succs)
2548 {
2549 if (e->dest == EXIT_BLOCK_PTR)
2550 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2551 else
2552 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2553
2554 if (e->flags & EDGE_FAKE)
2555 fprintf (file, " priority: 10 linestyle: dotted");
2556 else
2557 fprintf (file, " priority: 100 linestyle: solid");
2558
2559 fprintf (file, " }\n");
2560 }
2561
2562 if (bb->next_bb != EXIT_BLOCK_PTR)
2563 fputc ('\n', file);
2564 }
2565
2566 fputs ("}\n\n", file);
2567 }
2568
2569
2570
2571 /*---------------------------------------------------------------------------
2572 Miscellaneous helpers
2573 ---------------------------------------------------------------------------*/
2574
2575 /* Return true if T represents a stmt that always transfers control. */
2576
2577 bool
2578 is_ctrl_stmt (gimple t)
2579 {
2580 return gimple_code (t) == GIMPLE_COND
2581 || gimple_code (t) == GIMPLE_SWITCH
2582 || gimple_code (t) == GIMPLE_GOTO
2583 || gimple_code (t) == GIMPLE_RETURN
2584 || gimple_code (t) == GIMPLE_RESX;
2585 }
2586
2587
2588 /* Return true if T is a statement that may alter the flow of control
2589 (e.g., a call to a non-returning function). */
2590
2591 bool
2592 is_ctrl_altering_stmt (gimple t)
2593 {
2594 gcc_assert (t);
2595
2596 if (is_gimple_call (t))
2597 {
2598 int flags = gimple_call_flags (t);
2599
2600 /* A non-pure/const call alters flow control if the current
2601 function has nonlocal labels. */
2602 if (!(flags & (ECF_CONST | ECF_PURE))
2603 && cfun->has_nonlocal_label)
2604 return true;
2605
2606 /* A call also alters control flow if it does not return. */
2607 if (gimple_call_flags (t) & ECF_NORETURN)
2608 return true;
2609 }
2610
2611 /* OpenMP directives alter control flow. */
2612 if (is_gimple_omp (t))
2613 return true;
2614
2615 /* If a statement can throw, it alters control flow. */
2616 return stmt_can_throw_internal (t);
2617 }
2618
2619
2620 /* Return true if T is a simple local goto. */
2621
2622 bool
2623 simple_goto_p (gimple t)
2624 {
2625 return (gimple_code (t) == GIMPLE_GOTO
2626 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2627 }
2628
2629
2630 /* Return true if T can make an abnormal transfer of control flow.
2631 Transfers of control flow associated with EH are excluded. */
2632
2633 bool
2634 stmt_can_make_abnormal_goto (gimple t)
2635 {
2636 if (computed_goto_p (t))
2637 return true;
2638 if (is_gimple_call (t))
2639 return gimple_has_side_effects (t) && cfun->has_nonlocal_label;
2640 return false;
2641 }
2642
2643
2644 /* Return true if STMT should start a new basic block. PREV_STMT is
2645 the statement preceding STMT. It is used when STMT is a label or a
2646 case label. Labels should only start a new basic block if their
2647 previous statement wasn't a label. Otherwise, sequence of labels
2648 would generate unnecessary basic blocks that only contain a single
2649 label. */
2650
2651 static inline bool
2652 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2653 {
2654 if (stmt == NULL)
2655 return false;
2656
2657 /* Labels start a new basic block only if the preceding statement
2658 wasn't a label of the same type. This prevents the creation of
2659 consecutive blocks that have nothing but a single label. */
2660 if (gimple_code (stmt) == GIMPLE_LABEL)
2661 {
2662 /* Nonlocal and computed GOTO targets always start a new block. */
2663 if (DECL_NONLOCAL (gimple_label_label (stmt))
2664 || FORCED_LABEL (gimple_label_label (stmt)))
2665 return true;
2666
2667 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2668 {
2669 if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
2670 return true;
2671
2672 cfg_stats.num_merged_labels++;
2673 return false;
2674 }
2675 else
2676 return true;
2677 }
2678
2679 return false;
2680 }
2681
2682
2683 /* Return true if T should end a basic block. */
2684
2685 bool
2686 stmt_ends_bb_p (gimple t)
2687 {
2688 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2689 }
2690
2691 /* Remove block annotations and other data structures. */
2692
2693 void
2694 delete_tree_cfg_annotations (void)
2695 {
2696 label_to_block_map = NULL;
2697 }
2698
2699
2700 /* Return the first statement in basic block BB. */
2701
2702 gimple
2703 first_stmt (basic_block bb)
2704 {
2705 gimple_stmt_iterator i = gsi_start_bb (bb);
2706 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2707 }
2708
2709 /* Return the last statement in basic block BB. */
2710
2711 gimple
2712 last_stmt (basic_block bb)
2713 {
2714 gimple_stmt_iterator b = gsi_last_bb (bb);
2715 return !gsi_end_p (b) ? gsi_stmt (b) : NULL;
2716 }
2717
2718 /* Return the last statement of an otherwise empty block. Return NULL
2719 if the block is totally empty, or if it contains more than one
2720 statement. */
2721
2722 gimple
2723 last_and_only_stmt (basic_block bb)
2724 {
2725 gimple_stmt_iterator i = gsi_last_bb (bb);
2726 gimple last, prev;
2727
2728 if (gsi_end_p (i))
2729 return NULL;
2730
2731 last = gsi_stmt (i);
2732 gsi_prev (&i);
2733 if (gsi_end_p (i))
2734 return last;
2735
2736 /* Empty statements should no longer appear in the instruction stream.
2737 Everything that might have appeared before should be deleted by
2738 remove_useless_stmts, and the optimizers should just gsi_remove
2739 instead of smashing with build_empty_stmt.
2740
2741 Thus the only thing that should appear here in a block containing
2742 one executable statement is a label. */
2743 prev = gsi_stmt (i);
2744 if (gimple_code (prev) == GIMPLE_LABEL)
2745 return last;
2746 else
2747 return NULL;
2748 }
2749
2750 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2751
2752 static void
2753 reinstall_phi_args (edge new_edge, edge old_edge)
2754 {
2755 edge_var_map_vector v;
2756 edge_var_map *vm;
2757 int i;
2758 gimple_stmt_iterator phis;
2759
2760 v = redirect_edge_var_map_vector (old_edge);
2761 if (!v)
2762 return;
2763
2764 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2765 VEC_iterate (edge_var_map, v, i, vm) && !gsi_end_p (phis);
2766 i++, gsi_next (&phis))
2767 {
2768 gimple phi = gsi_stmt (phis);
2769 tree result = redirect_edge_var_map_result (vm);
2770 tree arg = redirect_edge_var_map_def (vm);
2771
2772 gcc_assert (result == gimple_phi_result (phi));
2773
2774 add_phi_arg (phi, arg, new_edge);
2775 }
2776
2777 redirect_edge_var_map_clear (old_edge);
2778 }
2779
2780 /* Returns the basic block after which the new basic block created
2781 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2782 near its "logical" location. This is of most help to humans looking
2783 at debugging dumps. */
2784
2785 static basic_block
2786 split_edge_bb_loc (edge edge_in)
2787 {
2788 basic_block dest = edge_in->dest;
2789
2790 if (dest->prev_bb && find_edge (dest->prev_bb, dest))
2791 return edge_in->src;
2792 else
2793 return dest->prev_bb;
2794 }
2795
2796 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2797 Abort on abnormal edges. */
2798
2799 static basic_block
2800 gimple_split_edge (edge edge_in)
2801 {
2802 basic_block new_bb, after_bb, dest;
2803 edge new_edge, e;
2804
2805 /* Abnormal edges cannot be split. */
2806 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2807
2808 dest = edge_in->dest;
2809
2810 after_bb = split_edge_bb_loc (edge_in);
2811
2812 new_bb = create_empty_bb (after_bb);
2813 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2814 new_bb->count = edge_in->count;
2815 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2816 new_edge->probability = REG_BR_PROB_BASE;
2817 new_edge->count = edge_in->count;
2818
2819 e = redirect_edge_and_branch (edge_in, new_bb);
2820 gcc_assert (e == edge_in);
2821 reinstall_phi_args (new_edge, e);
2822
2823 return new_bb;
2824 }
2825
2826 /* Callback for walk_tree, check that all elements with address taken are
2827 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2828 inside a PHI node. */
2829
2830 static tree
2831 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2832 {
2833 tree t = *tp, x;
2834
2835 if (TYPE_P (t))
2836 *walk_subtrees = 0;
2837
2838 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2839 #define CHECK_OP(N, MSG) \
2840 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2841 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2842
2843 switch (TREE_CODE (t))
2844 {
2845 case SSA_NAME:
2846 if (SSA_NAME_IN_FREE_LIST (t))
2847 {
2848 error ("SSA name in freelist but still referenced");
2849 return *tp;
2850 }
2851 break;
2852
2853 case INDIRECT_REF:
2854 x = TREE_OPERAND (t, 0);
2855 if (!is_gimple_reg (x) && !is_gimple_min_invariant (x))
2856 {
2857 error ("Indirect reference's operand is not a register or a constant.");
2858 return x;
2859 }
2860 break;
2861
2862 case ASSERT_EXPR:
2863 x = fold (ASSERT_EXPR_COND (t));
2864 if (x == boolean_false_node)
2865 {
2866 error ("ASSERT_EXPR with an always-false condition");
2867 return *tp;
2868 }
2869 break;
2870
2871 case MODIFY_EXPR:
2872 error ("MODIFY_EXPR not expected while having tuples.");
2873 return *tp;
2874
2875 case ADDR_EXPR:
2876 {
2877 bool old_constant;
2878 bool old_side_effects;
2879 bool new_constant;
2880 bool new_side_effects;
2881
2882 gcc_assert (is_gimple_address (t));
2883
2884 old_constant = TREE_CONSTANT (t);
2885 old_side_effects = TREE_SIDE_EFFECTS (t);
2886
2887 recompute_tree_invariant_for_addr_expr (t);
2888 new_side_effects = TREE_SIDE_EFFECTS (t);
2889 new_constant = TREE_CONSTANT (t);
2890
2891 if (old_constant != new_constant)
2892 {
2893 error ("constant not recomputed when ADDR_EXPR changed");
2894 return t;
2895 }
2896 if (old_side_effects != new_side_effects)
2897 {
2898 error ("side effects not recomputed when ADDR_EXPR changed");
2899 return t;
2900 }
2901
2902 /* Skip any references (they will be checked when we recurse down the
2903 tree) and ensure that any variable used as a prefix is marked
2904 addressable. */
2905 for (x = TREE_OPERAND (t, 0);
2906 handled_component_p (x);
2907 x = TREE_OPERAND (x, 0))
2908 ;
2909
2910 if (!(TREE_CODE (x) == VAR_DECL
2911 || TREE_CODE (x) == PARM_DECL
2912 || TREE_CODE (x) == RESULT_DECL))
2913 return NULL;
2914 if (!TREE_ADDRESSABLE (x))
2915 {
2916 error ("address taken, but ADDRESSABLE bit not set");
2917 return x;
2918 }
2919 if (DECL_GIMPLE_REG_P (x))
2920 {
2921 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2922 return x;
2923 }
2924
2925 break;
2926 }
2927
2928 case COND_EXPR:
2929 x = COND_EXPR_COND (t);
2930 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2931 {
2932 error ("non-integral used in condition");
2933 return x;
2934 }
2935 if (!is_gimple_condexpr (x))
2936 {
2937 error ("invalid conditional operand");
2938 return x;
2939 }
2940 break;
2941
2942 case NON_LVALUE_EXPR:
2943 gcc_unreachable ();
2944
2945 CASE_CONVERT:
2946 case FIX_TRUNC_EXPR:
2947 case FLOAT_EXPR:
2948 case NEGATE_EXPR:
2949 case ABS_EXPR:
2950 case BIT_NOT_EXPR:
2951 case TRUTH_NOT_EXPR:
2952 CHECK_OP (0, "invalid operand to unary operator");
2953 break;
2954
2955 case REALPART_EXPR:
2956 case IMAGPART_EXPR:
2957 case COMPONENT_REF:
2958 case ARRAY_REF:
2959 case ARRAY_RANGE_REF:
2960 case BIT_FIELD_REF:
2961 case VIEW_CONVERT_EXPR:
2962 /* We have a nest of references. Verify that each of the operands
2963 that determine where to reference is either a constant or a variable,
2964 verify that the base is valid, and then show we've already checked
2965 the subtrees. */
2966 while (handled_component_p (t))
2967 {
2968 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2969 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2970 else if (TREE_CODE (t) == ARRAY_REF
2971 || TREE_CODE (t) == ARRAY_RANGE_REF)
2972 {
2973 CHECK_OP (1, "invalid array index");
2974 if (TREE_OPERAND (t, 2))
2975 CHECK_OP (2, "invalid array lower bound");
2976 if (TREE_OPERAND (t, 3))
2977 CHECK_OP (3, "invalid array stride");
2978 }
2979 else if (TREE_CODE (t) == BIT_FIELD_REF)
2980 {
2981 if (!host_integerp (TREE_OPERAND (t, 1), 1)
2982 || !host_integerp (TREE_OPERAND (t, 2), 1))
2983 {
2984 error ("invalid position or size operand to BIT_FIELD_REF");
2985 return t;
2986 }
2987 else if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2988 && (TYPE_PRECISION (TREE_TYPE (t))
2989 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2990 {
2991 error ("integral result type precision does not match "
2992 "field size of BIT_FIELD_REF");
2993 return t;
2994 }
2995 if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2996 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2997 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2998 {
2999 error ("mode precision of non-integral result does not "
3000 "match field size of BIT_FIELD_REF");
3001 return t;
3002 }
3003 }
3004
3005 t = TREE_OPERAND (t, 0);
3006 }
3007
3008 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
3009 {
3010 error ("invalid reference prefix");
3011 return t;
3012 }
3013 *walk_subtrees = 0;
3014 break;
3015 case PLUS_EXPR:
3016 case MINUS_EXPR:
3017 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3018 POINTER_PLUS_EXPR. */
3019 if (POINTER_TYPE_P (TREE_TYPE (t)))
3020 {
3021 error ("invalid operand to plus/minus, type is a pointer");
3022 return t;
3023 }
3024 CHECK_OP (0, "invalid operand to binary operator");
3025 CHECK_OP (1, "invalid operand to binary operator");
3026 break;
3027
3028 case POINTER_PLUS_EXPR:
3029 /* Check to make sure the first operand is a pointer or reference type. */
3030 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3031 {
3032 error ("invalid operand to pointer plus, first operand is not a pointer");
3033 return t;
3034 }
3035 /* Check to make sure the second operand is an integer with type of
3036 sizetype. */
3037 if (!useless_type_conversion_p (sizetype,
3038 TREE_TYPE (TREE_OPERAND (t, 1))))
3039 {
3040 error ("invalid operand to pointer plus, second operand is not an "
3041 "integer with type of sizetype.");
3042 return t;
3043 }
3044 /* FALLTHROUGH */
3045 case LT_EXPR:
3046 case LE_EXPR:
3047 case GT_EXPR:
3048 case GE_EXPR:
3049 case EQ_EXPR:
3050 case NE_EXPR:
3051 case UNORDERED_EXPR:
3052 case ORDERED_EXPR:
3053 case UNLT_EXPR:
3054 case UNLE_EXPR:
3055 case UNGT_EXPR:
3056 case UNGE_EXPR:
3057 case UNEQ_EXPR:
3058 case LTGT_EXPR:
3059 case MULT_EXPR:
3060 case TRUNC_DIV_EXPR:
3061 case CEIL_DIV_EXPR:
3062 case FLOOR_DIV_EXPR:
3063 case ROUND_DIV_EXPR:
3064 case TRUNC_MOD_EXPR:
3065 case CEIL_MOD_EXPR:
3066 case FLOOR_MOD_EXPR:
3067 case ROUND_MOD_EXPR:
3068 case RDIV_EXPR:
3069 case EXACT_DIV_EXPR:
3070 case MIN_EXPR:
3071 case MAX_EXPR:
3072 case LSHIFT_EXPR:
3073 case RSHIFT_EXPR:
3074 case LROTATE_EXPR:
3075 case RROTATE_EXPR:
3076 case BIT_IOR_EXPR:
3077 case BIT_XOR_EXPR:
3078 case BIT_AND_EXPR:
3079 CHECK_OP (0, "invalid operand to binary operator");
3080 CHECK_OP (1, "invalid operand to binary operator");
3081 break;
3082
3083 case CONSTRUCTOR:
3084 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3085 *walk_subtrees = 0;
3086 break;
3087
3088 default:
3089 break;
3090 }
3091 return NULL;
3092
3093 #undef CHECK_OP
3094 }
3095
3096
3097 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3098 Returns true if there is an error, otherwise false. */
3099
3100 static bool
3101 verify_types_in_gimple_min_lval (tree expr)
3102 {
3103 tree op;
3104
3105 if (is_gimple_id (expr))
3106 return false;
3107
3108 if (!INDIRECT_REF_P (expr)
3109 && TREE_CODE (expr) != TARGET_MEM_REF)
3110 {
3111 error ("invalid expression for min lvalue");
3112 return true;
3113 }
3114
3115 /* TARGET_MEM_REFs are strange beasts. */
3116 if (TREE_CODE (expr) == TARGET_MEM_REF)
3117 return false;
3118
3119 op = TREE_OPERAND (expr, 0);
3120 if (!is_gimple_val (op))
3121 {
3122 error ("invalid operand in indirect reference");
3123 debug_generic_stmt (op);
3124 return true;
3125 }
3126 if (!useless_type_conversion_p (TREE_TYPE (expr),
3127 TREE_TYPE (TREE_TYPE (op))))
3128 {
3129 error ("type mismatch in indirect reference");
3130 debug_generic_stmt (TREE_TYPE (expr));
3131 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3132 return true;
3133 }
3134
3135 return false;
3136 }
3137
3138 /* Verify if EXPR is a valid GIMPLE reference expression. If
3139 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3140 if there is an error, otherwise false. */
3141
3142 static bool
3143 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3144 {
3145 while (handled_component_p (expr))
3146 {
3147 tree op = TREE_OPERAND (expr, 0);
3148
3149 if (TREE_CODE (expr) == ARRAY_REF
3150 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3151 {
3152 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3153 || (TREE_OPERAND (expr, 2)
3154 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3155 || (TREE_OPERAND (expr, 3)
3156 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3157 {
3158 error ("invalid operands to array reference");
3159 debug_generic_stmt (expr);
3160 return true;
3161 }
3162 }
3163
3164 /* Verify if the reference array element types are compatible. */
3165 if (TREE_CODE (expr) == ARRAY_REF
3166 && !useless_type_conversion_p (TREE_TYPE (expr),
3167 TREE_TYPE (TREE_TYPE (op))))
3168 {
3169 error ("type mismatch in array reference");
3170 debug_generic_stmt (TREE_TYPE (expr));
3171 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3172 return true;
3173 }
3174 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3175 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3176 TREE_TYPE (TREE_TYPE (op))))
3177 {
3178 error ("type mismatch in array range reference");
3179 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3180 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3181 return true;
3182 }
3183
3184 if ((TREE_CODE (expr) == REALPART_EXPR
3185 || TREE_CODE (expr) == IMAGPART_EXPR)
3186 && !useless_type_conversion_p (TREE_TYPE (expr),
3187 TREE_TYPE (TREE_TYPE (op))))
3188 {
3189 error ("type mismatch in real/imagpart reference");
3190 debug_generic_stmt (TREE_TYPE (expr));
3191 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3192 return true;
3193 }
3194
3195 if (TREE_CODE (expr) == COMPONENT_REF
3196 && !useless_type_conversion_p (TREE_TYPE (expr),
3197 TREE_TYPE (TREE_OPERAND (expr, 1))))
3198 {
3199 error ("type mismatch in component reference");
3200 debug_generic_stmt (TREE_TYPE (expr));
3201 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3202 return true;
3203 }
3204
3205 /* For VIEW_CONVERT_EXPRs which are allowed here, too, there
3206 is nothing to verify. Gross mismatches at most invoke
3207 undefined behavior. */
3208 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3209 && !handled_component_p (op))
3210 return false;
3211
3212 expr = op;
3213 }
3214
3215 return ((require_lvalue || !is_gimple_min_invariant (expr))
3216 && verify_types_in_gimple_min_lval (expr));
3217 }
3218
3219 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3220 list of pointer-to types that is trivially convertible to DEST. */
3221
3222 static bool
3223 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3224 {
3225 tree src;
3226
3227 if (!TYPE_POINTER_TO (src_obj))
3228 return true;
3229
3230 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3231 if (useless_type_conversion_p (dest, src))
3232 return true;
3233
3234 return false;
3235 }
3236
3237 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3238 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3239
3240 static bool
3241 valid_fixed_convert_types_p (tree type1, tree type2)
3242 {
3243 return (FIXED_POINT_TYPE_P (type1)
3244 && (INTEGRAL_TYPE_P (type2)
3245 || SCALAR_FLOAT_TYPE_P (type2)
3246 || FIXED_POINT_TYPE_P (type2)));
3247 }
3248
3249 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3250 is a problem, otherwise false. */
3251
3252 static bool
3253 verify_gimple_call (gimple stmt)
3254 {
3255 tree fn = gimple_call_fn (stmt);
3256 tree fntype;
3257
3258 if (!POINTER_TYPE_P (TREE_TYPE (fn))
3259 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3260 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE))
3261 {
3262 error ("non-function in gimple call");
3263 return true;
3264 }
3265
3266 if (gimple_call_lhs (stmt)
3267 && !is_gimple_lvalue (gimple_call_lhs (stmt)))
3268 {
3269 error ("invalid LHS in gimple call");
3270 return true;
3271 }
3272
3273 fntype = TREE_TYPE (TREE_TYPE (fn));
3274 if (gimple_call_lhs (stmt)
3275 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3276 TREE_TYPE (fntype))
3277 /* ??? At least C++ misses conversions at assignments from
3278 void * call results.
3279 ??? Java is completely off. Especially with functions
3280 returning java.lang.Object.
3281 For now simply allow arbitrary pointer type conversions. */
3282 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3283 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3284 {
3285 error ("invalid conversion in gimple call");
3286 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3287 debug_generic_stmt (TREE_TYPE (fntype));
3288 return true;
3289 }
3290
3291 /* ??? The C frontend passes unpromoted arguments in case it
3292 didn't see a function declaration before the call. So for now
3293 leave the call arguments unverified. Once we gimplify
3294 unit-at-a-time we have a chance to fix this. */
3295
3296 return false;
3297 }
3298
3299 /* Verifies the gimple comparison with the result type TYPE and
3300 the operands OP0 and OP1. */
3301
3302 static bool
3303 verify_gimple_comparison (tree type, tree op0, tree op1)
3304 {
3305 tree op0_type = TREE_TYPE (op0);
3306 tree op1_type = TREE_TYPE (op1);
3307
3308 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3309 {
3310 error ("invalid operands in gimple comparison");
3311 return true;
3312 }
3313
3314 /* For comparisons we do not have the operations type as the
3315 effective type the comparison is carried out in. Instead
3316 we require that either the first operand is trivially
3317 convertible into the second, or the other way around.
3318 The resulting type of a comparison may be any integral type.
3319 Because we special-case pointers to void we allow
3320 comparisons of pointers with the same mode as well. */
3321 if ((!useless_type_conversion_p (op0_type, op1_type)
3322 && !useless_type_conversion_p (op1_type, op0_type)
3323 && (!POINTER_TYPE_P (op0_type)
3324 || !POINTER_TYPE_P (op1_type)
3325 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3326 || !INTEGRAL_TYPE_P (type))
3327 {
3328 error ("type mismatch in comparison expression");
3329 debug_generic_expr (type);
3330 debug_generic_expr (op0_type);
3331 debug_generic_expr (op1_type);
3332 return true;
3333 }
3334
3335 return false;
3336 }
3337
3338 /* Verify a gimple assignment statement STMT with an unary rhs.
3339 Returns true if anything is wrong. */
3340
3341 static bool
3342 verify_gimple_assign_unary (gimple stmt)
3343 {
3344 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3345 tree lhs = gimple_assign_lhs (stmt);
3346 tree lhs_type = TREE_TYPE (lhs);
3347 tree rhs1 = gimple_assign_rhs1 (stmt);
3348 tree rhs1_type = TREE_TYPE (rhs1);
3349
3350 if (!is_gimple_reg (lhs)
3351 && !(optimize == 0
3352 && TREE_CODE (lhs_type) == COMPLEX_TYPE))
3353 {
3354 error ("non-register as LHS of unary operation");
3355 return true;
3356 }
3357
3358 if (!is_gimple_val (rhs1))
3359 {
3360 error ("invalid operand in unary operation");
3361 return true;
3362 }
3363
3364 /* First handle conversions. */
3365 switch (rhs_code)
3366 {
3367 CASE_CONVERT:
3368 {
3369 /* Allow conversions between integral types and pointers only if
3370 there is no sign or zero extension involved.
3371 For targets were the precision of sizetype doesn't match that
3372 of pointers we need to allow arbitrary conversions from and
3373 to sizetype. */
3374 if ((POINTER_TYPE_P (lhs_type)
3375 && INTEGRAL_TYPE_P (rhs1_type)
3376 && (TYPE_PRECISION (lhs_type) >= TYPE_PRECISION (rhs1_type)
3377 || rhs1_type == sizetype))
3378 || (POINTER_TYPE_P (rhs1_type)
3379 && INTEGRAL_TYPE_P (lhs_type)
3380 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3381 || lhs_type == sizetype)))
3382 return false;
3383
3384 /* Allow conversion from integer to offset type and vice versa. */
3385 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3386 && TREE_CODE (rhs1_type) == INTEGER_TYPE)
3387 || (TREE_CODE (lhs_type) == INTEGER_TYPE
3388 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3389 return false;
3390
3391 /* Otherwise assert we are converting between types of the
3392 same kind. */
3393 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3394 {
3395 error ("invalid types in nop conversion");
3396 debug_generic_expr (lhs_type);
3397 debug_generic_expr (rhs1_type);
3398 return true;
3399 }
3400
3401 return false;
3402 }
3403
3404 case FIXED_CONVERT_EXPR:
3405 {
3406 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3407 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3408 {
3409 error ("invalid types in fixed-point conversion");
3410 debug_generic_expr (lhs_type);
3411 debug_generic_expr (rhs1_type);
3412 return true;
3413 }
3414
3415 return false;
3416 }
3417
3418 case FLOAT_EXPR:
3419 {
3420 if (!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3421 {
3422 error ("invalid types in conversion to floating point");
3423 debug_generic_expr (lhs_type);
3424 debug_generic_expr (rhs1_type);
3425 return true;
3426 }
3427
3428 return false;
3429 }
3430
3431 case FIX_TRUNC_EXPR:
3432 {
3433 if (!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3434 {
3435 error ("invalid types in conversion to integer");
3436 debug_generic_expr (lhs_type);
3437 debug_generic_expr (rhs1_type);
3438 return true;
3439 }
3440
3441 return false;
3442 }
3443
3444 case VEC_UNPACK_HI_EXPR:
3445 case VEC_UNPACK_LO_EXPR:
3446 case REDUC_MAX_EXPR:
3447 case REDUC_MIN_EXPR:
3448 case REDUC_PLUS_EXPR:
3449 case VEC_UNPACK_FLOAT_HI_EXPR:
3450 case VEC_UNPACK_FLOAT_LO_EXPR:
3451 /* FIXME. */
3452 return false;
3453
3454 case TRUTH_NOT_EXPR:
3455 case NEGATE_EXPR:
3456 case ABS_EXPR:
3457 case BIT_NOT_EXPR:
3458 case PAREN_EXPR:
3459 case NON_LVALUE_EXPR:
3460 case CONJ_EXPR:
3461 break;
3462
3463 default:
3464 gcc_unreachable ();
3465 }
3466
3467 /* For the remaining codes assert there is no conversion involved. */
3468 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3469 {
3470 error ("non-trivial conversion in unary operation");
3471 debug_generic_expr (lhs_type);
3472 debug_generic_expr (rhs1_type);
3473 return true;
3474 }
3475
3476 return false;
3477 }
3478
3479 /* Verify a gimple assignment statement STMT with a binary rhs.
3480 Returns true if anything is wrong. */
3481
3482 static bool
3483 verify_gimple_assign_binary (gimple stmt)
3484 {
3485 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3486 tree lhs = gimple_assign_lhs (stmt);
3487 tree lhs_type = TREE_TYPE (lhs);
3488 tree rhs1 = gimple_assign_rhs1 (stmt);
3489 tree rhs1_type = TREE_TYPE (rhs1);
3490 tree rhs2 = gimple_assign_rhs2 (stmt);
3491 tree rhs2_type = TREE_TYPE (rhs2);
3492
3493 if (!is_gimple_reg (lhs)
3494 && !(optimize == 0
3495 && TREE_CODE (lhs_type) == COMPLEX_TYPE))
3496 {
3497 error ("non-register as LHS of binary operation");
3498 return true;
3499 }
3500
3501 if (!is_gimple_val (rhs1)
3502 || !is_gimple_val (rhs2))
3503 {
3504 error ("invalid operands in binary operation");
3505 return true;
3506 }
3507
3508 /* First handle operations that involve different types. */
3509 switch (rhs_code)
3510 {
3511 case COMPLEX_EXPR:
3512 {
3513 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3514 || !(INTEGRAL_TYPE_P (rhs1_type)
3515 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3516 || !(INTEGRAL_TYPE_P (rhs2_type)
3517 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3518 {
3519 error ("type mismatch in complex expression");
3520 debug_generic_expr (lhs_type);
3521 debug_generic_expr (rhs1_type);
3522 debug_generic_expr (rhs2_type);
3523 return true;
3524 }
3525
3526 return false;
3527 }
3528
3529 case LSHIFT_EXPR:
3530 case RSHIFT_EXPR:
3531 case LROTATE_EXPR:
3532 case RROTATE_EXPR:
3533 {
3534 /* Shifts and rotates are ok on integral types, fixed point
3535 types and integer vector types. */
3536 if ((!INTEGRAL_TYPE_P (rhs1_type)
3537 && !FIXED_POINT_TYPE_P (rhs1_type)
3538 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3539 && TREE_CODE (TREE_TYPE (rhs1_type)) == INTEGER_TYPE))
3540 || (!INTEGRAL_TYPE_P (rhs2_type)
3541 /* Vector shifts of vectors are also ok. */
3542 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3543 && TREE_CODE (TREE_TYPE (rhs1_type)) == INTEGER_TYPE
3544 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3545 && TREE_CODE (TREE_TYPE (rhs2_type)) == INTEGER_TYPE))
3546 || !useless_type_conversion_p (lhs_type, rhs1_type))
3547 {
3548 error ("type mismatch in shift expression");
3549 debug_generic_expr (lhs_type);
3550 debug_generic_expr (rhs1_type);
3551 debug_generic_expr (rhs2_type);
3552 return true;
3553 }
3554
3555 return false;
3556 }
3557
3558 case VEC_LSHIFT_EXPR:
3559 case VEC_RSHIFT_EXPR:
3560 {
3561 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3562 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3563 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type))
3564 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3565 || (!INTEGRAL_TYPE_P (rhs2_type)
3566 && (TREE_CODE (rhs2_type) != VECTOR_TYPE
3567 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3568 || !useless_type_conversion_p (lhs_type, rhs1_type))
3569 {
3570 error ("type mismatch in vector shift expression");
3571 debug_generic_expr (lhs_type);
3572 debug_generic_expr (rhs1_type);
3573 debug_generic_expr (rhs2_type);
3574 return true;
3575 }
3576 /* For shifting a vector of floating point components we
3577 only allow shifting by a constant multiple of the element size. */
3578 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))
3579 && (TREE_CODE (rhs2) != INTEGER_CST
3580 || !div_if_zero_remainder (EXACT_DIV_EXPR, rhs2,
3581 TYPE_SIZE (TREE_TYPE (rhs1_type)))))
3582 {
3583 error ("non-element sized vector shift of floating point vector");
3584 return true;
3585 }
3586
3587 return false;
3588 }
3589
3590 case PLUS_EXPR:
3591 {
3592 /* We use regular PLUS_EXPR for vectors.
3593 ??? This just makes the checker happy and may not be what is
3594 intended. */
3595 if (TREE_CODE (lhs_type) == VECTOR_TYPE
3596 && POINTER_TYPE_P (TREE_TYPE (lhs_type)))
3597 {
3598 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3599 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3600 {
3601 error ("invalid non-vector operands to vector valued plus");
3602 return true;
3603 }
3604 lhs_type = TREE_TYPE (lhs_type);
3605 rhs1_type = TREE_TYPE (rhs1_type);
3606 rhs2_type = TREE_TYPE (rhs2_type);
3607 /* PLUS_EXPR is commutative, so we might end up canonicalizing
3608 the pointer to 2nd place. */
3609 if (POINTER_TYPE_P (rhs2_type))
3610 {
3611 tree tem = rhs1_type;
3612 rhs1_type = rhs2_type;
3613 rhs2_type = tem;
3614 }
3615 goto do_pointer_plus_expr_check;
3616 }
3617 }
3618 /* Fallthru. */
3619 case MINUS_EXPR:
3620 {
3621 if (POINTER_TYPE_P (lhs_type)
3622 || POINTER_TYPE_P (rhs1_type)
3623 || POINTER_TYPE_P (rhs2_type))
3624 {
3625 error ("invalid (pointer) operands to plus/minus");
3626 return true;
3627 }
3628
3629 /* Continue with generic binary expression handling. */
3630 break;
3631 }
3632
3633 case POINTER_PLUS_EXPR:
3634 {
3635 do_pointer_plus_expr_check:
3636 if (!POINTER_TYPE_P (rhs1_type)
3637 || !useless_type_conversion_p (lhs_type, rhs1_type)
3638 || !useless_type_conversion_p (sizetype, rhs2_type))
3639 {
3640 error ("type mismatch in pointer plus expression");
3641 debug_generic_stmt (lhs_type);
3642 debug_generic_stmt (rhs1_type);
3643 debug_generic_stmt (rhs2_type);
3644 return true;
3645 }
3646
3647 return false;
3648 }
3649
3650 case TRUTH_ANDIF_EXPR:
3651 case TRUTH_ORIF_EXPR:
3652 gcc_unreachable ();
3653
3654 case TRUTH_AND_EXPR:
3655 case TRUTH_OR_EXPR:
3656 case TRUTH_XOR_EXPR:
3657 {
3658 /* We allow any kind of integral typed argument and result. */
3659 if (!INTEGRAL_TYPE_P (rhs1_type)
3660 || !INTEGRAL_TYPE_P (rhs2_type)
3661 || !INTEGRAL_TYPE_P (lhs_type))
3662 {
3663 error ("type mismatch in binary truth expression");
3664 debug_generic_expr (lhs_type);
3665 debug_generic_expr (rhs1_type);
3666 debug_generic_expr (rhs2_type);
3667 return true;
3668 }
3669
3670 return false;
3671 }
3672
3673 case LT_EXPR:
3674 case LE_EXPR:
3675 case GT_EXPR:
3676 case GE_EXPR:
3677 case EQ_EXPR:
3678 case NE_EXPR:
3679 case UNORDERED_EXPR:
3680 case ORDERED_EXPR:
3681 case UNLT_EXPR:
3682 case UNLE_EXPR:
3683 case UNGT_EXPR:
3684 case UNGE_EXPR:
3685 case UNEQ_EXPR:
3686 case LTGT_EXPR:
3687 /* Comparisons are also binary, but the result type is not
3688 connected to the operand types. */
3689 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3690
3691 case WIDEN_SUM_EXPR:
3692 case WIDEN_MULT_EXPR:
3693 case VEC_WIDEN_MULT_HI_EXPR:
3694 case VEC_WIDEN_MULT_LO_EXPR:
3695 case VEC_PACK_TRUNC_EXPR:
3696 case VEC_PACK_SAT_EXPR:
3697 case VEC_PACK_FIX_TRUNC_EXPR:
3698 case VEC_EXTRACT_EVEN_EXPR:
3699 case VEC_EXTRACT_ODD_EXPR:
3700 case VEC_INTERLEAVE_HIGH_EXPR:
3701 case VEC_INTERLEAVE_LOW_EXPR:
3702 /* FIXME. */
3703 return false;
3704
3705 case MULT_EXPR:
3706 case TRUNC_DIV_EXPR:
3707 case CEIL_DIV_EXPR:
3708 case FLOOR_DIV_EXPR:
3709 case ROUND_DIV_EXPR:
3710 case TRUNC_MOD_EXPR:
3711 case CEIL_MOD_EXPR:
3712 case FLOOR_MOD_EXPR:
3713 case ROUND_MOD_EXPR:
3714 case RDIV_EXPR:
3715 case EXACT_DIV_EXPR:
3716 case MIN_EXPR:
3717 case MAX_EXPR:
3718 case BIT_IOR_EXPR:
3719 case BIT_XOR_EXPR:
3720 case BIT_AND_EXPR:
3721 /* Continue with generic binary expression handling. */
3722 break;
3723
3724 default:
3725 gcc_unreachable ();
3726 }
3727
3728 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3729 || !useless_type_conversion_p (lhs_type, rhs2_type))
3730 {
3731 error ("type mismatch in binary expression");
3732 debug_generic_stmt (lhs_type);
3733 debug_generic_stmt (rhs1_type);
3734 debug_generic_stmt (rhs2_type);
3735 return true;
3736 }
3737
3738 return false;
3739 }
3740
3741 /* Verify a gimple assignment statement STMT with a single rhs.
3742 Returns true if anything is wrong. */
3743
3744 static bool
3745 verify_gimple_assign_single (gimple stmt)
3746 {
3747 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3748 tree lhs = gimple_assign_lhs (stmt);
3749 tree lhs_type = TREE_TYPE (lhs);
3750 tree rhs1 = gimple_assign_rhs1 (stmt);
3751 tree rhs1_type = TREE_TYPE (rhs1);
3752 bool res = false;
3753
3754 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3755 {
3756 error ("non-trivial conversion at assignment");
3757 debug_generic_expr (lhs_type);
3758 debug_generic_expr (rhs1_type);
3759 return true;
3760 }
3761
3762 if (handled_component_p (lhs))
3763 res |= verify_types_in_gimple_reference (lhs, true);
3764
3765 /* Special codes we cannot handle via their class. */
3766 switch (rhs_code)
3767 {
3768 case ADDR_EXPR:
3769 {
3770 tree op = TREE_OPERAND (rhs1, 0);
3771 if (!is_gimple_addressable (op))
3772 {
3773 error ("invalid operand in unary expression");
3774 return true;
3775 }
3776
3777 if (!one_pointer_to_useless_type_conversion_p (lhs_type,
3778 TREE_TYPE (op)))
3779 {
3780 error ("type mismatch in address expression");
3781 debug_generic_stmt (lhs_type);
3782 debug_generic_stmt (TYPE_POINTER_TO (TREE_TYPE (op)));
3783 return true;
3784 }
3785
3786 return verify_types_in_gimple_reference (op, true);
3787 }
3788
3789 /* tcc_reference */
3790 case COMPONENT_REF:
3791 case BIT_FIELD_REF:
3792 case INDIRECT_REF:
3793 case ALIGN_INDIRECT_REF:
3794 case MISALIGNED_INDIRECT_REF:
3795 case ARRAY_REF:
3796 case ARRAY_RANGE_REF:
3797 case VIEW_CONVERT_EXPR:
3798 case REALPART_EXPR:
3799 case IMAGPART_EXPR:
3800 case TARGET_MEM_REF:
3801 if (!is_gimple_reg (lhs)
3802 && is_gimple_reg_type (TREE_TYPE (lhs)))
3803 {
3804 error ("invalid rhs for gimple memory store");
3805 debug_generic_stmt (lhs);
3806 debug_generic_stmt (rhs1);
3807 return true;
3808 }
3809 return res || verify_types_in_gimple_reference (rhs1, false);
3810
3811 /* tcc_constant */
3812 case SSA_NAME:
3813 case INTEGER_CST:
3814 case REAL_CST:
3815 case FIXED_CST:
3816 case COMPLEX_CST:
3817 case VECTOR_CST:
3818 case STRING_CST:
3819 return res;
3820
3821 /* tcc_declaration */
3822 case CONST_DECL:
3823 return res;
3824 case VAR_DECL:
3825 case PARM_DECL:
3826 if (!is_gimple_reg (lhs)
3827 && !is_gimple_reg (rhs1)
3828 && is_gimple_reg_type (TREE_TYPE (lhs)))
3829 {
3830 error ("invalid rhs for gimple memory store");
3831 debug_generic_stmt (lhs);
3832 debug_generic_stmt (rhs1);
3833 return true;
3834 }
3835 return res;
3836
3837 case COND_EXPR:
3838 case CONSTRUCTOR:
3839 case OBJ_TYPE_REF:
3840 case ASSERT_EXPR:
3841 case WITH_SIZE_EXPR:
3842 case EXC_PTR_EXPR:
3843 case FILTER_EXPR:
3844 case POLYNOMIAL_CHREC:
3845 case DOT_PROD_EXPR:
3846 case VEC_COND_EXPR:
3847 case REALIGN_LOAD_EXPR:
3848 /* FIXME. */
3849 return res;
3850
3851 default:;
3852 }
3853
3854 return res;
3855 }
3856
3857 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
3858 is a problem, otherwise false. */
3859
3860 static bool
3861 verify_gimple_assign (gimple stmt)
3862 {
3863 switch (gimple_assign_rhs_class (stmt))
3864 {
3865 case GIMPLE_SINGLE_RHS:
3866 return verify_gimple_assign_single (stmt);
3867
3868 case GIMPLE_UNARY_RHS:
3869 return verify_gimple_assign_unary (stmt);
3870
3871 case GIMPLE_BINARY_RHS:
3872 return verify_gimple_assign_binary (stmt);
3873
3874 default:
3875 gcc_unreachable ();
3876 }
3877 }
3878
3879 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
3880 is a problem, otherwise false. */
3881
3882 static bool
3883 verify_gimple_return (gimple stmt)
3884 {
3885 tree op = gimple_return_retval (stmt);
3886 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
3887
3888 /* We cannot test for present return values as we do not fix up missing
3889 return values from the original source. */
3890 if (op == NULL)
3891 return false;
3892
3893 if (!is_gimple_val (op)
3894 && TREE_CODE (op) != RESULT_DECL)
3895 {
3896 error ("invalid operand in return statement");
3897 debug_generic_stmt (op);
3898 return true;
3899 }
3900
3901 if (!useless_type_conversion_p (restype, TREE_TYPE (op))
3902 /* ??? With C++ we can have the situation that the result
3903 decl is a reference type while the return type is an aggregate. */
3904 && !(TREE_CODE (op) == RESULT_DECL
3905 && TREE_CODE (TREE_TYPE (op)) == REFERENCE_TYPE
3906 && useless_type_conversion_p (restype, TREE_TYPE (TREE_TYPE (op)))))
3907 {
3908 error ("invalid conversion in return statement");
3909 debug_generic_stmt (restype);
3910 debug_generic_stmt (TREE_TYPE (op));
3911 return true;
3912 }
3913
3914 return false;
3915 }
3916
3917
3918 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
3919 is a problem, otherwise false. */
3920
3921 static bool
3922 verify_gimple_goto (gimple stmt)
3923 {
3924 tree dest = gimple_goto_dest (stmt);
3925
3926 /* ??? We have two canonical forms of direct goto destinations, a
3927 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
3928 if (TREE_CODE (dest) != LABEL_DECL
3929 && (!is_gimple_val (dest)
3930 || !POINTER_TYPE_P (TREE_TYPE (dest))))
3931 {
3932 error ("goto destination is neither a label nor a pointer");
3933 return true;
3934 }
3935
3936 return false;
3937 }
3938
3939 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
3940 is a problem, otherwise false. */
3941
3942 static bool
3943 verify_gimple_switch (gimple stmt)
3944 {
3945 if (!is_gimple_val (gimple_switch_index (stmt)))
3946 {
3947 error ("invalid operand to switch statement");
3948 debug_generic_stmt (gimple_switch_index (stmt));
3949 return true;
3950 }
3951
3952 return false;
3953 }
3954
3955
3956 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
3957 and false otherwise. */
3958
3959 static bool
3960 verify_gimple_phi (gimple stmt)
3961 {
3962 tree type = TREE_TYPE (gimple_phi_result (stmt));
3963 unsigned i;
3964
3965 if (!is_gimple_variable (gimple_phi_result (stmt)))
3966 {
3967 error ("Invalid PHI result");
3968 return true;
3969 }
3970
3971 for (i = 0; i < gimple_phi_num_args (stmt); i++)
3972 {
3973 tree arg = gimple_phi_arg_def (stmt, i);
3974 if ((is_gimple_reg (gimple_phi_result (stmt))
3975 && !is_gimple_val (arg))
3976 || (!is_gimple_reg (gimple_phi_result (stmt))
3977 && !is_gimple_addressable (arg)))
3978 {
3979 error ("Invalid PHI argument");
3980 debug_generic_stmt (arg);
3981 return true;
3982 }
3983 if (!useless_type_conversion_p (type, TREE_TYPE (arg)))
3984 {
3985 error ("Incompatible types in PHI argument %u", i);
3986 debug_generic_stmt (type);
3987 debug_generic_stmt (TREE_TYPE (arg));
3988 return true;
3989 }
3990 }
3991
3992 return false;
3993 }
3994
3995
3996 /* Verify the GIMPLE statement STMT. Returns true if there is an
3997 error, otherwise false. */
3998
3999 static bool
4000 verify_types_in_gimple_stmt (gimple stmt)
4001 {
4002 if (is_gimple_omp (stmt))
4003 {
4004 /* OpenMP directives are validated by the FE and never operated
4005 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4006 non-gimple expressions when the main index variable has had
4007 its address taken. This does not affect the loop itself
4008 because the header of an GIMPLE_OMP_FOR is merely used to determine
4009 how to setup the parallel iteration. */
4010 return false;
4011 }
4012
4013 switch (gimple_code (stmt))
4014 {
4015 case GIMPLE_ASSIGN:
4016 return verify_gimple_assign (stmt);
4017
4018 case GIMPLE_LABEL:
4019 return TREE_CODE (gimple_label_label (stmt)) != LABEL_DECL;
4020
4021 case GIMPLE_CALL:
4022 return verify_gimple_call (stmt);
4023
4024 case GIMPLE_COND:
4025 return verify_gimple_comparison (boolean_type_node,
4026 gimple_cond_lhs (stmt),
4027 gimple_cond_rhs (stmt));
4028
4029 case GIMPLE_GOTO:
4030 return verify_gimple_goto (stmt);
4031
4032 case GIMPLE_SWITCH:
4033 return verify_gimple_switch (stmt);
4034
4035 case GIMPLE_RETURN:
4036 return verify_gimple_return (stmt);
4037
4038 case GIMPLE_ASM:
4039 return false;
4040
4041 case GIMPLE_CHANGE_DYNAMIC_TYPE:
4042 return (!is_gimple_val (gimple_cdt_location (stmt))
4043 || !POINTER_TYPE_P (TREE_TYPE (gimple_cdt_location (stmt))));
4044
4045 case GIMPLE_PHI:
4046 return verify_gimple_phi (stmt);
4047
4048 /* Tuples that do not have tree operands. */
4049 case GIMPLE_NOP:
4050 case GIMPLE_RESX:
4051 case GIMPLE_PREDICT:
4052 return false;
4053
4054 default:
4055 gcc_unreachable ();
4056 }
4057 }
4058
4059 /* Verify the GIMPLE statements inside the sequence STMTS. */
4060
4061 static bool
4062 verify_types_in_gimple_seq_2 (gimple_seq stmts)
4063 {
4064 gimple_stmt_iterator ittr;
4065 bool err = false;
4066
4067 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4068 {
4069 gimple stmt = gsi_stmt (ittr);
4070
4071 switch (gimple_code (stmt))
4072 {
4073 case GIMPLE_BIND:
4074 err |= verify_types_in_gimple_seq_2 (gimple_bind_body (stmt));
4075 break;
4076
4077 case GIMPLE_TRY:
4078 err |= verify_types_in_gimple_seq_2 (gimple_try_eval (stmt));
4079 err |= verify_types_in_gimple_seq_2 (gimple_try_cleanup (stmt));
4080 break;
4081
4082 case GIMPLE_EH_FILTER:
4083 err |= verify_types_in_gimple_seq_2 (gimple_eh_filter_failure (stmt));
4084 break;
4085
4086 case GIMPLE_CATCH:
4087 err |= verify_types_in_gimple_seq_2 (gimple_catch_handler (stmt));
4088 break;
4089
4090 default:
4091 {
4092 bool err2 = verify_types_in_gimple_stmt (stmt);
4093 if (err2)
4094 debug_gimple_stmt (stmt);
4095 err |= err2;
4096 }
4097 }
4098 }
4099
4100 return err;
4101 }
4102
4103
4104 /* Verify the GIMPLE statements inside the statement list STMTS. */
4105
4106 void
4107 verify_types_in_gimple_seq (gimple_seq stmts)
4108 {
4109 if (verify_types_in_gimple_seq_2 (stmts))
4110 internal_error ("verify_gimple failed");
4111 }
4112
4113
4114 /* Verify STMT, return true if STMT is not in GIMPLE form.
4115 TODO: Implement type checking. */
4116
4117 static bool
4118 verify_stmt (gimple_stmt_iterator *gsi)
4119 {
4120 tree addr;
4121 struct walk_stmt_info wi;
4122 bool last_in_block = gsi_one_before_end_p (*gsi);
4123 gimple stmt = gsi_stmt (*gsi);
4124
4125 if (is_gimple_omp (stmt))
4126 {
4127 /* OpenMP directives are validated by the FE and never operated
4128 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4129 non-gimple expressions when the main index variable has had
4130 its address taken. This does not affect the loop itself
4131 because the header of an GIMPLE_OMP_FOR is merely used to determine
4132 how to setup the parallel iteration. */
4133 return false;
4134 }
4135
4136 /* FIXME. The C frontend passes unpromoted arguments in case it
4137 didn't see a function declaration before the call. */
4138 if (is_gimple_call (stmt))
4139 {
4140 tree decl;
4141
4142 if (!is_gimple_call_addr (gimple_call_fn (stmt)))
4143 {
4144 error ("invalid function in call statement");
4145 return true;
4146 }
4147
4148 decl = gimple_call_fndecl (stmt);
4149 if (decl
4150 && TREE_CODE (decl) == FUNCTION_DECL
4151 && DECL_LOOPING_CONST_OR_PURE_P (decl)
4152 && (!DECL_PURE_P (decl))
4153 && (!TREE_READONLY (decl)))
4154 {
4155 error ("invalid pure const state for function");
4156 return true;
4157 }
4158 }
4159
4160 memset (&wi, 0, sizeof (wi));
4161 addr = walk_gimple_op (gsi_stmt (*gsi), verify_expr, &wi);
4162 if (addr)
4163 {
4164 debug_generic_expr (addr);
4165 inform (input_location, "in statement");
4166 debug_gimple_stmt (stmt);
4167 return true;
4168 }
4169
4170 /* If the statement is marked as part of an EH region, then it is
4171 expected that the statement could throw. Verify that when we
4172 have optimizations that simplify statements such that we prove
4173 that they cannot throw, that we update other data structures
4174 to match. */
4175 if (lookup_stmt_eh_region (stmt) >= 0)
4176 {
4177 /* During IPA passes, ipa-pure-const sets nothrow flags on calls
4178 and they are updated on statements only after fixup_cfg
4179 is executed at beggining of expansion stage. */
4180 if (!stmt_could_throw_p (stmt) && cgraph_state != CGRAPH_STATE_IPA_SSA)
4181 {
4182 error ("statement marked for throw, but doesn%'t");
4183 goto fail;
4184 }
4185 if (!last_in_block && stmt_can_throw_internal (stmt))
4186 {
4187 error ("statement marked for throw in middle of block");
4188 goto fail;
4189 }
4190 }
4191
4192 return false;
4193
4194 fail:
4195 debug_gimple_stmt (stmt);
4196 return true;
4197 }
4198
4199
4200 /* Return true when the T can be shared. */
4201
4202 static bool
4203 tree_node_can_be_shared (tree t)
4204 {
4205 if (IS_TYPE_OR_DECL_P (t)
4206 || is_gimple_min_invariant (t)
4207 || TREE_CODE (t) == SSA_NAME
4208 || t == error_mark_node
4209 || TREE_CODE (t) == IDENTIFIER_NODE)
4210 return true;
4211
4212 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4213 return true;
4214
4215 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4216 && is_gimple_min_invariant (TREE_OPERAND (t, 1)))
4217 || TREE_CODE (t) == COMPONENT_REF
4218 || TREE_CODE (t) == REALPART_EXPR
4219 || TREE_CODE (t) == IMAGPART_EXPR)
4220 t = TREE_OPERAND (t, 0);
4221
4222 if (DECL_P (t))
4223 return true;
4224
4225 return false;
4226 }
4227
4228
4229 /* Called via walk_gimple_stmt. Verify tree sharing. */
4230
4231 static tree
4232 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4233 {
4234 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4235 struct pointer_set_t *visited = (struct pointer_set_t *) wi->info;
4236
4237 if (tree_node_can_be_shared (*tp))
4238 {
4239 *walk_subtrees = false;
4240 return NULL;
4241 }
4242
4243 if (pointer_set_insert (visited, *tp))
4244 return *tp;
4245
4246 return NULL;
4247 }
4248
4249
4250 static bool eh_error_found;
4251 static int
4252 verify_eh_throw_stmt_node (void **slot, void *data)
4253 {
4254 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4255 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4256
4257 if (!pointer_set_contains (visited, node->stmt))
4258 {
4259 error ("Dead STMT in EH table");
4260 debug_gimple_stmt (node->stmt);
4261 eh_error_found = true;
4262 }
4263 return 1;
4264 }
4265
4266
4267 /* Verify the GIMPLE statements in every basic block. */
4268
4269 void
4270 verify_stmts (void)
4271 {
4272 basic_block bb;
4273 gimple_stmt_iterator gsi;
4274 bool err = false;
4275 struct pointer_set_t *visited, *visited_stmts;
4276 tree addr;
4277 struct walk_stmt_info wi;
4278
4279 timevar_push (TV_TREE_STMT_VERIFY);
4280 visited = pointer_set_create ();
4281 visited_stmts = pointer_set_create ();
4282
4283 memset (&wi, 0, sizeof (wi));
4284 wi.info = (void *) visited;
4285
4286 FOR_EACH_BB (bb)
4287 {
4288 gimple phi;
4289 size_t i;
4290
4291 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4292 {
4293 phi = gsi_stmt (gsi);
4294 pointer_set_insert (visited_stmts, phi);
4295 if (gimple_bb (phi) != bb)
4296 {
4297 error ("gimple_bb (phi) is set to a wrong basic block");
4298 err |= true;
4299 }
4300
4301 for (i = 0; i < gimple_phi_num_args (phi); i++)
4302 {
4303 tree t = gimple_phi_arg_def (phi, i);
4304 tree addr;
4305
4306 if (!t)
4307 {
4308 error ("missing PHI def");
4309 debug_gimple_stmt (phi);
4310 err |= true;
4311 continue;
4312 }
4313 /* Addressable variables do have SSA_NAMEs but they
4314 are not considered gimple values. */
4315 else if (TREE_CODE (t) != SSA_NAME
4316 && TREE_CODE (t) != FUNCTION_DECL
4317 && !is_gimple_min_invariant (t))
4318 {
4319 error ("PHI argument is not a GIMPLE value");
4320 debug_gimple_stmt (phi);
4321 debug_generic_expr (t);
4322 err |= true;
4323 }
4324
4325 addr = walk_tree (&t, verify_node_sharing, visited, NULL);
4326 if (addr)
4327 {
4328 error ("incorrect sharing of tree nodes");
4329 debug_gimple_stmt (phi);
4330 debug_generic_expr (addr);
4331 err |= true;
4332 }
4333 }
4334
4335 #ifdef ENABLE_TYPES_CHECKING
4336 if (verify_gimple_phi (phi))
4337 {
4338 debug_gimple_stmt (phi);
4339 err |= true;
4340 }
4341 #endif
4342 }
4343
4344 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
4345 {
4346 gimple stmt = gsi_stmt (gsi);
4347
4348 if (gimple_code (stmt) == GIMPLE_WITH_CLEANUP_EXPR
4349 || gimple_code (stmt) == GIMPLE_BIND)
4350 {
4351 error ("invalid GIMPLE statement");
4352 debug_gimple_stmt (stmt);
4353 err |= true;
4354 }
4355
4356 pointer_set_insert (visited_stmts, stmt);
4357
4358 if (gimple_bb (stmt) != bb)
4359 {
4360 error ("gimple_bb (stmt) is set to a wrong basic block");
4361 err |= true;
4362 }
4363
4364 if (gimple_code (stmt) == GIMPLE_LABEL)
4365 {
4366 tree decl = gimple_label_label (stmt);
4367 int uid = LABEL_DECL_UID (decl);
4368
4369 if (uid == -1
4370 || VEC_index (basic_block, label_to_block_map, uid) != bb)
4371 {
4372 error ("incorrect entry in label_to_block_map.\n");
4373 err |= true;
4374 }
4375 }
4376
4377 err |= verify_stmt (&gsi);
4378
4379 #ifdef ENABLE_TYPES_CHECKING
4380 if (verify_types_in_gimple_stmt (gsi_stmt (gsi)))
4381 {
4382 debug_gimple_stmt (stmt);
4383 err |= true;
4384 }
4385 #endif
4386 addr = walk_gimple_op (gsi_stmt (gsi), verify_node_sharing, &wi);
4387 if (addr)
4388 {
4389 error ("incorrect sharing of tree nodes");
4390 debug_gimple_stmt (stmt);
4391 debug_generic_expr (addr);
4392 err |= true;
4393 }
4394 gsi_next (&gsi);
4395 }
4396 }
4397
4398 eh_error_found = false;
4399 if (get_eh_throw_stmt_table (cfun))
4400 htab_traverse (get_eh_throw_stmt_table (cfun),
4401 verify_eh_throw_stmt_node,
4402 visited_stmts);
4403
4404 if (err | eh_error_found)
4405 internal_error ("verify_stmts failed");
4406
4407 pointer_set_destroy (visited);
4408 pointer_set_destroy (visited_stmts);
4409 verify_histograms ();
4410 timevar_pop (TV_TREE_STMT_VERIFY);
4411 }
4412
4413
4414 /* Verifies that the flow information is OK. */
4415
4416 static int
4417 gimple_verify_flow_info (void)
4418 {
4419 int err = 0;
4420 basic_block bb;
4421 gimple_stmt_iterator gsi;
4422 gimple stmt;
4423 edge e;
4424 edge_iterator ei;
4425
4426 if (ENTRY_BLOCK_PTR->il.gimple)
4427 {
4428 error ("ENTRY_BLOCK has IL associated with it");
4429 err = 1;
4430 }
4431
4432 if (EXIT_BLOCK_PTR->il.gimple)
4433 {
4434 error ("EXIT_BLOCK has IL associated with it");
4435 err = 1;
4436 }
4437
4438 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4439 if (e->flags & EDGE_FALLTHRU)
4440 {
4441 error ("fallthru to exit from bb %d", e->src->index);
4442 err = 1;
4443 }
4444
4445 FOR_EACH_BB (bb)
4446 {
4447 bool found_ctrl_stmt = false;
4448
4449 stmt = NULL;
4450
4451 /* Skip labels on the start of basic block. */
4452 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4453 {
4454 tree label;
4455 gimple prev_stmt = stmt;
4456
4457 stmt = gsi_stmt (gsi);
4458
4459 if (gimple_code (stmt) != GIMPLE_LABEL)
4460 break;
4461
4462 label = gimple_label_label (stmt);
4463 if (prev_stmt && DECL_NONLOCAL (label))
4464 {
4465 error ("nonlocal label ");
4466 print_generic_expr (stderr, label, 0);
4467 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4468 bb->index);
4469 err = 1;
4470 }
4471
4472 if (label_to_block (label) != bb)
4473 {
4474 error ("label ");
4475 print_generic_expr (stderr, label, 0);
4476 fprintf (stderr, " to block does not match in bb %d",
4477 bb->index);
4478 err = 1;
4479 }
4480
4481 if (decl_function_context (label) != current_function_decl)
4482 {
4483 error ("label ");
4484 print_generic_expr (stderr, label, 0);
4485 fprintf (stderr, " has incorrect context in bb %d",
4486 bb->index);
4487 err = 1;
4488 }
4489 }
4490
4491 /* Verify that body of basic block BB is free of control flow. */
4492 for (; !gsi_end_p (gsi); gsi_next (&gsi))
4493 {
4494 gimple stmt = gsi_stmt (gsi);
4495
4496 if (found_ctrl_stmt)
4497 {
4498 error ("control flow in the middle of basic block %d",
4499 bb->index);
4500 err = 1;
4501 }
4502
4503 if (stmt_ends_bb_p (stmt))
4504 found_ctrl_stmt = true;
4505
4506 if (gimple_code (stmt) == GIMPLE_LABEL)
4507 {
4508 error ("label ");
4509 print_generic_expr (stderr, gimple_label_label (stmt), 0);
4510 fprintf (stderr, " in the middle of basic block %d", bb->index);
4511 err = 1;
4512 }
4513 }
4514
4515 gsi = gsi_last_bb (bb);
4516 if (gsi_end_p (gsi))
4517 continue;
4518
4519 stmt = gsi_stmt (gsi);
4520
4521 err |= verify_eh_edges (stmt);
4522
4523 if (is_ctrl_stmt (stmt))
4524 {
4525 FOR_EACH_EDGE (e, ei, bb->succs)
4526 if (e->flags & EDGE_FALLTHRU)
4527 {
4528 error ("fallthru edge after a control statement in bb %d",
4529 bb->index);
4530 err = 1;
4531 }
4532 }
4533
4534 if (gimple_code (stmt) != GIMPLE_COND)
4535 {
4536 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4537 after anything else but if statement. */
4538 FOR_EACH_EDGE (e, ei, bb->succs)
4539 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
4540 {
4541 error ("true/false edge after a non-GIMPLE_COND in bb %d",
4542 bb->index);
4543 err = 1;
4544 }
4545 }
4546
4547 switch (gimple_code (stmt))
4548 {
4549 case GIMPLE_COND:
4550 {
4551 edge true_edge;
4552 edge false_edge;
4553
4554 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
4555
4556 if (!true_edge
4557 || !false_edge
4558 || !(true_edge->flags & EDGE_TRUE_VALUE)
4559 || !(false_edge->flags & EDGE_FALSE_VALUE)
4560 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4561 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4562 || EDGE_COUNT (bb->succs) >= 3)
4563 {
4564 error ("wrong outgoing edge flags at end of bb %d",
4565 bb->index);
4566 err = 1;
4567 }
4568 }
4569 break;
4570
4571 case GIMPLE_GOTO:
4572 if (simple_goto_p (stmt))
4573 {
4574 error ("explicit goto at end of bb %d", bb->index);
4575 err = 1;
4576 }
4577 else
4578 {
4579 /* FIXME. We should double check that the labels in the
4580 destination blocks have their address taken. */
4581 FOR_EACH_EDGE (e, ei, bb->succs)
4582 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
4583 | EDGE_FALSE_VALUE))
4584 || !(e->flags & EDGE_ABNORMAL))
4585 {
4586 error ("wrong outgoing edge flags at end of bb %d",
4587 bb->index);
4588 err = 1;
4589 }
4590 }
4591 break;
4592
4593 case GIMPLE_RETURN:
4594 if (!single_succ_p (bb)
4595 || (single_succ_edge (bb)->flags
4596 & (EDGE_FALLTHRU | EDGE_ABNORMAL
4597 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4598 {
4599 error ("wrong outgoing edge flags at end of bb %d", bb->index);
4600 err = 1;
4601 }
4602 if (single_succ (bb) != EXIT_BLOCK_PTR)
4603 {
4604 error ("return edge does not point to exit in bb %d",
4605 bb->index);
4606 err = 1;
4607 }
4608 break;
4609
4610 case GIMPLE_SWITCH:
4611 {
4612 tree prev;
4613 edge e;
4614 size_t i, n;
4615
4616 n = gimple_switch_num_labels (stmt);
4617
4618 /* Mark all the destination basic blocks. */
4619 for (i = 0; i < n; ++i)
4620 {
4621 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
4622 basic_block label_bb = label_to_block (lab);
4623 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
4624 label_bb->aux = (void *)1;
4625 }
4626
4627 /* Verify that the case labels are sorted. */
4628 prev = gimple_switch_label (stmt, 0);
4629 for (i = 1; i < n; ++i)
4630 {
4631 tree c = gimple_switch_label (stmt, i);
4632 if (!CASE_LOW (c))
4633 {
4634 error ("found default case not at the start of "
4635 "case vector");
4636 err = 1;
4637 continue;
4638 }
4639 if (CASE_LOW (prev)
4640 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
4641 {
4642 error ("case labels not sorted: ");
4643 print_generic_expr (stderr, prev, 0);
4644 fprintf (stderr," is greater than ");
4645 print_generic_expr (stderr, c, 0);
4646 fprintf (stderr," but comes before it.\n");
4647 err = 1;
4648 }
4649 prev = c;
4650 }
4651 /* VRP will remove the default case if it can prove it will
4652 never be executed. So do not verify there always exists
4653 a default case here. */
4654
4655 FOR_EACH_EDGE (e, ei, bb->succs)
4656 {
4657 if (!e->dest->aux)
4658 {
4659 error ("extra outgoing edge %d->%d",
4660 bb->index, e->dest->index);
4661 err = 1;
4662 }
4663
4664 e->dest->aux = (void *)2;
4665 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
4666 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4667 {
4668 error ("wrong outgoing edge flags at end of bb %d",
4669 bb->index);
4670 err = 1;
4671 }
4672 }
4673
4674 /* Check that we have all of them. */
4675 for (i = 0; i < n; ++i)
4676 {
4677 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
4678 basic_block label_bb = label_to_block (lab);
4679
4680 if (label_bb->aux != (void *)2)
4681 {
4682 error ("missing edge %i->%i", bb->index, label_bb->index);
4683 err = 1;
4684 }
4685 }
4686
4687 FOR_EACH_EDGE (e, ei, bb->succs)
4688 e->dest->aux = (void *)0;
4689 }
4690
4691 default: ;
4692 }
4693 }
4694
4695 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
4696 verify_dominators (CDI_DOMINATORS);
4697
4698 return err;
4699 }
4700
4701
4702 /* Updates phi nodes after creating a forwarder block joined
4703 by edge FALLTHRU. */
4704
4705 static void
4706 gimple_make_forwarder_block (edge fallthru)
4707 {
4708 edge e;
4709 edge_iterator ei;
4710 basic_block dummy, bb;
4711 tree var;
4712 gimple_stmt_iterator gsi;
4713
4714 dummy = fallthru->src;
4715 bb = fallthru->dest;
4716
4717 if (single_pred_p (bb))
4718 return;
4719
4720 /* If we redirected a branch we must create new PHI nodes at the
4721 start of BB. */
4722 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
4723 {
4724 gimple phi, new_phi;
4725
4726 phi = gsi_stmt (gsi);
4727 var = gimple_phi_result (phi);
4728 new_phi = create_phi_node (var, bb);
4729 SSA_NAME_DEF_STMT (var) = new_phi;
4730 gimple_phi_set_result (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
4731 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru);
4732 }
4733
4734 /* Add the arguments we have stored on edges. */
4735 FOR_EACH_EDGE (e, ei, bb->preds)
4736 {
4737 if (e == fallthru)
4738 continue;
4739
4740 flush_pending_stmts (e);
4741 }
4742 }
4743
4744
4745 /* Return a non-special label in the head of basic block BLOCK.
4746 Create one if it doesn't exist. */
4747
4748 tree
4749 gimple_block_label (basic_block bb)
4750 {
4751 gimple_stmt_iterator i, s = gsi_start_bb (bb);
4752 bool first = true;
4753 tree label;
4754 gimple stmt;
4755
4756 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
4757 {
4758 stmt = gsi_stmt (i);
4759 if (gimple_code (stmt) != GIMPLE_LABEL)
4760 break;
4761 label = gimple_label_label (stmt);
4762 if (!DECL_NONLOCAL (label))
4763 {
4764 if (!first)
4765 gsi_move_before (&i, &s);
4766 return label;
4767 }
4768 }
4769
4770 label = create_artificial_label ();
4771 stmt = gimple_build_label (label);
4772 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
4773 return label;
4774 }
4775
4776
4777 /* Attempt to perform edge redirection by replacing a possibly complex
4778 jump instruction by a goto or by removing the jump completely.
4779 This can apply only if all edges now point to the same block. The
4780 parameters and return values are equivalent to
4781 redirect_edge_and_branch. */
4782
4783 static edge
4784 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
4785 {
4786 basic_block src = e->src;
4787 gimple_stmt_iterator i;
4788 gimple stmt;
4789
4790 /* We can replace or remove a complex jump only when we have exactly
4791 two edges. */
4792 if (EDGE_COUNT (src->succs) != 2
4793 /* Verify that all targets will be TARGET. Specifically, the
4794 edge that is not E must also go to TARGET. */
4795 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
4796 return NULL;
4797
4798 i = gsi_last_bb (src);
4799 if (gsi_end_p (i))
4800 return NULL;
4801
4802 stmt = gsi_stmt (i);
4803
4804 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
4805 {
4806 gsi_remove (&i, true);
4807 e = ssa_redirect_edge (e, target);
4808 e->flags = EDGE_FALLTHRU;
4809 return e;
4810 }
4811
4812 return NULL;
4813 }
4814
4815
4816 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
4817 edge representing the redirected branch. */
4818
4819 static edge
4820 gimple_redirect_edge_and_branch (edge e, basic_block dest)
4821 {
4822 basic_block bb = e->src;
4823 gimple_stmt_iterator gsi;
4824 edge ret;
4825 gimple stmt;
4826
4827 if (e->flags & EDGE_ABNORMAL)
4828 return NULL;
4829
4830 if (e->src != ENTRY_BLOCK_PTR
4831 && (ret = gimple_try_redirect_by_replacing_jump (e, dest)))
4832 return ret;
4833
4834 if (e->dest == dest)
4835 return NULL;
4836
4837 if (e->flags & EDGE_EH)
4838 return redirect_eh_edge (e, dest);
4839
4840 gsi = gsi_last_bb (bb);
4841 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
4842
4843 switch (stmt ? gimple_code (stmt) : ERROR_MARK)
4844 {
4845 case GIMPLE_COND:
4846 /* For COND_EXPR, we only need to redirect the edge. */
4847 break;
4848
4849 case GIMPLE_GOTO:
4850 /* No non-abnormal edges should lead from a non-simple goto, and
4851 simple ones should be represented implicitly. */
4852 gcc_unreachable ();
4853
4854 case GIMPLE_SWITCH:
4855 {
4856 tree label = gimple_block_label (dest);
4857 tree cases = get_cases_for_edge (e, stmt);
4858
4859 /* If we have a list of cases associated with E, then use it
4860 as it's a lot faster than walking the entire case vector. */
4861 if (cases)
4862 {
4863 edge e2 = find_edge (e->src, dest);
4864 tree last, first;
4865
4866 first = cases;
4867 while (cases)
4868 {
4869 last = cases;
4870 CASE_LABEL (cases) = label;
4871 cases = TREE_CHAIN (cases);
4872 }
4873
4874 /* If there was already an edge in the CFG, then we need
4875 to move all the cases associated with E to E2. */
4876 if (e2)
4877 {
4878 tree cases2 = get_cases_for_edge (e2, stmt);
4879
4880 TREE_CHAIN (last) = TREE_CHAIN (cases2);
4881 TREE_CHAIN (cases2) = first;
4882 }
4883 }
4884 else
4885 {
4886 size_t i, n = gimple_switch_num_labels (stmt);
4887
4888 for (i = 0; i < n; i++)
4889 {
4890 tree elt = gimple_switch_label (stmt, i);
4891 if (label_to_block (CASE_LABEL (elt)) == e->dest)
4892 CASE_LABEL (elt) = label;
4893 }
4894 }
4895
4896 break;
4897 }
4898
4899 case GIMPLE_RETURN:
4900 gsi_remove (&gsi, true);
4901 e->flags |= EDGE_FALLTHRU;
4902 break;
4903
4904 case GIMPLE_OMP_RETURN:
4905 case GIMPLE_OMP_CONTINUE:
4906 case GIMPLE_OMP_SECTIONS_SWITCH:
4907 case GIMPLE_OMP_FOR:
4908 /* The edges from OMP constructs can be simply redirected. */
4909 break;
4910
4911 default:
4912 /* Otherwise it must be a fallthru edge, and we don't need to
4913 do anything besides redirecting it. */
4914 gcc_assert (e->flags & EDGE_FALLTHRU);
4915 break;
4916 }
4917
4918 /* Update/insert PHI nodes as necessary. */
4919
4920 /* Now update the edges in the CFG. */
4921 e = ssa_redirect_edge (e, dest);
4922
4923 return e;
4924 }
4925
4926 /* Returns true if it is possible to remove edge E by redirecting
4927 it to the destination of the other edge from E->src. */
4928
4929 static bool
4930 gimple_can_remove_branch_p (const_edge e)
4931 {
4932 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
4933 return false;
4934
4935 return true;
4936 }
4937
4938 /* Simple wrapper, as we can always redirect fallthru edges. */
4939
4940 static basic_block
4941 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
4942 {
4943 e = gimple_redirect_edge_and_branch (e, dest);
4944 gcc_assert (e);
4945
4946 return NULL;
4947 }
4948
4949
4950 /* Splits basic block BB after statement STMT (but at least after the
4951 labels). If STMT is NULL, BB is split just after the labels. */
4952
4953 static basic_block
4954 gimple_split_block (basic_block bb, void *stmt)
4955 {
4956 gimple_stmt_iterator gsi;
4957 gimple_stmt_iterator gsi_tgt;
4958 gimple act;
4959 gimple_seq list;
4960 basic_block new_bb;
4961 edge e;
4962 edge_iterator ei;
4963
4964 new_bb = create_empty_bb (bb);
4965
4966 /* Redirect the outgoing edges. */
4967 new_bb->succs = bb->succs;
4968 bb->succs = NULL;
4969 FOR_EACH_EDGE (e, ei, new_bb->succs)
4970 e->src = new_bb;
4971
4972 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
4973 stmt = NULL;
4974
4975 /* Move everything from GSI to the new basic block. */
4976 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4977 {
4978 act = gsi_stmt (gsi);
4979 if (gimple_code (act) == GIMPLE_LABEL)
4980 continue;
4981
4982 if (!stmt)
4983 break;
4984
4985 if (stmt == act)
4986 {
4987 gsi_next (&gsi);
4988 break;
4989 }
4990 }
4991
4992 if (gsi_end_p (gsi))
4993 return new_bb;
4994
4995 /* Split the statement list - avoid re-creating new containers as this
4996 brings ugly quadratic memory consumption in the inliner.
4997 (We are still quadratic since we need to update stmt BB pointers,
4998 sadly.) */
4999 list = gsi_split_seq_before (&gsi);
5000 set_bb_seq (new_bb, list);
5001 for (gsi_tgt = gsi_start (list);
5002 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5003 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5004
5005 return new_bb;
5006 }
5007
5008
5009 /* Moves basic block BB after block AFTER. */
5010
5011 static bool
5012 gimple_move_block_after (basic_block bb, basic_block after)
5013 {
5014 if (bb->prev_bb == after)
5015 return true;
5016
5017 unlink_block (bb);
5018 link_block (bb, after);
5019
5020 return true;
5021 }
5022
5023
5024 /* Return true if basic_block can be duplicated. */
5025
5026 static bool
5027 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5028 {
5029 return true;
5030 }
5031
5032 /* Create a duplicate of the basic block BB. NOTE: This does not
5033 preserve SSA form. */
5034
5035 static basic_block
5036 gimple_duplicate_bb (basic_block bb)
5037 {
5038 basic_block new_bb;
5039 gimple_stmt_iterator gsi, gsi_tgt;
5040 gimple_seq phis = phi_nodes (bb);
5041 gimple phi, stmt, copy;
5042
5043 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
5044
5045 /* Copy the PHI nodes. We ignore PHI node arguments here because
5046 the incoming edges have not been setup yet. */
5047 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
5048 {
5049 phi = gsi_stmt (gsi);
5050 copy = create_phi_node (gimple_phi_result (phi), new_bb);
5051 create_new_def_for (gimple_phi_result (copy), copy,
5052 gimple_phi_result_ptr (copy));
5053 }
5054
5055 gsi_tgt = gsi_start_bb (new_bb);
5056 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5057 {
5058 def_operand_p def_p;
5059 ssa_op_iter op_iter;
5060 int region;
5061
5062 stmt = gsi_stmt (gsi);
5063 if (gimple_code (stmt) == GIMPLE_LABEL)
5064 continue;
5065
5066 /* Create a new copy of STMT and duplicate STMT's virtual
5067 operands. */
5068 copy = gimple_copy (stmt);
5069 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5070 region = lookup_stmt_eh_region (stmt);
5071 if (region >= 0)
5072 add_stmt_to_eh_region (copy, region);
5073 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5074
5075 /* Create new names for all the definitions created by COPY and
5076 add replacement mappings for each new name. */
5077 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5078 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5079 }
5080
5081 return new_bb;
5082 }
5083
5084 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5085
5086 static void
5087 add_phi_args_after_copy_edge (edge e_copy)
5088 {
5089 basic_block bb, bb_copy = e_copy->src, dest;
5090 edge e;
5091 edge_iterator ei;
5092 gimple phi, phi_copy;
5093 tree def;
5094 gimple_stmt_iterator psi, psi_copy;
5095
5096 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5097 return;
5098
5099 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5100
5101 if (e_copy->dest->flags & BB_DUPLICATED)
5102 dest = get_bb_original (e_copy->dest);
5103 else
5104 dest = e_copy->dest;
5105
5106 e = find_edge (bb, dest);
5107 if (!e)
5108 {
5109 /* During loop unrolling the target of the latch edge is copied.
5110 In this case we are not looking for edge to dest, but to
5111 duplicated block whose original was dest. */
5112 FOR_EACH_EDGE (e, ei, bb->succs)
5113 {
5114 if ((e->dest->flags & BB_DUPLICATED)
5115 && get_bb_original (e->dest) == dest)
5116 break;
5117 }
5118
5119 gcc_assert (e != NULL);
5120 }
5121
5122 for (psi = gsi_start_phis (e->dest),
5123 psi_copy = gsi_start_phis (e_copy->dest);
5124 !gsi_end_p (psi);
5125 gsi_next (&psi), gsi_next (&psi_copy))
5126 {
5127 phi = gsi_stmt (psi);
5128 phi_copy = gsi_stmt (psi_copy);
5129 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5130 add_phi_arg (phi_copy, def, e_copy);
5131 }
5132 }
5133
5134
5135 /* Basic block BB_COPY was created by code duplication. Add phi node
5136 arguments for edges going out of BB_COPY. The blocks that were
5137 duplicated have BB_DUPLICATED set. */
5138
5139 void
5140 add_phi_args_after_copy_bb (basic_block bb_copy)
5141 {
5142 edge e_copy;
5143 edge_iterator ei;
5144
5145 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5146 {
5147 add_phi_args_after_copy_edge (e_copy);
5148 }
5149 }
5150
5151 /* Blocks in REGION_COPY array of length N_REGION were created by
5152 duplication of basic blocks. Add phi node arguments for edges
5153 going from these blocks. If E_COPY is not NULL, also add
5154 phi node arguments for its destination.*/
5155
5156 void
5157 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5158 edge e_copy)
5159 {
5160 unsigned i;
5161
5162 for (i = 0; i < n_region; i++)
5163 region_copy[i]->flags |= BB_DUPLICATED;
5164
5165 for (i = 0; i < n_region; i++)
5166 add_phi_args_after_copy_bb (region_copy[i]);
5167 if (e_copy)
5168 add_phi_args_after_copy_edge (e_copy);
5169
5170 for (i = 0; i < n_region; i++)
5171 region_copy[i]->flags &= ~BB_DUPLICATED;
5172 }
5173
5174 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5175 important exit edge EXIT. By important we mean that no SSA name defined
5176 inside region is live over the other exit edges of the region. All entry
5177 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5178 to the duplicate of the region. SSA form, dominance and loop information
5179 is updated. The new basic blocks are stored to REGION_COPY in the same
5180 order as they had in REGION, provided that REGION_COPY is not NULL.
5181 The function returns false if it is unable to copy the region,
5182 true otherwise. */
5183
5184 bool
5185 gimple_duplicate_sese_region (edge entry, edge exit,
5186 basic_block *region, unsigned n_region,
5187 basic_block *region_copy)
5188 {
5189 unsigned i;
5190 bool free_region_copy = false, copying_header = false;
5191 struct loop *loop = entry->dest->loop_father;
5192 edge exit_copy;
5193 VEC (basic_block, heap) *doms;
5194 edge redirected;
5195 int total_freq = 0, entry_freq = 0;
5196 gcov_type total_count = 0, entry_count = 0;
5197
5198 if (!can_copy_bbs_p (region, n_region))
5199 return false;
5200
5201 /* Some sanity checking. Note that we do not check for all possible
5202 missuses of the functions. I.e. if you ask to copy something weird,
5203 it will work, but the state of structures probably will not be
5204 correct. */
5205 for (i = 0; i < n_region; i++)
5206 {
5207 /* We do not handle subloops, i.e. all the blocks must belong to the
5208 same loop. */
5209 if (region[i]->loop_father != loop)
5210 return false;
5211
5212 if (region[i] != entry->dest
5213 && region[i] == loop->header)
5214 return false;
5215 }
5216
5217 set_loop_copy (loop, loop);
5218
5219 /* In case the function is used for loop header copying (which is the primary
5220 use), ensure that EXIT and its copy will be new latch and entry edges. */
5221 if (loop->header == entry->dest)
5222 {
5223 copying_header = true;
5224 set_loop_copy (loop, loop_outer (loop));
5225
5226 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5227 return false;
5228
5229 for (i = 0; i < n_region; i++)
5230 if (region[i] != exit->src
5231 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5232 return false;
5233 }
5234
5235 if (!region_copy)
5236 {
5237 region_copy = XNEWVEC (basic_block, n_region);
5238 free_region_copy = true;
5239 }
5240
5241 gcc_assert (!need_ssa_update_p (cfun));
5242
5243 /* Record blocks outside the region that are dominated by something
5244 inside. */
5245 doms = NULL;
5246 initialize_original_copy_tables ();
5247
5248 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5249
5250 if (entry->dest->count)
5251 {
5252 total_count = entry->dest->count;
5253 entry_count = entry->count;
5254 /* Fix up corner cases, to avoid division by zero or creation of negative
5255 frequencies. */
5256 if (entry_count > total_count)
5257 entry_count = total_count;
5258 }
5259 else
5260 {
5261 total_freq = entry->dest->frequency;
5262 entry_freq = EDGE_FREQUENCY (entry);
5263 /* Fix up corner cases, to avoid division by zero or creation of negative
5264 frequencies. */
5265 if (total_freq == 0)
5266 total_freq = 1;
5267 else if (entry_freq > total_freq)
5268 entry_freq = total_freq;
5269 }
5270
5271 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5272 split_edge_bb_loc (entry));
5273 if (total_count)
5274 {
5275 scale_bbs_frequencies_gcov_type (region, n_region,
5276 total_count - entry_count,
5277 total_count);
5278 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
5279 total_count);
5280 }
5281 else
5282 {
5283 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5284 total_freq);
5285 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5286 }
5287
5288 if (copying_header)
5289 {
5290 loop->header = exit->dest;
5291 loop->latch = exit->src;
5292 }
5293
5294 /* Redirect the entry and add the phi node arguments. */
5295 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
5296 gcc_assert (redirected != NULL);
5297 flush_pending_stmts (entry);
5298
5299 /* Concerning updating of dominators: We must recount dominators
5300 for entry block and its copy. Anything that is outside of the
5301 region, but was dominated by something inside needs recounting as
5302 well. */
5303 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
5304 VEC_safe_push (basic_block, heap, doms, get_bb_original (entry->dest));
5305 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5306 VEC_free (basic_block, heap, doms);
5307
5308 /* Add the other PHI node arguments. */
5309 add_phi_args_after_copy (region_copy, n_region, NULL);
5310
5311 /* Update the SSA web. */
5312 update_ssa (TODO_update_ssa);
5313
5314 if (free_region_copy)
5315 free (region_copy);
5316
5317 free_original_copy_tables ();
5318 return true;
5319 }
5320
5321 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
5322 are stored to REGION_COPY in the same order in that they appear
5323 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5324 the region, EXIT an exit from it. The condition guarding EXIT
5325 is moved to ENTRY. Returns true if duplication succeeds, false
5326 otherwise.
5327
5328 For example,
5329
5330 some_code;
5331 if (cond)
5332 A;
5333 else
5334 B;
5335
5336 is transformed to
5337
5338 if (cond)
5339 {
5340 some_code;
5341 A;
5342 }
5343 else
5344 {
5345 some_code;
5346 B;
5347 }
5348 */
5349
5350 bool
5351 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
5352 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
5353 basic_block *region_copy ATTRIBUTE_UNUSED)
5354 {
5355 unsigned i;
5356 bool free_region_copy = false;
5357 struct loop *loop = exit->dest->loop_father;
5358 struct loop *orig_loop = entry->dest->loop_father;
5359 basic_block switch_bb, entry_bb, nentry_bb;
5360 VEC (basic_block, heap) *doms;
5361 int total_freq = 0, exit_freq = 0;
5362 gcov_type total_count = 0, exit_count = 0;
5363 edge exits[2], nexits[2], e;
5364 gimple_stmt_iterator gsi;
5365 gimple cond_stmt;
5366 edge sorig, snew;
5367
5368 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
5369 exits[0] = exit;
5370 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
5371
5372 if (!can_copy_bbs_p (region, n_region))
5373 return false;
5374
5375 /* Some sanity checking. Note that we do not check for all possible
5376 missuses of the functions. I.e. if you ask to copy something weird
5377 (e.g., in the example, if there is a jump from inside to the middle
5378 of some_code, or come_code defines some of the values used in cond)
5379 it will work, but the resulting code will not be correct. */
5380 for (i = 0; i < n_region; i++)
5381 {
5382 /* We do not handle subloops, i.e. all the blocks must belong to the
5383 same loop. */
5384 if (region[i]->loop_father != orig_loop)
5385 return false;
5386
5387 if (region[i] == orig_loop->latch)
5388 return false;
5389 }
5390
5391 initialize_original_copy_tables ();
5392 set_loop_copy (orig_loop, loop);
5393
5394 if (!region_copy)
5395 {
5396 region_copy = XNEWVEC (basic_block, n_region);
5397 free_region_copy = true;
5398 }
5399
5400 gcc_assert (!need_ssa_update_p (cfun));
5401
5402 /* Record blocks outside the region that are dominated by something
5403 inside. */
5404 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5405
5406 if (exit->src->count)
5407 {
5408 total_count = exit->src->count;
5409 exit_count = exit->count;
5410 /* Fix up corner cases, to avoid division by zero or creation of negative
5411 frequencies. */
5412 if (exit_count > total_count)
5413 exit_count = total_count;
5414 }
5415 else
5416 {
5417 total_freq = exit->src->frequency;
5418 exit_freq = EDGE_FREQUENCY (exit);
5419 /* Fix up corner cases, to avoid division by zero or creation of negative
5420 frequencies. */
5421 if (total_freq == 0)
5422 total_freq = 1;
5423 if (exit_freq > total_freq)
5424 exit_freq = total_freq;
5425 }
5426
5427 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
5428 split_edge_bb_loc (exit));
5429 if (total_count)
5430 {
5431 scale_bbs_frequencies_gcov_type (region, n_region,
5432 total_count - exit_count,
5433 total_count);
5434 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
5435 total_count);
5436 }
5437 else
5438 {
5439 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
5440 total_freq);
5441 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
5442 }
5443
5444 /* Create the switch block, and put the exit condition to it. */
5445 entry_bb = entry->dest;
5446 nentry_bb = get_bb_copy (entry_bb);
5447 if (!last_stmt (entry->src)
5448 || !stmt_ends_bb_p (last_stmt (entry->src)))
5449 switch_bb = entry->src;
5450 else
5451 switch_bb = split_edge (entry);
5452 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
5453
5454 gsi = gsi_last_bb (switch_bb);
5455 cond_stmt = last_stmt (exit->src);
5456 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
5457 cond_stmt = gimple_copy (cond_stmt);
5458 gimple_cond_set_lhs (cond_stmt, unshare_expr (gimple_cond_lhs (cond_stmt)));
5459 gimple_cond_set_rhs (cond_stmt, unshare_expr (gimple_cond_rhs (cond_stmt)));
5460 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
5461
5462 sorig = single_succ_edge (switch_bb);
5463 sorig->flags = exits[1]->flags;
5464 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
5465
5466 /* Register the new edge from SWITCH_BB in loop exit lists. */
5467 rescan_loop_exit (snew, true, false);
5468
5469 /* Add the PHI node arguments. */
5470 add_phi_args_after_copy (region_copy, n_region, snew);
5471
5472 /* Get rid of now superfluous conditions and associated edges (and phi node
5473 arguments). */
5474 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
5475 PENDING_STMT (e) = NULL;
5476 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
5477 PENDING_STMT (e) = NULL;
5478
5479 /* Anything that is outside of the region, but was dominated by something
5480 inside needs to update dominance info. */
5481 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5482 VEC_free (basic_block, heap, doms);
5483
5484 /* Update the SSA web. */
5485 update_ssa (TODO_update_ssa);
5486
5487 if (free_region_copy)
5488 free (region_copy);
5489
5490 free_original_copy_tables ();
5491 return true;
5492 }
5493
5494 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
5495 adding blocks when the dominator traversal reaches EXIT. This
5496 function silently assumes that ENTRY strictly dominates EXIT. */
5497
5498 void
5499 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
5500 VEC(basic_block,heap) **bbs_p)
5501 {
5502 basic_block son;
5503
5504 for (son = first_dom_son (CDI_DOMINATORS, entry);
5505 son;
5506 son = next_dom_son (CDI_DOMINATORS, son))
5507 {
5508 VEC_safe_push (basic_block, heap, *bbs_p, son);
5509 if (son != exit)
5510 gather_blocks_in_sese_region (son, exit, bbs_p);
5511 }
5512 }
5513
5514 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
5515 The duplicates are recorded in VARS_MAP. */
5516
5517 static void
5518 replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
5519 tree to_context)
5520 {
5521 tree t = *tp, new_t;
5522 struct function *f = DECL_STRUCT_FUNCTION (to_context);
5523 void **loc;
5524
5525 if (DECL_CONTEXT (t) == to_context)
5526 return;
5527
5528 loc = pointer_map_contains (vars_map, t);
5529
5530 if (!loc)
5531 {
5532 loc = pointer_map_insert (vars_map, t);
5533
5534 if (SSA_VAR_P (t))
5535 {
5536 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
5537 f->local_decls = tree_cons (NULL_TREE, new_t, f->local_decls);
5538 }
5539 else
5540 {
5541 gcc_assert (TREE_CODE (t) == CONST_DECL);
5542 new_t = copy_node (t);
5543 }
5544 DECL_CONTEXT (new_t) = to_context;
5545
5546 *loc = new_t;
5547 }
5548 else
5549 new_t = (tree) *loc;
5550
5551 *tp = new_t;
5552 }
5553
5554
5555 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
5556 VARS_MAP maps old ssa names and var_decls to the new ones. */
5557
5558 static tree
5559 replace_ssa_name (tree name, struct pointer_map_t *vars_map,
5560 tree to_context)
5561 {
5562 void **loc;
5563 tree new_name, decl = SSA_NAME_VAR (name);
5564
5565 gcc_assert (is_gimple_reg (name));
5566
5567 loc = pointer_map_contains (vars_map, name);
5568
5569 if (!loc)
5570 {
5571 replace_by_duplicate_decl (&decl, vars_map, to_context);
5572
5573 push_cfun (DECL_STRUCT_FUNCTION (to_context));
5574 if (gimple_in_ssa_p (cfun))
5575 add_referenced_var (decl);
5576
5577 new_name = make_ssa_name (decl, SSA_NAME_DEF_STMT (name));
5578 if (SSA_NAME_IS_DEFAULT_DEF (name))
5579 set_default_def (decl, new_name);
5580 pop_cfun ();
5581
5582 loc = pointer_map_insert (vars_map, name);
5583 *loc = new_name;
5584 }
5585 else
5586 new_name = (tree) *loc;
5587
5588 return new_name;
5589 }
5590
5591 struct move_stmt_d
5592 {
5593 tree orig_block;
5594 tree new_block;
5595 tree from_context;
5596 tree to_context;
5597 struct pointer_map_t *vars_map;
5598 htab_t new_label_map;
5599 bool remap_decls_p;
5600 };
5601
5602 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
5603 contained in *TP if it has been ORIG_BLOCK previously and change the
5604 DECL_CONTEXT of every local variable referenced in *TP. */
5605
5606 static tree
5607 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
5608 {
5609 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5610 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
5611 tree t = *tp;
5612
5613 if (EXPR_P (t))
5614 /* We should never have TREE_BLOCK set on non-statements. */
5615 gcc_assert (!TREE_BLOCK (t));
5616
5617 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
5618 {
5619 if (TREE_CODE (t) == SSA_NAME)
5620 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
5621 else if (TREE_CODE (t) == LABEL_DECL)
5622 {
5623 if (p->new_label_map)
5624 {
5625 struct tree_map in, *out;
5626 in.base.from = t;
5627 out = (struct tree_map *)
5628 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
5629 if (out)
5630 *tp = t = out->to;
5631 }
5632
5633 DECL_CONTEXT (t) = p->to_context;
5634 }
5635 else if (p->remap_decls_p)
5636 {
5637 /* Replace T with its duplicate. T should no longer appear in the
5638 parent function, so this looks wasteful; however, it may appear
5639 in referenced_vars, and more importantly, as virtual operands of
5640 statements, and in alias lists of other variables. It would be
5641 quite difficult to expunge it from all those places. ??? It might
5642 suffice to do this for addressable variables. */
5643 if ((TREE_CODE (t) == VAR_DECL
5644 && !is_global_var (t))
5645 || TREE_CODE (t) == CONST_DECL)
5646 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
5647
5648 if (SSA_VAR_P (t)
5649 && gimple_in_ssa_p (cfun))
5650 {
5651 push_cfun (DECL_STRUCT_FUNCTION (p->to_context));
5652 add_referenced_var (*tp);
5653 pop_cfun ();
5654 }
5655 }
5656 *walk_subtrees = 0;
5657 }
5658 else if (TYPE_P (t))
5659 *walk_subtrees = 0;
5660
5661 return NULL_TREE;
5662 }
5663
5664 /* Like move_stmt_op, but for gimple statements.
5665
5666 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
5667 contained in the current statement in *GSI_P and change the
5668 DECL_CONTEXT of every local variable referenced in the current
5669 statement. */
5670
5671 static tree
5672 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
5673 struct walk_stmt_info *wi)
5674 {
5675 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
5676 gimple stmt = gsi_stmt (*gsi_p);
5677 tree block = gimple_block (stmt);
5678
5679 if (p->orig_block == NULL_TREE
5680 || block == p->orig_block
5681 || block == NULL_TREE)
5682 gimple_set_block (stmt, p->new_block);
5683 #ifdef ENABLE_CHECKING
5684 else if (block != p->new_block)
5685 {
5686 while (block && block != p->orig_block)
5687 block = BLOCK_SUPERCONTEXT (block);
5688 gcc_assert (block);
5689 }
5690 #endif
5691
5692 if (is_gimple_omp (stmt)
5693 && gimple_code (stmt) != GIMPLE_OMP_RETURN
5694 && gimple_code (stmt) != GIMPLE_OMP_CONTINUE)
5695 {
5696 /* Do not remap variables inside OMP directives. Variables
5697 referenced in clauses and directive header belong to the
5698 parent function and should not be moved into the child
5699 function. */
5700 bool save_remap_decls_p = p->remap_decls_p;
5701 p->remap_decls_p = false;
5702 *handled_ops_p = true;
5703
5704 walk_gimple_seq (gimple_omp_body (stmt), move_stmt_r, move_stmt_op, wi);
5705
5706 p->remap_decls_p = save_remap_decls_p;
5707 }
5708
5709 return NULL_TREE;
5710 }
5711
5712 /* Marks virtual operands of all statements in basic blocks BBS for
5713 renaming. */
5714
5715 void
5716 mark_virtual_ops_in_bb (basic_block bb)
5717 {
5718 gimple_stmt_iterator gsi;
5719
5720 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5721 mark_virtual_ops_for_renaming (gsi_stmt (gsi));
5722
5723 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5724 mark_virtual_ops_for_renaming (gsi_stmt (gsi));
5725 }
5726
5727 /* Move basic block BB from function CFUN to function DEST_FN. The
5728 block is moved out of the original linked list and placed after
5729 block AFTER in the new list. Also, the block is removed from the
5730 original array of blocks and placed in DEST_FN's array of blocks.
5731 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
5732 updated to reflect the moved edges.
5733
5734 The local variables are remapped to new instances, VARS_MAP is used
5735 to record the mapping. */
5736
5737 static void
5738 move_block_to_fn (struct function *dest_cfun, basic_block bb,
5739 basic_block after, bool update_edge_count_p,
5740 struct move_stmt_d *d, int eh_offset)
5741 {
5742 struct control_flow_graph *cfg;
5743 edge_iterator ei;
5744 edge e;
5745 gimple_stmt_iterator si;
5746 unsigned old_len, new_len;
5747
5748 /* Remove BB from dominance structures. */
5749 delete_from_dominance_info (CDI_DOMINATORS, bb);
5750 if (current_loops)
5751 remove_bb_from_loops (bb);
5752
5753 /* Link BB to the new linked list. */
5754 move_block_after (bb, after);
5755
5756 /* Update the edge count in the corresponding flowgraphs. */
5757 if (update_edge_count_p)
5758 FOR_EACH_EDGE (e, ei, bb->succs)
5759 {
5760 cfun->cfg->x_n_edges--;
5761 dest_cfun->cfg->x_n_edges++;
5762 }
5763
5764 /* Remove BB from the original basic block array. */
5765 VEC_replace (basic_block, cfun->cfg->x_basic_block_info, bb->index, NULL);
5766 cfun->cfg->x_n_basic_blocks--;
5767
5768 /* Grow DEST_CFUN's basic block array if needed. */
5769 cfg = dest_cfun->cfg;
5770 cfg->x_n_basic_blocks++;
5771 if (bb->index >= cfg->x_last_basic_block)
5772 cfg->x_last_basic_block = bb->index + 1;
5773
5774 old_len = VEC_length (basic_block, cfg->x_basic_block_info);
5775 if ((unsigned) cfg->x_last_basic_block >= old_len)
5776 {
5777 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
5778 VEC_safe_grow_cleared (basic_block, gc, cfg->x_basic_block_info,
5779 new_len);
5780 }
5781
5782 VEC_replace (basic_block, cfg->x_basic_block_info,
5783 bb->index, bb);
5784
5785 /* Remap the variables in phi nodes. */
5786 for (si = gsi_start_phis (bb); !gsi_end_p (si); )
5787 {
5788 gimple phi = gsi_stmt (si);
5789 use_operand_p use;
5790 tree op = PHI_RESULT (phi);
5791 ssa_op_iter oi;
5792
5793 if (!is_gimple_reg (op))
5794 {
5795 /* Remove the phi nodes for virtual operands (alias analysis will be
5796 run for the new function, anyway). */
5797 remove_phi_node (&si, true);
5798 continue;
5799 }
5800
5801 SET_PHI_RESULT (phi,
5802 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
5803 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
5804 {
5805 op = USE_FROM_PTR (use);
5806 if (TREE_CODE (op) == SSA_NAME)
5807 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
5808 }
5809
5810 gsi_next (&si);
5811 }
5812
5813 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5814 {
5815 gimple stmt = gsi_stmt (si);
5816 int region;
5817 struct walk_stmt_info wi;
5818
5819 memset (&wi, 0, sizeof (wi));
5820 wi.info = d;
5821 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
5822
5823 if (gimple_code (stmt) == GIMPLE_LABEL)
5824 {
5825 tree label = gimple_label_label (stmt);
5826 int uid = LABEL_DECL_UID (label);
5827
5828 gcc_assert (uid > -1);
5829
5830 old_len = VEC_length (basic_block, cfg->x_label_to_block_map);
5831 if (old_len <= (unsigned) uid)
5832 {
5833 new_len = 3 * uid / 2 + 1;
5834 VEC_safe_grow_cleared (basic_block, gc,
5835 cfg->x_label_to_block_map, new_len);
5836 }
5837
5838 VEC_replace (basic_block, cfg->x_label_to_block_map, uid, bb);
5839 VEC_replace (basic_block, cfun->cfg->x_label_to_block_map, uid, NULL);
5840
5841 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
5842
5843 if (uid >= dest_cfun->cfg->last_label_uid)
5844 dest_cfun->cfg->last_label_uid = uid + 1;
5845 }
5846 else if (gimple_code (stmt) == GIMPLE_RESX && eh_offset != 0)
5847 gimple_resx_set_region (stmt, gimple_resx_region (stmt) + eh_offset);
5848
5849 region = lookup_stmt_eh_region (stmt);
5850 if (region >= 0)
5851 {
5852 add_stmt_to_eh_region_fn (dest_cfun, stmt, region + eh_offset);
5853 remove_stmt_from_eh_region (stmt);
5854 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
5855 gimple_remove_stmt_histograms (cfun, stmt);
5856 }
5857
5858 /* We cannot leave any operands allocated from the operand caches of
5859 the current function. */
5860 free_stmt_operands (stmt);
5861 push_cfun (dest_cfun);
5862 update_stmt (stmt);
5863 pop_cfun ();
5864 }
5865
5866 FOR_EACH_EDGE (e, ei, bb->succs)
5867 if (e->goto_locus)
5868 {
5869 tree block = e->goto_block;
5870 if (d->orig_block == NULL_TREE
5871 || block == d->orig_block)
5872 e->goto_block = d->new_block;
5873 #ifdef ENABLE_CHECKING
5874 else if (block != d->new_block)
5875 {
5876 while (block && block != d->orig_block)
5877 block = BLOCK_SUPERCONTEXT (block);
5878 gcc_assert (block);
5879 }
5880 #endif
5881 }
5882 }
5883
5884 /* Examine the statements in BB (which is in SRC_CFUN); find and return
5885 the outermost EH region. Use REGION as the incoming base EH region. */
5886
5887 static int
5888 find_outermost_region_in_block (struct function *src_cfun,
5889 basic_block bb, int region)
5890 {
5891 gimple_stmt_iterator si;
5892
5893 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5894 {
5895 gimple stmt = gsi_stmt (si);
5896 int stmt_region;
5897
5898 if (gimple_code (stmt) == GIMPLE_RESX)
5899 stmt_region = gimple_resx_region (stmt);
5900 else
5901 stmt_region = lookup_stmt_eh_region_fn (src_cfun, stmt);
5902 if (stmt_region > 0)
5903 {
5904 if (region < 0)
5905 region = stmt_region;
5906 else if (stmt_region != region)
5907 {
5908 region = eh_region_outermost (src_cfun, stmt_region, region);
5909 gcc_assert (region != -1);
5910 }
5911 }
5912 }
5913
5914 return region;
5915 }
5916
5917 static tree
5918 new_label_mapper (tree decl, void *data)
5919 {
5920 htab_t hash = (htab_t) data;
5921 struct tree_map *m;
5922 void **slot;
5923
5924 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
5925
5926 m = XNEW (struct tree_map);
5927 m->hash = DECL_UID (decl);
5928 m->base.from = decl;
5929 m->to = create_artificial_label ();
5930 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
5931 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
5932 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
5933
5934 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
5935 gcc_assert (*slot == NULL);
5936
5937 *slot = m;
5938
5939 return m->to;
5940 }
5941
5942 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
5943 subblocks. */
5944
5945 static void
5946 replace_block_vars_by_duplicates (tree block, struct pointer_map_t *vars_map,
5947 tree to_context)
5948 {
5949 tree *tp, t;
5950
5951 for (tp = &BLOCK_VARS (block); *tp; tp = &TREE_CHAIN (*tp))
5952 {
5953 t = *tp;
5954 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
5955 continue;
5956 replace_by_duplicate_decl (&t, vars_map, to_context);
5957 if (t != *tp)
5958 {
5959 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
5960 {
5961 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
5962 DECL_HAS_VALUE_EXPR_P (t) = 1;
5963 }
5964 TREE_CHAIN (t) = TREE_CHAIN (*tp);
5965 *tp = t;
5966 }
5967 }
5968
5969 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
5970 replace_block_vars_by_duplicates (block, vars_map, to_context);
5971 }
5972
5973 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
5974 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
5975 single basic block in the original CFG and the new basic block is
5976 returned. DEST_CFUN must not have a CFG yet.
5977
5978 Note that the region need not be a pure SESE region. Blocks inside
5979 the region may contain calls to abort/exit. The only restriction
5980 is that ENTRY_BB should be the only entry point and it must
5981 dominate EXIT_BB.
5982
5983 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
5984 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
5985 to the new function.
5986
5987 All local variables referenced in the region are assumed to be in
5988 the corresponding BLOCK_VARS and unexpanded variable lists
5989 associated with DEST_CFUN. */
5990
5991 basic_block
5992 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
5993 basic_block exit_bb, tree orig_block)
5994 {
5995 VEC(basic_block,heap) *bbs, *dom_bbs;
5996 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
5997 basic_block after, bb, *entry_pred, *exit_succ, abb;
5998 struct function *saved_cfun = cfun;
5999 int *entry_flag, *exit_flag, eh_offset;
6000 unsigned *entry_prob, *exit_prob;
6001 unsigned i, num_entry_edges, num_exit_edges;
6002 edge e;
6003 edge_iterator ei;
6004 htab_t new_label_map;
6005 struct pointer_map_t *vars_map;
6006 struct loop *loop = entry_bb->loop_father;
6007 struct move_stmt_d d;
6008
6009 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6010 region. */
6011 gcc_assert (entry_bb != exit_bb
6012 && (!exit_bb
6013 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
6014
6015 /* Collect all the blocks in the region. Manually add ENTRY_BB
6016 because it won't be added by dfs_enumerate_from. */
6017 bbs = NULL;
6018 VEC_safe_push (basic_block, heap, bbs, entry_bb);
6019 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6020
6021 /* The blocks that used to be dominated by something in BBS will now be
6022 dominated by the new block. */
6023 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6024 VEC_address (basic_block, bbs),
6025 VEC_length (basic_block, bbs));
6026
6027 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6028 the predecessor edges to ENTRY_BB and the successor edges to
6029 EXIT_BB so that we can re-attach them to the new basic block that
6030 will replace the region. */
6031 num_entry_edges = EDGE_COUNT (entry_bb->preds);
6032 entry_pred = (basic_block *) xcalloc (num_entry_edges, sizeof (basic_block));
6033 entry_flag = (int *) xcalloc (num_entry_edges, sizeof (int));
6034 entry_prob = XNEWVEC (unsigned, num_entry_edges);
6035 i = 0;
6036 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
6037 {
6038 entry_prob[i] = e->probability;
6039 entry_flag[i] = e->flags;
6040 entry_pred[i++] = e->src;
6041 remove_edge (e);
6042 }
6043
6044 if (exit_bb)
6045 {
6046 num_exit_edges = EDGE_COUNT (exit_bb->succs);
6047 exit_succ = (basic_block *) xcalloc (num_exit_edges,
6048 sizeof (basic_block));
6049 exit_flag = (int *) xcalloc (num_exit_edges, sizeof (int));
6050 exit_prob = XNEWVEC (unsigned, num_exit_edges);
6051 i = 0;
6052 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
6053 {
6054 exit_prob[i] = e->probability;
6055 exit_flag[i] = e->flags;
6056 exit_succ[i++] = e->dest;
6057 remove_edge (e);
6058 }
6059 }
6060 else
6061 {
6062 num_exit_edges = 0;
6063 exit_succ = NULL;
6064 exit_flag = NULL;
6065 exit_prob = NULL;
6066 }
6067
6068 /* Switch context to the child function to initialize DEST_FN's CFG. */
6069 gcc_assert (dest_cfun->cfg == NULL);
6070 push_cfun (dest_cfun);
6071
6072 init_empty_tree_cfg ();
6073
6074 /* Initialize EH information for the new function. */
6075 eh_offset = 0;
6076 new_label_map = NULL;
6077 if (saved_cfun->eh)
6078 {
6079 int region = -1;
6080
6081 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
6082 region = find_outermost_region_in_block (saved_cfun, bb, region);
6083
6084 init_eh_for_function ();
6085 if (region != -1)
6086 {
6087 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6088 eh_offset = duplicate_eh_regions (saved_cfun, new_label_mapper,
6089 new_label_map, region, 0);
6090 }
6091 }
6092
6093 pop_cfun ();
6094
6095 /* Move blocks from BBS into DEST_CFUN. */
6096 gcc_assert (VEC_length (basic_block, bbs) >= 2);
6097 after = dest_cfun->cfg->x_entry_block_ptr;
6098 vars_map = pointer_map_create ();
6099
6100 memset (&d, 0, sizeof (d));
6101 d.vars_map = vars_map;
6102 d.from_context = cfun->decl;
6103 d.to_context = dest_cfun->decl;
6104 d.new_label_map = new_label_map;
6105 d.remap_decls_p = true;
6106 d.orig_block = orig_block;
6107 d.new_block = DECL_INITIAL (dest_cfun->decl);
6108
6109 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
6110 {
6111 /* No need to update edge counts on the last block. It has
6112 already been updated earlier when we detached the region from
6113 the original CFG. */
6114 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d, eh_offset);
6115 after = bb;
6116 }
6117
6118 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
6119 if (orig_block)
6120 {
6121 tree block;
6122 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6123 == NULL_TREE);
6124 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6125 = BLOCK_SUBBLOCKS (orig_block);
6126 for (block = BLOCK_SUBBLOCKS (orig_block);
6127 block; block = BLOCK_CHAIN (block))
6128 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
6129 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
6130 }
6131
6132 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
6133 vars_map, dest_cfun->decl);
6134
6135 if (new_label_map)
6136 htab_delete (new_label_map);
6137 pointer_map_destroy (vars_map);
6138
6139 /* Rewire the entry and exit blocks. The successor to the entry
6140 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6141 the child function. Similarly, the predecessor of DEST_FN's
6142 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
6143 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6144 various CFG manipulation function get to the right CFG.
6145
6146 FIXME, this is silly. The CFG ought to become a parameter to
6147 these helpers. */
6148 push_cfun (dest_cfun);
6149 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
6150 if (exit_bb)
6151 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
6152 pop_cfun ();
6153
6154 /* Back in the original function, the SESE region has disappeared,
6155 create a new basic block in its place. */
6156 bb = create_empty_bb (entry_pred[0]);
6157 if (current_loops)
6158 add_bb_to_loop (bb, loop);
6159 for (i = 0; i < num_entry_edges; i++)
6160 {
6161 e = make_edge (entry_pred[i], bb, entry_flag[i]);
6162 e->probability = entry_prob[i];
6163 }
6164
6165 for (i = 0; i < num_exit_edges; i++)
6166 {
6167 e = make_edge (bb, exit_succ[i], exit_flag[i]);
6168 e->probability = exit_prob[i];
6169 }
6170
6171 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
6172 for (i = 0; VEC_iterate (basic_block, dom_bbs, i, abb); i++)
6173 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
6174 VEC_free (basic_block, heap, dom_bbs);
6175
6176 if (exit_bb)
6177 {
6178 free (exit_prob);
6179 free (exit_flag);
6180 free (exit_succ);
6181 }
6182 free (entry_prob);
6183 free (entry_flag);
6184 free (entry_pred);
6185 VEC_free (basic_block, heap, bbs);
6186
6187 return bb;
6188 }
6189
6190
6191 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree-pass.h)
6192 */
6193
6194 void
6195 dump_function_to_file (tree fn, FILE *file, int flags)
6196 {
6197 tree arg, vars, var;
6198 struct function *dsf;
6199 bool ignore_topmost_bind = false, any_var = false;
6200 basic_block bb;
6201 tree chain;
6202
6203 fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
6204
6205 arg = DECL_ARGUMENTS (fn);
6206 while (arg)
6207 {
6208 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
6209 fprintf (file, " ");
6210 print_generic_expr (file, arg, dump_flags);
6211 if (flags & TDF_VERBOSE)
6212 print_node (file, "", arg, 4);
6213 if (TREE_CHAIN (arg))
6214 fprintf (file, ", ");
6215 arg = TREE_CHAIN (arg);
6216 }
6217 fprintf (file, ")\n");
6218
6219 if (flags & TDF_VERBOSE)
6220 print_node (file, "", fn, 2);
6221
6222 dsf = DECL_STRUCT_FUNCTION (fn);
6223 if (dsf && (flags & TDF_DETAILS))
6224 dump_eh_tree (file, dsf);
6225
6226 if (flags & TDF_RAW && !gimple_has_body_p (fn))
6227 {
6228 dump_node (fn, TDF_SLIM | flags, file);
6229 return;
6230 }
6231
6232 /* Switch CFUN to point to FN. */
6233 push_cfun (DECL_STRUCT_FUNCTION (fn));
6234
6235 /* When GIMPLE is lowered, the variables are no longer available in
6236 BIND_EXPRs, so display them separately. */
6237 if (cfun && cfun->decl == fn && cfun->local_decls)
6238 {
6239 ignore_topmost_bind = true;
6240
6241 fprintf (file, "{\n");
6242 for (vars = cfun->local_decls; vars; vars = TREE_CHAIN (vars))
6243 {
6244 var = TREE_VALUE (vars);
6245
6246 print_generic_decl (file, var, flags);
6247 if (flags & TDF_VERBOSE)
6248 print_node (file, "", var, 4);
6249 fprintf (file, "\n");
6250
6251 any_var = true;
6252 }
6253 }
6254
6255 if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
6256 {
6257 /* If the CFG has been built, emit a CFG-based dump. */
6258 check_bb_profile (ENTRY_BLOCK_PTR, file);
6259 if (!ignore_topmost_bind)
6260 fprintf (file, "{\n");
6261
6262 if (any_var && n_basic_blocks)
6263 fprintf (file, "\n");
6264
6265 FOR_EACH_BB (bb)
6266 gimple_dump_bb (bb, file, 2, flags);
6267
6268 fprintf (file, "}\n");
6269 check_bb_profile (EXIT_BLOCK_PTR, file);
6270 }
6271 else if (DECL_SAVED_TREE (fn) == NULL)
6272 {
6273 /* The function is now in GIMPLE form but the CFG has not been
6274 built yet. Emit the single sequence of GIMPLE statements
6275 that make up its body. */
6276 gimple_seq body = gimple_body (fn);
6277
6278 if (gimple_seq_first_stmt (body)
6279 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
6280 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
6281 print_gimple_seq (file, body, 0, flags);
6282 else
6283 {
6284 if (!ignore_topmost_bind)
6285 fprintf (file, "{\n");
6286
6287 if (any_var)
6288 fprintf (file, "\n");
6289
6290 print_gimple_seq (file, body, 2, flags);
6291 fprintf (file, "}\n");
6292 }
6293 }
6294 else
6295 {
6296 int indent;
6297
6298 /* Make a tree based dump. */
6299 chain = DECL_SAVED_TREE (fn);
6300
6301 if (chain && TREE_CODE (chain) == BIND_EXPR)
6302 {
6303 if (ignore_topmost_bind)
6304 {
6305 chain = BIND_EXPR_BODY (chain);
6306 indent = 2;
6307 }
6308 else
6309 indent = 0;
6310 }
6311 else
6312 {
6313 if (!ignore_topmost_bind)
6314 fprintf (file, "{\n");
6315 indent = 2;
6316 }
6317
6318 if (any_var)
6319 fprintf (file, "\n");
6320
6321 print_generic_stmt_indented (file, chain, flags, indent);
6322 if (ignore_topmost_bind)
6323 fprintf (file, "}\n");
6324 }
6325
6326 fprintf (file, "\n\n");
6327
6328 /* Restore CFUN. */
6329 pop_cfun ();
6330 }
6331
6332
6333 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
6334
6335 void
6336 debug_function (tree fn, int flags)
6337 {
6338 dump_function_to_file (fn, stderr, flags);
6339 }
6340
6341
6342 /* Print on FILE the indexes for the predecessors of basic_block BB. */
6343
6344 static void
6345 print_pred_bbs (FILE *file, basic_block bb)
6346 {
6347 edge e;
6348 edge_iterator ei;
6349
6350 FOR_EACH_EDGE (e, ei, bb->preds)
6351 fprintf (file, "bb_%d ", e->src->index);
6352 }
6353
6354
6355 /* Print on FILE the indexes for the successors of basic_block BB. */
6356
6357 static void
6358 print_succ_bbs (FILE *file, basic_block bb)
6359 {
6360 edge e;
6361 edge_iterator ei;
6362
6363 FOR_EACH_EDGE (e, ei, bb->succs)
6364 fprintf (file, "bb_%d ", e->dest->index);
6365 }
6366
6367 /* Print to FILE the basic block BB following the VERBOSITY level. */
6368
6369 void
6370 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
6371 {
6372 char *s_indent = (char *) alloca ((size_t) indent + 1);
6373 memset ((void *) s_indent, ' ', (size_t) indent);
6374 s_indent[indent] = '\0';
6375
6376 /* Print basic_block's header. */
6377 if (verbosity >= 2)
6378 {
6379 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
6380 print_pred_bbs (file, bb);
6381 fprintf (file, "}, succs = {");
6382 print_succ_bbs (file, bb);
6383 fprintf (file, "})\n");
6384 }
6385
6386 /* Print basic_block's body. */
6387 if (verbosity >= 3)
6388 {
6389 fprintf (file, "%s {\n", s_indent);
6390 gimple_dump_bb (bb, file, indent + 4, TDF_VOPS|TDF_MEMSYMS);
6391 fprintf (file, "%s }\n", s_indent);
6392 }
6393 }
6394
6395 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
6396
6397 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
6398 VERBOSITY level this outputs the contents of the loop, or just its
6399 structure. */
6400
6401 static void
6402 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
6403 {
6404 char *s_indent;
6405 basic_block bb;
6406
6407 if (loop == NULL)
6408 return;
6409
6410 s_indent = (char *) alloca ((size_t) indent + 1);
6411 memset ((void *) s_indent, ' ', (size_t) indent);
6412 s_indent[indent] = '\0';
6413
6414 /* Print loop's header. */
6415 fprintf (file, "%sloop_%d (header = %d, latch = %d", s_indent,
6416 loop->num, loop->header->index, loop->latch->index);
6417 fprintf (file, ", niter = ");
6418 print_generic_expr (file, loop->nb_iterations, 0);
6419
6420 if (loop->any_upper_bound)
6421 {
6422 fprintf (file, ", upper_bound = ");
6423 dump_double_int (file, loop->nb_iterations_upper_bound, true);
6424 }
6425
6426 if (loop->any_estimate)
6427 {
6428 fprintf (file, ", estimate = ");
6429 dump_double_int (file, loop->nb_iterations_estimate, true);
6430 }
6431 fprintf (file, ")\n");
6432
6433 /* Print loop's body. */
6434 if (verbosity >= 1)
6435 {
6436 fprintf (file, "%s{\n", s_indent);
6437 FOR_EACH_BB (bb)
6438 if (bb->loop_father == loop)
6439 print_loops_bb (file, bb, indent, verbosity);
6440
6441 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
6442 fprintf (file, "%s}\n", s_indent);
6443 }
6444 }
6445
6446 /* Print the LOOP and its sibling loops on FILE, indented INDENT
6447 spaces. Following VERBOSITY level this outputs the contents of the
6448 loop, or just its structure. */
6449
6450 static void
6451 print_loop_and_siblings (FILE *file, struct loop *loop, int indent, int verbosity)
6452 {
6453 if (loop == NULL)
6454 return;
6455
6456 print_loop (file, loop, indent, verbosity);
6457 print_loop_and_siblings (file, loop->next, indent, verbosity);
6458 }
6459
6460 /* Follow a CFG edge from the entry point of the program, and on entry
6461 of a loop, pretty print the loop structure on FILE. */
6462
6463 void
6464 print_loops (FILE *file, int verbosity)
6465 {
6466 basic_block bb;
6467
6468 bb = ENTRY_BLOCK_PTR;
6469 if (bb && bb->loop_father)
6470 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
6471 }
6472
6473
6474 /* Debugging loops structure at tree level, at some VERBOSITY level. */
6475
6476 void
6477 debug_loops (int verbosity)
6478 {
6479 print_loops (stderr, verbosity);
6480 }
6481
6482 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
6483
6484 void
6485 debug_loop (struct loop *loop, int verbosity)
6486 {
6487 print_loop (stderr, loop, 0, verbosity);
6488 }
6489
6490 /* Print on stderr the code of loop number NUM, at some VERBOSITY
6491 level. */
6492
6493 void
6494 debug_loop_num (unsigned num, int verbosity)
6495 {
6496 debug_loop (get_loop (num), verbosity);
6497 }
6498
6499 /* Return true if BB ends with a call, possibly followed by some
6500 instructions that must stay with the call. Return false,
6501 otherwise. */
6502
6503 static bool
6504 gimple_block_ends_with_call_p (basic_block bb)
6505 {
6506 gimple_stmt_iterator gsi = gsi_last_bb (bb);
6507 return is_gimple_call (gsi_stmt (gsi));
6508 }
6509
6510
6511 /* Return true if BB ends with a conditional branch. Return false,
6512 otherwise. */
6513
6514 static bool
6515 gimple_block_ends_with_condjump_p (const_basic_block bb)
6516 {
6517 gimple stmt = last_stmt (CONST_CAST_BB (bb));
6518 return (stmt && gimple_code (stmt) == GIMPLE_COND);
6519 }
6520
6521
6522 /* Return true if we need to add fake edge to exit at statement T.
6523 Helper function for gimple_flow_call_edges_add. */
6524
6525 static bool
6526 need_fake_edge_p (gimple t)
6527 {
6528 tree fndecl = NULL_TREE;
6529 int call_flags = 0;
6530
6531 /* NORETURN and LONGJMP calls already have an edge to exit.
6532 CONST and PURE calls do not need one.
6533 We don't currently check for CONST and PURE here, although
6534 it would be a good idea, because those attributes are
6535 figured out from the RTL in mark_constant_function, and
6536 the counter incrementation code from -fprofile-arcs
6537 leads to different results from -fbranch-probabilities. */
6538 if (is_gimple_call (t))
6539 {
6540 fndecl = gimple_call_fndecl (t);
6541 call_flags = gimple_call_flags (t);
6542 }
6543
6544 if (is_gimple_call (t)
6545 && fndecl
6546 && DECL_BUILT_IN (fndecl)
6547 && (call_flags & ECF_NOTHROW)
6548 && !(call_flags & ECF_RETURNS_TWICE)
6549 /* fork() doesn't really return twice, but the effect of
6550 wrapping it in __gcov_fork() which calls __gcov_flush()
6551 and clears the counters before forking has the same
6552 effect as returning twice. Force a fake edge. */
6553 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6554 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
6555 return false;
6556
6557 if (is_gimple_call (t)
6558 && !(call_flags & ECF_NORETURN))
6559 return true;
6560
6561 if (gimple_code (t) == GIMPLE_ASM
6562 && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
6563 return true;
6564
6565 return false;
6566 }
6567
6568
6569 /* Add fake edges to the function exit for any non constant and non
6570 noreturn calls, volatile inline assembly in the bitmap of blocks
6571 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
6572 the number of blocks that were split.
6573
6574 The goal is to expose cases in which entering a basic block does
6575 not imply that all subsequent instructions must be executed. */
6576
6577 static int
6578 gimple_flow_call_edges_add (sbitmap blocks)
6579 {
6580 int i;
6581 int blocks_split = 0;
6582 int last_bb = last_basic_block;
6583 bool check_last_block = false;
6584
6585 if (n_basic_blocks == NUM_FIXED_BLOCKS)
6586 return 0;
6587
6588 if (! blocks)
6589 check_last_block = true;
6590 else
6591 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
6592
6593 /* In the last basic block, before epilogue generation, there will be
6594 a fallthru edge to EXIT. Special care is required if the last insn
6595 of the last basic block is a call because make_edge folds duplicate
6596 edges, which would result in the fallthru edge also being marked
6597 fake, which would result in the fallthru edge being removed by
6598 remove_fake_edges, which would result in an invalid CFG.
6599
6600 Moreover, we can't elide the outgoing fake edge, since the block
6601 profiler needs to take this into account in order to solve the minimal
6602 spanning tree in the case that the call doesn't return.
6603
6604 Handle this by adding a dummy instruction in a new last basic block. */
6605 if (check_last_block)
6606 {
6607 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
6608 gimple_stmt_iterator gsi = gsi_last_bb (bb);
6609 gimple t = NULL;
6610
6611 if (!gsi_end_p (gsi))
6612 t = gsi_stmt (gsi);
6613
6614 if (t && need_fake_edge_p (t))
6615 {
6616 edge e;
6617
6618 e = find_edge (bb, EXIT_BLOCK_PTR);
6619 if (e)
6620 {
6621 gsi_insert_on_edge (e, gimple_build_nop ());
6622 gsi_commit_edge_inserts ();
6623 }
6624 }
6625 }
6626
6627 /* Now add fake edges to the function exit for any non constant
6628 calls since there is no way that we can determine if they will
6629 return or not... */
6630 for (i = 0; i < last_bb; i++)
6631 {
6632 basic_block bb = BASIC_BLOCK (i);
6633 gimple_stmt_iterator gsi;
6634 gimple stmt, last_stmt;
6635
6636 if (!bb)
6637 continue;
6638
6639 if (blocks && !TEST_BIT (blocks, i))
6640 continue;
6641
6642 gsi = gsi_last_bb (bb);
6643 if (!gsi_end_p (gsi))
6644 {
6645 last_stmt = gsi_stmt (gsi);
6646 do
6647 {
6648 stmt = gsi_stmt (gsi);
6649 if (need_fake_edge_p (stmt))
6650 {
6651 edge e;
6652
6653 /* The handling above of the final block before the
6654 epilogue should be enough to verify that there is
6655 no edge to the exit block in CFG already.
6656 Calling make_edge in such case would cause us to
6657 mark that edge as fake and remove it later. */
6658 #ifdef ENABLE_CHECKING
6659 if (stmt == last_stmt)
6660 {
6661 e = find_edge (bb, EXIT_BLOCK_PTR);
6662 gcc_assert (e == NULL);
6663 }
6664 #endif
6665
6666 /* Note that the following may create a new basic block
6667 and renumber the existing basic blocks. */
6668 if (stmt != last_stmt)
6669 {
6670 e = split_block (bb, stmt);
6671 if (e)
6672 blocks_split++;
6673 }
6674 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
6675 }
6676 gsi_prev (&gsi);
6677 }
6678 while (!gsi_end_p (gsi));
6679 }
6680 }
6681
6682 if (blocks_split)
6683 verify_flow_info ();
6684
6685 return blocks_split;
6686 }
6687
6688 /* Purge dead abnormal call edges from basic block BB. */
6689
6690 bool
6691 gimple_purge_dead_abnormal_call_edges (basic_block bb)
6692 {
6693 bool changed = gimple_purge_dead_eh_edges (bb);
6694
6695 if (cfun->has_nonlocal_label)
6696 {
6697 gimple stmt = last_stmt (bb);
6698 edge_iterator ei;
6699 edge e;
6700
6701 if (!(stmt && stmt_can_make_abnormal_goto (stmt)))
6702 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6703 {
6704 if (e->flags & EDGE_ABNORMAL)
6705 {
6706 remove_edge (e);
6707 changed = true;
6708 }
6709 else
6710 ei_next (&ei);
6711 }
6712
6713 /* See gimple_purge_dead_eh_edges below. */
6714 if (changed)
6715 free_dominance_info (CDI_DOMINATORS);
6716 }
6717
6718 return changed;
6719 }
6720
6721 /* Removes edge E and all the blocks dominated by it, and updates dominance
6722 information. The IL in E->src needs to be updated separately.
6723 If dominance info is not available, only the edge E is removed.*/
6724
6725 void
6726 remove_edge_and_dominated_blocks (edge e)
6727 {
6728 VEC (basic_block, heap) *bbs_to_remove = NULL;
6729 VEC (basic_block, heap) *bbs_to_fix_dom = NULL;
6730 bitmap df, df_idom;
6731 edge f;
6732 edge_iterator ei;
6733 bool none_removed = false;
6734 unsigned i;
6735 basic_block bb, dbb;
6736 bitmap_iterator bi;
6737
6738 if (!dom_info_available_p (CDI_DOMINATORS))
6739 {
6740 remove_edge (e);
6741 return;
6742 }
6743
6744 /* No updating is needed for edges to exit. */
6745 if (e->dest == EXIT_BLOCK_PTR)
6746 {
6747 if (cfgcleanup_altered_bbs)
6748 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
6749 remove_edge (e);
6750 return;
6751 }
6752
6753 /* First, we find the basic blocks to remove. If E->dest has a predecessor
6754 that is not dominated by E->dest, then this set is empty. Otherwise,
6755 all the basic blocks dominated by E->dest are removed.
6756
6757 Also, to DF_IDOM we store the immediate dominators of the blocks in
6758 the dominance frontier of E (i.e., of the successors of the
6759 removed blocks, if there are any, and of E->dest otherwise). */
6760 FOR_EACH_EDGE (f, ei, e->dest->preds)
6761 {
6762 if (f == e)
6763 continue;
6764
6765 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
6766 {
6767 none_removed = true;
6768 break;
6769 }
6770 }
6771
6772 df = BITMAP_ALLOC (NULL);
6773 df_idom = BITMAP_ALLOC (NULL);
6774
6775 if (none_removed)
6776 bitmap_set_bit (df_idom,
6777 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
6778 else
6779 {
6780 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
6781 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6782 {
6783 FOR_EACH_EDGE (f, ei, bb->succs)
6784 {
6785 if (f->dest != EXIT_BLOCK_PTR)
6786 bitmap_set_bit (df, f->dest->index);
6787 }
6788 }
6789 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6790 bitmap_clear_bit (df, bb->index);
6791
6792 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
6793 {
6794 bb = BASIC_BLOCK (i);
6795 bitmap_set_bit (df_idom,
6796 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
6797 }
6798 }
6799
6800 if (cfgcleanup_altered_bbs)
6801 {
6802 /* Record the set of the altered basic blocks. */
6803 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
6804 bitmap_ior_into (cfgcleanup_altered_bbs, df);
6805 }
6806
6807 /* Remove E and the cancelled blocks. */
6808 if (none_removed)
6809 remove_edge (e);
6810 else
6811 {
6812 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6813 delete_basic_block (bb);
6814 }
6815
6816 /* Update the dominance information. The immediate dominator may change only
6817 for blocks whose immediate dominator belongs to DF_IDOM:
6818
6819 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
6820 removal. Let Z the arbitrary block such that idom(Z) = Y and
6821 Z dominates X after the removal. Before removal, there exists a path P
6822 from Y to X that avoids Z. Let F be the last edge on P that is
6823 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
6824 dominates W, and because of P, Z does not dominate W), and W belongs to
6825 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
6826 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
6827 {
6828 bb = BASIC_BLOCK (i);
6829 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
6830 dbb;
6831 dbb = next_dom_son (CDI_DOMINATORS, dbb))
6832 VEC_safe_push (basic_block, heap, bbs_to_fix_dom, dbb);
6833 }
6834
6835 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
6836
6837 BITMAP_FREE (df);
6838 BITMAP_FREE (df_idom);
6839 VEC_free (basic_block, heap, bbs_to_remove);
6840 VEC_free (basic_block, heap, bbs_to_fix_dom);
6841 }
6842
6843 /* Purge dead EH edges from basic block BB. */
6844
6845 bool
6846 gimple_purge_dead_eh_edges (basic_block bb)
6847 {
6848 bool changed = false;
6849 edge e;
6850 edge_iterator ei;
6851 gimple stmt = last_stmt (bb);
6852
6853 if (stmt && stmt_can_throw_internal (stmt))
6854 return false;
6855
6856 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6857 {
6858 if (e->flags & EDGE_EH)
6859 {
6860 remove_edge_and_dominated_blocks (e);
6861 changed = true;
6862 }
6863 else
6864 ei_next (&ei);
6865 }
6866
6867 return changed;
6868 }
6869
6870 bool
6871 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
6872 {
6873 bool changed = false;
6874 unsigned i;
6875 bitmap_iterator bi;
6876
6877 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
6878 {
6879 basic_block bb = BASIC_BLOCK (i);
6880
6881 /* Earlier gimple_purge_dead_eh_edges could have removed
6882 this basic block already. */
6883 gcc_assert (bb || changed);
6884 if (bb != NULL)
6885 changed |= gimple_purge_dead_eh_edges (bb);
6886 }
6887
6888 return changed;
6889 }
6890
6891 /* This function is called whenever a new edge is created or
6892 redirected. */
6893
6894 static void
6895 gimple_execute_on_growing_pred (edge e)
6896 {
6897 basic_block bb = e->dest;
6898
6899 if (phi_nodes (bb))
6900 reserve_phi_args_for_new_edge (bb);
6901 }
6902
6903 /* This function is called immediately before edge E is removed from
6904 the edge vector E->dest->preds. */
6905
6906 static void
6907 gimple_execute_on_shrinking_pred (edge e)
6908 {
6909 if (phi_nodes (e->dest))
6910 remove_phi_args (e);
6911 }
6912
6913 /*---------------------------------------------------------------------------
6914 Helper functions for Loop versioning
6915 ---------------------------------------------------------------------------*/
6916
6917 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
6918 of 'first'. Both of them are dominated by 'new_head' basic block. When
6919 'new_head' was created by 'second's incoming edge it received phi arguments
6920 on the edge by split_edge(). Later, additional edge 'e' was created to
6921 connect 'new_head' and 'first'. Now this routine adds phi args on this
6922 additional edge 'e' that new_head to second edge received as part of edge
6923 splitting. */
6924
6925 static void
6926 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
6927 basic_block new_head, edge e)
6928 {
6929 gimple phi1, phi2;
6930 gimple_stmt_iterator psi1, psi2;
6931 tree def;
6932 edge e2 = find_edge (new_head, second);
6933
6934 /* Because NEW_HEAD has been created by splitting SECOND's incoming
6935 edge, we should always have an edge from NEW_HEAD to SECOND. */
6936 gcc_assert (e2 != NULL);
6937
6938 /* Browse all 'second' basic block phi nodes and add phi args to
6939 edge 'e' for 'first' head. PHI args are always in correct order. */
6940
6941 for (psi2 = gsi_start_phis (second),
6942 psi1 = gsi_start_phis (first);
6943 !gsi_end_p (psi2) && !gsi_end_p (psi1);
6944 gsi_next (&psi2), gsi_next (&psi1))
6945 {
6946 phi1 = gsi_stmt (psi1);
6947 phi2 = gsi_stmt (psi2);
6948 def = PHI_ARG_DEF (phi2, e2->dest_idx);
6949 add_phi_arg (phi1, def, e);
6950 }
6951 }
6952
6953
6954 /* Adds a if else statement to COND_BB with condition COND_EXPR.
6955 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
6956 the destination of the ELSE part. */
6957
6958 static void
6959 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
6960 basic_block second_head ATTRIBUTE_UNUSED,
6961 basic_block cond_bb, void *cond_e)
6962 {
6963 gimple_stmt_iterator gsi;
6964 gimple new_cond_expr;
6965 tree cond_expr = (tree) cond_e;
6966 edge e0;
6967
6968 /* Build new conditional expr */
6969 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
6970 NULL_TREE, NULL_TREE);
6971
6972 /* Add new cond in cond_bb. */
6973 gsi = gsi_last_bb (cond_bb);
6974 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
6975
6976 /* Adjust edges appropriately to connect new head with first head
6977 as well as second head. */
6978 e0 = single_succ_edge (cond_bb);
6979 e0->flags &= ~EDGE_FALLTHRU;
6980 e0->flags |= EDGE_FALSE_VALUE;
6981 }
6982
6983 struct cfg_hooks gimple_cfg_hooks = {
6984 "gimple",
6985 gimple_verify_flow_info,
6986 gimple_dump_bb, /* dump_bb */
6987 create_bb, /* create_basic_block */
6988 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
6989 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
6990 gimple_can_remove_branch_p, /* can_remove_branch_p */
6991 remove_bb, /* delete_basic_block */
6992 gimple_split_block, /* split_block */
6993 gimple_move_block_after, /* move_block_after */
6994 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
6995 gimple_merge_blocks, /* merge_blocks */
6996 gimple_predict_edge, /* predict_edge */
6997 gimple_predicted_by_p, /* predicted_by_p */
6998 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
6999 gimple_duplicate_bb, /* duplicate_block */
7000 gimple_split_edge, /* split_edge */
7001 gimple_make_forwarder_block, /* make_forward_block */
7002 NULL, /* tidy_fallthru_edge */
7003 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
7004 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
7005 gimple_flow_call_edges_add, /* flow_call_edges_add */
7006 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
7007 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
7008 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
7009 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
7010 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
7011 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
7012 flush_pending_stmts /* flush_pending_stmts */
7013 };
7014
7015
7016 /* Split all critical edges. */
7017
7018 static unsigned int
7019 split_critical_edges (void)
7020 {
7021 basic_block bb;
7022 edge e;
7023 edge_iterator ei;
7024
7025 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
7026 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
7027 mappings around the calls to split_edge. */
7028 start_recording_case_labels ();
7029 FOR_ALL_BB (bb)
7030 {
7031 FOR_EACH_EDGE (e, ei, bb->succs)
7032 {
7033 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
7034 split_edge (e);
7035 /* PRE inserts statements to edges and expects that
7036 since split_critical_edges was done beforehand, committing edge
7037 insertions will not split more edges. In addition to critical
7038 edges we must split edges that have multiple successors and
7039 end by control flow statements, such as RESX.
7040 Go ahead and split them too. This matches the logic in
7041 gimple_find_edge_insert_loc. */
7042 else if ((!single_pred_p (e->dest)
7043 || phi_nodes (e->dest)
7044 || e->dest == EXIT_BLOCK_PTR)
7045 && e->src != ENTRY_BLOCK_PTR
7046 && !(e->flags & EDGE_ABNORMAL))
7047 {
7048 gimple_stmt_iterator gsi;
7049
7050 gsi = gsi_last_bb (e->src);
7051 if (!gsi_end_p (gsi)
7052 && stmt_ends_bb_p (gsi_stmt (gsi))
7053 && gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN)
7054 split_edge (e);
7055 }
7056 }
7057 }
7058 end_recording_case_labels ();
7059 return 0;
7060 }
7061
7062 struct gimple_opt_pass pass_split_crit_edges =
7063 {
7064 {
7065 GIMPLE_PASS,
7066 "crited", /* name */
7067 NULL, /* gate */
7068 split_critical_edges, /* execute */
7069 NULL, /* sub */
7070 NULL, /* next */
7071 0, /* static_pass_number */
7072 TV_TREE_SPLIT_EDGES, /* tv_id */
7073 PROP_cfg, /* properties required */
7074 PROP_no_crit_edges, /* properties_provided */
7075 0, /* properties_destroyed */
7076 0, /* todo_flags_start */
7077 TODO_dump_func | TODO_verify_flow /* todo_flags_finish */
7078 }
7079 };
7080
7081
7082 /* Build a ternary operation and gimplify it. Emit code before GSI.
7083 Return the gimple_val holding the result. */
7084
7085 tree
7086 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
7087 tree type, tree a, tree b, tree c)
7088 {
7089 tree ret;
7090
7091 ret = fold_build3 (code, type, a, b, c);
7092 STRIP_NOPS (ret);
7093
7094 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7095 GSI_SAME_STMT);
7096 }
7097
7098 /* Build a binary operation and gimplify it. Emit code before GSI.
7099 Return the gimple_val holding the result. */
7100
7101 tree
7102 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
7103 tree type, tree a, tree b)
7104 {
7105 tree ret;
7106
7107 ret = fold_build2 (code, type, a, b);
7108 STRIP_NOPS (ret);
7109
7110 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7111 GSI_SAME_STMT);
7112 }
7113
7114 /* Build a unary operation and gimplify it. Emit code before GSI.
7115 Return the gimple_val holding the result. */
7116
7117 tree
7118 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
7119 tree a)
7120 {
7121 tree ret;
7122
7123 ret = fold_build1 (code, type, a);
7124 STRIP_NOPS (ret);
7125
7126 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7127 GSI_SAME_STMT);
7128 }
7129
7130
7131 \f
7132 /* Emit return warnings. */
7133
7134 static unsigned int
7135 execute_warn_function_return (void)
7136 {
7137 source_location location;
7138 gimple last;
7139 edge e;
7140 edge_iterator ei;
7141
7142 /* If we have a path to EXIT, then we do return. */
7143 if (TREE_THIS_VOLATILE (cfun->decl)
7144 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
7145 {
7146 location = UNKNOWN_LOCATION;
7147 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7148 {
7149 last = last_stmt (e->src);
7150 if (gimple_code (last) == GIMPLE_RETURN
7151 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
7152 break;
7153 }
7154 if (location == UNKNOWN_LOCATION)
7155 location = cfun->function_end_locus;
7156 warning (0, "%H%<noreturn%> function does return", &location);
7157 }
7158
7159 /* If we see "return;" in some basic block, then we do reach the end
7160 without returning a value. */
7161 else if (warn_return_type
7162 && !TREE_NO_WARNING (cfun->decl)
7163 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
7164 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
7165 {
7166 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7167 {
7168 gimple last = last_stmt (e->src);
7169 if (gimple_code (last) == GIMPLE_RETURN
7170 && gimple_return_retval (last) == NULL
7171 && !gimple_no_warning_p (last))
7172 {
7173 location = gimple_location (last);
7174 if (location == UNKNOWN_LOCATION)
7175 location = cfun->function_end_locus;
7176 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
7177 TREE_NO_WARNING (cfun->decl) = 1;
7178 break;
7179 }
7180 }
7181 }
7182 return 0;
7183 }
7184
7185
7186 /* Given a basic block B which ends with a conditional and has
7187 precisely two successors, determine which of the edges is taken if
7188 the conditional is true and which is taken if the conditional is
7189 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
7190
7191 void
7192 extract_true_false_edges_from_block (basic_block b,
7193 edge *true_edge,
7194 edge *false_edge)
7195 {
7196 edge e = EDGE_SUCC (b, 0);
7197
7198 if (e->flags & EDGE_TRUE_VALUE)
7199 {
7200 *true_edge = e;
7201 *false_edge = EDGE_SUCC (b, 1);
7202 }
7203 else
7204 {
7205 *false_edge = e;
7206 *true_edge = EDGE_SUCC (b, 1);
7207 }
7208 }
7209
7210 struct gimple_opt_pass pass_warn_function_return =
7211 {
7212 {
7213 GIMPLE_PASS,
7214 NULL, /* name */
7215 NULL, /* gate */
7216 execute_warn_function_return, /* execute */
7217 NULL, /* sub */
7218 NULL, /* next */
7219 0, /* static_pass_number */
7220 TV_NONE, /* tv_id */
7221 PROP_cfg, /* properties_required */
7222 0, /* properties_provided */
7223 0, /* properties_destroyed */
7224 0, /* todo_flags_start */
7225 0 /* todo_flags_finish */
7226 }
7227 };
7228
7229 /* Emit noreturn warnings. */
7230
7231 static unsigned int
7232 execute_warn_function_noreturn (void)
7233 {
7234 if (warn_missing_noreturn
7235 && !TREE_THIS_VOLATILE (cfun->decl)
7236 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
7237 && !lang_hooks.missing_noreturn_ok_p (cfun->decl))
7238 warning (OPT_Wmissing_noreturn, "%Jfunction might be possible candidate "
7239 "for attribute %<noreturn%>",
7240 cfun->decl);
7241 return 0;
7242 }
7243
7244 struct gimple_opt_pass pass_warn_function_noreturn =
7245 {
7246 {
7247 GIMPLE_PASS,
7248 NULL, /* name */
7249 NULL, /* gate */
7250 execute_warn_function_noreturn, /* execute */
7251 NULL, /* sub */
7252 NULL, /* next */
7253 0, /* static_pass_number */
7254 TV_NONE, /* tv_id */
7255 PROP_cfg, /* properties_required */
7256 0, /* properties_provided */
7257 0, /* properties_destroyed */
7258 0, /* todo_flags_start */
7259 0 /* todo_flags_finish */
7260 }
7261 };