re PR c++/19317 (removing a temporary return value when we cannot)
[gcc.git] / gcc / tree-optimize.c
1 /* Top-level control of tree optimizations.
2 Copyright 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "output.h"
32 #include "expr.h"
33 #include "diagnostic.h"
34 #include "basic-block.h"
35 #include "flags.h"
36 #include "tree-flow.h"
37 #include "tree-dump.h"
38 #include "timevar.h"
39 #include "function.h"
40 #include "langhooks.h"
41 #include "toplev.h"
42 #include "flags.h"
43 #include "cgraph.h"
44 #include "tree-inline.h"
45 #include "tree-mudflap.h"
46 #include "tree-pass.h"
47 #include "ggc.h"
48 #include "cgraph.h"
49 #include "graph.h"
50 #include "cfgloop.h"
51 #include "except.h"
52
53 /* Global variables used to communicate with passes. */
54 int dump_flags;
55 bool in_gimple_form;
56
57 /* The root of the compilation pass tree, once constructed. */
58 static struct tree_opt_pass *all_passes, *all_ipa_passes, * all_lowering_passes;
59
60 /* Gate: execute, or not, all of the non-trivial optimizations. */
61
62 static bool
63 gate_all_optimizations (void)
64 {
65 return (optimize >= 1
66 /* Don't bother doing anything if the program has errors. */
67 && !(errorcount || sorrycount));
68 }
69
70 static struct tree_opt_pass pass_all_optimizations =
71 {
72 NULL, /* name */
73 gate_all_optimizations, /* gate */
74 NULL, /* execute */
75 NULL, /* sub */
76 NULL, /* next */
77 0, /* static_pass_number */
78 0, /* tv_id */
79 0, /* properties_required */
80 0, /* properties_provided */
81 0, /* properties_destroyed */
82 0, /* todo_flags_start */
83 0, /* todo_flags_finish */
84 0 /* letter */
85 };
86
87 /* Pass: cleanup the CFG just before expanding trees to RTL.
88 This is just a round of label cleanups and case node grouping
89 because after the tree optimizers have run such cleanups may
90 be necessary. */
91
92 static void
93 execute_cleanup_cfg_post_optimizing (void)
94 {
95 cleanup_tree_cfg ();
96 cleanup_dead_labels ();
97 group_case_labels ();
98 }
99
100 static struct tree_opt_pass pass_cleanup_cfg_post_optimizing =
101 {
102 "final_cleanup", /* name */
103 NULL, /* gate */
104 execute_cleanup_cfg_post_optimizing, /* execute */
105 NULL, /* sub */
106 NULL, /* next */
107 0, /* static_pass_number */
108 0, /* tv_id */
109 PROP_cfg, /* properties_required */
110 0, /* properties_provided */
111 0, /* properties_destroyed */
112 0, /* todo_flags_start */
113 TODO_dump_func, /* todo_flags_finish */
114 0 /* letter */
115 };
116
117 /* Pass: do the actions required to finish with tree-ssa optimization
118 passes. */
119
120 static void
121 execute_free_datastructures (void)
122 {
123 tree *chain;
124
125 /* ??? This isn't the right place for this. Worse, it got computed
126 more or less at random in various passes. */
127 free_dominance_info (CDI_DOMINATORS);
128
129 /* Emit gotos for implicit jumps. */
130 disband_implicit_edges ();
131
132 /* Remove the ssa structures. Do it here since this includes statement
133 annotations that need to be intact during disband_implicit_edges. */
134 delete_tree_ssa ();
135
136 /* Re-chain the statements from the blocks. */
137 chain = &DECL_SAVED_TREE (current_function_decl);
138 *chain = alloc_stmt_list ();
139
140 /* And get rid of annotations we no longer need. */
141 delete_tree_cfg_annotations ();
142 }
143
144 static struct tree_opt_pass pass_free_datastructures =
145 {
146 NULL, /* name */
147 NULL, /* gate */
148 execute_free_datastructures, /* execute */
149 NULL, /* sub */
150 NULL, /* next */
151 0, /* static_pass_number */
152 0, /* tv_id */
153 PROP_cfg, /* properties_required */
154 0, /* properties_provided */
155 0, /* properties_destroyed */
156 0, /* todo_flags_start */
157 0, /* todo_flags_finish */
158 0 /* letter */
159 };
160
161 /* Pass: fixup_cfg - IPA passes or compilation of earlier functions might've
162 changed some properties - such as marked functions nothrow. Remove now
163 redundant edges and basic blocks. */
164
165 static void
166 execute_fixup_cfg (void)
167 {
168 basic_block bb;
169 block_stmt_iterator bsi;
170
171 if (cfun->eh)
172 FOR_EACH_BB (bb)
173 {
174 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
175 {
176 tree stmt = bsi_stmt (bsi);
177 tree call = get_call_expr_in (stmt);
178
179 if (call && call_expr_flags (call) & (ECF_CONST | ECF_PURE))
180 TREE_SIDE_EFFECTS (call) = 0;
181 if (!tree_could_throw_p (stmt) && lookup_stmt_eh_region (stmt))
182 remove_stmt_from_eh_region (stmt);
183 }
184 tree_purge_dead_eh_edges (bb);
185 }
186
187 cleanup_tree_cfg ();
188 }
189
190 static struct tree_opt_pass pass_fixup_cfg =
191 {
192 NULL, /* name */
193 NULL, /* gate */
194 execute_fixup_cfg, /* execute */
195 NULL, /* sub */
196 NULL, /* next */
197 0, /* static_pass_number */
198 0, /* tv_id */
199 PROP_cfg, /* properties_required */
200 0, /* properties_provided */
201 0, /* properties_destroyed */
202 0, /* todo_flags_start */
203 0, /* todo_flags_finish */
204 0 /* letter */
205 };
206
207 /* Do the actions required to initialize internal data structures used
208 in tree-ssa optimization passes. */
209
210 static void
211 execute_init_datastructures (void)
212 {
213 /* Allocate hash tables, arrays and other structures. */
214 init_tree_ssa ();
215 }
216
217 static struct tree_opt_pass pass_init_datastructures =
218 {
219 NULL, /* name */
220 NULL, /* gate */
221 execute_init_datastructures, /* execute */
222 NULL, /* sub */
223 NULL, /* next */
224 0, /* static_pass_number */
225 0, /* tv_id */
226 PROP_cfg, /* properties_required */
227 0, /* properties_provided */
228 0, /* properties_destroyed */
229 0, /* todo_flags_start */
230 0, /* todo_flags_finish */
231 0 /* letter */
232 };
233
234 /* Iterate over the pass tree allocating dump file numbers. We want
235 to do this depth first, and independent of whether the pass is
236 enabled or not. */
237
238 static void
239 register_one_dump_file (struct tree_opt_pass *pass, bool ipa, int n)
240 {
241 char *dot_name, *flag_name, *glob_name;
242 char num[10];
243
244 /* See below in next_pass_1. */
245 num[0] = '\0';
246 if (pass->static_pass_number != -1)
247 sprintf (num, "%d", ((int) pass->static_pass_number < 0
248 ? 1 : pass->static_pass_number));
249
250 dot_name = concat (".", pass->name, num, NULL);
251 if (ipa)
252 {
253 flag_name = concat ("ipa-", pass->name, num, NULL);
254 glob_name = concat ("ipa-", pass->name, NULL);
255 /* First IPA dump is cgraph that is dumped via separate channels. */
256 pass->static_pass_number = dump_register (dot_name, flag_name, glob_name,
257 TDF_IPA, n + 1, 0);
258 }
259 else if (pass->properties_provided & PROP_trees)
260 {
261 flag_name = concat ("tree-", pass->name, num, NULL);
262 glob_name = concat ("tree-", pass->name, NULL);
263 pass->static_pass_number = dump_register (dot_name, flag_name, glob_name,
264 TDF_TREE, n + TDI_tree_all, 0);
265 }
266 else
267 {
268 flag_name = concat ("rtl-", pass->name, num, NULL);
269 glob_name = concat ("rtl-", pass->name, NULL);
270 pass->static_pass_number = dump_register (dot_name, flag_name, glob_name,
271 TDF_RTL, n, pass->letter);
272 }
273 }
274
275 static int
276 register_dump_files (struct tree_opt_pass *pass, bool ipa, int properties)
277 {
278 static int n = 0;
279 do
280 {
281 int new_properties;
282 int pass_number;
283
284 pass->properties_required = properties;
285 new_properties =
286 (properties | pass->properties_provided) & ~pass->properties_destroyed;
287
288 /* Reset the counter when we reach RTL-based passes. */
289 if ((pass->properties_provided ^ pass->properties_required) & PROP_rtl)
290 n = 0;
291
292 pass_number = n;
293 if (pass->name)
294 n++;
295
296 if (pass->sub)
297 new_properties = register_dump_files (pass->sub, ipa, new_properties);
298
299 /* If we have a gate, combine the properties that we could have with
300 and without the pass being examined. */
301 if (pass->gate)
302 properties &= new_properties;
303 else
304 properties = new_properties;
305
306 pass->properties_provided = properties;
307 if (pass->name)
308 register_one_dump_file (pass, ipa, pass_number);
309
310 pass = pass->next;
311 }
312 while (pass);
313
314 return properties;
315 }
316
317 /* Add a pass to the pass list. Duplicate the pass if it's already
318 in the list. */
319
320 static struct tree_opt_pass **
321 next_pass_1 (struct tree_opt_pass **list, struct tree_opt_pass *pass)
322 {
323
324 /* A nonzero static_pass_number indicates that the
325 pass is already in the list. */
326 if (pass->static_pass_number)
327 {
328 struct tree_opt_pass *new;
329
330 new = xmalloc (sizeof (*new));
331 memcpy (new, pass, sizeof (*new));
332
333 /* Indicate to register_dump_files that this pass has duplicates,
334 and so it should rename the dump file. The first instance will
335 be -1, and be number of duplicates = -static_pass_number - 1.
336 Subsequent instances will be > 0 and just the duplicate number. */
337 if (pass->name)
338 {
339 pass->static_pass_number -= 1;
340 new->static_pass_number = -pass->static_pass_number;
341 }
342
343 *list = new;
344 }
345 else
346 {
347 pass->static_pass_number = -1;
348 *list = pass;
349 }
350
351 return &(*list)->next;
352
353 }
354
355 /* Construct the pass tree. */
356
357 void
358 init_tree_optimization_passes (void)
359 {
360 struct tree_opt_pass **p;
361
362 #define NEXT_PASS(PASS) (p = next_pass_1 (p, &PASS))
363 /* Intraprocedural optimization passes. */
364 p = &all_ipa_passes;
365 NEXT_PASS (pass_ipa_inline);
366 *p = NULL;
367
368 /* All passes needed to lower the function into shape optimizers can operate
369 on. These passes are performed before interprocedural passes, unlike rest
370 of local passes (all_passes). */
371 p = &all_lowering_passes;
372 NEXT_PASS (pass_remove_useless_stmts);
373 NEXT_PASS (pass_mudflap_1);
374 NEXT_PASS (pass_lower_cf);
375 NEXT_PASS (pass_lower_eh);
376 NEXT_PASS (pass_build_cfg);
377 NEXT_PASS (pass_lower_complex_O0);
378 NEXT_PASS (pass_lower_vector);
379 NEXT_PASS (pass_warn_function_return);
380 NEXT_PASS (pass_tree_profile);
381 *p = NULL;
382
383 p = &all_passes;
384 NEXT_PASS (pass_fixup_cfg);
385 NEXT_PASS (pass_init_datastructures);
386 NEXT_PASS (pass_all_optimizations);
387 NEXT_PASS (pass_warn_function_noreturn);
388 NEXT_PASS (pass_mudflap_2);
389 NEXT_PASS (pass_free_datastructures);
390 NEXT_PASS (pass_expand);
391 NEXT_PASS (pass_rest_of_compilation);
392 *p = NULL;
393
394 p = &pass_all_optimizations.sub;
395 NEXT_PASS (pass_referenced_vars);
396 NEXT_PASS (pass_create_structure_vars);
397 NEXT_PASS (pass_build_ssa);
398 NEXT_PASS (pass_build_pta);
399 NEXT_PASS (pass_may_alias);
400 NEXT_PASS (pass_return_slot);
401 NEXT_PASS (pass_del_pta);
402 NEXT_PASS (pass_rename_ssa_copies);
403 NEXT_PASS (pass_early_warn_uninitialized);
404
405 /* Initial scalar cleanups. */
406 NEXT_PASS (pass_ccp);
407 NEXT_PASS (pass_fre);
408 NEXT_PASS (pass_dce);
409 NEXT_PASS (pass_forwprop);
410 NEXT_PASS (pass_vrp);
411 NEXT_PASS (pass_copy_prop);
412 NEXT_PASS (pass_dce);
413 NEXT_PASS (pass_merge_phi);
414 NEXT_PASS (pass_dominator);
415
416 NEXT_PASS (pass_phiopt);
417 NEXT_PASS (pass_build_pta);
418 NEXT_PASS (pass_may_alias);
419 NEXT_PASS (pass_del_pta);
420 NEXT_PASS (pass_tail_recursion);
421 NEXT_PASS (pass_profile);
422 NEXT_PASS (pass_ch);
423 NEXT_PASS (pass_stdarg);
424 NEXT_PASS (pass_lower_complex);
425 NEXT_PASS (pass_sra);
426 /* FIXME: SRA may generate arbitrary gimple code, exposing new
427 aliased and call-clobbered variables. As mentioned below,
428 pass_may_alias should be a TODO item. */
429 NEXT_PASS (pass_may_alias);
430 NEXT_PASS (pass_rename_ssa_copies);
431 NEXT_PASS (pass_dominator);
432 NEXT_PASS (pass_copy_prop);
433 NEXT_PASS (pass_dce);
434 NEXT_PASS (pass_dse);
435 NEXT_PASS (pass_may_alias);
436 NEXT_PASS (pass_forwprop);
437 NEXT_PASS (pass_phiopt);
438 NEXT_PASS (pass_store_ccp);
439 NEXT_PASS (pass_store_copy_prop);
440 NEXT_PASS (pass_fold_builtins);
441 /* FIXME: May alias should a TODO but for 4.0.0,
442 we add may_alias right after fold builtins
443 which can create arbitrary GIMPLE. */
444 NEXT_PASS (pass_may_alias);
445 NEXT_PASS (pass_cse_reciprocals);
446 NEXT_PASS (pass_split_crit_edges);
447 NEXT_PASS (pass_reassoc);
448 NEXT_PASS (pass_pre);
449 NEXT_PASS (pass_sink_code);
450 NEXT_PASS (pass_loop);
451 NEXT_PASS (pass_dominator);
452 NEXT_PASS (pass_copy_prop);
453 NEXT_PASS (pass_cd_dce);
454 /* FIXME: If DCE is not run before checking for uninitialized uses,
455 we may get false warnings (e.g., testsuite/gcc.dg/uninit-5.c).
456 However, this also causes us to misdiagnose cases that should be
457 real warnings (e.g., testsuite/gcc.dg/pr18501.c).
458
459 To fix the false positives in uninit-5.c, we would have to
460 account for the predicates protecting the set and the use of each
461 variable. Using a representation like Gated Single Assignment
462 may help. */
463 NEXT_PASS (pass_late_warn_uninitialized);
464 NEXT_PASS (pass_dse);
465 NEXT_PASS (pass_forwprop);
466 NEXT_PASS (pass_phiopt);
467 NEXT_PASS (pass_tail_calls);
468 NEXT_PASS (pass_rename_ssa_copies);
469 NEXT_PASS (pass_uncprop);
470 NEXT_PASS (pass_del_ssa);
471 NEXT_PASS (pass_nrv);
472 NEXT_PASS (pass_remove_useless_vars);
473 NEXT_PASS (pass_mark_used_blocks);
474 NEXT_PASS (pass_cleanup_cfg_post_optimizing);
475 *p = NULL;
476
477 p = &pass_loop.sub;
478 NEXT_PASS (pass_loop_init);
479 NEXT_PASS (pass_copy_prop);
480 NEXT_PASS (pass_lim);
481 NEXT_PASS (pass_unswitch);
482 NEXT_PASS (pass_scev_cprop);
483 NEXT_PASS (pass_record_bounds);
484 NEXT_PASS (pass_linear_transform);
485 NEXT_PASS (pass_iv_canon);
486 NEXT_PASS (pass_if_conversion);
487 NEXT_PASS (pass_vectorize);
488 /* NEXT_PASS (pass_may_alias) cannot be done again because the
489 vectorizer creates alias relations that are not supported by
490 pass_may_alias. */
491 NEXT_PASS (pass_lower_vector_ssa);
492 NEXT_PASS (pass_complete_unroll);
493 NEXT_PASS (pass_iv_optimize);
494 NEXT_PASS (pass_loop_done);
495 *p = NULL;
496
497 #undef NEXT_PASS
498
499 register_dump_files (all_lowering_passes, false, PROP_gimple_any);
500 register_dump_files (all_passes, false, PROP_gimple_any
501 | PROP_gimple_lcf
502 | PROP_gimple_leh
503 | PROP_cfg);
504 register_dump_files (all_ipa_passes, true, PROP_gimple_any
505 | PROP_gimple_lcf
506 | PROP_gimple_leh
507 | PROP_cfg);
508 }
509
510 static unsigned int last_verified;
511
512 static void
513 execute_todo (struct tree_opt_pass *pass, unsigned int flags, bool use_required)
514 {
515 int properties
516 = use_required ? pass->properties_required : pass->properties_provided;
517
518 #if defined ENABLE_CHECKING
519 if (need_ssa_update_p ())
520 gcc_assert (flags & TODO_update_ssa_any);
521 #endif
522
523 if (flags & TODO_update_ssa_any)
524 {
525 unsigned update_flags = flags & TODO_update_ssa_any;
526 update_ssa (update_flags);
527 }
528
529 if (flags & TODO_cleanup_cfg)
530 {
531 if (current_loops)
532 cleanup_tree_cfg_loop ();
533 else
534 cleanup_tree_cfg ();
535 }
536
537 if ((flags & TODO_dump_func)
538 && dump_file && current_function_decl)
539 {
540 if (properties & PROP_trees)
541 dump_function_to_file (current_function_decl,
542 dump_file, dump_flags);
543 else if (properties & PROP_cfg)
544 print_rtl_with_bb (dump_file, get_insns ());
545 else
546 print_rtl (dump_file, get_insns ());
547
548 /* Flush the file. If verification fails, we won't be able to
549 close the file before dieing. */
550 fflush (dump_file);
551 }
552 if ((flags & TODO_dump_cgraph)
553 && dump_file && !current_function_decl)
554 {
555 dump_cgraph (dump_file);
556 /* Flush the file. If verification fails, we won't be able to
557 close the file before aborting. */
558 fflush (dump_file);
559 }
560
561 if (flags & TODO_ggc_collect)
562 {
563 ggc_collect ();
564 }
565
566 #if defined ENABLE_CHECKING
567 if ((pass->properties_required & PROP_ssa)
568 && !(pass->properties_destroyed & PROP_ssa))
569 verify_ssa (true);
570 if (flags & TODO_verify_flow)
571 verify_flow_info ();
572 if (flags & TODO_verify_stmts)
573 verify_stmts ();
574 if (flags & TODO_verify_loops)
575 verify_loop_closed_ssa ();
576 #endif
577 }
578
579 static bool
580 execute_one_pass (struct tree_opt_pass *pass)
581 {
582 unsigned int todo;
583
584 /* See if we're supposed to run this pass. */
585 if (pass->gate && !pass->gate ())
586 return false;
587
588 /* Note that the folders should only create gimple expressions.
589 This is a hack until the new folder is ready. */
590 in_gimple_form = (pass->properties_provided & PROP_trees) != 0;
591
592 /* Run pre-pass verification. */
593 todo = pass->todo_flags_start & ~last_verified;
594 if (todo)
595 execute_todo (pass, todo, true);
596
597 /* If a dump file name is present, open it if enabled. */
598 if (pass->static_pass_number != -1)
599 {
600 bool initializing_dump = !dump_initialized_p (pass->static_pass_number);
601 dump_file_name = get_dump_file_name (pass->static_pass_number);
602 dump_file = dump_begin (pass->static_pass_number, &dump_flags);
603 if (dump_file && current_function_decl)
604 {
605 const char *dname, *aname;
606 dname = lang_hooks.decl_printable_name (current_function_decl, 2);
607 aname = (IDENTIFIER_POINTER
608 (DECL_ASSEMBLER_NAME (current_function_decl)));
609 fprintf (dump_file, "\n;; Function %s (%s)%s\n\n", dname, aname,
610 cfun->function_frequency == FUNCTION_FREQUENCY_HOT
611 ? " (hot)"
612 : cfun->function_frequency == FUNCTION_FREQUENCY_UNLIKELY_EXECUTED
613 ? " (unlikely executed)"
614 : "");
615 }
616
617 if (initializing_dump
618 && graph_dump_format != no_graph
619 && (pass->properties_provided & (PROP_cfg | PROP_rtl))
620 == (PROP_cfg | PROP_rtl))
621 clean_graph_dump_file (dump_file_name);
622 }
623
624 /* If a timevar is present, start it. */
625 if (pass->tv_id)
626 timevar_push (pass->tv_id);
627
628 /* Do it! */
629 if (pass->execute)
630 pass->execute ();
631
632 /* Stop timevar. */
633 if (pass->tv_id)
634 timevar_pop (pass->tv_id);
635
636 if (dump_file
637 && (pass->properties_provided & (PROP_cfg | PROP_rtl))
638 == (PROP_cfg | PROP_rtl))
639 print_rtl_with_bb (dump_file, get_insns ());
640
641 /* Run post-pass cleanup and verification. */
642 todo = pass->todo_flags_finish;
643 last_verified = todo & TODO_verify_all;
644 if (todo)
645 execute_todo (pass, todo, false);
646
647 /* Flush and close dump file. */
648 if (dump_file_name)
649 {
650 free ((char *) dump_file_name);
651 dump_file_name = NULL;
652 }
653 if (dump_file)
654 {
655 dump_end (pass->static_pass_number, dump_file);
656 dump_file = NULL;
657 }
658
659 return true;
660 }
661
662 static void
663 execute_pass_list (struct tree_opt_pass *pass)
664 {
665 do
666 {
667 if (execute_one_pass (pass) && pass->sub)
668 execute_pass_list (pass->sub);
669 pass = pass->next;
670 }
671 while (pass);
672 }
673
674 /* Same as execute_pass_list but assume that subpasses of IPA passes
675 are local passes. */
676 static void
677 execute_ipa_pass_list (struct tree_opt_pass *pass)
678 {
679 do
680 {
681 if (execute_one_pass (pass) && pass->sub)
682 {
683 struct cgraph_node *node;
684 for (node = cgraph_nodes; node; node = node->next)
685 if (node->analyzed)
686 {
687 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
688 current_function_decl = node->decl;
689 execute_pass_list (pass);
690 free_dominance_info (CDI_DOMINATORS);
691 free_dominance_info (CDI_POST_DOMINATORS);
692 current_function_decl = NULL;
693 pop_cfun ();
694 ggc_collect ();
695 }
696 }
697 pass = pass->next;
698 }
699 while (pass);
700 }
701
702 void
703 tree_lowering_passes (tree fn)
704 {
705 tree saved_current_function_decl = current_function_decl;
706
707 current_function_decl = fn;
708 push_cfun (DECL_STRUCT_FUNCTION (fn));
709 tree_register_cfg_hooks ();
710 bitmap_obstack_initialize (NULL);
711 execute_pass_list (all_lowering_passes);
712 free_dominance_info (CDI_POST_DOMINATORS);
713 compact_blocks ();
714 current_function_decl = saved_current_function_decl;
715 bitmap_obstack_release (NULL);
716 pop_cfun ();
717 }
718
719 /* Execute all IPA passes. */
720 void
721 ipa_passes (void)
722 {
723 cfun = NULL;
724 tree_register_cfg_hooks ();
725 bitmap_obstack_initialize (NULL);
726 execute_ipa_pass_list (all_ipa_passes);
727 bitmap_obstack_release (NULL);
728 }
729 \f
730
731 /* Update recursively all inlined_to pointers of functions
732 inlined into NODE to INLINED_TO. */
733 static void
734 update_inlined_to_pointers (struct cgraph_node *node,
735 struct cgraph_node *inlined_to)
736 {
737 struct cgraph_edge *e;
738 for (e = node->callees; e; e = e->next_callee)
739 {
740 if (e->callee->global.inlined_to)
741 {
742 e->callee->global.inlined_to = inlined_to;
743 update_inlined_to_pointers (e->callee, inlined_to);
744 }
745 }
746 }
747
748 \f
749 /* For functions-as-trees languages, this performs all optimization and
750 compilation for FNDECL. */
751
752 void
753 tree_rest_of_compilation (tree fndecl)
754 {
755 location_t saved_loc;
756 struct cgraph_node *saved_node = NULL, *node;
757
758 timevar_push (TV_EXPAND);
759
760 gcc_assert (!flag_unit_at_a_time || cgraph_global_info_ready);
761
762 /* Initialize the RTL code for the function. */
763 current_function_decl = fndecl;
764 saved_loc = input_location;
765 input_location = DECL_SOURCE_LOCATION (fndecl);
766 init_function_start (fndecl);
767
768 /* Even though we're inside a function body, we still don't want to
769 call expand_expr to calculate the size of a variable-sized array.
770 We haven't necessarily assigned RTL to all variables yet, so it's
771 not safe to try to expand expressions involving them. */
772 cfun->x_dont_save_pending_sizes_p = 1;
773 cfun->after_inlining = true;
774
775 node = cgraph_node (fndecl);
776
777 /* We might need the body of this function so that we can expand
778 it inline somewhere else. This means not lowering some constructs
779 such as exception handling. */
780 if (cgraph_preserve_function_body_p (fndecl))
781 {
782 if (!flag_unit_at_a_time)
783 {
784 struct cgraph_edge *e;
785
786 saved_node = cgraph_clone_node (node, node->count, 1);
787 for (e = saved_node->callees; e; e = e->next_callee)
788 if (!e->inline_failed)
789 cgraph_clone_inlined_nodes (e, true);
790 }
791 cfun->saved_static_chain_decl = cfun->static_chain_decl;
792 save_body (fndecl, &cfun->saved_args, &cfun->saved_static_chain_decl);
793 }
794
795 if (flag_inline_trees)
796 {
797 struct cgraph_edge *e;
798 for (e = node->callees; e; e = e->next_callee)
799 if (!e->inline_failed || warn_inline)
800 break;
801 if (e)
802 {
803 timevar_push (TV_INTEGRATION);
804 optimize_inline_calls (fndecl);
805 timevar_pop (TV_INTEGRATION);
806 }
807 }
808 /* We are not going to maintain the cgraph edges up to date.
809 Kill it so it won't confuse us. */
810 while (node->callees)
811 {
812 /* In non-unit-at-a-time we must mark all referenced functions as needed.
813 */
814 if (node->callees->callee->analyzed && !flag_unit_at_a_time)
815 cgraph_mark_needed_node (node->callees->callee);
816 cgraph_remove_edge (node->callees);
817 }
818
819 /* We are not going to maintain the cgraph edges up to date.
820 Kill it so it won't confuse us. */
821 cgraph_node_remove_callees (node);
822
823
824 /* Initialize the default bitmap obstack. */
825 bitmap_obstack_initialize (NULL);
826 bitmap_obstack_initialize (&reg_obstack); /* FIXME, only at RTL generation*/
827
828 tree_register_cfg_hooks ();
829 /* Perform all tree transforms and optimizations. */
830 execute_pass_list (all_passes);
831
832 bitmap_obstack_release (&reg_obstack);
833
834 /* Release the default bitmap obstack. */
835 bitmap_obstack_release (NULL);
836
837 /* Restore original body if still needed. */
838 if (cfun->saved_cfg)
839 {
840 DECL_ARGUMENTS (fndecl) = cfun->saved_args;
841 cfun->cfg = cfun->saved_cfg;
842 cfun->eh = cfun->saved_eh;
843 cfun->saved_cfg = NULL;
844 cfun->saved_eh = NULL;
845 cfun->saved_args = NULL_TREE;
846 cfun->static_chain_decl = cfun->saved_static_chain_decl;
847 cfun->saved_static_chain_decl = NULL;
848 /* When not in unit-at-a-time mode, we must preserve out of line copy
849 representing node before inlining. Restore original outgoing edges
850 using clone we created earlier. */
851 if (!flag_unit_at_a_time)
852 {
853 struct cgraph_edge *e;
854
855 node = cgraph_node (current_function_decl);
856 cgraph_node_remove_callees (node);
857 node->callees = saved_node->callees;
858 saved_node->callees = NULL;
859 update_inlined_to_pointers (node, node);
860 for (e = node->callees; e; e = e->next_callee)
861 e->caller = node;
862 cgraph_remove_node (saved_node);
863 }
864 }
865 else
866 DECL_SAVED_TREE (fndecl) = NULL;
867 cfun = 0;
868
869 /* If requested, warn about function definitions where the function will
870 return a value (usually of some struct or union type) which itself will
871 take up a lot of stack space. */
872 if (warn_larger_than && !DECL_EXTERNAL (fndecl) && TREE_TYPE (fndecl))
873 {
874 tree ret_type = TREE_TYPE (TREE_TYPE (fndecl));
875
876 if (ret_type && TYPE_SIZE_UNIT (ret_type)
877 && TREE_CODE (TYPE_SIZE_UNIT (ret_type)) == INTEGER_CST
878 && 0 < compare_tree_int (TYPE_SIZE_UNIT (ret_type),
879 larger_than_size))
880 {
881 unsigned int size_as_int
882 = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (ret_type));
883
884 if (compare_tree_int (TYPE_SIZE_UNIT (ret_type), size_as_int) == 0)
885 warning (0, "%Jsize of return value of %qD is %u bytes",
886 fndecl, fndecl, size_as_int);
887 else
888 warning (0, "%Jsize of return value of %qD is larger than %wd bytes",
889 fndecl, fndecl, larger_than_size);
890 }
891 }
892
893 if (!flag_inline_trees)
894 {
895 DECL_SAVED_TREE (fndecl) = NULL;
896 if (DECL_STRUCT_FUNCTION (fndecl) == 0
897 && !cgraph_node (fndecl)->origin)
898 {
899 /* Stop pointing to the local nodes about to be freed.
900 But DECL_INITIAL must remain nonzero so we know this
901 was an actual function definition.
902 For a nested function, this is done in c_pop_function_context.
903 If rest_of_compilation set this to 0, leave it 0. */
904 if (DECL_INITIAL (fndecl) != 0)
905 DECL_INITIAL (fndecl) = error_mark_node;
906 }
907 }
908
909 input_location = saved_loc;
910
911 ggc_collect ();
912 timevar_pop (TV_EXPAND);
913 }