tree-optimize (execute_fixup_cfg): Set after_inlining flag.
[gcc.git] / gcc / tree-optimize.c
1 /* Top-level control of tree optimizations.
2 Copyright 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "output.h"
32 #include "expr.h"
33 #include "diagnostic.h"
34 #include "basic-block.h"
35 #include "flags.h"
36 #include "tree-flow.h"
37 #include "tree-dump.h"
38 #include "timevar.h"
39 #include "function.h"
40 #include "langhooks.h"
41 #include "toplev.h"
42 #include "flags.h"
43 #include "cgraph.h"
44 #include "tree-inline.h"
45 #include "tree-mudflap.h"
46 #include "tree-pass.h"
47 #include "ggc.h"
48 #include "cgraph.h"
49 #include "graph.h"
50 #include "cfgloop.h"
51 #include "except.h"
52
53
54 /* Gate: execute, or not, all of the non-trivial optimizations. */
55
56 static bool
57 gate_all_optimizations (void)
58 {
59 return (optimize >= 1
60 /* Don't bother doing anything if the program has errors. */
61 && !(errorcount || sorrycount));
62 }
63
64 struct tree_opt_pass pass_all_optimizations =
65 {
66 NULL, /* name */
67 gate_all_optimizations, /* gate */
68 NULL, /* execute */
69 NULL, /* sub */
70 NULL, /* next */
71 0, /* static_pass_number */
72 0, /* tv_id */
73 0, /* properties_required */
74 0, /* properties_provided */
75 0, /* properties_destroyed */
76 0, /* todo_flags_start */
77 0, /* todo_flags_finish */
78 0 /* letter */
79 };
80
81 struct tree_opt_pass pass_early_local_passes =
82 {
83 NULL, /* name */
84 gate_all_optimizations, /* gate */
85 NULL, /* execute */
86 NULL, /* sub */
87 NULL, /* next */
88 0, /* static_pass_number */
89 0, /* tv_id */
90 0, /* properties_required */
91 0, /* properties_provided */
92 0, /* properties_destroyed */
93 0, /* todo_flags_start */
94 0, /* todo_flags_finish */
95 0 /* letter */
96 };
97
98 /* Pass: cleanup the CFG just before expanding trees to RTL.
99 This is just a round of label cleanups and case node grouping
100 because after the tree optimizers have run such cleanups may
101 be necessary. */
102
103 static unsigned int
104 execute_cleanup_cfg_pre_ipa (void)
105 {
106 cleanup_tree_cfg ();
107 return 0;
108 }
109
110 struct tree_opt_pass pass_cleanup_cfg =
111 {
112 "cleanup_cfg", /* name */
113 NULL, /* gate */
114 execute_cleanup_cfg_pre_ipa, /* execute */
115 NULL, /* sub */
116 NULL, /* next */
117 0, /* static_pass_number */
118 0, /* tv_id */
119 PROP_cfg, /* properties_required */
120 0, /* properties_provided */
121 0, /* properties_destroyed */
122 0, /* todo_flags_start */
123 TODO_dump_func, /* todo_flags_finish */
124 0 /* letter */
125 };
126
127
128 /* Pass: cleanup the CFG just before expanding trees to RTL.
129 This is just a round of label cleanups and case node grouping
130 because after the tree optimizers have run such cleanups may
131 be necessary. */
132
133 static unsigned int
134 execute_cleanup_cfg_post_optimizing (void)
135 {
136 fold_cond_expr_cond ();
137 cleanup_tree_cfg ();
138 cleanup_dead_labels ();
139 group_case_labels ();
140 return 0;
141 }
142
143 struct tree_opt_pass pass_cleanup_cfg_post_optimizing =
144 {
145 "final_cleanup", /* name */
146 NULL, /* gate */
147 execute_cleanup_cfg_post_optimizing, /* execute */
148 NULL, /* sub */
149 NULL, /* next */
150 0, /* static_pass_number */
151 0, /* tv_id */
152 PROP_cfg, /* properties_required */
153 0, /* properties_provided */
154 0, /* properties_destroyed */
155 0, /* todo_flags_start */
156 TODO_dump_func, /* todo_flags_finish */
157 0 /* letter */
158 };
159
160 /* Pass: do the actions required to finish with tree-ssa optimization
161 passes. */
162
163 static unsigned int
164 execute_free_datastructures (void)
165 {
166 /* ??? This isn't the right place for this. Worse, it got computed
167 more or less at random in various passes. */
168 free_dominance_info (CDI_DOMINATORS);
169 free_dominance_info (CDI_POST_DOMINATORS);
170
171 /* Remove the ssa structures. Do it here since this includes statement
172 annotations that need to be intact during disband_implicit_edges. */
173 delete_tree_ssa ();
174 return 0;
175 }
176
177 struct tree_opt_pass pass_free_datastructures =
178 {
179 NULL, /* name */
180 NULL, /* gate */
181 execute_free_datastructures, /* execute */
182 NULL, /* sub */
183 NULL, /* next */
184 0, /* static_pass_number */
185 0, /* tv_id */
186 PROP_cfg, /* properties_required */
187 0, /* properties_provided */
188 0, /* properties_destroyed */
189 0, /* todo_flags_start */
190 0, /* todo_flags_finish */
191 0 /* letter */
192 };
193 /* Pass: free cfg annotations. */
194
195 static unsigned int
196 execute_free_cfg_annotations (void)
197 {
198 basic_block bb;
199 block_stmt_iterator bsi;
200
201 /* Emit gotos for implicit jumps. */
202 disband_implicit_edges ();
203
204 /* Remove annotations from every tree in the function. */
205 FOR_EACH_BB (bb)
206 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
207 {
208 tree stmt = bsi_stmt (bsi);
209 ggc_free (stmt->base.ann);
210 stmt->base.ann = NULL;
211 }
212
213 /* And get rid of annotations we no longer need. */
214 delete_tree_cfg_annotations ();
215
216 #ifdef ENABLE_CHECKING
217 /* Once the statement annotations have been removed, we can verify
218 the integrity of statements in the EH throw table. */
219 verify_eh_throw_table_statements ();
220 #endif
221 return 0;
222 }
223
224 struct tree_opt_pass pass_free_cfg_annotations =
225 {
226 NULL, /* name */
227 NULL, /* gate */
228 execute_free_cfg_annotations, /* execute */
229 NULL, /* sub */
230 NULL, /* next */
231 0, /* static_pass_number */
232 0, /* tv_id */
233 PROP_cfg, /* properties_required */
234 0, /* properties_provided */
235 0, /* properties_destroyed */
236 0, /* todo_flags_start */
237 0, /* todo_flags_finish */
238 0 /* letter */
239 };
240
241 /* Return true if BB has at least one abnormal outgoing edge. */
242
243 static inline bool
244 has_abnormal_outgoing_edge_p (basic_block bb)
245 {
246 edge e;
247 edge_iterator ei;
248
249 FOR_EACH_EDGE (e, ei, bb->succs)
250 if (e->flags & EDGE_ABNORMAL)
251 return true;
252
253 return false;
254 }
255
256 /* Pass: fixup_cfg. IPA passes, compilation of earlier functions or inlining
257 might have changed some properties, such as marked functions nothrow or
258 added calls that can potentially go to non-local labels. Remove redundant
259 edges and basic blocks, and create new ones if necessary. */
260
261 static unsigned int
262 execute_fixup_cfg (void)
263 {
264 basic_block bb;
265 block_stmt_iterator bsi;
266
267 cfun->after_inlining = true;
268
269 if (cfun->eh)
270 FOR_EACH_BB (bb)
271 {
272 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
273 {
274 tree stmt = bsi_stmt (bsi);
275 tree call = get_call_expr_in (stmt);
276 tree decl = call ? get_callee_fndecl (call) : NULL;
277
278 if (decl && call_expr_flags (call) & (ECF_CONST | ECF_PURE)
279 && TREE_SIDE_EFFECTS (call))
280 {
281 update_stmt (stmt);
282 TREE_SIDE_EFFECTS (call) = 0;
283 }
284 if (decl && TREE_NOTHROW (decl))
285 TREE_NOTHROW (call) = 1;
286 if (!tree_could_throw_p (stmt) && lookup_stmt_eh_region (stmt))
287 remove_stmt_from_eh_region (stmt);
288 }
289 tree_purge_dead_eh_edges (bb);
290 }
291
292 if (current_function_has_nonlocal_label)
293 {
294 FOR_EACH_BB (bb)
295 {
296 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
297 {
298 tree stmt = bsi_stmt (bsi);
299 if (tree_can_make_abnormal_goto (stmt))
300 {
301 if (stmt == bsi_stmt (bsi_last (bb)))
302 {
303 if (!has_abnormal_outgoing_edge_p (bb))
304 make_abnormal_goto_edges (bb, true);
305 }
306 else
307 {
308 edge e = split_block (bb, stmt);
309 bb = e->src;
310 make_abnormal_goto_edges (bb, true);
311 }
312 break;
313 }
314
315 /* Update PHIs on nonlocal goto receivers we (possibly)
316 just created new edges into. */
317 if (TREE_CODE (stmt) == LABEL_EXPR
318 && gimple_in_ssa_p (cfun))
319 {
320 tree target = LABEL_EXPR_LABEL (stmt);
321 if (DECL_NONLOCAL (target))
322 {
323 tree phi;
324 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
325 {
326 gcc_assert (SSA_NAME_OCCURS_IN_ABNORMAL_PHI
327 (PHI_RESULT (phi)));
328 mark_sym_for_renaming
329 (SSA_NAME_VAR (PHI_RESULT (phi)));
330 }
331 }
332 }
333 }
334 }
335 }
336
337 if (gimple_in_ssa_p (cfun))
338 {
339 delete_unreachable_blocks ();
340 update_ssa (TODO_update_ssa);
341 }
342 cleanup_tree_cfg ();
343
344 /* Dump a textual representation of the flowgraph. */
345 if (dump_file)
346 dump_tree_cfg (dump_file, dump_flags);
347
348 return 0;
349 }
350
351 struct tree_opt_pass pass_fixup_cfg =
352 {
353 "fixupcfg", /* name */
354 NULL, /* gate */
355 execute_fixup_cfg, /* execute */
356 NULL, /* sub */
357 NULL, /* next */
358 0, /* static_pass_number */
359 0, /* tv_id */
360 PROP_cfg, /* properties_required */
361 0, /* properties_provided */
362 0, /* properties_destroyed */
363 0, /* todo_flags_start */
364 0, /* todo_flags_finish */
365 0 /* letter */
366 };
367
368 /* Do the actions required to initialize internal data structures used
369 in tree-ssa optimization passes. */
370
371 static unsigned int
372 execute_init_datastructures (void)
373 {
374 /* Allocate hash tables, arrays and other structures. */
375 init_tree_ssa ();
376 return 0;
377 }
378
379 struct tree_opt_pass pass_init_datastructures =
380 {
381 NULL, /* name */
382 NULL, /* gate */
383 execute_init_datastructures, /* execute */
384 NULL, /* sub */
385 NULL, /* next */
386 0, /* static_pass_number */
387 0, /* tv_id */
388 PROP_cfg, /* properties_required */
389 0, /* properties_provided */
390 0, /* properties_destroyed */
391 0, /* todo_flags_start */
392 0, /* todo_flags_finish */
393 0 /* letter */
394 };
395
396 void
397 tree_lowering_passes (tree fn)
398 {
399 tree saved_current_function_decl = current_function_decl;
400
401 current_function_decl = fn;
402 push_cfun (DECL_STRUCT_FUNCTION (fn));
403 tree_register_cfg_hooks ();
404 bitmap_obstack_initialize (NULL);
405 execute_pass_list (all_lowering_passes);
406 free_dominance_info (CDI_POST_DOMINATORS);
407 compact_blocks ();
408 current_function_decl = saved_current_function_decl;
409 bitmap_obstack_release (NULL);
410 pop_cfun ();
411 }
412
413 /* Update recursively all inlined_to pointers of functions
414 inlined into NODE to INLINED_TO. */
415 static void
416 update_inlined_to_pointers (struct cgraph_node *node,
417 struct cgraph_node *inlined_to)
418 {
419 struct cgraph_edge *e;
420 for (e = node->callees; e; e = e->next_callee)
421 {
422 if (e->callee->global.inlined_to)
423 {
424 e->callee->global.inlined_to = inlined_to;
425 update_inlined_to_pointers (e->callee, inlined_to);
426 }
427 }
428 }
429
430 \f
431 /* For functions-as-trees languages, this performs all optimization and
432 compilation for FNDECL. */
433
434 void
435 tree_rest_of_compilation (tree fndecl)
436 {
437 location_t saved_loc;
438 struct cgraph_node *node;
439
440 timevar_push (TV_EXPAND);
441
442 gcc_assert (!flag_unit_at_a_time || cgraph_global_info_ready);
443
444 node = cgraph_node (fndecl);
445
446 /* Initialize the default bitmap obstack. */
447 bitmap_obstack_initialize (NULL);
448
449 /* We might need the body of this function so that we can expand
450 it inline somewhere else. */
451 if (cgraph_preserve_function_body_p (fndecl))
452 save_inline_function_body (node);
453
454 /* Initialize the RTL code for the function. */
455 current_function_decl = fndecl;
456 saved_loc = input_location;
457 input_location = DECL_SOURCE_LOCATION (fndecl);
458 init_function_start (fndecl);
459
460 /* Even though we're inside a function body, we still don't want to
461 call expand_expr to calculate the size of a variable-sized array.
462 We haven't necessarily assigned RTL to all variables yet, so it's
463 not safe to try to expand expressions involving them. */
464 cfun->x_dont_save_pending_sizes_p = 1;
465
466 tree_register_cfg_hooks ();
467
468 if (flag_inline_trees)
469 {
470 struct cgraph_edge *e;
471 for (e = node->callees; e; e = e->next_callee)
472 if (!e->inline_failed || warn_inline)
473 break;
474 if (e)
475 {
476 timevar_push (TV_INTEGRATION);
477 optimize_inline_calls (fndecl);
478 timevar_pop (TV_INTEGRATION);
479 }
480 }
481 /* In non-unit-at-a-time we must mark all referenced functions as needed.
482 */
483 if (!flag_unit_at_a_time)
484 {
485 struct cgraph_edge *e;
486 for (e = node->callees; e; e = e->next_callee)
487 if (e->callee->analyzed)
488 cgraph_mark_needed_node (e->callee);
489 }
490
491 /* We are not going to maintain the cgraph edges up to date.
492 Kill it so it won't confuse us. */
493 cgraph_node_remove_callees (node);
494
495 bitmap_obstack_initialize (&reg_obstack); /* FIXME, only at RTL generation*/
496 /* Perform all tree transforms and optimizations. */
497 execute_pass_list (all_passes);
498
499 bitmap_obstack_release (&reg_obstack);
500
501 /* Release the default bitmap obstack. */
502 bitmap_obstack_release (NULL);
503
504 DECL_SAVED_TREE (fndecl) = NULL;
505 cfun = 0;
506
507 /* If requested, warn about function definitions where the function will
508 return a value (usually of some struct or union type) which itself will
509 take up a lot of stack space. */
510 if (warn_larger_than && !DECL_EXTERNAL (fndecl) && TREE_TYPE (fndecl))
511 {
512 tree ret_type = TREE_TYPE (TREE_TYPE (fndecl));
513
514 if (ret_type && TYPE_SIZE_UNIT (ret_type)
515 && TREE_CODE (TYPE_SIZE_UNIT (ret_type)) == INTEGER_CST
516 && 0 < compare_tree_int (TYPE_SIZE_UNIT (ret_type),
517 larger_than_size))
518 {
519 unsigned int size_as_int
520 = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (ret_type));
521
522 if (compare_tree_int (TYPE_SIZE_UNIT (ret_type), size_as_int) == 0)
523 warning (0, "size of return value of %q+D is %u bytes",
524 fndecl, size_as_int);
525 else
526 warning (0, "size of return value of %q+D is larger than %wd bytes",
527 fndecl, larger_than_size);
528 }
529 }
530
531 if (!flag_inline_trees)
532 {
533 DECL_SAVED_TREE (fndecl) = NULL;
534 if (DECL_STRUCT_FUNCTION (fndecl) == 0
535 && !cgraph_node (fndecl)->origin)
536 {
537 /* Stop pointing to the local nodes about to be freed.
538 But DECL_INITIAL must remain nonzero so we know this
539 was an actual function definition.
540 For a nested function, this is done in c_pop_function_context.
541 If rest_of_compilation set this to 0, leave it 0. */
542 if (DECL_INITIAL (fndecl) != 0)
543 DECL_INITIAL (fndecl) = error_mark_node;
544 }
545 }
546
547 input_location = saved_loc;
548
549 ggc_collect ();
550 timevar_pop (TV_EXPAND);
551 }