cgraph.c (cgraph_add_new_function): Do early local passes.
[gcc.git] / gcc / tree-optimize.c
1 /* Top-level control of tree optimizations.
2 Copyright 2001, 2002, 2003, 2004, 2005, 2007 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
30 #include "output.h"
31 #include "expr.h"
32 #include "diagnostic.h"
33 #include "basic-block.h"
34 #include "flags.h"
35 #include "tree-flow.h"
36 #include "tree-dump.h"
37 #include "timevar.h"
38 #include "function.h"
39 #include "langhooks.h"
40 #include "toplev.h"
41 #include "flags.h"
42 #include "cgraph.h"
43 #include "tree-inline.h"
44 #include "tree-mudflap.h"
45 #include "tree-pass.h"
46 #include "ggc.h"
47 #include "cgraph.h"
48 #include "graph.h"
49 #include "cfgloop.h"
50 #include "except.h"
51
52
53 /* Gate: execute, or not, all of the non-trivial optimizations. */
54
55 static bool
56 gate_all_optimizations (void)
57 {
58 return (optimize >= 1
59 /* Don't bother doing anything if the program has errors.
60 We have to pass down the queue if we already went into SSA */
61 && (!(errorcount || sorrycount) || gimple_in_ssa_p (cfun)));
62 }
63
64 struct gimple_opt_pass pass_all_optimizations =
65 {
66 {
67 GIMPLE_PASS,
68 NULL, /* name */
69 gate_all_optimizations, /* gate */
70 NULL, /* execute */
71 NULL, /* sub */
72 NULL, /* next */
73 0, /* static_pass_number */
74 0, /* tv_id */
75 0, /* properties_required */
76 0, /* properties_provided */
77 0, /* properties_destroyed */
78 0, /* todo_flags_start */
79 0 /* todo_flags_finish */
80 }
81 };
82
83 /* Gate: execute, or not, all of the non-trivial optimizations. */
84
85 static bool
86 gate_all_early_local_passes (void)
87 {
88 /* Don't bother doing anything if the program has errors. */
89 return (!errorcount && !sorrycount);
90 }
91
92 struct simple_ipa_opt_pass pass_early_local_passes =
93 {
94 {
95 SIMPLE_IPA_PASS,
96 "early_local_cleanups", /* name */
97 gate_all_early_local_passes, /* gate */
98 NULL, /* execute */
99 NULL, /* sub */
100 NULL, /* next */
101 0, /* static_pass_number */
102 0, /* tv_id */
103 0, /* properties_required */
104 0, /* properties_provided */
105 0, /* properties_destroyed */
106 0, /* todo_flags_start */
107 TODO_remove_functions /* todo_flags_finish */
108 }
109 };
110
111 static unsigned int
112 execute_early_local_optimizations (void)
113 {
114 /* First time we start with early optimization we need to advance
115 cgraph state so newly inserted functions are also early optimized.
116 However we execute early local optimizations for lately inserted
117 functions, in that case don't reset cgraph state back to IPA_SSA. */
118 if (flag_unit_at_a_time && cgraph_state < CGRAPH_STATE_IPA_SSA)
119 cgraph_state = CGRAPH_STATE_IPA_SSA;
120 return 0;
121 }
122
123 /* Gate: execute, or not, all of the non-trivial optimizations. */
124
125 static bool
126 gate_all_early_optimizations (void)
127 {
128 return (optimize >= 1
129 /* Don't bother doing anything if the program has errors. */
130 && !(errorcount || sorrycount));
131 }
132
133 struct gimple_opt_pass pass_all_early_optimizations =
134 {
135 {
136 GIMPLE_PASS,
137 "early_optimizations", /* name */
138 gate_all_early_optimizations, /* gate */
139 execute_early_local_optimizations, /* execute */
140 NULL, /* sub */
141 NULL, /* next */
142 0, /* static_pass_number */
143 0, /* tv_id */
144 0, /* properties_required */
145 0, /* properties_provided */
146 0, /* properties_destroyed */
147 0, /* todo_flags_start */
148 0 /* todo_flags_finish */
149 }
150 };
151
152 /* Pass: cleanup the CFG just before expanding trees to RTL.
153 This is just a round of label cleanups and case node grouping
154 because after the tree optimizers have run such cleanups may
155 be necessary. */
156
157 static unsigned int
158 execute_cleanup_cfg_pre_ipa (void)
159 {
160 cleanup_tree_cfg ();
161 return 0;
162 }
163
164 struct gimple_opt_pass pass_cleanup_cfg =
165 {
166 {
167 GIMPLE_PASS,
168 "cleanup_cfg", /* name */
169 NULL, /* gate */
170 execute_cleanup_cfg_pre_ipa, /* execute */
171 NULL, /* sub */
172 NULL, /* next */
173 0, /* static_pass_number */
174 0, /* tv_id */
175 PROP_cfg, /* properties_required */
176 0, /* properties_provided */
177 0, /* properties_destroyed */
178 0, /* todo_flags_start */
179 TODO_dump_func /* todo_flags_finish */
180 }
181 };
182
183
184 /* Pass: cleanup the CFG just before expanding trees to RTL.
185 This is just a round of label cleanups and case node grouping
186 because after the tree optimizers have run such cleanups may
187 be necessary. */
188
189 static unsigned int
190 execute_cleanup_cfg_post_optimizing (void)
191 {
192 fold_cond_expr_cond ();
193 cleanup_tree_cfg ();
194 cleanup_dead_labels ();
195 group_case_labels ();
196 return 0;
197 }
198
199 struct gimple_opt_pass pass_cleanup_cfg_post_optimizing =
200 {
201 {
202 GIMPLE_PASS,
203 "final_cleanup", /* name */
204 NULL, /* gate */
205 execute_cleanup_cfg_post_optimizing, /* execute */
206 NULL, /* sub */
207 NULL, /* next */
208 0, /* static_pass_number */
209 0, /* tv_id */
210 PROP_cfg, /* properties_required */
211 0, /* properties_provided */
212 0, /* properties_destroyed */
213 0, /* todo_flags_start */
214 TODO_dump_func /* todo_flags_finish */
215 }
216 };
217
218 /* Pass: do the actions required to finish with tree-ssa optimization
219 passes. */
220
221 static unsigned int
222 execute_free_datastructures (void)
223 {
224 free_dominance_info (CDI_DOMINATORS);
225 free_dominance_info (CDI_POST_DOMINATORS);
226
227 /* Remove the ssa structures. */
228 if (cfun->gimple_df)
229 delete_tree_ssa ();
230 return 0;
231 }
232
233 struct gimple_opt_pass pass_free_datastructures =
234 {
235 {
236 GIMPLE_PASS,
237 NULL, /* name */
238 NULL, /* gate */
239 execute_free_datastructures, /* execute */
240 NULL, /* sub */
241 NULL, /* next */
242 0, /* static_pass_number */
243 0, /* tv_id */
244 PROP_cfg, /* properties_required */
245 0, /* properties_provided */
246 0, /* properties_destroyed */
247 0, /* todo_flags_start */
248 0 /* todo_flags_finish */
249 }
250 };
251 /* Pass: free cfg annotations. */
252
253 static unsigned int
254 execute_free_cfg_annotations (void)
255 {
256 /* And get rid of annotations we no longer need. */
257 delete_tree_cfg_annotations ();
258
259 return 0;
260 }
261
262 struct gimple_opt_pass pass_free_cfg_annotations =
263 {
264 {
265 GIMPLE_PASS,
266 NULL, /* name */
267 NULL, /* gate */
268 execute_free_cfg_annotations, /* execute */
269 NULL, /* sub */
270 NULL, /* next */
271 0, /* static_pass_number */
272 0, /* tv_id */
273 PROP_cfg, /* properties_required */
274 0, /* properties_provided */
275 0, /* properties_destroyed */
276 0, /* todo_flags_start */
277 0 /* todo_flags_finish */
278 }
279 };
280
281 /* Pass: fixup_cfg. IPA passes, compilation of earlier functions or inlining
282 might have changed some properties, such as marked functions nothrow.
283 Remove redundant edges and basic blocks, and create new ones if necessary.
284
285 This pass can't be executed as stand alone pass from pass manager, because
286 in between inlining and this fixup the verify_flow_info would fail. */
287
288 unsigned int
289 execute_fixup_cfg (void)
290 {
291 basic_block bb;
292 block_stmt_iterator bsi;
293 int todo = gimple_in_ssa_p (cfun) ? TODO_verify_ssa : 0;
294
295 cfun->after_inlining = true;
296
297 if (cfun->eh)
298 FOR_EACH_BB (bb)
299 {
300 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
301 {
302 tree stmt = bsi_stmt (bsi);
303 tree call = get_call_expr_in (stmt);
304 tree decl = call ? get_callee_fndecl (call) : NULL;
305
306 if (decl && call_expr_flags (call) & (ECF_CONST | ECF_PURE
307 | ECF_LOOPING_CONST_OR_PURE)
308 && TREE_SIDE_EFFECTS (call))
309 {
310 if (gimple_in_ssa_p (cfun))
311 {
312 todo |= TODO_update_ssa | TODO_cleanup_cfg;
313 update_stmt (stmt);
314 }
315 TREE_SIDE_EFFECTS (call) = 0;
316 }
317 if (decl && TREE_NOTHROW (decl))
318 TREE_NOTHROW (call) = 1;
319 if (!tree_could_throw_p (stmt) && lookup_stmt_eh_region (stmt))
320 remove_stmt_from_eh_region (stmt);
321 }
322 if (tree_purge_dead_eh_edges (bb))
323 todo |= TODO_cleanup_cfg;
324 }
325
326 /* Dump a textual representation of the flowgraph. */
327 if (dump_file)
328 dump_tree_cfg (dump_file, dump_flags);
329
330 return todo;
331 }
332
333 /* Do the actions required to initialize internal data structures used
334 in tree-ssa optimization passes. */
335
336 static unsigned int
337 execute_init_datastructures (void)
338 {
339 /* Allocate hash tables, arrays and other structures. */
340 init_tree_ssa (cfun);
341 return 0;
342 }
343
344 struct gimple_opt_pass pass_init_datastructures =
345 {
346 {
347 GIMPLE_PASS,
348 NULL, /* name */
349 NULL, /* gate */
350 execute_init_datastructures, /* execute */
351 NULL, /* sub */
352 NULL, /* next */
353 0, /* static_pass_number */
354 0, /* tv_id */
355 PROP_cfg, /* properties_required */
356 0, /* properties_provided */
357 0, /* properties_destroyed */
358 0, /* todo_flags_start */
359 0 /* todo_flags_finish */
360 }
361 };
362
363 void
364 tree_lowering_passes (tree fn)
365 {
366 tree saved_current_function_decl = current_function_decl;
367
368 current_function_decl = fn;
369 push_cfun (DECL_STRUCT_FUNCTION (fn));
370 tree_register_cfg_hooks ();
371 bitmap_obstack_initialize (NULL);
372 execute_pass_list (all_lowering_passes);
373 if (optimize && cgraph_global_info_ready)
374 execute_pass_list (pass_early_local_passes.pass.sub);
375 free_dominance_info (CDI_POST_DOMINATORS);
376 free_dominance_info (CDI_DOMINATORS);
377 compact_blocks ();
378 current_function_decl = saved_current_function_decl;
379 bitmap_obstack_release (NULL);
380 pop_cfun ();
381 }
382 \f
383 /* For functions-as-trees languages, this performs all optimization and
384 compilation for FNDECL. */
385
386 void
387 tree_rest_of_compilation (tree fndecl)
388 {
389 location_t saved_loc;
390 struct cgraph_node *node;
391
392 timevar_push (TV_EXPAND);
393
394 gcc_assert (!flag_unit_at_a_time || cgraph_global_info_ready);
395
396 node = cgraph_node (fndecl);
397
398 /* Initialize the default bitmap obstack. */
399 bitmap_obstack_initialize (NULL);
400
401 /* Initialize the RTL code for the function. */
402 current_function_decl = fndecl;
403 saved_loc = input_location;
404 input_location = DECL_SOURCE_LOCATION (fndecl);
405 init_function_start (fndecl);
406
407 /* Even though we're inside a function body, we still don't want to
408 call expand_expr to calculate the size of a variable-sized array.
409 We haven't necessarily assigned RTL to all variables yet, so it's
410 not safe to try to expand expressions involving them. */
411 cfun->dont_save_pending_sizes_p = 1;
412
413 tree_register_cfg_hooks ();
414
415 bitmap_obstack_initialize (&reg_obstack); /* FIXME, only at RTL generation*/
416 /* Perform all tree transforms and optimizations. */
417 execute_pass_list (all_passes);
418
419 bitmap_obstack_release (&reg_obstack);
420
421 /* Release the default bitmap obstack. */
422 bitmap_obstack_release (NULL);
423
424 DECL_SAVED_TREE (fndecl) = NULL;
425 set_cfun (NULL);
426
427 /* If requested, warn about function definitions where the function will
428 return a value (usually of some struct or union type) which itself will
429 take up a lot of stack space. */
430 if (warn_larger_than && !DECL_EXTERNAL (fndecl) && TREE_TYPE (fndecl))
431 {
432 tree ret_type = TREE_TYPE (TREE_TYPE (fndecl));
433
434 if (ret_type && TYPE_SIZE_UNIT (ret_type)
435 && TREE_CODE (TYPE_SIZE_UNIT (ret_type)) == INTEGER_CST
436 && 0 < compare_tree_int (TYPE_SIZE_UNIT (ret_type),
437 larger_than_size))
438 {
439 unsigned int size_as_int
440 = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (ret_type));
441
442 if (compare_tree_int (TYPE_SIZE_UNIT (ret_type), size_as_int) == 0)
443 warning (OPT_Wlarger_than_eq, "size of return value of %q+D is %u bytes",
444 fndecl, size_as_int);
445 else
446 warning (OPT_Wlarger_than_eq, "size of return value of %q+D is larger than %wd bytes",
447 fndecl, larger_than_size);
448 }
449 }
450
451 if (!flag_inline_trees)
452 {
453 DECL_SAVED_TREE (fndecl) = NULL;
454 if (DECL_STRUCT_FUNCTION (fndecl) == 0
455 && !cgraph_node (fndecl)->origin)
456 {
457 /* Stop pointing to the local nodes about to be freed.
458 But DECL_INITIAL must remain nonzero so we know this
459 was an actual function definition.
460 For a nested function, this is done in c_pop_function_context.
461 If rest_of_compilation set this to 0, leave it 0. */
462 if (DECL_INITIAL (fndecl) != 0)
463 DECL_INITIAL (fndecl) = error_mark_node;
464 }
465 }
466
467 input_location = saved_loc;
468
469 ggc_collect ();
470 timevar_pop (TV_EXPAND);
471 }