intrinsic.h (gfc_check_selected_real_kind, [...]): Update prototypes.
[gcc.git] / gcc / tree-optimize.c
1 /* Top-level control of tree optimizations.
2 Copyright 2001, 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "output.h"
30 #include "flags.h"
31 #include "tree-flow.h"
32 #include "tree-dump.h"
33 #include "timevar.h"
34 #include "function.h"
35 #include "langhooks.h"
36 #include "toplev.h"
37 #include "flags.h"
38 #include "cgraph.h"
39 #include "tree-inline.h"
40 #include "tree-mudflap.h"
41 #include "tree-pass.h"
42 #include "ggc.h"
43 #include "cgraph.h"
44 #include "graph.h"
45 #include "cfgloop.h"
46 #include "except.h"
47 #include "plugin.h"
48 #include "regset.h" /* FIXME: For reg_obstack. */
49
50 /* Gate: execute, or not, all of the non-trivial optimizations. */
51
52 static bool
53 gate_all_optimizations (void)
54 {
55 return (optimize >= 1
56 /* Don't bother doing anything if the program has errors.
57 We have to pass down the queue if we already went into SSA */
58 && (!seen_error () || gimple_in_ssa_p (cfun)));
59 }
60
61 struct gimple_opt_pass pass_all_optimizations =
62 {
63 {
64 GIMPLE_PASS,
65 "*all_optimizations", /* name */
66 gate_all_optimizations, /* gate */
67 NULL, /* execute */
68 NULL, /* sub */
69 NULL, /* next */
70 0, /* static_pass_number */
71 TV_NONE, /* tv_id */
72 0, /* properties_required */
73 0, /* properties_provided */
74 0, /* properties_destroyed */
75 0, /* todo_flags_start */
76 0 /* todo_flags_finish */
77 }
78 };
79
80 /* Gate: execute, or not, all of the non-trivial optimizations. */
81
82 static bool
83 gate_all_early_local_passes (void)
84 {
85 /* Don't bother doing anything if the program has errors. */
86 return (!seen_error () && !in_lto_p);
87 }
88
89 struct simple_ipa_opt_pass pass_early_local_passes =
90 {
91 {
92 SIMPLE_IPA_PASS,
93 "early_local_cleanups", /* name */
94 gate_all_early_local_passes, /* gate */
95 NULL, /* execute */
96 NULL, /* sub */
97 NULL, /* next */
98 0, /* static_pass_number */
99 TV_NONE, /* tv_id */
100 0, /* properties_required */
101 0, /* properties_provided */
102 0, /* properties_destroyed */
103 0, /* todo_flags_start */
104 TODO_remove_functions /* todo_flags_finish */
105 }
106 };
107
108 static unsigned int
109 execute_early_local_optimizations (void)
110 {
111 /* First time we start with early optimization we need to advance
112 cgraph state so newly inserted functions are also early optimized.
113 However we execute early local optimizations for lately inserted
114 functions, in that case don't reset cgraph state back to IPA_SSA. */
115 if (cgraph_state < CGRAPH_STATE_IPA_SSA)
116 cgraph_state = CGRAPH_STATE_IPA_SSA;
117 return 0;
118 }
119
120 /* Gate: execute, or not, all of the non-trivial optimizations. */
121
122 static bool
123 gate_all_early_optimizations (void)
124 {
125 return (optimize >= 1
126 /* Don't bother doing anything if the program has errors. */
127 && !seen_error ());
128 }
129
130 struct gimple_opt_pass pass_all_early_optimizations =
131 {
132 {
133 GIMPLE_PASS,
134 "early_optimizations", /* name */
135 gate_all_early_optimizations, /* gate */
136 execute_early_local_optimizations, /* execute */
137 NULL, /* sub */
138 NULL, /* next */
139 0, /* static_pass_number */
140 TV_NONE, /* tv_id */
141 0, /* properties_required */
142 0, /* properties_provided */
143 0, /* properties_destroyed */
144 0, /* todo_flags_start */
145 0 /* todo_flags_finish */
146 }
147 };
148
149 /* Pass: cleanup the CFG just before expanding trees to RTL.
150 This is just a round of label cleanups and case node grouping
151 because after the tree optimizers have run such cleanups may
152 be necessary. */
153
154 static unsigned int
155 execute_cleanup_cfg_pre_ipa (void)
156 {
157 cleanup_tree_cfg ();
158 return 0;
159 }
160
161 struct gimple_opt_pass pass_cleanup_cfg =
162 {
163 {
164 GIMPLE_PASS,
165 "cleanup_cfg", /* name */
166 NULL, /* gate */
167 execute_cleanup_cfg_pre_ipa, /* execute */
168 NULL, /* sub */
169 NULL, /* next */
170 0, /* static_pass_number */
171 TV_NONE, /* tv_id */
172 PROP_cfg, /* properties_required */
173 0, /* properties_provided */
174 0, /* properties_destroyed */
175 0, /* todo_flags_start */
176 TODO_dump_func /* todo_flags_finish */
177 }
178 };
179
180
181 /* Pass: cleanup the CFG just before expanding trees to RTL.
182 This is just a round of label cleanups and case node grouping
183 because after the tree optimizers have run such cleanups may
184 be necessary. */
185
186 static unsigned int
187 execute_cleanup_cfg_post_optimizing (void)
188 {
189 fold_cond_expr_cond ();
190 cleanup_tree_cfg ();
191 cleanup_dead_labels ();
192 group_case_labels ();
193 return 0;
194 }
195
196 struct gimple_opt_pass pass_cleanup_cfg_post_optimizing =
197 {
198 {
199 GIMPLE_PASS,
200 "optimized", /* name */
201 NULL, /* gate */
202 execute_cleanup_cfg_post_optimizing, /* execute */
203 NULL, /* sub */
204 NULL, /* next */
205 0, /* static_pass_number */
206 TV_NONE, /* tv_id */
207 PROP_cfg, /* properties_required */
208 0, /* properties_provided */
209 0, /* properties_destroyed */
210 0, /* todo_flags_start */
211 TODO_dump_func /* todo_flags_finish */
212 | TODO_remove_unused_locals
213 }
214 };
215
216 /* Pass: do the actions required to finish with tree-ssa optimization
217 passes. */
218
219 unsigned int
220 execute_free_datastructures (void)
221 {
222 free_dominance_info (CDI_DOMINATORS);
223 free_dominance_info (CDI_POST_DOMINATORS);
224
225 /* And get rid of annotations we no longer need. */
226 delete_tree_cfg_annotations ();
227
228 return 0;
229 }
230
231 /* Pass: fixup_cfg. IPA passes, compilation of earlier functions or inlining
232 might have changed some properties, such as marked functions nothrow,
233 pure, const or noreturn.
234 Remove redundant edges and basic blocks, and create new ones if necessary.
235
236 This pass can't be executed as stand alone pass from pass manager, because
237 in between inlining and this fixup the verify_flow_info would fail. */
238
239 unsigned int
240 execute_fixup_cfg (void)
241 {
242 basic_block bb;
243 gimple_stmt_iterator gsi;
244 int todo = gimple_in_ssa_p (cfun) ? TODO_verify_ssa : 0;
245 gcov_type count_scale;
246 edge e;
247 edge_iterator ei;
248
249 if (ENTRY_BLOCK_PTR->count)
250 count_scale = (cgraph_node (current_function_decl)->count * REG_BR_PROB_BASE
251 + ENTRY_BLOCK_PTR->count / 2) / ENTRY_BLOCK_PTR->count;
252 else
253 count_scale = REG_BR_PROB_BASE;
254
255 ENTRY_BLOCK_PTR->count = cgraph_node (current_function_decl)->count;
256 EXIT_BLOCK_PTR->count = (EXIT_BLOCK_PTR->count * count_scale
257 + REG_BR_PROB_BASE / 2) / REG_BR_PROB_BASE;
258
259 FOR_EACH_BB (bb)
260 {
261 bb->count = (bb->count * count_scale
262 + REG_BR_PROB_BASE / 2) / REG_BR_PROB_BASE;
263 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
264 {
265 gimple stmt = gsi_stmt (gsi);
266 tree decl = is_gimple_call (stmt)
267 ? gimple_call_fndecl (stmt)
268 : NULL;
269 if (decl)
270 {
271 int flags = gimple_call_flags (stmt);
272 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
273 {
274 if (gimple_in_ssa_p (cfun))
275 {
276 todo |= TODO_update_ssa | TODO_cleanup_cfg;
277 mark_symbols_for_renaming (stmt);
278 update_stmt (stmt);
279 }
280 }
281
282 if (flags & ECF_NORETURN
283 && fixup_noreturn_call (stmt))
284 todo |= TODO_cleanup_cfg;
285 }
286
287 maybe_clean_eh_stmt (stmt);
288 }
289
290 if (gimple_purge_dead_eh_edges (bb))
291 todo |= TODO_cleanup_cfg;
292 FOR_EACH_EDGE (e, ei, bb->succs)
293 e->count = (e->count * count_scale
294 + REG_BR_PROB_BASE / 2) / REG_BR_PROB_BASE;
295 }
296 if (count_scale != REG_BR_PROB_BASE)
297 compute_function_frequency ();
298
299 /* We just processed all calls. */
300 if (cfun->gimple_df)
301 {
302 VEC_free (gimple, gc, MODIFIED_NORETURN_CALLS (cfun));
303 MODIFIED_NORETURN_CALLS (cfun) = NULL;
304 }
305
306 /* Dump a textual representation of the flowgraph. */
307 if (dump_file)
308 gimple_dump_cfg (dump_file, dump_flags);
309
310 return todo;
311 }
312
313 struct gimple_opt_pass pass_fixup_cfg =
314 {
315 {
316 GIMPLE_PASS,
317 "*free_cfg_annotations", /* name */
318 NULL, /* gate */
319 execute_fixup_cfg, /* execute */
320 NULL, /* sub */
321 NULL, /* next */
322 0, /* static_pass_number */
323 TV_NONE, /* tv_id */
324 PROP_cfg, /* properties_required */
325 0, /* properties_provided */
326 0, /* properties_destroyed */
327 0, /* todo_flags_start */
328 0 /* todo_flags_finish */
329 }
330 };
331
332 /* Do the actions required to initialize internal data structures used
333 in tree-ssa optimization passes. */
334
335 static unsigned int
336 execute_init_datastructures (void)
337 {
338 /* Allocate hash tables, arrays and other structures. */
339 init_tree_ssa (cfun);
340 return 0;
341 }
342
343 struct gimple_opt_pass pass_init_datastructures =
344 {
345 {
346 GIMPLE_PASS,
347 "*init_datastructures", /* name */
348 NULL, /* gate */
349 execute_init_datastructures, /* execute */
350 NULL, /* sub */
351 NULL, /* next */
352 0, /* static_pass_number */
353 TV_NONE, /* tv_id */
354 PROP_cfg, /* properties_required */
355 0, /* properties_provided */
356 0, /* properties_destroyed */
357 0, /* todo_flags_start */
358 0 /* todo_flags_finish */
359 }
360 };
361
362 void
363 tree_lowering_passes (tree fn)
364 {
365 tree saved_current_function_decl = current_function_decl;
366
367 current_function_decl = fn;
368 push_cfun (DECL_STRUCT_FUNCTION (fn));
369 gimple_register_cfg_hooks ();
370 bitmap_obstack_initialize (NULL);
371 execute_pass_list (all_lowering_passes);
372 if (optimize && cgraph_global_info_ready)
373 execute_pass_list (pass_early_local_passes.pass.sub);
374 free_dominance_info (CDI_POST_DOMINATORS);
375 free_dominance_info (CDI_DOMINATORS);
376 compact_blocks ();
377 current_function_decl = saved_current_function_decl;
378 bitmap_obstack_release (NULL);
379 pop_cfun ();
380 }
381 \f
382 /* For functions-as-trees languages, this performs all optimization and
383 compilation for FNDECL. */
384
385 void
386 tree_rest_of_compilation (tree fndecl)
387 {
388 location_t saved_loc;
389
390 timevar_push (TV_EXPAND);
391
392 gcc_assert (cgraph_global_info_ready);
393
394 /* Initialize the default bitmap obstack. */
395 bitmap_obstack_initialize (NULL);
396
397 /* Initialize the RTL code for the function. */
398 current_function_decl = fndecl;
399 saved_loc = input_location;
400 input_location = DECL_SOURCE_LOCATION (fndecl);
401 init_function_start (fndecl);
402
403 /* Even though we're inside a function body, we still don't want to
404 call expand_expr to calculate the size of a variable-sized array.
405 We haven't necessarily assigned RTL to all variables yet, so it's
406 not safe to try to expand expressions involving them. */
407 cfun->dont_save_pending_sizes_p = 1;
408
409 gimple_register_cfg_hooks ();
410
411 bitmap_obstack_initialize (&reg_obstack); /* FIXME, only at RTL generation*/
412
413 execute_all_ipa_transforms ();
414
415 /* Perform all tree transforms and optimizations. */
416
417 /* Signal the start of passes. */
418 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_START, NULL);
419
420 execute_pass_list (all_passes);
421
422 /* Signal the end of passes. */
423 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_END, NULL);
424
425 bitmap_obstack_release (&reg_obstack);
426
427 /* Release the default bitmap obstack. */
428 bitmap_obstack_release (NULL);
429
430 set_cfun (NULL);
431
432 /* If requested, warn about function definitions where the function will
433 return a value (usually of some struct or union type) which itself will
434 take up a lot of stack space. */
435 if (warn_larger_than && !DECL_EXTERNAL (fndecl) && TREE_TYPE (fndecl))
436 {
437 tree ret_type = TREE_TYPE (TREE_TYPE (fndecl));
438
439 if (ret_type && TYPE_SIZE_UNIT (ret_type)
440 && TREE_CODE (TYPE_SIZE_UNIT (ret_type)) == INTEGER_CST
441 && 0 < compare_tree_int (TYPE_SIZE_UNIT (ret_type),
442 larger_than_size))
443 {
444 unsigned int size_as_int
445 = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (ret_type));
446
447 if (compare_tree_int (TYPE_SIZE_UNIT (ret_type), size_as_int) == 0)
448 warning (OPT_Wlarger_than_eq, "size of return value of %q+D is %u bytes",
449 fndecl, size_as_int);
450 else
451 warning (OPT_Wlarger_than_eq, "size of return value of %q+D is larger than %wd bytes",
452 fndecl, larger_than_size);
453 }
454 }
455
456 gimple_set_body (fndecl, NULL);
457 if (DECL_STRUCT_FUNCTION (fndecl) == 0
458 && !cgraph_node (fndecl)->origin)
459 {
460 /* Stop pointing to the local nodes about to be freed.
461 But DECL_INITIAL must remain nonzero so we know this
462 was an actual function definition.
463 For a nested function, this is done in c_pop_function_context.
464 If rest_of_compilation set this to 0, leave it 0. */
465 if (DECL_INITIAL (fndecl) != 0)
466 DECL_INITIAL (fndecl) = error_mark_node;
467 }
468
469 input_location = saved_loc;
470
471 ggc_collect ();
472 timevar_pop (TV_EXPAND);
473 }