attr-noinline.c: Avoid pure-const optimization.
[gcc.git] / gcc / passes.c
1 /* Top level of GCC compilers (cc1, cc1plus, etc.)
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* This is the top level of cc1/c++.
23 It parses command args, opens files, invokes the various passes
24 in the proper order, and counts the time used by each.
25 Error messages and low-level interface to malloc also handled here. */
26
27 #include "config.h"
28 #undef FLOAT /* This is for hpux. They should change hpux. */
29 #undef FFS /* Some systems define this in param.h. */
30 #include "system.h"
31 #include "coretypes.h"
32 #include "tm.h"
33 #include <signal.h>
34
35 #ifdef HAVE_SYS_RESOURCE_H
36 # include <sys/resource.h>
37 #endif
38
39 #ifdef HAVE_SYS_TIMES_H
40 # include <sys/times.h>
41 #endif
42
43 #include "line-map.h"
44 #include "input.h"
45 #include "tree.h"
46 #include "rtl.h"
47 #include "tm_p.h"
48 #include "flags.h"
49 #include "insn-attr.h"
50 #include "insn-config.h"
51 #include "insn-flags.h"
52 #include "hard-reg-set.h"
53 #include "recog.h"
54 #include "output.h"
55 #include "except.h"
56 #include "function.h"
57 #include "toplev.h"
58 #include "expr.h"
59 #include "basic-block.h"
60 #include "intl.h"
61 #include "ggc.h"
62 #include "graph.h"
63 #include "regs.h"
64 #include "timevar.h"
65 #include "diagnostic.h"
66 #include "params.h"
67 #include "reload.h"
68 #include "dwarf2asm.h"
69 #include "integrate.h"
70 #include "real.h"
71 #include "debug.h"
72 #include "target.h"
73 #include "langhooks.h"
74 #include "cfglayout.h"
75 #include "cfgloop.h"
76 #include "hosthooks.h"
77 #include "cgraph.h"
78 #include "opts.h"
79 #include "coverage.h"
80 #include "value-prof.h"
81 #include "tree-inline.h"
82 #include "tree-flow.h"
83 #include "tree-pass.h"
84 #include "tree-dump.h"
85 #include "df.h"
86 #include "predict.h"
87
88 #if defined (DWARF2_UNWIND_INFO) || defined (DWARF2_DEBUGGING_INFO)
89 #include "dwarf2out.h"
90 #endif
91
92 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
93 #include "dbxout.h"
94 #endif
95
96 #ifdef SDB_DEBUGGING_INFO
97 #include "sdbout.h"
98 #endif
99
100 #ifdef XCOFF_DEBUGGING_INFO
101 #include "xcoffout.h" /* Needed for external data
102 declarations for e.g. AIX 4.x. */
103 #endif
104
105 /* This is used for debugging. It allows the current pass to printed
106 from anywhere in compilation. */
107 struct opt_pass *current_pass;
108
109 /* Call from anywhere to find out what pass this is. Useful for
110 printing out debugging information deep inside an service
111 routine. */
112 void
113 print_current_pass (FILE *file)
114 {
115 if (current_pass)
116 fprintf (file, "current pass = %s (%d)\n",
117 current_pass->name, current_pass->static_pass_number);
118 else
119 fprintf (file, "no current pass.\n");
120 }
121
122
123 /* Call from the debugger to get the current pass name. */
124 void
125 debug_pass (void)
126 {
127 print_current_pass (stderr);
128 }
129
130
131
132 /* Global variables used to communicate with passes. */
133 int dump_flags;
134 bool in_gimple_form;
135 bool first_pass_instance;
136
137
138 /* This is called from various places for FUNCTION_DECL, VAR_DECL,
139 and TYPE_DECL nodes.
140
141 This does nothing for local (non-static) variables, unless the
142 variable is a register variable with DECL_ASSEMBLER_NAME set. In
143 that case, or if the variable is not an automatic, it sets up the
144 RTL and outputs any assembler code (label definition, storage
145 allocation and initialization).
146
147 DECL is the declaration. TOP_LEVEL is nonzero
148 if this declaration is not within a function. */
149
150 void
151 rest_of_decl_compilation (tree decl,
152 int top_level,
153 int at_end)
154 {
155 /* We deferred calling assemble_alias so that we could collect
156 other attributes such as visibility. Emit the alias now. */
157 {
158 tree alias;
159 alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
160 if (alias)
161 {
162 alias = TREE_VALUE (TREE_VALUE (alias));
163 alias = get_identifier (TREE_STRING_POINTER (alias));
164 assemble_alias (decl, alias);
165 }
166 }
167
168 /* Can't defer this, because it needs to happen before any
169 later function definitions are processed. */
170 if (DECL_ASSEMBLER_NAME_SET_P (decl) && DECL_REGISTER (decl))
171 make_decl_rtl (decl);
172
173 /* Forward declarations for nested functions are not "external",
174 but we need to treat them as if they were. */
175 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl)
176 || TREE_CODE (decl) == FUNCTION_DECL)
177 {
178 timevar_push (TV_VARCONST);
179
180 /* Don't output anything when a tentative file-scope definition
181 is seen. But at end of compilation, do output code for them.
182
183 We do output all variables and rely on
184 callgraph code to defer them except for forward declarations
185 (see gcc.c-torture/compile/920624-1.c) */
186 if ((at_end
187 || !DECL_DEFER_OUTPUT (decl)
188 || DECL_INITIAL (decl))
189 && !DECL_EXTERNAL (decl))
190 {
191 if (TREE_CODE (decl) != FUNCTION_DECL)
192 varpool_finalize_decl (decl);
193 else
194 assemble_variable (decl, top_level, at_end, 0);
195 }
196
197 #ifdef ASM_FINISH_DECLARE_OBJECT
198 if (decl == last_assemble_variable_decl)
199 {
200 ASM_FINISH_DECLARE_OBJECT (asm_out_file, decl,
201 top_level, at_end);
202 }
203 #endif
204
205 timevar_pop (TV_VARCONST);
206 }
207 else if (TREE_CODE (decl) == TYPE_DECL
208 /* Like in rest_of_type_compilation, avoid confusing the debug
209 information machinery when there are errors. */
210 && !(sorrycount || errorcount))
211 {
212 timevar_push (TV_SYMOUT);
213 debug_hooks->type_decl (decl, !top_level);
214 timevar_pop (TV_SYMOUT);
215 }
216
217 /* Let cgraph know about the existence of variables. */
218 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
219 varpool_node (decl);
220 }
221
222 /* Called after finishing a record, union or enumeral type. */
223
224 void
225 rest_of_type_compilation (tree type, int toplev)
226 {
227 /* Avoid confusing the debug information machinery when there are
228 errors. */
229 if (errorcount != 0 || sorrycount != 0)
230 return;
231
232 timevar_push (TV_SYMOUT);
233 debug_hooks->type_decl (TYPE_STUB_DECL (type), !toplev);
234 timevar_pop (TV_SYMOUT);
235 }
236
237 \f
238
239 void
240 finish_optimization_passes (void)
241 {
242 enum tree_dump_index i;
243 struct dump_file_info *dfi;
244 char *name;
245
246 timevar_push (TV_DUMP);
247 if (profile_arc_flag || flag_test_coverage || flag_branch_probabilities)
248 {
249 dump_file = dump_begin (pass_profile.pass.static_pass_number, NULL);
250 end_branch_prob ();
251 if (dump_file)
252 dump_end (pass_profile.pass.static_pass_number, dump_file);
253 }
254
255 if (optimize > 0)
256 {
257 dump_file = dump_begin (pass_combine.pass.static_pass_number, NULL);
258 if (dump_file)
259 {
260 dump_combine_total_stats (dump_file);
261 dump_end (pass_combine.pass.static_pass_number, dump_file);
262 }
263 }
264
265 /* Do whatever is necessary to finish printing the graphs. */
266 if (graph_dump_format != no_graph)
267 for (i = TDI_end; (dfi = get_dump_file_info (i)) != NULL; ++i)
268 if (dump_initialized_p (i)
269 && (dfi->flags & TDF_GRAPH) != 0
270 && (name = get_dump_file_name (i)) != NULL)
271 {
272 finish_graph_dump_file (name);
273 free (name);
274 }
275
276 timevar_pop (TV_DUMP);
277 }
278
279 static bool
280 gate_rest_of_compilation (void)
281 {
282 /* Early return if there were errors. We can run afoul of our
283 consistency checks, and there's not really much point in fixing them. */
284 return !(rtl_dump_and_exit || flag_syntax_only || errorcount || sorrycount);
285 }
286
287 struct gimple_opt_pass pass_rest_of_compilation =
288 {
289 {
290 GIMPLE_PASS,
291 NULL, /* name */
292 gate_rest_of_compilation, /* gate */
293 NULL, /* execute */
294 NULL, /* sub */
295 NULL, /* next */
296 0, /* static_pass_number */
297 TV_REST_OF_COMPILATION, /* tv_id */
298 PROP_rtl, /* properties_required */
299 0, /* properties_provided */
300 0, /* properties_destroyed */
301 0, /* todo_flags_start */
302 TODO_ggc_collect /* todo_flags_finish */
303 }
304 };
305
306 static bool
307 gate_postreload (void)
308 {
309 return reload_completed;
310 }
311
312 struct rtl_opt_pass pass_postreload =
313 {
314 {
315 RTL_PASS,
316 NULL, /* name */
317 gate_postreload, /* gate */
318 NULL, /* execute */
319 NULL, /* sub */
320 NULL, /* next */
321 0, /* static_pass_number */
322 0, /* tv_id */
323 PROP_rtl, /* properties_required */
324 0, /* properties_provided */
325 0, /* properties_destroyed */
326 0, /* todo_flags_start */
327 TODO_ggc_collect | TODO_verify_rtl_sharing /* todo_flags_finish */
328 }
329 };
330
331
332
333 /* The root of the compilation pass tree, once constructed. */
334 struct opt_pass *all_passes, *all_ipa_passes, *all_lowering_passes;
335
336 /* A map from static pass id to optimization pass. */
337 struct opt_pass **passes_by_id;
338 int passes_by_id_size;
339
340 /* Set the static pass number of pass PASS to ID and record that
341 in the mapping from static pass number to pass. */
342
343 static void
344 set_pass_for_id (int id, struct opt_pass *pass)
345 {
346 pass->static_pass_number = id;
347 if (passes_by_id_size <= id)
348 {
349 passes_by_id = XRESIZEVEC (struct opt_pass *, passes_by_id, id + 1);
350 memset (passes_by_id + passes_by_id_size, 0,
351 (id + 1 - passes_by_id_size) * sizeof (void *));
352 passes_by_id_size = id + 1;
353 }
354 passes_by_id[id] = pass;
355 }
356
357 /* Return the pass with the static pass number ID. */
358
359 struct opt_pass *
360 get_pass_for_id (int id)
361 {
362 if (id >= passes_by_id_size)
363 return NULL;
364 return passes_by_id[id];
365 }
366
367 /* Iterate over the pass tree allocating dump file numbers. We want
368 to do this depth first, and independent of whether the pass is
369 enabled or not. */
370
371 static void
372 register_one_dump_file (struct opt_pass *pass)
373 {
374 char *dot_name, *flag_name, *glob_name;
375 const char *prefix;
376 char num[10];
377 int flags, id;
378
379 /* See below in next_pass_1. */
380 num[0] = '\0';
381 if (pass->static_pass_number != -1)
382 sprintf (num, "%d", ((int) pass->static_pass_number < 0
383 ? 1 : pass->static_pass_number));
384
385 dot_name = concat (".", pass->name, num, NULL);
386 if (pass->type == SIMPLE_IPA_PASS || pass->type == IPA_PASS)
387 prefix = "ipa-", flags = TDF_IPA;
388 else if (pass->type == GIMPLE_PASS)
389 prefix = "tree-", flags = TDF_TREE;
390 else
391 prefix = "rtl-", flags = TDF_RTL;
392
393 flag_name = concat (prefix, pass->name, num, NULL);
394 glob_name = concat (prefix, pass->name, NULL);
395 id = dump_register (dot_name, flag_name, glob_name, flags);
396 set_pass_for_id (id, pass);
397 }
398
399 /* Recursive worker function for register_dump_files. */
400
401 static int
402 register_dump_files_1 (struct opt_pass *pass, int properties)
403 {
404 do
405 {
406 int new_properties = (properties | pass->properties_provided)
407 & ~pass->properties_destroyed;
408
409 if (pass->name && pass->name[0] != '*')
410 register_one_dump_file (pass);
411
412 if (pass->sub)
413 new_properties = register_dump_files_1 (pass->sub, new_properties);
414
415 /* If we have a gate, combine the properties that we could have with
416 and without the pass being examined. */
417 if (pass->gate)
418 properties &= new_properties;
419 else
420 properties = new_properties;
421
422 pass = pass->next;
423 }
424 while (pass);
425
426 return properties;
427 }
428
429 /* Register the dump files for the pipeline starting at PASS.
430 PROPERTIES reflects the properties that are guaranteed to be available at
431 the beginning of the pipeline. */
432
433 static void
434 register_dump_files (struct opt_pass *pass,int properties)
435 {
436 pass->properties_required |= properties;
437 register_dump_files_1 (pass, properties);
438 }
439
440 /* Add a pass to the pass list. Duplicate the pass if it's already
441 in the list. */
442
443 static struct opt_pass **
444 next_pass_1 (struct opt_pass **list, struct opt_pass *pass)
445 {
446 /* A nonzero static_pass_number indicates that the
447 pass is already in the list. */
448 if (pass->static_pass_number)
449 {
450 struct opt_pass *new_pass;
451
452 new_pass = XNEW (struct opt_pass);
453 memcpy (new_pass, pass, sizeof (*new_pass));
454 new_pass->next = NULL;
455
456 new_pass->todo_flags_start &= ~TODO_mark_first_instance;
457
458 /* Indicate to register_dump_files that this pass has duplicates,
459 and so it should rename the dump file. The first instance will
460 be -1, and be number of duplicates = -static_pass_number - 1.
461 Subsequent instances will be > 0 and just the duplicate number. */
462 if (pass->name)
463 {
464 pass->static_pass_number -= 1;
465 new_pass->static_pass_number = -pass->static_pass_number;
466 }
467
468 *list = new_pass;
469 }
470 else
471 {
472 pass->todo_flags_start |= TODO_mark_first_instance;
473 pass->static_pass_number = -1;
474 *list = pass;
475 }
476
477 return &(*list)->next;
478
479 }
480
481
482 /* Construct the pass tree. The sequencing of passes is driven by
483 the cgraph routines:
484
485 cgraph_finalize_compilation_unit ()
486 for each node N in the cgraph
487 cgraph_analyze_function (N)
488 cgraph_lower_function (N) -> all_lowering_passes
489
490 If we are optimizing, cgraph_optimize is then invoked:
491
492 cgraph_optimize ()
493 ipa_passes () -> all_ipa_passes
494 cgraph_expand_all_functions ()
495 for each node N in the cgraph
496 cgraph_expand_function (N)
497 tree_rest_of_compilation (DECL (N)) -> all_passes
498 */
499
500 void
501 init_optimization_passes (void)
502 {
503 struct opt_pass **p;
504
505 #define NEXT_PASS(PASS) (p = next_pass_1 (p, &((PASS).pass)))
506
507 /* All passes needed to lower the function into shape optimizers can
508 operate on. These passes are always run first on the function, but
509 backend might produce already lowered functions that are not processed
510 by these passes. */
511 p = &all_lowering_passes;
512 NEXT_PASS (pass_remove_useless_stmts);
513 NEXT_PASS (pass_mudflap_1);
514 NEXT_PASS (pass_lower_omp);
515 NEXT_PASS (pass_lower_cf);
516 NEXT_PASS (pass_refactor_eh);
517 NEXT_PASS (pass_lower_eh);
518 NEXT_PASS (pass_build_cfg);
519 NEXT_PASS (pass_lower_complex_O0);
520 NEXT_PASS (pass_lower_vector);
521 NEXT_PASS (pass_warn_function_return);
522 NEXT_PASS (pass_build_cgraph_edges);
523 NEXT_PASS (pass_inline_parameters);
524 *p = NULL;
525
526 /* Interprocedural optimization passes. */
527 p = &all_ipa_passes;
528 NEXT_PASS (pass_ipa_function_and_variable_visibility);
529 NEXT_PASS (pass_ipa_early_inline);
530 {
531 struct opt_pass **p = &pass_ipa_early_inline.pass.sub;
532 NEXT_PASS (pass_early_inline);
533 NEXT_PASS (pass_inline_parameters);
534 NEXT_PASS (pass_rebuild_cgraph_edges);
535 }
536 NEXT_PASS (pass_early_local_passes);
537 {
538 struct opt_pass **p = &pass_early_local_passes.pass.sub;
539 NEXT_PASS (pass_fixup_cfg);
540 NEXT_PASS (pass_tree_profile);
541 NEXT_PASS (pass_cleanup_cfg);
542 NEXT_PASS (pass_init_datastructures);
543 NEXT_PASS (pass_expand_omp);
544
545 NEXT_PASS (pass_referenced_vars);
546 NEXT_PASS (pass_reset_cc_flags);
547 NEXT_PASS (pass_build_ssa);
548 NEXT_PASS (pass_early_warn_uninitialized);
549 NEXT_PASS (pass_all_early_optimizations);
550 {
551 struct opt_pass **p = &pass_all_early_optimizations.pass.sub;
552 NEXT_PASS (pass_rebuild_cgraph_edges);
553 NEXT_PASS (pass_early_inline);
554 NEXT_PASS (pass_rename_ssa_copies);
555 NEXT_PASS (pass_ccp);
556 NEXT_PASS (pass_forwprop);
557 NEXT_PASS (pass_update_address_taken);
558 NEXT_PASS (pass_sra_early);
559 NEXT_PASS (pass_copy_prop);
560 NEXT_PASS (pass_merge_phi);
561 NEXT_PASS (pass_cd_dce);
562 NEXT_PASS (pass_simple_dse);
563 NEXT_PASS (pass_tail_recursion);
564 NEXT_PASS (pass_convert_switch);
565 NEXT_PASS (pass_profile);
566 NEXT_PASS (pass_local_pure_const);
567 }
568 NEXT_PASS (pass_release_ssa_names);
569 NEXT_PASS (pass_rebuild_cgraph_edges);
570 NEXT_PASS (pass_inline_parameters);
571 }
572 NEXT_PASS (pass_ipa_increase_alignment);
573 NEXT_PASS (pass_ipa_matrix_reorg);
574 NEXT_PASS (pass_ipa_cp);
575 NEXT_PASS (pass_ipa_inline);
576 NEXT_PASS (pass_ipa_reference);
577 NEXT_PASS (pass_ipa_pure_const);
578 NEXT_PASS (pass_ipa_type_escape);
579 NEXT_PASS (pass_ipa_pta);
580 NEXT_PASS (pass_ipa_struct_reorg);
581 *p = NULL;
582
583 /* These passes are run after IPA passes on every function that is being
584 output to the assembler file. */
585 p = &all_passes;
586 NEXT_PASS (pass_all_optimizations);
587 {
588 struct opt_pass **p = &pass_all_optimizations.pass.sub;
589 /* Initial scalar cleanups before alias computation.
590 They ensure memory accesses are not indirect wherever possible. */
591 NEXT_PASS (pass_strip_predict_hints);
592 NEXT_PASS (pass_update_address_taken);
593 NEXT_PASS (pass_rename_ssa_copies);
594 NEXT_PASS (pass_complete_unrolli);
595 NEXT_PASS (pass_ccp);
596 NEXT_PASS (pass_forwprop);
597 /* Ideally the function call conditional
598 dead code elimination phase can be delayed
599 till later where potentially more opportunities
600 can be found. Due to lack of good ways to
601 update VDEFs associated with the shrink-wrapped
602 calls, it is better to do the transformation
603 here where memory SSA is not built yet. */
604 NEXT_PASS (pass_call_cdce);
605 /* pass_build_alias is a dummy pass that ensures that we
606 execute TODO_rebuild_alias at this point. Re-building
607 alias information also rewrites no longer addressed
608 locals into SSA form if possible. */
609 NEXT_PASS (pass_build_alias);
610 NEXT_PASS (pass_return_slot);
611 NEXT_PASS (pass_phiprop);
612 NEXT_PASS (pass_fre);
613 NEXT_PASS (pass_copy_prop);
614 NEXT_PASS (pass_merge_phi);
615 NEXT_PASS (pass_vrp);
616 NEXT_PASS (pass_dce);
617 NEXT_PASS (pass_cselim);
618 NEXT_PASS (pass_tree_ifcombine);
619 NEXT_PASS (pass_phiopt);
620 NEXT_PASS (pass_tail_recursion);
621 NEXT_PASS (pass_ch);
622 NEXT_PASS (pass_stdarg);
623 NEXT_PASS (pass_lower_complex);
624 NEXT_PASS (pass_sra);
625 NEXT_PASS (pass_rename_ssa_copies);
626 NEXT_PASS (pass_dominator);
627 /* The only const/copy propagation opportunities left after
628 DOM should be due to degenerate PHI nodes. So rather than
629 run the full propagators, run a specialized pass which
630 only examines PHIs to discover const/copy propagation
631 opportunities. */
632 NEXT_PASS (pass_phi_only_cprop);
633 NEXT_PASS (pass_dse);
634 NEXT_PASS (pass_reassoc);
635 NEXT_PASS (pass_dce);
636 NEXT_PASS (pass_forwprop);
637 NEXT_PASS (pass_phiopt);
638 NEXT_PASS (pass_object_sizes);
639 NEXT_PASS (pass_ccp);
640 NEXT_PASS (pass_copy_prop);
641 NEXT_PASS (pass_fold_builtins);
642 NEXT_PASS (pass_cse_sincos);
643 NEXT_PASS (pass_split_crit_edges);
644 NEXT_PASS (pass_pre);
645 NEXT_PASS (pass_sink_code);
646 NEXT_PASS (pass_tree_loop);
647 {
648 struct opt_pass **p = &pass_tree_loop.pass.sub;
649 NEXT_PASS (pass_tree_loop_init);
650 NEXT_PASS (pass_copy_prop);
651 NEXT_PASS (pass_dce_loop);
652 NEXT_PASS (pass_lim);
653 NEXT_PASS (pass_predcom);
654 NEXT_PASS (pass_tree_unswitch);
655 NEXT_PASS (pass_scev_cprop);
656 NEXT_PASS (pass_empty_loop);
657 NEXT_PASS (pass_record_bounds);
658 NEXT_PASS (pass_check_data_deps);
659 NEXT_PASS (pass_loop_distribution);
660 NEXT_PASS (pass_linear_transform);
661 NEXT_PASS (pass_graphite_transforms);
662 NEXT_PASS (pass_iv_canon);
663 NEXT_PASS (pass_if_conversion);
664 NEXT_PASS (pass_vectorize);
665 {
666 struct opt_pass **p = &pass_vectorize.pass.sub;
667 NEXT_PASS (pass_lower_vector_ssa);
668 NEXT_PASS (pass_dce_loop);
669 }
670 NEXT_PASS (pass_complete_unroll);
671 NEXT_PASS (pass_parallelize_loops);
672 NEXT_PASS (pass_loop_prefetch);
673 NEXT_PASS (pass_iv_optimize);
674 NEXT_PASS (pass_tree_loop_done);
675 }
676 NEXT_PASS (pass_cse_reciprocals);
677 NEXT_PASS (pass_convert_to_rsqrt);
678 NEXT_PASS (pass_reassoc);
679 NEXT_PASS (pass_vrp);
680 NEXT_PASS (pass_dominator);
681 /* The only const/copy propagation opportunities left after
682 DOM should be due to degenerate PHI nodes. So rather than
683 run the full propagators, run a specialized pass which
684 only examines PHIs to discover const/copy propagation
685 opportunities. */
686 NEXT_PASS (pass_phi_only_cprop);
687 NEXT_PASS (pass_cd_dce);
688 NEXT_PASS (pass_tracer);
689
690 /* FIXME: If DCE is not run before checking for uninitialized uses,
691 we may get false warnings (e.g., testsuite/gcc.dg/uninit-5.c).
692 However, this also causes us to misdiagnose cases that should be
693 real warnings (e.g., testsuite/gcc.dg/pr18501.c).
694
695 To fix the false positives in uninit-5.c, we would have to
696 account for the predicates protecting the set and the use of each
697 variable. Using a representation like Gated Single Assignment
698 may help. */
699 NEXT_PASS (pass_late_warn_uninitialized);
700 NEXT_PASS (pass_dse);
701 NEXT_PASS (pass_forwprop);
702 NEXT_PASS (pass_phiopt);
703 NEXT_PASS (pass_tail_calls);
704 NEXT_PASS (pass_rename_ssa_copies);
705 NEXT_PASS (pass_uncprop);
706 NEXT_PASS (pass_local_pure_const);
707 }
708 NEXT_PASS (pass_del_ssa);
709 NEXT_PASS (pass_nrv);
710 NEXT_PASS (pass_mark_used_blocks);
711 NEXT_PASS (pass_cleanup_cfg_post_optimizing);
712
713 NEXT_PASS (pass_warn_function_noreturn);
714 NEXT_PASS (pass_free_datastructures);
715 NEXT_PASS (pass_mudflap_2);
716
717 NEXT_PASS (pass_free_cfg_annotations);
718 NEXT_PASS (pass_expand);
719 NEXT_PASS (pass_rest_of_compilation);
720 {
721 struct opt_pass **p = &pass_rest_of_compilation.pass.sub;
722 NEXT_PASS (pass_init_function);
723 NEXT_PASS (pass_jump);
724 NEXT_PASS (pass_rtl_eh);
725 NEXT_PASS (pass_initial_value_sets);
726 NEXT_PASS (pass_unshare_all_rtl);
727 NEXT_PASS (pass_instantiate_virtual_regs);
728 NEXT_PASS (pass_into_cfg_layout_mode);
729 NEXT_PASS (pass_jump2);
730 NEXT_PASS (pass_lower_subreg);
731 NEXT_PASS (pass_df_initialize_opt);
732 NEXT_PASS (pass_cse);
733 NEXT_PASS (pass_rtl_fwprop);
734 NEXT_PASS (pass_gcse);
735 NEXT_PASS (pass_rtl_ifcvt);
736 /* Perform loop optimizations. It might be better to do them a bit
737 sooner, but we want the profile feedback to work more
738 efficiently. */
739 NEXT_PASS (pass_loop2);
740 {
741 struct opt_pass **p = &pass_loop2.pass.sub;
742 NEXT_PASS (pass_rtl_loop_init);
743 NEXT_PASS (pass_rtl_move_loop_invariants);
744 NEXT_PASS (pass_rtl_unswitch);
745 NEXT_PASS (pass_rtl_unroll_and_peel_loops);
746 NEXT_PASS (pass_rtl_doloop);
747 NEXT_PASS (pass_rtl_loop_done);
748 *p = NULL;
749 }
750 NEXT_PASS (pass_web);
751 NEXT_PASS (pass_jump_bypass);
752 NEXT_PASS (pass_cse2);
753 NEXT_PASS (pass_rtl_dse1);
754 NEXT_PASS (pass_rtl_fwprop_addr);
755 NEXT_PASS (pass_reginfo_init);
756 NEXT_PASS (pass_inc_dec);
757 NEXT_PASS (pass_initialize_regs);
758 NEXT_PASS (pass_outof_cfg_layout_mode);
759 NEXT_PASS (pass_ud_rtl_dce);
760 NEXT_PASS (pass_combine);
761 NEXT_PASS (pass_if_after_combine);
762 NEXT_PASS (pass_partition_blocks);
763 NEXT_PASS (pass_regmove);
764 NEXT_PASS (pass_split_all_insns);
765 NEXT_PASS (pass_lower_subreg2);
766 NEXT_PASS (pass_df_initialize_no_opt);
767 NEXT_PASS (pass_stack_ptr_mod);
768 NEXT_PASS (pass_mode_switching);
769 NEXT_PASS (pass_see);
770 NEXT_PASS (pass_match_asm_constraints);
771 NEXT_PASS (pass_sms);
772 NEXT_PASS (pass_sched);
773 NEXT_PASS (pass_subregs_of_mode_init);
774 NEXT_PASS (pass_ira);
775 NEXT_PASS (pass_subregs_of_mode_finish);
776 NEXT_PASS (pass_postreload);
777 {
778 struct opt_pass **p = &pass_postreload.pass.sub;
779 NEXT_PASS (pass_postreload_cse);
780 NEXT_PASS (pass_gcse2);
781 NEXT_PASS (pass_split_after_reload);
782 NEXT_PASS (pass_branch_target_load_optimize1);
783 NEXT_PASS (pass_thread_prologue_and_epilogue);
784 NEXT_PASS (pass_rtl_dse2);
785 NEXT_PASS (pass_rtl_seqabstr);
786 NEXT_PASS (pass_stack_adjustments);
787 NEXT_PASS (pass_peephole2);
788 NEXT_PASS (pass_if_after_reload);
789 NEXT_PASS (pass_regrename);
790 NEXT_PASS (pass_cprop_hardreg);
791 NEXT_PASS (pass_fast_rtl_dce);
792 NEXT_PASS (pass_reorder_blocks);
793 NEXT_PASS (pass_branch_target_load_optimize2);
794 NEXT_PASS (pass_leaf_regs);
795 NEXT_PASS (pass_split_before_sched2);
796 NEXT_PASS (pass_sched2);
797 NEXT_PASS (pass_stack_regs);
798 {
799 struct opt_pass **p = &pass_stack_regs.pass.sub;
800 NEXT_PASS (pass_split_before_regstack);
801 NEXT_PASS (pass_stack_regs_run);
802 }
803 NEXT_PASS (pass_compute_alignments);
804 NEXT_PASS (pass_duplicate_computed_gotos);
805 NEXT_PASS (pass_variable_tracking);
806 NEXT_PASS (pass_free_cfg);
807 NEXT_PASS (pass_machine_reorg);
808 NEXT_PASS (pass_cleanup_barriers);
809 NEXT_PASS (pass_delay_slots);
810 NEXT_PASS (pass_split_for_shorten_branches);
811 NEXT_PASS (pass_convert_to_eh_region_ranges);
812 NEXT_PASS (pass_shorten_branches);
813 NEXT_PASS (pass_set_nothrow_function_flags);
814 NEXT_PASS (pass_final);
815 }
816 NEXT_PASS (pass_df_finish);
817 }
818 NEXT_PASS (pass_clean_state);
819 *p = NULL;
820
821 #undef NEXT_PASS
822
823 /* Register the passes with the tree dump code. */
824 register_dump_files (all_lowering_passes, PROP_gimple_any);
825 all_lowering_passes->todo_flags_start |= TODO_set_props;
826 register_dump_files (all_ipa_passes,
827 PROP_gimple_any | PROP_gimple_lcf | PROP_gimple_leh
828 | PROP_cfg);
829 register_dump_files (all_passes,
830 PROP_gimple_any | PROP_gimple_lcf | PROP_gimple_leh
831 | PROP_cfg);
832 }
833
834 /* If we are in IPA mode (i.e., current_function_decl is NULL), call
835 function CALLBACK for every function in the call graph. Otherwise,
836 call CALLBACK on the current function. */
837
838 static void
839 do_per_function (void (*callback) (void *data), void *data)
840 {
841 if (current_function_decl)
842 callback (data);
843 else
844 {
845 struct cgraph_node *node;
846 for (node = cgraph_nodes; node; node = node->next)
847 if (node->analyzed)
848 {
849 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
850 current_function_decl = node->decl;
851 callback (data);
852 free_dominance_info (CDI_DOMINATORS);
853 free_dominance_info (CDI_POST_DOMINATORS);
854 current_function_decl = NULL;
855 pop_cfun ();
856 ggc_collect ();
857 }
858 }
859 }
860
861 /* Because inlining might remove no-longer reachable nodes, we need to
862 keep the array visible to garbage collector to avoid reading collected
863 out nodes. */
864 static int nnodes;
865 static GTY ((length ("nnodes"))) struct cgraph_node **order;
866
867 /* If we are in IPA mode (i.e., current_function_decl is NULL), call
868 function CALLBACK for every function in the call graph. Otherwise,
869 call CALLBACK on the current function. */
870
871 static void
872 do_per_function_toporder (void (*callback) (void *data), void *data)
873 {
874 int i;
875
876 if (current_function_decl)
877 callback (data);
878 else
879 {
880 gcc_assert (!order);
881 order = GGC_NEWVEC (struct cgraph_node *, cgraph_n_nodes);
882 nnodes = cgraph_postorder (order);
883 for (i = nnodes - 1; i >= 0; i--)
884 order[i]->process = 1;
885 for (i = nnodes - 1; i >= 0; i--)
886 {
887 struct cgraph_node *node = order[i];
888
889 /* Allow possibly removed nodes to be garbage collected. */
890 order[i] = NULL;
891 node->process = 0;
892 if (node->analyzed && (node->needed || node->reachable))
893 {
894 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
895 current_function_decl = node->decl;
896 callback (data);
897 free_dominance_info (CDI_DOMINATORS);
898 free_dominance_info (CDI_POST_DOMINATORS);
899 current_function_decl = NULL;
900 pop_cfun ();
901 ggc_collect ();
902 }
903 }
904 }
905 ggc_free (order);
906 order = NULL;
907 nnodes = 0;
908 }
909
910 /* Perform all TODO actions that ought to be done on each function. */
911
912 static void
913 execute_function_todo (void *data)
914 {
915 unsigned int flags = (size_t)data;
916 if (cfun->curr_properties & PROP_ssa)
917 flags |= TODO_verify_ssa;
918 flags &= ~cfun->last_verified;
919 if (!flags)
920 return;
921
922 statistics_fini_pass ();
923
924 /* Always cleanup the CFG before trying to update SSA. */
925 if (flags & TODO_cleanup_cfg)
926 {
927 bool cleanup = cleanup_tree_cfg ();
928
929 if (cleanup && (cfun->curr_properties & PROP_ssa))
930 flags |= TODO_remove_unused_locals;
931
932 /* When cleanup_tree_cfg merges consecutive blocks, it may
933 perform some simplistic propagation when removing single
934 valued PHI nodes. This propagation may, in turn, cause the
935 SSA form to become out-of-date (see PR 22037). So, even
936 if the parent pass had not scheduled an SSA update, we may
937 still need to do one. */
938 if (!(flags & TODO_update_ssa_any) && need_ssa_update_p ())
939 flags |= TODO_update_ssa;
940 }
941
942 if (flags & TODO_update_ssa_any)
943 {
944 unsigned update_flags = flags & TODO_update_ssa_any;
945 update_ssa (update_flags);
946 cfun->last_verified &= ~TODO_verify_ssa;
947 }
948
949 if (flags & TODO_rebuild_alias)
950 {
951 compute_may_aliases ();
952 cfun->curr_properties |= PROP_alias;
953 }
954
955 if (flags & TODO_remove_unused_locals)
956 remove_unused_locals ();
957
958 if ((flags & TODO_dump_func) && dump_file && current_function_decl)
959 {
960 if (cfun->curr_properties & PROP_trees)
961 dump_function_to_file (current_function_decl, dump_file, dump_flags);
962 else
963 {
964 if (dump_flags & TDF_SLIM)
965 print_rtl_slim_with_bb (dump_file, get_insns (), dump_flags);
966 else if ((cfun->curr_properties & PROP_cfg)
967 && (dump_flags & TDF_BLOCKS))
968 print_rtl_with_bb (dump_file, get_insns ());
969 else
970 print_rtl (dump_file, get_insns ());
971
972 if ((cfun->curr_properties & PROP_cfg)
973 && graph_dump_format != no_graph
974 && (dump_flags & TDF_GRAPH))
975 print_rtl_graph_with_bb (dump_file_name, get_insns ());
976 }
977
978 /* Flush the file. If verification fails, we won't be able to
979 close the file before aborting. */
980 fflush (dump_file);
981 }
982
983 if (flags & TODO_rebuild_frequencies)
984 {
985 if (profile_status == PROFILE_GUESSED)
986 {
987 loop_optimizer_init (0);
988 add_noreturn_fake_exit_edges ();
989 mark_irreducible_loops ();
990 connect_infinite_loops_to_exit ();
991 estimate_bb_frequencies ();
992 remove_fake_exit_edges ();
993 loop_optimizer_finalize ();
994 }
995 else if (profile_status == PROFILE_READ)
996 counts_to_freqs ();
997 else
998 gcc_unreachable ();
999 }
1000
1001 #if defined ENABLE_CHECKING
1002 if (flags & TODO_verify_ssa)
1003 verify_ssa (true);
1004 if (flags & TODO_verify_flow)
1005 verify_flow_info ();
1006 if (flags & TODO_verify_stmts)
1007 verify_stmts ();
1008 if (flags & TODO_verify_loops)
1009 verify_loop_closed_ssa ();
1010 if (flags & TODO_verify_rtl_sharing)
1011 verify_rtl_sharing ();
1012 #endif
1013
1014 cfun->last_verified = flags & TODO_verify_all;
1015 }
1016
1017 /* Perform all TODO actions. */
1018 static void
1019 execute_todo (unsigned int flags)
1020 {
1021 #if defined ENABLE_CHECKING
1022 if (need_ssa_update_p ())
1023 gcc_assert (flags & TODO_update_ssa_any);
1024 #endif
1025
1026 /* Inform the pass whether it is the first time it is run. */
1027 first_pass_instance = (flags & TODO_mark_first_instance) != 0;
1028
1029 do_per_function (execute_function_todo, (void *)(size_t) flags);
1030
1031 /* Always remove functions just as before inlining: IPA passes might be
1032 interested to see bodies of extern inline functions that are not inlined
1033 to analyze side effects. The full removal is done just at the end
1034 of IPA pass queue. */
1035 if (flags & TODO_remove_functions)
1036 {
1037 gcc_assert (!cfun);
1038 cgraph_remove_unreachable_nodes (true, dump_file);
1039 }
1040
1041 if ((flags & TODO_dump_cgraph) && dump_file && !current_function_decl)
1042 {
1043 gcc_assert (!cfun);
1044 dump_cgraph (dump_file);
1045 /* Flush the file. If verification fails, we won't be able to
1046 close the file before aborting. */
1047 fflush (dump_file);
1048 }
1049
1050 if (flags & TODO_ggc_collect)
1051 ggc_collect ();
1052
1053 /* Now that the dumping has been done, we can get rid of the optional
1054 df problems. */
1055 if (flags & TODO_df_finish)
1056 df_finish_pass ((flags & TODO_df_verify) != 0);
1057 }
1058
1059 /* Verify invariants that should hold between passes. This is a place
1060 to put simple sanity checks. */
1061
1062 static void
1063 verify_interpass_invariants (void)
1064 {
1065 #ifdef ENABLE_CHECKING
1066 gcc_assert (!fold_deferring_overflow_warnings_p ());
1067 #endif
1068 }
1069
1070 /* Clear the last verified flag. */
1071
1072 static void
1073 clear_last_verified (void *data ATTRIBUTE_UNUSED)
1074 {
1075 cfun->last_verified = 0;
1076 }
1077
1078 /* Helper function. Verify that the properties has been turn into the
1079 properties expected by the pass. */
1080
1081 #ifdef ENABLE_CHECKING
1082 static void
1083 verify_curr_properties (void *data)
1084 {
1085 unsigned int props = (size_t)data;
1086 gcc_assert ((cfun->curr_properties & props) == props);
1087 }
1088 #endif
1089
1090 /* Initialize pass dump file. */
1091
1092 static bool
1093 pass_init_dump_file (struct opt_pass *pass)
1094 {
1095 /* If a dump file name is present, open it if enabled. */
1096 if (pass->static_pass_number != -1)
1097 {
1098 bool initializing_dump = !dump_initialized_p (pass->static_pass_number);
1099 dump_file_name = get_dump_file_name (pass->static_pass_number);
1100 dump_file = dump_begin (pass->static_pass_number, &dump_flags);
1101 if (dump_file && current_function_decl)
1102 {
1103 const char *dname, *aname;
1104 dname = lang_hooks.decl_printable_name (current_function_decl, 2);
1105 aname = (IDENTIFIER_POINTER
1106 (DECL_ASSEMBLER_NAME (current_function_decl)));
1107 fprintf (dump_file, "\n;; Function %s (%s)%s\n\n", dname, aname,
1108 cfun->function_frequency == FUNCTION_FREQUENCY_HOT
1109 ? " (hot)"
1110 : cfun->function_frequency == FUNCTION_FREQUENCY_UNLIKELY_EXECUTED
1111 ? " (unlikely executed)"
1112 : "");
1113 }
1114 return initializing_dump;
1115 }
1116 else
1117 return false;
1118 }
1119
1120 /* Flush PASS dump file. */
1121
1122 static void
1123 pass_fini_dump_file (struct opt_pass *pass)
1124 {
1125 /* Flush and close dump file. */
1126 if (dump_file_name)
1127 {
1128 free (CONST_CAST (char *, dump_file_name));
1129 dump_file_name = NULL;
1130 }
1131
1132 if (dump_file)
1133 {
1134 dump_end (pass->static_pass_number, dump_file);
1135 dump_file = NULL;
1136 }
1137 }
1138
1139 /* After executing the pass, apply expected changes to the function
1140 properties. */
1141
1142 static void
1143 update_properties_after_pass (void *data)
1144 {
1145 struct opt_pass *pass = (struct opt_pass *) data;
1146 cfun->curr_properties = (cfun->curr_properties | pass->properties_provided)
1147 & ~pass->properties_destroyed;
1148 }
1149
1150 /* Schedule IPA transform pass DATA for CFUN. */
1151
1152 static void
1153 add_ipa_transform_pass (void *data)
1154 {
1155 struct ipa_opt_pass *ipa_pass = (struct ipa_opt_pass *) data;
1156 VEC_safe_push (ipa_opt_pass, heap, cfun->ipa_transforms_to_apply, ipa_pass);
1157 }
1158
1159 /* Execute summary generation for all of the passes in IPA_PASS. */
1160
1161 static void
1162 execute_ipa_summary_passes (struct ipa_opt_pass *ipa_pass)
1163 {
1164 while (ipa_pass)
1165 {
1166 struct opt_pass *pass = &ipa_pass->pass;
1167
1168 /* Execute all of the IPA_PASSes in the list. */
1169 if (ipa_pass->pass.type == IPA_PASS
1170 && (!pass->gate || pass->gate ()))
1171 {
1172 pass_init_dump_file (pass);
1173 ipa_pass->generate_summary ();
1174 pass_fini_dump_file (pass);
1175 }
1176 ipa_pass = (struct ipa_opt_pass *)ipa_pass->pass.next;
1177 }
1178 }
1179
1180 /* Execute IPA_PASS function transform on NODE. */
1181
1182 static void
1183 execute_one_ipa_transform_pass (struct cgraph_node *node,
1184 struct ipa_opt_pass *ipa_pass)
1185 {
1186 struct opt_pass *pass = &ipa_pass->pass;
1187 unsigned int todo_after = 0;
1188
1189 current_pass = pass;
1190 if (!ipa_pass->function_transform)
1191 return;
1192
1193 /* Note that the folders should only create gimple expressions.
1194 This is a hack until the new folder is ready. */
1195 in_gimple_form = (cfun && (cfun->curr_properties & PROP_trees)) != 0;
1196
1197 pass_init_dump_file (pass);
1198
1199 /* Run pre-pass verification. */
1200 execute_todo (ipa_pass->function_transform_todo_flags_start);
1201
1202 /* If a timevar is present, start it. */
1203 if (pass->tv_id)
1204 timevar_push (pass->tv_id);
1205
1206 /* Do it! */
1207 todo_after = ipa_pass->function_transform (node);
1208
1209 /* Stop timevar. */
1210 if (pass->tv_id)
1211 timevar_pop (pass->tv_id);
1212
1213 /* Run post-pass cleanup and verification. */
1214 execute_todo (todo_after);
1215 verify_interpass_invariants ();
1216
1217 pass_fini_dump_file (pass);
1218
1219 current_pass = NULL;
1220 }
1221
1222 static bool
1223 execute_one_pass (struct opt_pass *pass)
1224 {
1225 bool initializing_dump;
1226 unsigned int todo_after = 0;
1227
1228 /* IPA passes are executed on whole program, so cfun should be NULL.
1229 Other passes need function context set. */
1230 if (pass->type == SIMPLE_IPA_PASS || pass->type == IPA_PASS)
1231 gcc_assert (!cfun && !current_function_decl);
1232 else
1233 gcc_assert (cfun && current_function_decl);
1234
1235 if (cfun && cfun->ipa_transforms_to_apply)
1236 {
1237 unsigned int i;
1238 struct cgraph_node *node = cgraph_node (current_function_decl);
1239
1240 for (i = 0; i < VEC_length (ipa_opt_pass, cfun->ipa_transforms_to_apply);
1241 i++)
1242 execute_one_ipa_transform_pass (node,
1243 VEC_index (ipa_opt_pass,
1244 cfun->ipa_transforms_to_apply,
1245 i));
1246 VEC_free (ipa_opt_pass, heap, cfun->ipa_transforms_to_apply);
1247 cfun->ipa_transforms_to_apply = NULL;
1248 }
1249
1250 current_pass = pass;
1251
1252 /* See if we're supposed to run this pass. */
1253 if (pass->gate && !pass->gate ())
1254 return false;
1255
1256 if (!quiet_flag && !cfun)
1257 fprintf (stderr, " <%s>", pass->name ? pass->name : "");
1258
1259 if (pass->todo_flags_start & TODO_set_props)
1260 cfun->curr_properties = pass->properties_required;
1261
1262 /* Note that the folders should only create gimple expressions.
1263 This is a hack until the new folder is ready. */
1264 in_gimple_form = (cfun && (cfun->curr_properties & PROP_trees)) != 0;
1265
1266 /* Run pre-pass verification. */
1267 execute_todo (pass->todo_flags_start);
1268
1269 #ifdef ENABLE_CHECKING
1270 do_per_function (verify_curr_properties,
1271 (void *)(size_t)pass->properties_required);
1272 #endif
1273
1274 initializing_dump = pass_init_dump_file (pass);
1275
1276 /* If a timevar is present, start it. */
1277 if (pass->tv_id)
1278 timevar_push (pass->tv_id);
1279
1280 /* Do it! */
1281 if (pass->execute)
1282 {
1283 todo_after = pass->execute ();
1284 do_per_function (clear_last_verified, NULL);
1285 }
1286
1287 /* Stop timevar. */
1288 if (pass->tv_id)
1289 timevar_pop (pass->tv_id);
1290
1291 do_per_function (update_properties_after_pass, pass);
1292
1293 if (initializing_dump
1294 && dump_file
1295 && graph_dump_format != no_graph
1296 && cfun
1297 && (cfun->curr_properties & (PROP_cfg | PROP_rtl))
1298 == (PROP_cfg | PROP_rtl))
1299 {
1300 get_dump_file_info (pass->static_pass_number)->flags |= TDF_GRAPH;
1301 dump_flags |= TDF_GRAPH;
1302 clean_graph_dump_file (dump_file_name);
1303 }
1304
1305 /* Run post-pass cleanup and verification. */
1306 execute_todo (todo_after | pass->todo_flags_finish);
1307 verify_interpass_invariants ();
1308 if (pass->type == IPA_PASS)
1309 do_per_function (add_ipa_transform_pass, pass);
1310
1311 if (!current_function_decl)
1312 cgraph_process_new_functions ();
1313
1314 pass_fini_dump_file (pass);
1315
1316 if (pass->type != SIMPLE_IPA_PASS && pass->type != IPA_PASS)
1317 gcc_assert (!(cfun->curr_properties & PROP_trees)
1318 || pass->type != RTL_PASS);
1319
1320 current_pass = NULL;
1321
1322 return true;
1323 }
1324
1325 void
1326 execute_pass_list (struct opt_pass *pass)
1327 {
1328 do
1329 {
1330 gcc_assert (pass->type == GIMPLE_PASS
1331 || pass->type == RTL_PASS);
1332 if (execute_one_pass (pass) && pass->sub)
1333 execute_pass_list (pass->sub);
1334 pass = pass->next;
1335 }
1336 while (pass);
1337 }
1338
1339 /* Same as execute_pass_list but assume that subpasses of IPA passes
1340 are local passes. */
1341 void
1342 execute_ipa_pass_list (struct opt_pass *pass)
1343 {
1344 bool summaries_generated = false;
1345 do
1346 {
1347 gcc_assert (!current_function_decl);
1348 gcc_assert (!cfun);
1349 gcc_assert (pass->type == SIMPLE_IPA_PASS || pass->type == IPA_PASS);
1350 if (pass->type == IPA_PASS && (!pass->gate || pass->gate ()))
1351 {
1352 if (!summaries_generated)
1353 {
1354 if (!quiet_flag && !cfun)
1355 fprintf (stderr, " <summary generate>");
1356 execute_ipa_summary_passes ((struct ipa_opt_pass *) pass);
1357 }
1358 summaries_generated = true;
1359 }
1360 if (execute_one_pass (pass) && pass->sub)
1361 {
1362 if (pass->sub->type == GIMPLE_PASS)
1363 do_per_function_toporder ((void (*)(void *))execute_pass_list,
1364 pass->sub);
1365 else if (pass->sub->type == SIMPLE_IPA_PASS
1366 || pass->sub->type == IPA_PASS)
1367 execute_ipa_pass_list (pass->sub);
1368 else
1369 gcc_unreachable ();
1370 }
1371 if (!current_function_decl)
1372 cgraph_process_new_functions ();
1373 pass = pass->next;
1374 }
1375 while (pass);
1376 }
1377
1378 /* Called by local passes to see if function is called by already processed nodes.
1379 Because we process nodes in topological order, this means that function is
1380 in recursive cycle or we introduced new direct calls. */
1381 bool
1382 function_called_by_processed_nodes_p (void)
1383 {
1384 struct cgraph_edge *e;
1385 for (e = cgraph_node (current_function_decl)->callers; e; e = e->next_caller)
1386 {
1387 if (e->caller->decl == current_function_decl)
1388 continue;
1389 if (!e->caller->analyzed || (!e->caller->needed && !e->caller->reachable))
1390 continue;
1391 if (TREE_ASM_WRITTEN (e->caller->decl))
1392 continue;
1393 if (!e->caller->process && !e->caller->global.inlined_to)
1394 break;
1395 }
1396 if (dump_file && e)
1397 {
1398 fprintf (dump_file, "Already processed call to:\n");
1399 dump_cgraph_node (dump_file, e->caller);
1400 }
1401 return e != NULL;
1402 }
1403
1404 #include "gt-passes.h"