* g++.dg/cpp0x/nullptr21.c: Remove printfs, make self-checking.
[gcc.git] / gcc / cgraphunit.c
1 /* Driver of optimization process
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
3 2011, 2012 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* This module implements main driver of compilation process.
23
24 The main scope of this file is to act as an interface in between
25 tree based frontends and the backend.
26
27 The front-end is supposed to use following functionality:
28
29 - cgraph_finalize_function
30
31 This function is called once front-end has parsed whole body of function
32 and it is certain that the function body nor the declaration will change.
33
34 (There is one exception needed for implementing GCC extern inline
35 function.)
36
37 - varpool_finalize_variable
38
39 This function has same behavior as the above but is used for static
40 variables.
41
42 - add_asm_node
43
44 Insert new toplevel ASM statement
45
46 - finalize_compilation_unit
47
48 This function is called once (source level) compilation unit is finalized
49 and it will no longer change.
50
51 The symbol table is constructed starting from the trivially needed
52 symbols finalized by the frontend. Functions are lowered into
53 GIMPLE representation and callgraph/reference lists are constructed.
54 Those are used to discover other necessary functions and variables.
55
56 At the end the bodies of unreachable functions are removed.
57
58 The function can be called multiple times when multiple source level
59 compilation units are combined.
60
61 - compile
62
63 This passes control to the back-end. Optimizations are performed and
64 final assembler is generated. This is done in the following way. Note
65 that with link time optimization the process is split into three
66 stages (compile time, linktime analysis and parallel linktime as
67 indicated bellow).
68
69 Compile time:
70
71 1) Inter-procedural optimization.
72 (ipa_passes)
73
74 This part is further split into:
75
76 a) early optimizations. These are local passes executed in
77 the topological order on the callgraph.
78
79 The purpose of early optimiations is to optimize away simple
80 things that may otherwise confuse IP analysis. Very simple
81 propagation across the callgraph is done i.e. to discover
82 functions without side effects and simple inlining is performed.
83
84 b) early small interprocedural passes.
85
86 Those are interprocedural passes executed only at compilation
87 time. These include, for exmaple, transational memory lowering,
88 unreachable code removal and other simple transformations.
89
90 c) IP analysis stage. All interprocedural passes do their
91 analysis.
92
93 Interprocedural passes differ from small interprocedural
94 passes by their ability to operate across whole program
95 at linktime. Their analysis stage is performed early to
96 both reduce linking times and linktime memory usage by
97 not having to represent whole program in memory.
98
99 d) LTO sreaming. When doing LTO, everything important gets
100 streamed into the object file.
101
102 Compile time and or linktime analysis stage (WPA):
103
104 At linktime units gets streamed back and symbol table is
105 merged. Function bodies are not streamed in and not
106 available.
107 e) IP propagation stage. All IP passes execute their
108 IP propagation. This is done based on the earlier analysis
109 without having function bodies at hand.
110 f) Ltrans streaming. When doing WHOPR LTO, the program
111 is partitioned and streamed into multple object files.
112
113 Compile time and/or parallel linktime stage (ltrans)
114
115 Each of the object files is streamed back and compiled
116 separately. Now the function bodies becomes available
117 again.
118
119 2) Virtual clone materialization
120 (cgraph_materialize_clone)
121
122 IP passes can produce copies of existing functoins (such
123 as versioned clones or inline clones) without actually
124 manipulating their bodies by creating virtual clones in
125 the callgraph. At this time the virtual clones are
126 turned into real functions
127 3) IP transformation
128
129 All IP passes transform function bodies based on earlier
130 decision of the IP propagation.
131
132 4) late small IP passes
133
134 Simple IP passes working within single program partition.
135
136 5) Expansion
137 (expand_all_functions)
138
139 At this stage functions that needs to be output into
140 assembler are identified and compiled in topological order
141 6) Output of variables and aliases
142 Now it is known what variable references was not optimized
143 out and thus all variables are output to the file.
144
145 Note that with -fno-toplevel-reorder passes 5 and 6
146 are combined together in cgraph_output_in_order.
147
148 Finally there are functions to manipulate the callgraph from
149 backend.
150 - cgraph_add_new_function is used to add backend produced
151 functions introduced after the unit is finalized.
152 The functions are enqueue for later processing and inserted
153 into callgraph with cgraph_process_new_functions.
154
155 - cgraph_function_versioning
156
157 produces a copy of function into new one (a version)
158 and apply simple transformations
159 */
160
161 #include "config.h"
162 #include "system.h"
163 #include "coretypes.h"
164 #include "tm.h"
165 #include "tree.h"
166 #include "output.h"
167 #include "rtl.h"
168 #include "tree-flow.h"
169 #include "tree-inline.h"
170 #include "langhooks.h"
171 #include "pointer-set.h"
172 #include "toplev.h"
173 #include "flags.h"
174 #include "ggc.h"
175 #include "debug.h"
176 #include "target.h"
177 #include "cgraph.h"
178 #include "diagnostic.h"
179 #include "params.h"
180 #include "fibheap.h"
181 #include "intl.h"
182 #include "function.h"
183 #include "ipa-prop.h"
184 #include "gimple.h"
185 #include "tree-iterator.h"
186 #include "tree-pass.h"
187 #include "tree-dump.h"
188 #include "gimple-pretty-print.h"
189 #include "output.h"
190 #include "coverage.h"
191 #include "plugin.h"
192 #include "ipa-inline.h"
193 #include "ipa-utils.h"
194 #include "lto-streamer.h"
195 #include "except.h"
196 #include "regset.h" /* FIXME: For reg_obstack. */
197
198 /* Queue of cgraph nodes scheduled to be added into cgraph. This is a
199 secondary queue used during optimization to accommodate passes that
200 may generate new functions that need to be optimized and expanded. */
201 cgraph_node_set cgraph_new_nodes;
202
203 static void expand_all_functions (void);
204 static void mark_functions_to_output (void);
205 static void expand_function (struct cgraph_node *);
206 static void cgraph_analyze_function (struct cgraph_node *);
207 static void handle_alias_pairs (void);
208
209 FILE *cgraph_dump_file;
210
211 /* Linked list of cgraph asm nodes. */
212 struct asm_node *asm_nodes;
213
214 /* Last node in cgraph_asm_nodes. */
215 static GTY(()) struct asm_node *asm_last_node;
216
217 /* Used for vtable lookup in thunk adjusting. */
218 static GTY (()) tree vtable_entry_type;
219
220 /* Determine if function DECL is trivially needed and should stay in the
221 compilation unit. This is used at the symbol table construction time
222 and differs from later logic removing unnecessary functions that can
223 take into account results of analysis, whole program info etc. */
224
225 static bool
226 cgraph_decide_is_function_needed (struct cgraph_node *node, tree decl)
227 {
228 /* If the user told us it is used, then it must be so. */
229 if (node->symbol.force_output)
230 return true;
231
232 /* Double check that no one output the function into assembly file
233 early. */
234 gcc_checking_assert (!DECL_ASSEMBLER_NAME_SET_P (decl)
235 || (node->thunk.thunk_p || node->same_body_alias)
236 || !TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)));
237
238
239 /* Keep constructors, destructors and virtual functions. */
240 if (DECL_STATIC_CONSTRUCTOR (decl)
241 || DECL_STATIC_DESTRUCTOR (decl)
242 || (DECL_VIRTUAL_P (decl)
243 && optimize && (DECL_COMDAT (decl) || DECL_EXTERNAL (decl))))
244 return true;
245
246 /* Externally visible functions must be output. The exception is
247 COMDAT functions that must be output only when they are needed. */
248
249 if (TREE_PUBLIC (decl)
250 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
251 return true;
252
253 return false;
254 }
255
256 /* Head of the queue of nodes to be processed while building callgraph */
257
258 static symtab_node first = (symtab_node)(void *)1;
259
260 /* Add NODE to queue starting at FIRST.
261 The queue is linked via AUX pointers and terminated by pointer to 1. */
262
263 static void
264 enqueue_node (symtab_node node)
265 {
266 if (node->symbol.aux)
267 return;
268 gcc_checking_assert (first);
269 node->symbol.aux = first;
270 first = node;
271 }
272
273 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
274 functions into callgraph in a way so they look like ordinary reachable
275 functions inserted into callgraph already at construction time. */
276
277 bool
278 cgraph_process_new_functions (void)
279 {
280 bool output = false;
281 tree fndecl;
282 struct cgraph_node *node;
283 cgraph_node_set_iterator csi;
284
285 if (!cgraph_new_nodes)
286 return false;
287 handle_alias_pairs ();
288 /* Note that this queue may grow as its being processed, as the new
289 functions may generate new ones. */
290 for (csi = csi_start (cgraph_new_nodes); !csi_end_p (csi); csi_next (&csi))
291 {
292 node = csi_node (csi);
293 fndecl = node->symbol.decl;
294 switch (cgraph_state)
295 {
296 case CGRAPH_STATE_CONSTRUCTION:
297 /* At construction time we just need to finalize function and move
298 it into reachable functions list. */
299
300 cgraph_finalize_function (fndecl, false);
301 output = true;
302 cgraph_call_function_insertion_hooks (node);
303 enqueue_node ((symtab_node) node);
304 break;
305
306 case CGRAPH_STATE_IPA:
307 case CGRAPH_STATE_IPA_SSA:
308 /* When IPA optimization already started, do all essential
309 transformations that has been already performed on the whole
310 cgraph but not on this function. */
311
312 gimple_register_cfg_hooks ();
313 if (!node->analyzed)
314 cgraph_analyze_function (node);
315 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
316 current_function_decl = fndecl;
317 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
318 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
319 /* When not optimizing, be sure we run early local passes anyway
320 to expand OMP. */
321 || !optimize)
322 execute_pass_list (pass_early_local_passes.pass.sub);
323 else
324 compute_inline_parameters (node, true);
325 free_dominance_info (CDI_POST_DOMINATORS);
326 free_dominance_info (CDI_DOMINATORS);
327 pop_cfun ();
328 current_function_decl = NULL;
329 cgraph_call_function_insertion_hooks (node);
330 break;
331
332 case CGRAPH_STATE_EXPANSION:
333 /* Functions created during expansion shall be compiled
334 directly. */
335 node->process = 0;
336 cgraph_call_function_insertion_hooks (node);
337 expand_function (node);
338 break;
339
340 default:
341 gcc_unreachable ();
342 break;
343 }
344 }
345 free_cgraph_node_set (cgraph_new_nodes);
346 cgraph_new_nodes = NULL;
347 return output;
348 }
349
350 /* As an GCC extension we allow redefinition of the function. The
351 semantics when both copies of bodies differ is not well defined.
352 We replace the old body with new body so in unit at a time mode
353 we always use new body, while in normal mode we may end up with
354 old body inlined into some functions and new body expanded and
355 inlined in others.
356
357 ??? It may make more sense to use one body for inlining and other
358 body for expanding the function but this is difficult to do. */
359
360 static void
361 cgraph_reset_node (struct cgraph_node *node)
362 {
363 /* If node->process is set, then we have already begun whole-unit analysis.
364 This is *not* testing for whether we've already emitted the function.
365 That case can be sort-of legitimately seen with real function redefinition
366 errors. I would argue that the front end should never present us with
367 such a case, but don't enforce that for now. */
368 gcc_assert (!node->process);
369
370 /* Reset our data structures so we can analyze the function again. */
371 memset (&node->local, 0, sizeof (node->local));
372 memset (&node->global, 0, sizeof (node->global));
373 memset (&node->rtl, 0, sizeof (node->rtl));
374 node->analyzed = false;
375 node->local.finalized = false;
376
377 cgraph_node_remove_callees (node);
378 }
379
380 /* Return true when there are references to NODE. */
381
382 static bool
383 referred_to_p (symtab_node node)
384 {
385 struct ipa_ref *ref;
386
387 /* See if there are any references at all. */
388 if (ipa_ref_list_referring_iterate (&node->symbol.ref_list, 0, ref))
389 return true;
390 /* For functions check also calls. */
391 if (symtab_function_p (node) && cgraph (node)->callers)
392 return true;
393 return false;
394 }
395
396 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
397 logic in effect. If NESTED is true, then our caller cannot stand to have
398 the garbage collector run at the moment. We would need to either create
399 a new GC context, or just not compile right now. */
400
401 void
402 cgraph_finalize_function (tree decl, bool nested)
403 {
404 struct cgraph_node *node = cgraph_get_create_node (decl);
405
406 if (node->local.finalized)
407 {
408 cgraph_reset_node (node);
409 node->local.redefined_extern_inline = true;
410 }
411
412 notice_global_symbol (decl);
413 node->local.finalized = true;
414 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
415
416 /* With -fkeep-inline-functions we are keeping all inline functions except
417 for extern inline ones. */
418 if (flag_keep_inline_functions
419 && DECL_DECLARED_INLINE_P (decl)
420 && !DECL_EXTERNAL (decl)
421 && !DECL_DISREGARD_INLINE_LIMITS (decl))
422 node->symbol.force_output = 1;
423
424 /* When not optimizing, also output the static functions. (see
425 PR24561), but don't do so for always_inline functions, functions
426 declared inline and nested functions. These were optimized out
427 in the original implementation and it is unclear whether we want
428 to change the behavior here. */
429 if ((!optimize
430 && !node->same_body_alias
431 && !DECL_DISREGARD_INLINE_LIMITS (decl)
432 && !DECL_DECLARED_INLINE_P (decl)
433 && !(DECL_CONTEXT (decl)
434 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL))
435 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
436 node->symbol.force_output = 1;
437
438 /* If we've not yet emitted decl, tell the debug info about it. */
439 if (!TREE_ASM_WRITTEN (decl))
440 (*debug_hooks->deferred_inline_function) (decl);
441
442 /* Possibly warn about unused parameters. */
443 if (warn_unused_parameter)
444 do_warn_unused_parameter (decl);
445
446 if (!nested)
447 ggc_collect ();
448
449 if (cgraph_state == CGRAPH_STATE_CONSTRUCTION
450 && (cgraph_decide_is_function_needed (node, decl)
451 || referred_to_p ((symtab_node)node)))
452 enqueue_node ((symtab_node)node);
453 }
454
455 /* Add the function FNDECL to the call graph.
456 Unlike cgraph_finalize_function, this function is intended to be used
457 by middle end and allows insertion of new function at arbitrary point
458 of compilation. The function can be either in high, low or SSA form
459 GIMPLE.
460
461 The function is assumed to be reachable and have address taken (so no
462 API breaking optimizations are performed on it).
463
464 Main work done by this function is to enqueue the function for later
465 processing to avoid need the passes to be re-entrant. */
466
467 void
468 cgraph_add_new_function (tree fndecl, bool lowered)
469 {
470 struct cgraph_node *node;
471 switch (cgraph_state)
472 {
473 case CGRAPH_STATE_PARSING:
474 cgraph_finalize_function (fndecl, false);
475 break;
476 case CGRAPH_STATE_CONSTRUCTION:
477 /* Just enqueue function to be processed at nearest occurrence. */
478 node = cgraph_create_node (fndecl);
479 if (lowered)
480 node->lowered = true;
481 if (!cgraph_new_nodes)
482 cgraph_new_nodes = cgraph_node_set_new ();
483 cgraph_node_set_add (cgraph_new_nodes, node);
484 break;
485
486 case CGRAPH_STATE_IPA:
487 case CGRAPH_STATE_IPA_SSA:
488 case CGRAPH_STATE_EXPANSION:
489 /* Bring the function into finalized state and enqueue for later
490 analyzing and compilation. */
491 node = cgraph_get_create_node (fndecl);
492 node->local.local = false;
493 node->local.finalized = true;
494 node->symbol.force_output = true;
495 if (!lowered && cgraph_state == CGRAPH_STATE_EXPANSION)
496 {
497 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
498 current_function_decl = fndecl;
499 gimple_register_cfg_hooks ();
500 bitmap_obstack_initialize (NULL);
501 execute_pass_list (all_lowering_passes);
502 execute_pass_list (pass_early_local_passes.pass.sub);
503 bitmap_obstack_release (NULL);
504 pop_cfun ();
505 current_function_decl = NULL;
506
507 lowered = true;
508 }
509 if (lowered)
510 node->lowered = true;
511 if (!cgraph_new_nodes)
512 cgraph_new_nodes = cgraph_node_set_new ();
513 cgraph_node_set_add (cgraph_new_nodes, node);
514 break;
515
516 case CGRAPH_STATE_FINISHED:
517 /* At the very end of compilation we have to do all the work up
518 to expansion. */
519 node = cgraph_create_node (fndecl);
520 if (lowered)
521 node->lowered = true;
522 cgraph_analyze_function (node);
523 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
524 current_function_decl = fndecl;
525 gimple_register_cfg_hooks ();
526 bitmap_obstack_initialize (NULL);
527 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
528 execute_pass_list (pass_early_local_passes.pass.sub);
529 bitmap_obstack_release (NULL);
530 pop_cfun ();
531 expand_function (node);
532 current_function_decl = NULL;
533 break;
534
535 default:
536 gcc_unreachable ();
537 }
538
539 /* Set a personality if required and we already passed EH lowering. */
540 if (lowered
541 && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
542 == eh_personality_lang))
543 DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
544 }
545
546 /* Add a top-level asm statement to the list. */
547
548 struct asm_node *
549 add_asm_node (tree asm_str)
550 {
551 struct asm_node *node;
552
553 node = ggc_alloc_cleared_asm_node ();
554 node->asm_str = asm_str;
555 node->order = symtab_order++;
556 node->next = NULL;
557 if (asm_nodes == NULL)
558 asm_nodes = node;
559 else
560 asm_last_node->next = node;
561 asm_last_node = node;
562 return node;
563 }
564
565 /* Output all asm statements we have stored up to be output. */
566
567 static void
568 output_asm_statements (void)
569 {
570 struct asm_node *can;
571
572 if (seen_error ())
573 return;
574
575 for (can = asm_nodes; can; can = can->next)
576 assemble_asm (can->asm_str);
577 asm_nodes = NULL;
578 }
579
580 /* C++ FE sometimes change linkage flags after producing same body aliases. */
581 void
582 fixup_same_cpp_alias_visibility (symtab_node node, symtab_node target, tree alias)
583 {
584 DECL_VIRTUAL_P (node->symbol.decl) = DECL_VIRTUAL_P (alias);
585 if (TREE_PUBLIC (node->symbol.decl))
586 {
587 DECL_EXTERNAL (node->symbol.decl) = DECL_EXTERNAL (alias);
588 DECL_COMDAT (node->symbol.decl) = DECL_COMDAT (alias);
589 DECL_COMDAT_GROUP (node->symbol.decl) = DECL_COMDAT_GROUP (alias);
590 if (DECL_ONE_ONLY (alias)
591 && !node->symbol.same_comdat_group)
592 symtab_add_to_same_comdat_group ((symtab_node)node, (symtab_node)target);
593 }
594 }
595
596 /* Analyze the function scheduled to be output. */
597 static void
598 cgraph_analyze_function (struct cgraph_node *node)
599 {
600 tree save = current_function_decl;
601 tree decl = node->symbol.decl;
602 location_t saved_loc = input_location;
603 input_location = DECL_SOURCE_LOCATION (decl);
604
605 if (node->alias && node->thunk.alias)
606 {
607 struct cgraph_node *tgt = cgraph_get_node (node->thunk.alias);
608 struct cgraph_node *n;
609
610 for (n = tgt; n && n->alias;
611 n = n->analyzed ? cgraph_alias_aliased_node (n) : NULL)
612 if (n == node)
613 {
614 error ("function %q+D part of alias cycle", node->symbol.decl);
615 node->alias = false;
616 input_location = saved_loc;
617 return;
618 }
619 if (!VEC_length (ipa_ref_t, node->symbol.ref_list.references))
620 ipa_record_reference ((symtab_node)node, (symtab_node)tgt,
621 IPA_REF_ALIAS, NULL);
622 if (node->same_body_alias)
623 {
624 DECL_DECLARED_INLINE_P (node->symbol.decl)
625 = DECL_DECLARED_INLINE_P (node->thunk.alias);
626 DECL_DISREGARD_INLINE_LIMITS (node->symbol.decl)
627 = DECL_DISREGARD_INLINE_LIMITS (node->thunk.alias);
628 fixup_same_cpp_alias_visibility ((symtab_node) node, (symtab_node) tgt, node->thunk.alias);
629 }
630
631 if (node->symbol.address_taken)
632 cgraph_mark_address_taken_node (cgraph_alias_aliased_node (node));
633 }
634 else if (node->thunk.thunk_p)
635 {
636 cgraph_create_edge (node, cgraph_get_node (node->thunk.alias),
637 NULL, 0, CGRAPH_FREQ_BASE);
638 }
639 else
640 {
641 current_function_decl = decl;
642 push_cfun (DECL_STRUCT_FUNCTION (decl));
643
644 assign_assembler_name_if_neeeded (node->symbol.decl);
645
646 /* Make sure to gimplify bodies only once. During analyzing a
647 function we lower it, which will require gimplified nested
648 functions, so we can end up here with an already gimplified
649 body. */
650 if (!gimple_has_body_p (decl))
651 gimplify_function_tree (decl);
652 dump_function (TDI_generic, decl);
653
654 /* Lower the function. */
655 if (!node->lowered)
656 {
657 if (node->nested)
658 lower_nested_functions (node->symbol.decl);
659 gcc_assert (!node->nested);
660
661 gimple_register_cfg_hooks ();
662 bitmap_obstack_initialize (NULL);
663 execute_pass_list (all_lowering_passes);
664 free_dominance_info (CDI_POST_DOMINATORS);
665 free_dominance_info (CDI_DOMINATORS);
666 compact_blocks ();
667 bitmap_obstack_release (NULL);
668 node->lowered = true;
669 }
670
671 pop_cfun ();
672 }
673 node->analyzed = true;
674
675 current_function_decl = save;
676 input_location = saved_loc;
677 }
678
679 /* C++ frontend produce same body aliases all over the place, even before PCH
680 gets streamed out. It relies on us linking the aliases with their function
681 in order to do the fixups, but ipa-ref is not PCH safe. Consequentely we
682 first produce aliases without links, but once C++ FE is sure he won't sream
683 PCH we build the links via this function. */
684
685 void
686 cgraph_process_same_body_aliases (void)
687 {
688 struct cgraph_node *node;
689 FOR_EACH_FUNCTION (node)
690 if (node->same_body_alias
691 && !VEC_length (ipa_ref_t, node->symbol.ref_list.references))
692 {
693 struct cgraph_node *tgt = cgraph_get_node (node->thunk.alias);
694 ipa_record_reference ((symtab_node)node, (symtab_node)tgt,
695 IPA_REF_ALIAS, NULL);
696 }
697 same_body_aliases_done = true;
698 }
699
700 /* Process attributes common for vars and functions. */
701
702 static void
703 process_common_attributes (tree decl)
704 {
705 tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
706
707 if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
708 {
709 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
710 "%<weakref%> attribute should be accompanied with"
711 " an %<alias%> attribute");
712 DECL_WEAK (decl) = 0;
713 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
714 DECL_ATTRIBUTES (decl));
715 }
716 }
717
718 /* Look for externally_visible and used attributes and mark cgraph nodes
719 accordingly.
720
721 We cannot mark the nodes at the point the attributes are processed (in
722 handle_*_attribute) because the copy of the declarations available at that
723 point may not be canonical. For example, in:
724
725 void f();
726 void f() __attribute__((used));
727
728 the declaration we see in handle_used_attribute will be the second
729 declaration -- but the front end will subsequently merge that declaration
730 with the original declaration and discard the second declaration.
731
732 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
733
734 void f() {}
735 void f() __attribute__((externally_visible));
736
737 is valid.
738
739 So, we walk the nodes at the end of the translation unit, applying the
740 attributes at that point. */
741
742 static void
743 process_function_and_variable_attributes (struct cgraph_node *first,
744 struct varpool_node *first_var)
745 {
746 struct cgraph_node *node;
747 struct varpool_node *vnode;
748
749 for (node = cgraph_first_function (); node != first;
750 node = cgraph_next_function (node))
751 {
752 tree decl = node->symbol.decl;
753 if (DECL_PRESERVE_P (decl))
754 cgraph_mark_force_output_node (node);
755 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
756 {
757 if (! TREE_PUBLIC (node->symbol.decl))
758 warning_at (DECL_SOURCE_LOCATION (node->symbol.decl), OPT_Wattributes,
759 "%<externally_visible%>"
760 " attribute have effect only on public objects");
761 }
762 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
763 && (node->local.finalized && !node->alias))
764 {
765 warning_at (DECL_SOURCE_LOCATION (node->symbol.decl), OPT_Wattributes,
766 "%<weakref%> attribute ignored"
767 " because function is defined");
768 DECL_WEAK (decl) = 0;
769 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
770 DECL_ATTRIBUTES (decl));
771 }
772
773 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl))
774 && !DECL_DECLARED_INLINE_P (decl)
775 /* redefining extern inline function makes it DECL_UNINLINABLE. */
776 && !DECL_UNINLINABLE (decl))
777 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
778 "always_inline function might not be inlinable");
779
780 process_common_attributes (decl);
781 }
782 for (vnode = varpool_first_variable (); vnode != first_var;
783 vnode = varpool_next_variable (vnode))
784 {
785 tree decl = vnode->symbol.decl;
786 if (DECL_EXTERNAL (decl)
787 && DECL_INITIAL (decl)
788 && const_value_known_p (decl))
789 varpool_finalize_decl (decl);
790 if (DECL_PRESERVE_P (decl))
791 vnode->symbol.force_output = true;
792 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
793 {
794 if (! TREE_PUBLIC (vnode->symbol.decl))
795 warning_at (DECL_SOURCE_LOCATION (vnode->symbol.decl), OPT_Wattributes,
796 "%<externally_visible%>"
797 " attribute have effect only on public objects");
798 }
799 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
800 && vnode->finalized
801 && DECL_INITIAL (decl))
802 {
803 warning_at (DECL_SOURCE_LOCATION (vnode->symbol.decl), OPT_Wattributes,
804 "%<weakref%> attribute ignored"
805 " because variable is initialized");
806 DECL_WEAK (decl) = 0;
807 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
808 DECL_ATTRIBUTES (decl));
809 }
810 process_common_attributes (decl);
811 }
812 }
813
814 /* Mark DECL as finalized. By finalizing the declaration, frontend instruct the
815 middle end to output the variable to asm file, if needed or externally
816 visible. */
817
818 void
819 varpool_finalize_decl (tree decl)
820 {
821 struct varpool_node *node = varpool_node (decl);
822
823 gcc_assert (TREE_STATIC (decl) || DECL_EXTERNAL (decl));
824
825 if (node->finalized)
826 return;
827 notice_global_symbol (decl);
828 node->finalized = true;
829 if (TREE_THIS_VOLATILE (decl) || DECL_PRESERVE_P (decl)
830 /* Traditionally we do not eliminate static variables when not
831 optimizing and when not doing toplevel reoder. */
832 || (!flag_toplevel_reorder && !DECL_COMDAT (node->symbol.decl)
833 && !DECL_ARTIFICIAL (node->symbol.decl)))
834 node->symbol.force_output = true;
835
836 if (cgraph_state == CGRAPH_STATE_CONSTRUCTION
837 && (decide_is_variable_needed (node, decl)
838 || referred_to_p ((symtab_node)node)))
839 enqueue_node ((symtab_node)node);
840 if (cgraph_state >= CGRAPH_STATE_IPA_SSA)
841 varpool_analyze_node (node);
842 /* Some frontends produce various interface variables after compilation
843 finished. */
844 if (cgraph_state == CGRAPH_STATE_FINISHED)
845 varpool_assemble_decl (node);
846 }
847
848 /* Discover all functions and variables that are trivially needed, analyze
849 them as well as all functions and variables referred by them */
850
851 static void
852 cgraph_analyze_functions (void)
853 {
854 /* Keep track of already processed nodes when called multiple times for
855 intermodule optimization. */
856 static struct cgraph_node *first_analyzed;
857 struct cgraph_node *first_handled = first_analyzed;
858 static struct varpool_node *first_analyzed_var;
859 struct varpool_node *first_handled_var = first_analyzed_var;
860
861 symtab_node node, next;
862 int i;
863 struct ipa_ref *ref;
864 bool changed = true;
865
866 bitmap_obstack_initialize (NULL);
867 cgraph_state = CGRAPH_STATE_CONSTRUCTION;
868
869 /* Analysis adds static variables that in turn adds references to new functions.
870 So we need to iterate the process until it stabilize. */
871 while (changed)
872 {
873 changed = false;
874 process_function_and_variable_attributes (first_analyzed,
875 first_analyzed_var);
876
877 /* First identify the trivially needed symbols. */
878 for (node = symtab_nodes;
879 node != (symtab_node)first_analyzed
880 && node != (symtab_node)first_analyzed_var; node = node->symbol.next)
881 {
882 if ((symtab_function_p (node)
883 && cgraph (node)->local.finalized
884 && cgraph_decide_is_function_needed (cgraph (node), node->symbol.decl))
885 || (symtab_variable_p (node)
886 && varpool (node)->finalized
887 && !DECL_EXTERNAL (node->symbol.decl)
888 && decide_is_variable_needed (varpool (node), node->symbol.decl)))
889 {
890 enqueue_node (node);
891 if (!changed && cgraph_dump_file)
892 fprintf (cgraph_dump_file, "Trivially needed symbols:");
893 changed = true;
894 if (cgraph_dump_file)
895 fprintf (cgraph_dump_file, " %s", symtab_node_asm_name (node));
896 }
897 if (node == (symtab_node)first_analyzed
898 || node == (symtab_node)first_analyzed_var)
899 break;
900 }
901 cgraph_process_new_functions ();
902 first_analyzed_var = varpool_first_variable ();
903 first_analyzed = cgraph_first_function ();
904
905 if (changed && dump_file)
906 fprintf (cgraph_dump_file, "\n");
907
908 /* Lower representation, build callgraph edges and references for all trivially
909 needed symbols and all symbols referred by them. */
910 while (first != (symtab_node)(void *)1)
911 {
912 changed = true;
913 node = first;
914 first = (symtab_node)first->symbol.aux;
915 if (symtab_function_p (node) && cgraph (node)->local.finalized)
916 {
917 struct cgraph_edge *edge;
918 struct cgraph_node *cnode;
919 tree decl;
920
921 cnode = cgraph (node);
922 decl = cnode->symbol.decl;
923
924 /* ??? It is possible to create extern inline function and later using
925 weak alias attribute to kill its body. See
926 gcc.c-torture/compile/20011119-1.c */
927 if (!DECL_STRUCT_FUNCTION (decl)
928 && (!cnode->alias || !cnode->thunk.alias)
929 && !cnode->thunk.thunk_p)
930 {
931 cgraph_reset_node (cnode);
932 cnode->local.redefined_extern_inline = true;
933 continue;
934 }
935
936 if (!cnode->analyzed)
937 cgraph_analyze_function (cnode);
938
939 for (edge = cnode->callees; edge; edge = edge->next_callee)
940 if (edge->callee->local.finalized)
941 enqueue_node ((symtab_node)edge->callee);
942
943 /* If decl is a clone of an abstract function, mark that abstract
944 function so that we don't release its body. The DECL_INITIAL() of that
945 abstract function declaration will be later needed to output debug
946 info. */
947 if (DECL_ABSTRACT_ORIGIN (decl))
948 {
949 struct cgraph_node *origin_node;
950 origin_node = cgraph_get_node (DECL_ABSTRACT_ORIGIN (decl));
951 origin_node->abstract_and_needed = true;
952 }
953
954 }
955 else if (symtab_variable_p (node)
956 && varpool (node)->finalized)
957 varpool_analyze_node (varpool (node));
958
959 if (node->symbol.same_comdat_group)
960 {
961 symtab_node next;
962 for (next = node->symbol.same_comdat_group;
963 next != node;
964 next = next->symbol.same_comdat_group)
965 enqueue_node (next);
966 }
967 for (i = 0; ipa_ref_list_reference_iterate (&node->symbol.ref_list, i, ref); i++)
968 if ((symtab_function_p (ref->referred) && cgraph (ref->referred)->local.finalized)
969 || (symtab_variable_p (ref->referred) && varpool (ref->referred)->finalized))
970 enqueue_node (ref->referred);
971 cgraph_process_new_functions ();
972 }
973 }
974
975 /* Collect entry points to the unit. */
976 if (cgraph_dump_file)
977 {
978 fprintf (cgraph_dump_file, "\n\nInitial ");
979 dump_symtab (cgraph_dump_file);
980 }
981
982 if (cgraph_dump_file)
983 fprintf (cgraph_dump_file, "\nRemoving unused symbols:");
984
985 for (node = symtab_nodes;
986 node != (symtab_node)first_handled
987 && node != (symtab_node)first_handled_var; node = next)
988 {
989 next = node->symbol.next;
990 if (!node->symbol.aux && !referred_to_p (node))
991 {
992 if (cgraph_dump_file)
993 fprintf (cgraph_dump_file, " %s", symtab_node_name (node));
994 symtab_remove_node (node);
995 continue;
996 }
997 if (symtab_function_p (node))
998 {
999 tree decl = node->symbol.decl;
1000 struct cgraph_node *cnode = cgraph (node);
1001
1002 if (cnode->local.finalized && !gimple_has_body_p (decl)
1003 && (!cnode->alias || !cnode->thunk.alias)
1004 && !cnode->thunk.thunk_p)
1005 cgraph_reset_node (cnode);
1006
1007 gcc_assert (!cnode->local.finalized || cnode->thunk.thunk_p
1008 || cnode->alias
1009 || gimple_has_body_p (decl));
1010 gcc_assert (cnode->analyzed == cnode->local.finalized);
1011 }
1012 node->symbol.aux = NULL;
1013 }
1014 first_analyzed = cgraph_first_function ();
1015 first_analyzed_var = varpool_first_variable ();
1016 if (cgraph_dump_file)
1017 {
1018 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1019 dump_symtab (cgraph_dump_file);
1020 }
1021 bitmap_obstack_release (NULL);
1022 ggc_collect ();
1023 }
1024
1025 /* Translate the ugly representation of aliases as alias pairs into nice
1026 representation in callgraph. We don't handle all cases yet,
1027 unforutnately. */
1028
1029 static void
1030 handle_alias_pairs (void)
1031 {
1032 alias_pair *p;
1033 unsigned i;
1034
1035 for (i = 0; VEC_iterate (alias_pair, alias_pairs, i, p);)
1036 {
1037 symtab_node target_node = symtab_node_for_asm (p->target);
1038
1039 /* Weakrefs with target not defined in current unit are easy to handle; they
1040 behave just as external variables except we need to note the alias flag
1041 to later output the weakref pseudo op into asm file. */
1042 if (!target_node && lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)) != NULL)
1043 {
1044 if (TREE_CODE (p->decl) == FUNCTION_DECL)
1045 cgraph_get_create_node (p->decl)->alias = true;
1046 else
1047 varpool_get_node (p->decl)->alias = true;
1048 DECL_EXTERNAL (p->decl) = 1;
1049 VEC_unordered_remove (alias_pair, alias_pairs, i);
1050 continue;
1051 }
1052 else if (!target_node)
1053 {
1054 error ("%q+D aliased to undefined symbol %qE", p->decl, p->target);
1055 VEC_unordered_remove (alias_pair, alias_pairs, i);
1056 continue;
1057 }
1058
1059 /* Normally EXTERNAL flag is used to mark external inlines,
1060 however for aliases it seems to be allowed to use it w/o
1061 any meaning. See gcc.dg/attr-alias-3.c
1062 However for weakref we insist on EXTERNAL flag being set.
1063 See gcc.dg/attr-alias-5.c */
1064 if (DECL_EXTERNAL (p->decl))
1065 DECL_EXTERNAL (p->decl)
1066 = lookup_attribute ("weakref",
1067 DECL_ATTRIBUTES (p->decl)) != NULL;
1068
1069 if (DECL_EXTERNAL (target_node->symbol.decl)
1070 /* We use local aliases for C++ thunks to force the tailcall
1071 to bind locally. This is a hack - to keep it working do
1072 the following (which is not strictly correct). */
1073 && (! TREE_CODE (target_node->symbol.decl) == FUNCTION_DECL
1074 || ! DECL_VIRTUAL_P (target_node->symbol.decl))
1075 && ! lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)))
1076 {
1077 error ("%q+D aliased to external symbol %qE",
1078 p->decl, p->target);
1079 }
1080
1081 if (TREE_CODE (p->decl) == FUNCTION_DECL
1082 && target_node && symtab_function_p (target_node))
1083 {
1084 struct cgraph_node *src_node = cgraph_get_node (p->decl);
1085 if (src_node && src_node->local.finalized)
1086 cgraph_reset_node (src_node);
1087 cgraph_create_function_alias (p->decl, target_node->symbol.decl);
1088 VEC_unordered_remove (alias_pair, alias_pairs, i);
1089 }
1090 else if (TREE_CODE (p->decl) == VAR_DECL
1091 && target_node && symtab_variable_p (target_node))
1092 {
1093 varpool_create_variable_alias (p->decl, target_node->symbol.decl);
1094 VEC_unordered_remove (alias_pair, alias_pairs, i);
1095 }
1096 else
1097 {
1098 error ("%q+D alias in between function and variable is not supported",
1099 p->decl);
1100 warning (0, "%q+D aliased declaration",
1101 target_node->symbol.decl);
1102 VEC_unordered_remove (alias_pair, alias_pairs, i);
1103 }
1104 }
1105 VEC_free (alias_pair, gc, alias_pairs);
1106 }
1107
1108
1109 /* Figure out what functions we want to assemble. */
1110
1111 static void
1112 mark_functions_to_output (void)
1113 {
1114 struct cgraph_node *node;
1115 #ifdef ENABLE_CHECKING
1116 bool check_same_comdat_groups = false;
1117
1118 FOR_EACH_FUNCTION (node)
1119 gcc_assert (!node->process);
1120 #endif
1121
1122 FOR_EACH_FUNCTION (node)
1123 {
1124 tree decl = node->symbol.decl;
1125
1126 gcc_assert (!node->process || node->symbol.same_comdat_group);
1127 if (node->process)
1128 continue;
1129
1130 /* We need to output all local functions that are used and not
1131 always inlined, as well as those that are reachable from
1132 outside the current compilation unit. */
1133 if (node->analyzed
1134 && !node->thunk.thunk_p
1135 && !node->alias
1136 && !node->global.inlined_to
1137 && !TREE_ASM_WRITTEN (decl)
1138 && !DECL_EXTERNAL (decl))
1139 {
1140 node->process = 1;
1141 if (node->symbol.same_comdat_group)
1142 {
1143 struct cgraph_node *next;
1144 for (next = cgraph (node->symbol.same_comdat_group);
1145 next != node;
1146 next = cgraph (next->symbol.same_comdat_group))
1147 if (!next->thunk.thunk_p && !next->alias)
1148 next->process = 1;
1149 }
1150 }
1151 else if (node->symbol.same_comdat_group)
1152 {
1153 #ifdef ENABLE_CHECKING
1154 check_same_comdat_groups = true;
1155 #endif
1156 }
1157 else
1158 {
1159 /* We should've reclaimed all functions that are not needed. */
1160 #ifdef ENABLE_CHECKING
1161 if (!node->global.inlined_to
1162 && gimple_has_body_p (decl)
1163 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1164 are inside partition, we can end up not removing the body since we no longer
1165 have analyzed node pointing to it. */
1166 && !node->symbol.in_other_partition
1167 && !node->alias
1168 && !node->clones
1169 && !DECL_EXTERNAL (decl))
1170 {
1171 dump_cgraph_node (stderr, node);
1172 internal_error ("failed to reclaim unneeded function");
1173 }
1174 #endif
1175 gcc_assert (node->global.inlined_to
1176 || !gimple_has_body_p (decl)
1177 || node->symbol.in_other_partition
1178 || node->clones
1179 || DECL_ARTIFICIAL (decl)
1180 || DECL_EXTERNAL (decl));
1181
1182 }
1183
1184 }
1185 #ifdef ENABLE_CHECKING
1186 if (check_same_comdat_groups)
1187 FOR_EACH_FUNCTION (node)
1188 if (node->symbol.same_comdat_group && !node->process)
1189 {
1190 tree decl = node->symbol.decl;
1191 if (!node->global.inlined_to
1192 && gimple_has_body_p (decl)
1193 /* FIXME: in an ltrans unit when the offline copy is outside a
1194 partition but inline copies are inside a partition, we can
1195 end up not removing the body since we no longer have an
1196 analyzed node pointing to it. */
1197 && !node->symbol.in_other_partition
1198 && !node->clones
1199 && !DECL_EXTERNAL (decl))
1200 {
1201 dump_cgraph_node (stderr, node);
1202 internal_error ("failed to reclaim unneeded function in same "
1203 "comdat group");
1204 }
1205 }
1206 #endif
1207 }
1208
1209 /* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1210 in lowered gimple form.
1211
1212 Set current_function_decl and cfun to newly constructed empty function body.
1213 return basic block in the function body. */
1214
1215 static basic_block
1216 init_lowered_empty_function (tree decl)
1217 {
1218 basic_block bb;
1219
1220 current_function_decl = decl;
1221 allocate_struct_function (decl, false);
1222 gimple_register_cfg_hooks ();
1223 init_empty_tree_cfg ();
1224 init_tree_ssa (cfun);
1225 init_ssa_operands (cfun);
1226 cfun->gimple_df->in_ssa_p = true;
1227 DECL_INITIAL (decl) = make_node (BLOCK);
1228
1229 DECL_SAVED_TREE (decl) = error_mark_node;
1230 cfun->curr_properties |=
1231 (PROP_gimple_lcf | PROP_gimple_leh | PROP_cfg | PROP_referenced_vars |
1232 PROP_ssa | PROP_gimple_any);
1233
1234 /* Create BB for body of the function and connect it properly. */
1235 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR);
1236 make_edge (ENTRY_BLOCK_PTR, bb, 0);
1237 make_edge (bb, EXIT_BLOCK_PTR, 0);
1238
1239 return bb;
1240 }
1241
1242 /* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1243 offset indicated by VIRTUAL_OFFSET, if that is
1244 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1245 zero for a result adjusting thunk. */
1246
1247 static tree
1248 thunk_adjust (gimple_stmt_iterator * bsi,
1249 tree ptr, bool this_adjusting,
1250 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1251 {
1252 gimple stmt;
1253 tree ret;
1254
1255 if (this_adjusting
1256 && fixed_offset != 0)
1257 {
1258 stmt = gimple_build_assign
1259 (ptr, fold_build_pointer_plus_hwi_loc (input_location,
1260 ptr,
1261 fixed_offset));
1262 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1263 }
1264
1265 /* If there's a virtual offset, look up that value in the vtable and
1266 adjust the pointer again. */
1267 if (virtual_offset)
1268 {
1269 tree vtabletmp;
1270 tree vtabletmp2;
1271 tree vtabletmp3;
1272
1273 if (!vtable_entry_type)
1274 {
1275 tree vfunc_type = make_node (FUNCTION_TYPE);
1276 TREE_TYPE (vfunc_type) = integer_type_node;
1277 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1278 layout_type (vfunc_type);
1279
1280 vtable_entry_type = build_pointer_type (vfunc_type);
1281 }
1282
1283 vtabletmp =
1284 make_rename_temp (build_pointer_type
1285 (build_pointer_type (vtable_entry_type)), "vptr");
1286
1287 /* The vptr is always at offset zero in the object. */
1288 stmt = gimple_build_assign (vtabletmp,
1289 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1290 ptr));
1291 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1292
1293 /* Form the vtable address. */
1294 vtabletmp2 = make_rename_temp (TREE_TYPE (TREE_TYPE (vtabletmp)),
1295 "vtableaddr");
1296 stmt = gimple_build_assign (vtabletmp2,
1297 build_simple_mem_ref (vtabletmp));
1298 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1299
1300 /* Find the entry with the vcall offset. */
1301 stmt = gimple_build_assign (vtabletmp2,
1302 fold_build_pointer_plus_loc (input_location,
1303 vtabletmp2,
1304 virtual_offset));
1305 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1306
1307 /* Get the offset itself. */
1308 vtabletmp3 = make_rename_temp (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1309 "vcalloffset");
1310 stmt = gimple_build_assign (vtabletmp3,
1311 build_simple_mem_ref (vtabletmp2));
1312 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1313
1314 /* Adjust the `this' pointer. */
1315 ptr = fold_build_pointer_plus_loc (input_location, ptr, vtabletmp3);
1316 ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
1317 GSI_CONTINUE_LINKING);
1318 }
1319
1320 if (!this_adjusting
1321 && fixed_offset != 0)
1322 /* Adjust the pointer by the constant. */
1323 {
1324 tree ptrtmp;
1325
1326 if (TREE_CODE (ptr) == VAR_DECL)
1327 ptrtmp = ptr;
1328 else
1329 {
1330 ptrtmp = make_rename_temp (TREE_TYPE (ptr), "ptr");
1331 stmt = gimple_build_assign (ptrtmp, ptr);
1332 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1333 }
1334 ptr = fold_build_pointer_plus_hwi_loc (input_location,
1335 ptrtmp, fixed_offset);
1336 }
1337
1338 /* Emit the statement and gimplify the adjustment expression. */
1339 ret = make_rename_temp (TREE_TYPE (ptr), "adjusted_this");
1340 stmt = gimple_build_assign (ret, ptr);
1341 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1342
1343 return ret;
1344 }
1345
1346 /* Produce assembler for thunk NODE. */
1347
1348 static void
1349 assemble_thunk (struct cgraph_node *node)
1350 {
1351 bool this_adjusting = node->thunk.this_adjusting;
1352 HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset;
1353 HOST_WIDE_INT virtual_value = node->thunk.virtual_value;
1354 tree virtual_offset = NULL;
1355 tree alias = node->thunk.alias;
1356 tree thunk_fndecl = node->symbol.decl;
1357 tree a = DECL_ARGUMENTS (thunk_fndecl);
1358
1359 current_function_decl = thunk_fndecl;
1360
1361 /* Ensure thunks are emitted in their correct sections. */
1362 resolve_unique_section (thunk_fndecl, 0, flag_function_sections);
1363
1364 if (this_adjusting
1365 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1366 virtual_value, alias))
1367 {
1368 const char *fnname;
1369 tree fn_block;
1370 tree restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1371
1372 DECL_RESULT (thunk_fndecl)
1373 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1374 RESULT_DECL, 0, restype);
1375 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
1376
1377 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1378 create one. */
1379 fn_block = make_node (BLOCK);
1380 BLOCK_VARS (fn_block) = a;
1381 DECL_INITIAL (thunk_fndecl) = fn_block;
1382 init_function_start (thunk_fndecl);
1383 cfun->is_thunk = 1;
1384 assemble_start_function (thunk_fndecl, fnname);
1385
1386 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1387 fixed_offset, virtual_value, alias);
1388
1389 assemble_end_function (thunk_fndecl, fnname);
1390 init_insn_lengths ();
1391 free_after_compilation (cfun);
1392 set_cfun (NULL);
1393 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1394 node->thunk.thunk_p = false;
1395 node->analyzed = false;
1396 }
1397 else
1398 {
1399 tree restype;
1400 basic_block bb, then_bb, else_bb, return_bb;
1401 gimple_stmt_iterator bsi;
1402 int nargs = 0;
1403 tree arg;
1404 int i;
1405 tree resdecl;
1406 tree restmp = NULL;
1407 VEC(tree, heap) *vargs;
1408
1409 gimple call;
1410 gimple ret;
1411
1412 DECL_IGNORED_P (thunk_fndecl) = 1;
1413 bitmap_obstack_initialize (NULL);
1414
1415 if (node->thunk.virtual_offset_p)
1416 virtual_offset = size_int (virtual_value);
1417
1418 /* Build the return declaration for the function. */
1419 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1420 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1421 {
1422 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1423 DECL_ARTIFICIAL (resdecl) = 1;
1424 DECL_IGNORED_P (resdecl) = 1;
1425 DECL_RESULT (thunk_fndecl) = resdecl;
1426 }
1427 else
1428 resdecl = DECL_RESULT (thunk_fndecl);
1429
1430 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl);
1431
1432 bsi = gsi_start_bb (bb);
1433
1434 /* Build call to the function being thunked. */
1435 if (!VOID_TYPE_P (restype))
1436 {
1437 if (!is_gimple_reg_type (restype))
1438 {
1439 restmp = resdecl;
1440 add_local_decl (cfun, restmp);
1441 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1442 }
1443 else
1444 restmp = make_rename_temp (restype, "retval");
1445 }
1446
1447 for (arg = a; arg; arg = DECL_CHAIN (arg))
1448 nargs++;
1449 vargs = VEC_alloc (tree, heap, nargs);
1450 if (this_adjusting)
1451 VEC_quick_push (tree, vargs,
1452 thunk_adjust (&bsi,
1453 a, 1, fixed_offset,
1454 virtual_offset));
1455 else
1456 VEC_quick_push (tree, vargs, a);
1457 add_referenced_var (a);
1458 if (is_gimple_reg (a))
1459 mark_sym_for_renaming (a);
1460 for (i = 1, arg = DECL_CHAIN (a); i < nargs; i++, arg = DECL_CHAIN (arg))
1461 {
1462 add_referenced_var (arg);
1463 if (is_gimple_reg (arg))
1464 mark_sym_for_renaming (arg);
1465 VEC_quick_push (tree, vargs, arg);
1466 }
1467 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1468 VEC_free (tree, heap, vargs);
1469 gimple_call_set_from_thunk (call, true);
1470 if (restmp)
1471 gimple_call_set_lhs (call, restmp);
1472 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1473
1474 if (restmp && !this_adjusting)
1475 {
1476 tree true_label = NULL_TREE;
1477
1478 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1479 {
1480 gimple stmt;
1481 /* If the return type is a pointer, we need to
1482 protect against NULL. We know there will be an
1483 adjustment, because that's why we're emitting a
1484 thunk. */
1485 then_bb = create_basic_block (NULL, (void *) 0, bb);
1486 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1487 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1488 remove_edge (single_succ_edge (bb));
1489 true_label = gimple_block_label (then_bb);
1490 stmt = gimple_build_cond (NE_EXPR, restmp,
1491 build_zero_cst (TREE_TYPE (restmp)),
1492 NULL_TREE, NULL_TREE);
1493 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1494 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1495 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1496 make_edge (return_bb, EXIT_BLOCK_PTR, 0);
1497 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1498 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1499 bsi = gsi_last_bb (then_bb);
1500 }
1501
1502 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1503 fixed_offset, virtual_offset);
1504 if (true_label)
1505 {
1506 gimple stmt;
1507 bsi = gsi_last_bb (else_bb);
1508 stmt = gimple_build_assign (restmp,
1509 build_zero_cst (TREE_TYPE (restmp)));
1510 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1511 bsi = gsi_last_bb (return_bb);
1512 }
1513 }
1514 else
1515 gimple_call_set_tail (call, true);
1516
1517 /* Build return value. */
1518 ret = gimple_build_return (restmp);
1519 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1520
1521 delete_unreachable_blocks ();
1522 update_ssa (TODO_update_ssa);
1523
1524 /* Since we want to emit the thunk, we explicitly mark its name as
1525 referenced. */
1526 node->thunk.thunk_p = false;
1527 cgraph_node_remove_callees (node);
1528 cgraph_add_new_function (thunk_fndecl, true);
1529 bitmap_obstack_release (NULL);
1530 }
1531 current_function_decl = NULL;
1532 }
1533
1534
1535
1536 /* Assemble thunks and aliases associated to NODE. */
1537
1538 static void
1539 assemble_thunks_and_aliases (struct cgraph_node *node)
1540 {
1541 struct cgraph_edge *e;
1542 int i;
1543 struct ipa_ref *ref;
1544
1545 for (e = node->callers; e;)
1546 if (e->caller->thunk.thunk_p)
1547 {
1548 struct cgraph_node *thunk = e->caller;
1549
1550 e = e->next_caller;
1551 assemble_thunks_and_aliases (thunk);
1552 assemble_thunk (thunk);
1553 }
1554 else
1555 e = e->next_caller;
1556 for (i = 0; ipa_ref_list_referring_iterate (&node->symbol.ref_list,
1557 i, ref); i++)
1558 if (ref->use == IPA_REF_ALIAS)
1559 {
1560 struct cgraph_node *alias = ipa_ref_referring_node (ref);
1561 bool saved_written = TREE_ASM_WRITTEN (alias->thunk.alias);
1562
1563 /* Force assemble_alias to really output the alias this time instead
1564 of buffering it in same alias pairs. */
1565 TREE_ASM_WRITTEN (alias->thunk.alias) = 1;
1566 do_assemble_alias (alias->symbol.decl,
1567 DECL_ASSEMBLER_NAME (alias->thunk.alias));
1568 assemble_thunks_and_aliases (alias);
1569 TREE_ASM_WRITTEN (alias->thunk.alias) = saved_written;
1570 }
1571 }
1572
1573 /* Expand function specified by NODE. */
1574
1575 static void
1576 expand_function (struct cgraph_node *node)
1577 {
1578 tree decl = node->symbol.decl;
1579 location_t saved_loc;
1580
1581 /* We ought to not compile any inline clones. */
1582 gcc_assert (!node->global.inlined_to);
1583
1584 announce_function (decl);
1585 node->process = 0;
1586 gcc_assert (node->lowered);
1587
1588 /* Generate RTL for the body of DECL. */
1589
1590 timevar_push (TV_REST_OF_COMPILATION);
1591
1592 gcc_assert (cgraph_global_info_ready);
1593
1594 /* Initialize the default bitmap obstack. */
1595 bitmap_obstack_initialize (NULL);
1596
1597 /* Initialize the RTL code for the function. */
1598 current_function_decl = decl;
1599 saved_loc = input_location;
1600 input_location = DECL_SOURCE_LOCATION (decl);
1601 init_function_start (decl);
1602
1603 gimple_register_cfg_hooks ();
1604
1605 bitmap_obstack_initialize (&reg_obstack); /* FIXME, only at RTL generation*/
1606
1607 execute_all_ipa_transforms ();
1608
1609 /* Perform all tree transforms and optimizations. */
1610
1611 /* Signal the start of passes. */
1612 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_START, NULL);
1613
1614 execute_pass_list (all_passes);
1615
1616 /* Signal the end of passes. */
1617 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_END, NULL);
1618
1619 bitmap_obstack_release (&reg_obstack);
1620
1621 /* Release the default bitmap obstack. */
1622 bitmap_obstack_release (NULL);
1623
1624 set_cfun (NULL);
1625
1626 /* If requested, warn about function definitions where the function will
1627 return a value (usually of some struct or union type) which itself will
1628 take up a lot of stack space. */
1629 if (warn_larger_than && !DECL_EXTERNAL (decl) && TREE_TYPE (decl))
1630 {
1631 tree ret_type = TREE_TYPE (TREE_TYPE (decl));
1632
1633 if (ret_type && TYPE_SIZE_UNIT (ret_type)
1634 && TREE_CODE (TYPE_SIZE_UNIT (ret_type)) == INTEGER_CST
1635 && 0 < compare_tree_int (TYPE_SIZE_UNIT (ret_type),
1636 larger_than_size))
1637 {
1638 unsigned int size_as_int
1639 = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (ret_type));
1640
1641 if (compare_tree_int (TYPE_SIZE_UNIT (ret_type), size_as_int) == 0)
1642 warning (OPT_Wlarger_than_, "size of return value of %q+D is %u bytes",
1643 decl, size_as_int);
1644 else
1645 warning (OPT_Wlarger_than_, "size of return value of %q+D is larger than %wd bytes",
1646 decl, larger_than_size);
1647 }
1648 }
1649
1650 gimple_set_body (decl, NULL);
1651 if (DECL_STRUCT_FUNCTION (decl) == 0
1652 && !cgraph_get_node (decl)->origin)
1653 {
1654 /* Stop pointing to the local nodes about to be freed.
1655 But DECL_INITIAL must remain nonzero so we know this
1656 was an actual function definition.
1657 For a nested function, this is done in c_pop_function_context.
1658 If rest_of_compilation set this to 0, leave it 0. */
1659 if (DECL_INITIAL (decl) != 0)
1660 DECL_INITIAL (decl) = error_mark_node;
1661 }
1662
1663 input_location = saved_loc;
1664
1665 ggc_collect ();
1666 timevar_pop (TV_REST_OF_COMPILATION);
1667
1668 /* Make sure that BE didn't give up on compiling. */
1669 gcc_assert (TREE_ASM_WRITTEN (decl));
1670 current_function_decl = NULL;
1671
1672 /* It would make a lot more sense to output thunks before function body to get more
1673 forward and lest backwarding jumps. This however would need solving problem
1674 with comdats. See PR48668. Also aliases must come after function itself to
1675 make one pass assemblers, like one on AIX, happy. See PR 50689.
1676 FIXME: Perhaps thunks should be move before function IFF they are not in comdat
1677 groups. */
1678 assemble_thunks_and_aliases (node);
1679 cgraph_release_function_body (node);
1680 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1681 points to the dead function body. */
1682 cgraph_node_remove_callees (node);
1683 }
1684
1685
1686 /* Expand all functions that must be output.
1687
1688 Attempt to topologically sort the nodes so function is output when
1689 all called functions are already assembled to allow data to be
1690 propagated across the callgraph. Use a stack to get smaller distance
1691 between a function and its callees (later we may choose to use a more
1692 sophisticated algorithm for function reordering; we will likely want
1693 to use subsections to make the output functions appear in top-down
1694 order). */
1695
1696 static void
1697 expand_all_functions (void)
1698 {
1699 struct cgraph_node *node;
1700 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1701 int order_pos, new_order_pos = 0;
1702 int i;
1703
1704 order_pos = ipa_reverse_postorder (order);
1705 gcc_assert (order_pos == cgraph_n_nodes);
1706
1707 /* Garbage collector may remove inline clones we eliminate during
1708 optimization. So we must be sure to not reference them. */
1709 for (i = 0; i < order_pos; i++)
1710 if (order[i]->process)
1711 order[new_order_pos++] = order[i];
1712
1713 for (i = new_order_pos - 1; i >= 0; i--)
1714 {
1715 node = order[i];
1716 if (node->process)
1717 {
1718 node->process = 0;
1719 expand_function (node);
1720 }
1721 }
1722 cgraph_process_new_functions ();
1723
1724 free (order);
1725
1726 }
1727
1728 /* This is used to sort the node types by the cgraph order number. */
1729
1730 enum cgraph_order_sort_kind
1731 {
1732 ORDER_UNDEFINED = 0,
1733 ORDER_FUNCTION,
1734 ORDER_VAR,
1735 ORDER_ASM
1736 };
1737
1738 struct cgraph_order_sort
1739 {
1740 enum cgraph_order_sort_kind kind;
1741 union
1742 {
1743 struct cgraph_node *f;
1744 struct varpool_node *v;
1745 struct asm_node *a;
1746 } u;
1747 };
1748
1749 /* Output all functions, variables, and asm statements in the order
1750 according to their order fields, which is the order in which they
1751 appeared in the file. This implements -fno-toplevel-reorder. In
1752 this mode we may output functions and variables which don't really
1753 need to be output. */
1754
1755 static void
1756 output_in_order (void)
1757 {
1758 int max;
1759 struct cgraph_order_sort *nodes;
1760 int i;
1761 struct cgraph_node *pf;
1762 struct varpool_node *pv;
1763 struct asm_node *pa;
1764
1765 max = symtab_order;
1766 nodes = XCNEWVEC (struct cgraph_order_sort, max);
1767
1768 FOR_EACH_DEFINED_FUNCTION (pf)
1769 {
1770 if (pf->process && !pf->thunk.thunk_p && !pf->alias)
1771 {
1772 i = pf->symbol.order;
1773 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1774 nodes[i].kind = ORDER_FUNCTION;
1775 nodes[i].u.f = pf;
1776 }
1777 }
1778
1779 FOR_EACH_DEFINED_VARIABLE (pv)
1780 if (!DECL_EXTERNAL (pv->symbol.decl))
1781 {
1782 i = pv->symbol.order;
1783 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1784 nodes[i].kind = ORDER_VAR;
1785 nodes[i].u.v = pv;
1786 }
1787
1788 for (pa = asm_nodes; pa; pa = pa->next)
1789 {
1790 i = pa->order;
1791 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1792 nodes[i].kind = ORDER_ASM;
1793 nodes[i].u.a = pa;
1794 }
1795
1796 /* In toplevel reorder mode we output all statics; mark them as needed. */
1797
1798 for (i = 0; i < max; ++i)
1799 if (nodes[i].kind == ORDER_VAR)
1800 varpool_finalize_named_section_flags (nodes[i].u.v);
1801
1802 for (i = 0; i < max; ++i)
1803 {
1804 switch (nodes[i].kind)
1805 {
1806 case ORDER_FUNCTION:
1807 nodes[i].u.f->process = 0;
1808 expand_function (nodes[i].u.f);
1809 break;
1810
1811 case ORDER_VAR:
1812 varpool_assemble_decl (nodes[i].u.v);
1813 break;
1814
1815 case ORDER_ASM:
1816 assemble_asm (nodes[i].u.a->asm_str);
1817 break;
1818
1819 case ORDER_UNDEFINED:
1820 break;
1821
1822 default:
1823 gcc_unreachable ();
1824 }
1825 }
1826
1827 asm_nodes = NULL;
1828 free (nodes);
1829 }
1830
1831 static void
1832 ipa_passes (void)
1833 {
1834 set_cfun (NULL);
1835 current_function_decl = NULL;
1836 gimple_register_cfg_hooks ();
1837 bitmap_obstack_initialize (NULL);
1838
1839 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
1840
1841 if (!in_lto_p)
1842 {
1843 execute_ipa_pass_list (all_small_ipa_passes);
1844 if (seen_error ())
1845 return;
1846 }
1847
1848 /* We never run removal of unreachable nodes after early passes. This is
1849 because TODO is run before the subpasses. It is important to remove
1850 the unreachable functions to save works at IPA level and to get LTO
1851 symbol tables right. */
1852 symtab_remove_unreachable_nodes (true, cgraph_dump_file);
1853
1854 /* If pass_all_early_optimizations was not scheduled, the state of
1855 the cgraph will not be properly updated. Update it now. */
1856 if (cgraph_state < CGRAPH_STATE_IPA_SSA)
1857 cgraph_state = CGRAPH_STATE_IPA_SSA;
1858
1859 if (!in_lto_p)
1860 {
1861 /* Generate coverage variables and constructors. */
1862 coverage_finish ();
1863
1864 /* Process new functions added. */
1865 set_cfun (NULL);
1866 current_function_decl = NULL;
1867 cgraph_process_new_functions ();
1868
1869 execute_ipa_summary_passes
1870 ((struct ipa_opt_pass_d *) all_regular_ipa_passes);
1871 }
1872
1873 /* Some targets need to handle LTO assembler output specially. */
1874 if (flag_generate_lto)
1875 targetm.asm_out.lto_start ();
1876
1877 execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_lto_gen_passes);
1878
1879 if (!in_lto_p)
1880 ipa_write_summaries ();
1881
1882 if (flag_generate_lto)
1883 targetm.asm_out.lto_end ();
1884
1885 if (!flag_ltrans && (in_lto_p || !flag_lto || flag_fat_lto_objects))
1886 execute_ipa_pass_list (all_regular_ipa_passes);
1887 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
1888
1889 bitmap_obstack_release (NULL);
1890 }
1891
1892
1893 /* Return string alias is alias of. */
1894
1895 static tree
1896 get_alias_symbol (tree decl)
1897 {
1898 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
1899 return get_identifier (TREE_STRING_POINTER
1900 (TREE_VALUE (TREE_VALUE (alias))));
1901 }
1902
1903
1904 /* Weakrefs may be associated to external decls and thus not output
1905 at expansion time. Emit all necessary aliases. */
1906
1907 static void
1908 output_weakrefs (void)
1909 {
1910 struct cgraph_node *node;
1911 struct varpool_node *vnode;
1912 FOR_EACH_FUNCTION (node)
1913 if (node->alias && DECL_EXTERNAL (node->symbol.decl)
1914 && !TREE_ASM_WRITTEN (node->symbol.decl)
1915 && lookup_attribute ("weakref", DECL_ATTRIBUTES (node->symbol.decl)))
1916 do_assemble_alias (node->symbol.decl,
1917 node->thunk.alias ? DECL_ASSEMBLER_NAME (node->thunk.alias)
1918 : get_alias_symbol (node->symbol.decl));
1919 FOR_EACH_VARIABLE (vnode)
1920 if (vnode->alias && DECL_EXTERNAL (vnode->symbol.decl)
1921 && !TREE_ASM_WRITTEN (vnode->symbol.decl)
1922 && lookup_attribute ("weakref", DECL_ATTRIBUTES (vnode->symbol.decl)))
1923 do_assemble_alias (vnode->symbol.decl,
1924 vnode->alias_of ? DECL_ASSEMBLER_NAME (vnode->alias_of)
1925 : get_alias_symbol (vnode->symbol.decl));
1926 }
1927
1928 /* Initialize callgraph dump file. */
1929
1930 void
1931 init_cgraph (void)
1932 {
1933 if (!cgraph_dump_file)
1934 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
1935 }
1936
1937
1938 /* Perform simple optimizations based on callgraph. */
1939
1940 void
1941 compile (void)
1942 {
1943 if (seen_error ())
1944 return;
1945
1946 #ifdef ENABLE_CHECKING
1947 verify_symtab ();
1948 #endif
1949
1950 timevar_push (TV_CGRAPHOPT);
1951 if (pre_ipa_mem_report)
1952 {
1953 fprintf (stderr, "Memory consumption before IPA\n");
1954 dump_memory_report (false);
1955 }
1956 if (!quiet_flag)
1957 fprintf (stderr, "Performing interprocedural optimizations\n");
1958 cgraph_state = CGRAPH_STATE_IPA;
1959
1960 /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE. */
1961 if (flag_lto)
1962 lto_streamer_hooks_init ();
1963
1964 /* Don't run the IPA passes if there was any error or sorry messages. */
1965 if (!seen_error ())
1966 ipa_passes ();
1967
1968 /* Do nothing else if any IPA pass found errors or if we are just streaming LTO. */
1969 if (seen_error ()
1970 || (!in_lto_p && flag_lto && !flag_fat_lto_objects))
1971 {
1972 timevar_pop (TV_CGRAPHOPT);
1973 return;
1974 }
1975
1976 /* This pass remove bodies of extern inline functions we never inlined.
1977 Do this later so other IPA passes see what is really going on. */
1978 symtab_remove_unreachable_nodes (false, dump_file);
1979 cgraph_global_info_ready = true;
1980 if (cgraph_dump_file)
1981 {
1982 fprintf (cgraph_dump_file, "Optimized ");
1983 dump_symtab (cgraph_dump_file);
1984 }
1985 if (post_ipa_mem_report)
1986 {
1987 fprintf (stderr, "Memory consumption after IPA\n");
1988 dump_memory_report (false);
1989 }
1990 timevar_pop (TV_CGRAPHOPT);
1991
1992 /* Output everything. */
1993 (*debug_hooks->assembly_start) ();
1994 if (!quiet_flag)
1995 fprintf (stderr, "Assembling functions:\n");
1996 #ifdef ENABLE_CHECKING
1997 verify_symtab ();
1998 #endif
1999
2000 cgraph_materialize_all_clones ();
2001 bitmap_obstack_initialize (NULL);
2002 execute_ipa_pass_list (all_late_ipa_passes);
2003 symtab_remove_unreachable_nodes (true, dump_file);
2004 #ifdef ENABLE_CHECKING
2005 verify_symtab ();
2006 #endif
2007 bitmap_obstack_release (NULL);
2008 mark_functions_to_output ();
2009
2010 cgraph_state = CGRAPH_STATE_EXPANSION;
2011 if (!flag_toplevel_reorder)
2012 output_in_order ();
2013 else
2014 {
2015 output_asm_statements ();
2016
2017 expand_all_functions ();
2018 varpool_output_variables ();
2019 }
2020
2021 cgraph_process_new_functions ();
2022 cgraph_state = CGRAPH_STATE_FINISHED;
2023 output_weakrefs ();
2024
2025 if (cgraph_dump_file)
2026 {
2027 fprintf (cgraph_dump_file, "\nFinal ");
2028 dump_symtab (cgraph_dump_file);
2029 }
2030 #ifdef ENABLE_CHECKING
2031 verify_symtab ();
2032 /* Double check that all inline clones are gone and that all
2033 function bodies have been released from memory. */
2034 if (!seen_error ())
2035 {
2036 struct cgraph_node *node;
2037 bool error_found = false;
2038
2039 FOR_EACH_DEFINED_FUNCTION (node)
2040 if (node->global.inlined_to
2041 || gimple_has_body_p (node->symbol.decl))
2042 {
2043 error_found = true;
2044 dump_cgraph_node (stderr, node);
2045 }
2046 if (error_found)
2047 internal_error ("nodes with unreleased memory found");
2048 }
2049 #endif
2050 }
2051
2052
2053 /* Analyze the whole compilation unit once it is parsed completely. */
2054
2055 void
2056 finalize_compilation_unit (void)
2057 {
2058 timevar_push (TV_CGRAPH);
2059
2060 /* If we're here there's no current function anymore. Some frontends
2061 are lazy in clearing these. */
2062 current_function_decl = NULL;
2063 set_cfun (NULL);
2064
2065 /* Do not skip analyzing the functions if there were errors, we
2066 miss diagnostics for following functions otherwise. */
2067
2068 /* Emit size functions we didn't inline. */
2069 finalize_size_functions ();
2070
2071 /* Mark alias targets necessary and emit diagnostics. */
2072 handle_alias_pairs ();
2073
2074 if (!quiet_flag)
2075 {
2076 fprintf (stderr, "\nAnalyzing compilation unit\n");
2077 fflush (stderr);
2078 }
2079
2080 if (flag_dump_passes)
2081 dump_passes ();
2082
2083 /* Gimplify and lower all functions, compute reachability and
2084 remove unreachable nodes. */
2085 cgraph_analyze_functions ();
2086
2087 /* Mark alias targets necessary and emit diagnostics. */
2088 handle_alias_pairs ();
2089
2090 /* Gimplify and lower thunks. */
2091 cgraph_analyze_functions ();
2092
2093 /* Finally drive the pass manager. */
2094 compile ();
2095
2096 timevar_pop (TV_CGRAPH);
2097 }
2098
2099
2100 #include "gt-cgraphunit.h"