tree-dfa.c (referenced_var_lookup): Remove.
[gcc.git] / gcc / cgraphunit.c
1 /* Driver of optimization process
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
3 2011, 2012 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* This module implements main driver of compilation process.
23
24 The main scope of this file is to act as an interface in between
25 tree based frontends and the backend.
26
27 The front-end is supposed to use following functionality:
28
29 - cgraph_finalize_function
30
31 This function is called once front-end has parsed whole body of function
32 and it is certain that the function body nor the declaration will change.
33
34 (There is one exception needed for implementing GCC extern inline
35 function.)
36
37 - varpool_finalize_decl
38
39 This function has same behavior as the above but is used for static
40 variables.
41
42 - add_asm_node
43
44 Insert new toplevel ASM statement
45
46 - finalize_compilation_unit
47
48 This function is called once (source level) compilation unit is finalized
49 and it will no longer change.
50
51 The symbol table is constructed starting from the trivially needed
52 symbols finalized by the frontend. Functions are lowered into
53 GIMPLE representation and callgraph/reference lists are constructed.
54 Those are used to discover other necessary functions and variables.
55
56 At the end the bodies of unreachable functions are removed.
57
58 The function can be called multiple times when multiple source level
59 compilation units are combined.
60
61 - compile
62
63 This passes control to the back-end. Optimizations are performed and
64 final assembler is generated. This is done in the following way. Note
65 that with link time optimization the process is split into three
66 stages (compile time, linktime analysis and parallel linktime as
67 indicated bellow).
68
69 Compile time:
70
71 1) Inter-procedural optimization.
72 (ipa_passes)
73
74 This part is further split into:
75
76 a) early optimizations. These are local passes executed in
77 the topological order on the callgraph.
78
79 The purpose of early optimiations is to optimize away simple
80 things that may otherwise confuse IP analysis. Very simple
81 propagation across the callgraph is done i.e. to discover
82 functions without side effects and simple inlining is performed.
83
84 b) early small interprocedural passes.
85
86 Those are interprocedural passes executed only at compilation
87 time. These include, for exmaple, transational memory lowering,
88 unreachable code removal and other simple transformations.
89
90 c) IP analysis stage. All interprocedural passes do their
91 analysis.
92
93 Interprocedural passes differ from small interprocedural
94 passes by their ability to operate across whole program
95 at linktime. Their analysis stage is performed early to
96 both reduce linking times and linktime memory usage by
97 not having to represent whole program in memory.
98
99 d) LTO sreaming. When doing LTO, everything important gets
100 streamed into the object file.
101
102 Compile time and or linktime analysis stage (WPA):
103
104 At linktime units gets streamed back and symbol table is
105 merged. Function bodies are not streamed in and not
106 available.
107 e) IP propagation stage. All IP passes execute their
108 IP propagation. This is done based on the earlier analysis
109 without having function bodies at hand.
110 f) Ltrans streaming. When doing WHOPR LTO, the program
111 is partitioned and streamed into multple object files.
112
113 Compile time and/or parallel linktime stage (ltrans)
114
115 Each of the object files is streamed back and compiled
116 separately. Now the function bodies becomes available
117 again.
118
119 2) Virtual clone materialization
120 (cgraph_materialize_clone)
121
122 IP passes can produce copies of existing functoins (such
123 as versioned clones or inline clones) without actually
124 manipulating their bodies by creating virtual clones in
125 the callgraph. At this time the virtual clones are
126 turned into real functions
127 3) IP transformation
128
129 All IP passes transform function bodies based on earlier
130 decision of the IP propagation.
131
132 4) late small IP passes
133
134 Simple IP passes working within single program partition.
135
136 5) Expansion
137 (expand_all_functions)
138
139 At this stage functions that needs to be output into
140 assembler are identified and compiled in topological order
141 6) Output of variables and aliases
142 Now it is known what variable references was not optimized
143 out and thus all variables are output to the file.
144
145 Note that with -fno-toplevel-reorder passes 5 and 6
146 are combined together in cgraph_output_in_order.
147
148 Finally there are functions to manipulate the callgraph from
149 backend.
150 - cgraph_add_new_function is used to add backend produced
151 functions introduced after the unit is finalized.
152 The functions are enqueue for later processing and inserted
153 into callgraph with cgraph_process_new_functions.
154
155 - cgraph_function_versioning
156
157 produces a copy of function into new one (a version)
158 and apply simple transformations
159 */
160
161 #include "config.h"
162 #include "system.h"
163 #include "coretypes.h"
164 #include "tm.h"
165 #include "tree.h"
166 #include "output.h"
167 #include "rtl.h"
168 #include "tree-flow.h"
169 #include "tree-inline.h"
170 #include "langhooks.h"
171 #include "pointer-set.h"
172 #include "toplev.h"
173 #include "flags.h"
174 #include "ggc.h"
175 #include "debug.h"
176 #include "target.h"
177 #include "cgraph.h"
178 #include "diagnostic.h"
179 #include "params.h"
180 #include "fibheap.h"
181 #include "intl.h"
182 #include "function.h"
183 #include "ipa-prop.h"
184 #include "gimple.h"
185 #include "tree-iterator.h"
186 #include "tree-pass.h"
187 #include "tree-dump.h"
188 #include "gimple-pretty-print.h"
189 #include "output.h"
190 #include "coverage.h"
191 #include "plugin.h"
192 #include "ipa-inline.h"
193 #include "ipa-utils.h"
194 #include "lto-streamer.h"
195 #include "except.h"
196 #include "regset.h" /* FIXME: For reg_obstack. */
197
198 /* Queue of cgraph nodes scheduled to be added into cgraph. This is a
199 secondary queue used during optimization to accommodate passes that
200 may generate new functions that need to be optimized and expanded. */
201 cgraph_node_set cgraph_new_nodes;
202
203 static void expand_all_functions (void);
204 static void mark_functions_to_output (void);
205 static void expand_function (struct cgraph_node *);
206 static void cgraph_analyze_function (struct cgraph_node *);
207 static void handle_alias_pairs (void);
208
209 FILE *cgraph_dump_file;
210
211 /* Linked list of cgraph asm nodes. */
212 struct asm_node *asm_nodes;
213
214 /* Last node in cgraph_asm_nodes. */
215 static GTY(()) struct asm_node *asm_last_node;
216
217 /* Used for vtable lookup in thunk adjusting. */
218 static GTY (()) tree vtable_entry_type;
219
220 /* Determine if function DECL is trivially needed and should stay in the
221 compilation unit. This is used at the symbol table construction time
222 and differs from later logic removing unnecessary functions that can
223 take into account results of analysis, whole program info etc. */
224
225 static bool
226 cgraph_decide_is_function_needed (struct cgraph_node *node, tree decl)
227 {
228 /* If the user told us it is used, then it must be so. */
229 if (node->symbol.force_output)
230 return true;
231
232 /* Double check that no one output the function into assembly file
233 early. */
234 gcc_checking_assert (!DECL_ASSEMBLER_NAME_SET_P (decl)
235 || (node->thunk.thunk_p || node->same_body_alias)
236 || !TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)));
237
238
239 /* Keep constructors, destructors and virtual functions. */
240 if (DECL_STATIC_CONSTRUCTOR (decl)
241 || DECL_STATIC_DESTRUCTOR (decl)
242 || (DECL_VIRTUAL_P (decl)
243 && optimize && (DECL_COMDAT (decl) || DECL_EXTERNAL (decl))))
244 return true;
245
246 /* Externally visible functions must be output. The exception is
247 COMDAT functions that must be output only when they are needed. */
248
249 if (TREE_PUBLIC (decl)
250 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
251 return true;
252
253 return false;
254 }
255
256 /* Head of the queue of nodes to be processed while building callgraph */
257
258 static symtab_node first = (symtab_node)(void *)1;
259
260 /* Add NODE to queue starting at FIRST.
261 The queue is linked via AUX pointers and terminated by pointer to 1. */
262
263 static void
264 enqueue_node (symtab_node node)
265 {
266 if (node->symbol.aux)
267 return;
268 gcc_checking_assert (first);
269 node->symbol.aux = first;
270 first = node;
271 }
272
273 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
274 functions into callgraph in a way so they look like ordinary reachable
275 functions inserted into callgraph already at construction time. */
276
277 bool
278 cgraph_process_new_functions (void)
279 {
280 bool output = false;
281 tree fndecl;
282 struct cgraph_node *node;
283 cgraph_node_set_iterator csi;
284
285 if (!cgraph_new_nodes)
286 return false;
287 handle_alias_pairs ();
288 /* Note that this queue may grow as its being processed, as the new
289 functions may generate new ones. */
290 for (csi = csi_start (cgraph_new_nodes); !csi_end_p (csi); csi_next (&csi))
291 {
292 node = csi_node (csi);
293 fndecl = node->symbol.decl;
294 switch (cgraph_state)
295 {
296 case CGRAPH_STATE_CONSTRUCTION:
297 /* At construction time we just need to finalize function and move
298 it into reachable functions list. */
299
300 cgraph_finalize_function (fndecl, false);
301 output = true;
302 cgraph_call_function_insertion_hooks (node);
303 enqueue_node ((symtab_node) node);
304 break;
305
306 case CGRAPH_STATE_IPA:
307 case CGRAPH_STATE_IPA_SSA:
308 /* When IPA optimization already started, do all essential
309 transformations that has been already performed on the whole
310 cgraph but not on this function. */
311
312 gimple_register_cfg_hooks ();
313 if (!node->analyzed)
314 cgraph_analyze_function (node);
315 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
316 current_function_decl = fndecl;
317 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
318 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
319 /* When not optimizing, be sure we run early local passes anyway
320 to expand OMP. */
321 || !optimize)
322 execute_pass_list (pass_early_local_passes.pass.sub);
323 else
324 compute_inline_parameters (node, true);
325 free_dominance_info (CDI_POST_DOMINATORS);
326 free_dominance_info (CDI_DOMINATORS);
327 pop_cfun ();
328 current_function_decl = NULL;
329 cgraph_call_function_insertion_hooks (node);
330 break;
331
332 case CGRAPH_STATE_EXPANSION:
333 /* Functions created during expansion shall be compiled
334 directly. */
335 node->process = 0;
336 cgraph_call_function_insertion_hooks (node);
337 expand_function (node);
338 break;
339
340 default:
341 gcc_unreachable ();
342 break;
343 }
344 }
345 free_cgraph_node_set (cgraph_new_nodes);
346 cgraph_new_nodes = NULL;
347 return output;
348 }
349
350 /* As an GCC extension we allow redefinition of the function. The
351 semantics when both copies of bodies differ is not well defined.
352 We replace the old body with new body so in unit at a time mode
353 we always use new body, while in normal mode we may end up with
354 old body inlined into some functions and new body expanded and
355 inlined in others.
356
357 ??? It may make more sense to use one body for inlining and other
358 body for expanding the function but this is difficult to do. */
359
360 static void
361 cgraph_reset_node (struct cgraph_node *node)
362 {
363 /* If node->process is set, then we have already begun whole-unit analysis.
364 This is *not* testing for whether we've already emitted the function.
365 That case can be sort-of legitimately seen with real function redefinition
366 errors. I would argue that the front end should never present us with
367 such a case, but don't enforce that for now. */
368 gcc_assert (!node->process);
369
370 /* Reset our data structures so we can analyze the function again. */
371 memset (&node->local, 0, sizeof (node->local));
372 memset (&node->global, 0, sizeof (node->global));
373 memset (&node->rtl, 0, sizeof (node->rtl));
374 node->analyzed = false;
375 node->local.finalized = false;
376
377 cgraph_node_remove_callees (node);
378 }
379
380 /* Return true when there are references to NODE. */
381
382 static bool
383 referred_to_p (symtab_node node)
384 {
385 struct ipa_ref *ref;
386
387 /* See if there are any references at all. */
388 if (ipa_ref_list_referring_iterate (&node->symbol.ref_list, 0, ref))
389 return true;
390 /* For functions check also calls. */
391 if (symtab_function_p (node) && cgraph (node)->callers)
392 return true;
393 return false;
394 }
395
396 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
397 logic in effect. If NESTED is true, then our caller cannot stand to have
398 the garbage collector run at the moment. We would need to either create
399 a new GC context, or just not compile right now. */
400
401 void
402 cgraph_finalize_function (tree decl, bool nested)
403 {
404 struct cgraph_node *node = cgraph_get_create_node (decl);
405
406 if (node->local.finalized)
407 {
408 cgraph_reset_node (node);
409 node->local.redefined_extern_inline = true;
410 }
411
412 notice_global_symbol (decl);
413 node->local.finalized = true;
414 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
415
416 /* With -fkeep-inline-functions we are keeping all inline functions except
417 for extern inline ones. */
418 if (flag_keep_inline_functions
419 && DECL_DECLARED_INLINE_P (decl)
420 && !DECL_EXTERNAL (decl)
421 && !DECL_DISREGARD_INLINE_LIMITS (decl))
422 node->symbol.force_output = 1;
423
424 /* When not optimizing, also output the static functions. (see
425 PR24561), but don't do so for always_inline functions, functions
426 declared inline and nested functions. These were optimized out
427 in the original implementation and it is unclear whether we want
428 to change the behavior here. */
429 if ((!optimize
430 && !node->same_body_alias
431 && !DECL_DISREGARD_INLINE_LIMITS (decl)
432 && !DECL_DECLARED_INLINE_P (decl)
433 && !(DECL_CONTEXT (decl)
434 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL))
435 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
436 node->symbol.force_output = 1;
437
438 /* If we've not yet emitted decl, tell the debug info about it. */
439 if (!TREE_ASM_WRITTEN (decl))
440 (*debug_hooks->deferred_inline_function) (decl);
441
442 /* Possibly warn about unused parameters. */
443 if (warn_unused_parameter)
444 do_warn_unused_parameter (decl);
445
446 if (!nested)
447 ggc_collect ();
448
449 if (cgraph_state == CGRAPH_STATE_CONSTRUCTION
450 && (cgraph_decide_is_function_needed (node, decl)
451 || referred_to_p ((symtab_node)node)))
452 enqueue_node ((symtab_node)node);
453 }
454
455 /* Add the function FNDECL to the call graph.
456 Unlike cgraph_finalize_function, this function is intended to be used
457 by middle end and allows insertion of new function at arbitrary point
458 of compilation. The function can be either in high, low or SSA form
459 GIMPLE.
460
461 The function is assumed to be reachable and have address taken (so no
462 API breaking optimizations are performed on it).
463
464 Main work done by this function is to enqueue the function for later
465 processing to avoid need the passes to be re-entrant. */
466
467 void
468 cgraph_add_new_function (tree fndecl, bool lowered)
469 {
470 struct cgraph_node *node;
471 switch (cgraph_state)
472 {
473 case CGRAPH_STATE_PARSING:
474 cgraph_finalize_function (fndecl, false);
475 break;
476 case CGRAPH_STATE_CONSTRUCTION:
477 /* Just enqueue function to be processed at nearest occurrence. */
478 node = cgraph_create_node (fndecl);
479 if (lowered)
480 node->lowered = true;
481 if (!cgraph_new_nodes)
482 cgraph_new_nodes = cgraph_node_set_new ();
483 cgraph_node_set_add (cgraph_new_nodes, node);
484 break;
485
486 case CGRAPH_STATE_IPA:
487 case CGRAPH_STATE_IPA_SSA:
488 case CGRAPH_STATE_EXPANSION:
489 /* Bring the function into finalized state and enqueue for later
490 analyzing and compilation. */
491 node = cgraph_get_create_node (fndecl);
492 node->local.local = false;
493 node->local.finalized = true;
494 node->symbol.force_output = true;
495 if (!lowered && cgraph_state == CGRAPH_STATE_EXPANSION)
496 {
497 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
498 current_function_decl = fndecl;
499 gimple_register_cfg_hooks ();
500 bitmap_obstack_initialize (NULL);
501 execute_pass_list (all_lowering_passes);
502 execute_pass_list (pass_early_local_passes.pass.sub);
503 bitmap_obstack_release (NULL);
504 pop_cfun ();
505 current_function_decl = NULL;
506
507 lowered = true;
508 }
509 if (lowered)
510 node->lowered = true;
511 if (!cgraph_new_nodes)
512 cgraph_new_nodes = cgraph_node_set_new ();
513 cgraph_node_set_add (cgraph_new_nodes, node);
514 break;
515
516 case CGRAPH_STATE_FINISHED:
517 /* At the very end of compilation we have to do all the work up
518 to expansion. */
519 node = cgraph_create_node (fndecl);
520 if (lowered)
521 node->lowered = true;
522 cgraph_analyze_function (node);
523 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
524 current_function_decl = fndecl;
525 gimple_register_cfg_hooks ();
526 bitmap_obstack_initialize (NULL);
527 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
528 execute_pass_list (pass_early_local_passes.pass.sub);
529 bitmap_obstack_release (NULL);
530 pop_cfun ();
531 expand_function (node);
532 current_function_decl = NULL;
533 break;
534
535 default:
536 gcc_unreachable ();
537 }
538
539 /* Set a personality if required and we already passed EH lowering. */
540 if (lowered
541 && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
542 == eh_personality_lang))
543 DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
544 }
545
546 /* Add a top-level asm statement to the list. */
547
548 struct asm_node *
549 add_asm_node (tree asm_str)
550 {
551 struct asm_node *node;
552
553 node = ggc_alloc_cleared_asm_node ();
554 node->asm_str = asm_str;
555 node->order = symtab_order++;
556 node->next = NULL;
557 if (asm_nodes == NULL)
558 asm_nodes = node;
559 else
560 asm_last_node->next = node;
561 asm_last_node = node;
562 return node;
563 }
564
565 /* Output all asm statements we have stored up to be output. */
566
567 static void
568 output_asm_statements (void)
569 {
570 struct asm_node *can;
571
572 if (seen_error ())
573 return;
574
575 for (can = asm_nodes; can; can = can->next)
576 assemble_asm (can->asm_str);
577 asm_nodes = NULL;
578 }
579
580 /* C++ FE sometimes change linkage flags after producing same body aliases. */
581 void
582 fixup_same_cpp_alias_visibility (symtab_node node, symtab_node target, tree alias)
583 {
584 DECL_VIRTUAL_P (node->symbol.decl) = DECL_VIRTUAL_P (alias);
585 if (TREE_PUBLIC (node->symbol.decl))
586 {
587 DECL_EXTERNAL (node->symbol.decl) = DECL_EXTERNAL (alias);
588 DECL_COMDAT (node->symbol.decl) = DECL_COMDAT (alias);
589 DECL_COMDAT_GROUP (node->symbol.decl) = DECL_COMDAT_GROUP (alias);
590 if (DECL_ONE_ONLY (alias)
591 && !node->symbol.same_comdat_group)
592 symtab_add_to_same_comdat_group ((symtab_node)node, (symtab_node)target);
593 }
594 }
595
596 /* Analyze the function scheduled to be output. */
597 static void
598 cgraph_analyze_function (struct cgraph_node *node)
599 {
600 tree save = current_function_decl;
601 tree decl = node->symbol.decl;
602 location_t saved_loc = input_location;
603 input_location = DECL_SOURCE_LOCATION (decl);
604
605 if (node->alias && node->thunk.alias)
606 {
607 struct cgraph_node *tgt = cgraph_get_node (node->thunk.alias);
608 struct cgraph_node *n;
609
610 for (n = tgt; n && n->alias;
611 n = n->analyzed ? cgraph_alias_aliased_node (n) : NULL)
612 if (n == node)
613 {
614 error ("function %q+D part of alias cycle", node->symbol.decl);
615 node->alias = false;
616 input_location = saved_loc;
617 return;
618 }
619 if (!VEC_length (ipa_ref_t, node->symbol.ref_list.references))
620 ipa_record_reference ((symtab_node)node, (symtab_node)tgt,
621 IPA_REF_ALIAS, NULL);
622 if (node->same_body_alias)
623 {
624 DECL_DECLARED_INLINE_P (node->symbol.decl)
625 = DECL_DECLARED_INLINE_P (node->thunk.alias);
626 DECL_DISREGARD_INLINE_LIMITS (node->symbol.decl)
627 = DECL_DISREGARD_INLINE_LIMITS (node->thunk.alias);
628 fixup_same_cpp_alias_visibility ((symtab_node) node, (symtab_node) tgt, node->thunk.alias);
629 }
630
631 if (node->symbol.address_taken)
632 cgraph_mark_address_taken_node (cgraph_alias_aliased_node (node));
633 }
634 else if (node->thunk.thunk_p)
635 {
636 cgraph_create_edge (node, cgraph_get_node (node->thunk.alias),
637 NULL, 0, CGRAPH_FREQ_BASE);
638 }
639 else
640 {
641 current_function_decl = decl;
642 push_cfun (DECL_STRUCT_FUNCTION (decl));
643
644 assign_assembler_name_if_neeeded (node->symbol.decl);
645
646 /* Make sure to gimplify bodies only once. During analyzing a
647 function we lower it, which will require gimplified nested
648 functions, so we can end up here with an already gimplified
649 body. */
650 if (!gimple_has_body_p (decl))
651 gimplify_function_tree (decl);
652 dump_function (TDI_generic, decl);
653
654 /* Lower the function. */
655 if (!node->lowered)
656 {
657 if (node->nested)
658 lower_nested_functions (node->symbol.decl);
659 gcc_assert (!node->nested);
660
661 gimple_register_cfg_hooks ();
662 bitmap_obstack_initialize (NULL);
663 execute_pass_list (all_lowering_passes);
664 free_dominance_info (CDI_POST_DOMINATORS);
665 free_dominance_info (CDI_DOMINATORS);
666 compact_blocks ();
667 bitmap_obstack_release (NULL);
668 node->lowered = true;
669 }
670
671 pop_cfun ();
672 }
673 node->analyzed = true;
674
675 current_function_decl = save;
676 input_location = saved_loc;
677 }
678
679 /* C++ frontend produce same body aliases all over the place, even before PCH
680 gets streamed out. It relies on us linking the aliases with their function
681 in order to do the fixups, but ipa-ref is not PCH safe. Consequentely we
682 first produce aliases without links, but once C++ FE is sure he won't sream
683 PCH we build the links via this function. */
684
685 void
686 cgraph_process_same_body_aliases (void)
687 {
688 struct cgraph_node *node;
689 FOR_EACH_FUNCTION (node)
690 if (node->same_body_alias
691 && !VEC_length (ipa_ref_t, node->symbol.ref_list.references))
692 {
693 struct cgraph_node *tgt = cgraph_get_node (node->thunk.alias);
694 ipa_record_reference ((symtab_node)node, (symtab_node)tgt,
695 IPA_REF_ALIAS, NULL);
696 }
697 same_body_aliases_done = true;
698 }
699
700 /* Process attributes common for vars and functions. */
701
702 static void
703 process_common_attributes (tree decl)
704 {
705 tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
706
707 if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
708 {
709 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
710 "%<weakref%> attribute should be accompanied with"
711 " an %<alias%> attribute");
712 DECL_WEAK (decl) = 0;
713 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
714 DECL_ATTRIBUTES (decl));
715 }
716 }
717
718 /* Look for externally_visible and used attributes and mark cgraph nodes
719 accordingly.
720
721 We cannot mark the nodes at the point the attributes are processed (in
722 handle_*_attribute) because the copy of the declarations available at that
723 point may not be canonical. For example, in:
724
725 void f();
726 void f() __attribute__((used));
727
728 the declaration we see in handle_used_attribute will be the second
729 declaration -- but the front end will subsequently merge that declaration
730 with the original declaration and discard the second declaration.
731
732 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
733
734 void f() {}
735 void f() __attribute__((externally_visible));
736
737 is valid.
738
739 So, we walk the nodes at the end of the translation unit, applying the
740 attributes at that point. */
741
742 static void
743 process_function_and_variable_attributes (struct cgraph_node *first,
744 struct varpool_node *first_var)
745 {
746 struct cgraph_node *node;
747 struct varpool_node *vnode;
748
749 for (node = cgraph_first_function (); node != first;
750 node = cgraph_next_function (node))
751 {
752 tree decl = node->symbol.decl;
753 if (DECL_PRESERVE_P (decl))
754 cgraph_mark_force_output_node (node);
755 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
756 {
757 if (! TREE_PUBLIC (node->symbol.decl))
758 warning_at (DECL_SOURCE_LOCATION (node->symbol.decl), OPT_Wattributes,
759 "%<externally_visible%>"
760 " attribute have effect only on public objects");
761 }
762 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
763 && (node->local.finalized && !node->alias))
764 {
765 warning_at (DECL_SOURCE_LOCATION (node->symbol.decl), OPT_Wattributes,
766 "%<weakref%> attribute ignored"
767 " because function is defined");
768 DECL_WEAK (decl) = 0;
769 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
770 DECL_ATTRIBUTES (decl));
771 }
772
773 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl))
774 && !DECL_DECLARED_INLINE_P (decl)
775 /* redefining extern inline function makes it DECL_UNINLINABLE. */
776 && !DECL_UNINLINABLE (decl))
777 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
778 "always_inline function might not be inlinable");
779
780 process_common_attributes (decl);
781 }
782 for (vnode = varpool_first_variable (); vnode != first_var;
783 vnode = varpool_next_variable (vnode))
784 {
785 tree decl = vnode->symbol.decl;
786 if (DECL_EXTERNAL (decl)
787 && DECL_INITIAL (decl)
788 && const_value_known_p (decl))
789 varpool_finalize_decl (decl);
790 if (DECL_PRESERVE_P (decl))
791 vnode->symbol.force_output = true;
792 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
793 {
794 if (! TREE_PUBLIC (vnode->symbol.decl))
795 warning_at (DECL_SOURCE_LOCATION (vnode->symbol.decl), OPT_Wattributes,
796 "%<externally_visible%>"
797 " attribute have effect only on public objects");
798 }
799 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
800 && vnode->finalized
801 && DECL_INITIAL (decl))
802 {
803 warning_at (DECL_SOURCE_LOCATION (vnode->symbol.decl), OPT_Wattributes,
804 "%<weakref%> attribute ignored"
805 " because variable is initialized");
806 DECL_WEAK (decl) = 0;
807 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
808 DECL_ATTRIBUTES (decl));
809 }
810 process_common_attributes (decl);
811 }
812 }
813
814 /* Mark DECL as finalized. By finalizing the declaration, frontend instruct the
815 middle end to output the variable to asm file, if needed or externally
816 visible. */
817
818 void
819 varpool_finalize_decl (tree decl)
820 {
821 struct varpool_node *node = varpool_node (decl);
822
823 gcc_assert (TREE_STATIC (decl) || DECL_EXTERNAL (decl));
824
825 if (node->finalized)
826 return;
827 notice_global_symbol (decl);
828 node->finalized = true;
829 if (TREE_THIS_VOLATILE (decl) || DECL_PRESERVE_P (decl)
830 /* Traditionally we do not eliminate static variables when not
831 optimizing and when not doing toplevel reoder. */
832 || (!flag_toplevel_reorder && !DECL_COMDAT (node->symbol.decl)
833 && !DECL_ARTIFICIAL (node->symbol.decl)))
834 node->symbol.force_output = true;
835
836 if (cgraph_state == CGRAPH_STATE_CONSTRUCTION
837 && (decide_is_variable_needed (node, decl)
838 || referred_to_p ((symtab_node)node)))
839 enqueue_node ((symtab_node)node);
840 if (cgraph_state >= CGRAPH_STATE_IPA_SSA)
841 varpool_analyze_node (node);
842 /* Some frontends produce various interface variables after compilation
843 finished. */
844 if (cgraph_state == CGRAPH_STATE_FINISHED)
845 varpool_assemble_decl (node);
846 }
847
848 /* Discover all functions and variables that are trivially needed, analyze
849 them as well as all functions and variables referred by them */
850
851 static void
852 cgraph_analyze_functions (void)
853 {
854 /* Keep track of already processed nodes when called multiple times for
855 intermodule optimization. */
856 static struct cgraph_node *first_analyzed;
857 struct cgraph_node *first_handled = first_analyzed;
858 static struct varpool_node *first_analyzed_var;
859 struct varpool_node *first_handled_var = first_analyzed_var;
860
861 symtab_node node, next;
862 int i;
863 struct ipa_ref *ref;
864 bool changed = true;
865
866 bitmap_obstack_initialize (NULL);
867 cgraph_state = CGRAPH_STATE_CONSTRUCTION;
868
869 /* Analysis adds static variables that in turn adds references to new functions.
870 So we need to iterate the process until it stabilize. */
871 while (changed)
872 {
873 changed = false;
874 process_function_and_variable_attributes (first_analyzed,
875 first_analyzed_var);
876
877 /* First identify the trivially needed symbols. */
878 for (node = symtab_nodes;
879 node != (symtab_node)first_analyzed
880 && node != (symtab_node)first_analyzed_var; node = node->symbol.next)
881 {
882 if ((symtab_function_p (node)
883 && cgraph (node)->local.finalized
884 && cgraph_decide_is_function_needed (cgraph (node), node->symbol.decl))
885 || (symtab_variable_p (node)
886 && varpool (node)->finalized
887 && !DECL_EXTERNAL (node->symbol.decl)
888 && decide_is_variable_needed (varpool (node), node->symbol.decl)))
889 {
890 enqueue_node (node);
891 if (!changed && cgraph_dump_file)
892 fprintf (cgraph_dump_file, "Trivially needed symbols:");
893 changed = true;
894 if (cgraph_dump_file)
895 fprintf (cgraph_dump_file, " %s", symtab_node_asm_name (node));
896 }
897 if (node == (symtab_node)first_analyzed
898 || node == (symtab_node)first_analyzed_var)
899 break;
900 }
901 cgraph_process_new_functions ();
902 first_analyzed_var = varpool_first_variable ();
903 first_analyzed = cgraph_first_function ();
904
905 if (changed && dump_file)
906 fprintf (cgraph_dump_file, "\n");
907
908 /* Lower representation, build callgraph edges and references for all trivially
909 needed symbols and all symbols referred by them. */
910 while (first != (symtab_node)(void *)1)
911 {
912 changed = true;
913 node = first;
914 first = (symtab_node)first->symbol.aux;
915 if (symtab_function_p (node) && cgraph (node)->local.finalized)
916 {
917 struct cgraph_edge *edge;
918 struct cgraph_node *cnode;
919 tree decl;
920
921 cnode = cgraph (node);
922 decl = cnode->symbol.decl;
923
924 /* ??? It is possible to create extern inline function and later using
925 weak alias attribute to kill its body. See
926 gcc.c-torture/compile/20011119-1.c */
927 if (!DECL_STRUCT_FUNCTION (decl)
928 && (!cnode->alias || !cnode->thunk.alias)
929 && !cnode->thunk.thunk_p)
930 {
931 cgraph_reset_node (cnode);
932 cnode->local.redefined_extern_inline = true;
933 continue;
934 }
935
936 if (!cnode->analyzed)
937 cgraph_analyze_function (cnode);
938
939 for (edge = cnode->callees; edge; edge = edge->next_callee)
940 if (edge->callee->local.finalized)
941 enqueue_node ((symtab_node)edge->callee);
942
943 /* If decl is a clone of an abstract function, mark that abstract
944 function so that we don't release its body. The DECL_INITIAL() of that
945 abstract function declaration will be later needed to output debug
946 info. */
947 if (DECL_ABSTRACT_ORIGIN (decl))
948 {
949 struct cgraph_node *origin_node;
950 origin_node = cgraph_get_node (DECL_ABSTRACT_ORIGIN (decl));
951 origin_node->abstract_and_needed = true;
952 }
953
954 }
955 else if (symtab_variable_p (node)
956 && varpool (node)->finalized)
957 varpool_analyze_node (varpool (node));
958
959 if (node->symbol.same_comdat_group)
960 {
961 symtab_node next;
962 for (next = node->symbol.same_comdat_group;
963 next != node;
964 next = next->symbol.same_comdat_group)
965 enqueue_node (next);
966 }
967 for (i = 0; ipa_ref_list_reference_iterate (&node->symbol.ref_list, i, ref); i++)
968 if ((symtab_function_p (ref->referred) && cgraph (ref->referred)->local.finalized)
969 || (symtab_variable_p (ref->referred) && varpool (ref->referred)->finalized))
970 enqueue_node (ref->referred);
971 cgraph_process_new_functions ();
972 }
973 }
974
975 /* Collect entry points to the unit. */
976 if (cgraph_dump_file)
977 {
978 fprintf (cgraph_dump_file, "\n\nInitial ");
979 dump_symtab (cgraph_dump_file);
980 }
981
982 if (cgraph_dump_file)
983 fprintf (cgraph_dump_file, "\nRemoving unused symbols:");
984
985 for (node = symtab_nodes;
986 node != (symtab_node)first_handled
987 && node != (symtab_node)first_handled_var; node = next)
988 {
989 next = node->symbol.next;
990 if (!node->symbol.aux && !referred_to_p (node))
991 {
992 if (cgraph_dump_file)
993 fprintf (cgraph_dump_file, " %s", symtab_node_name (node));
994 symtab_remove_node (node);
995 continue;
996 }
997 if (symtab_function_p (node))
998 {
999 tree decl = node->symbol.decl;
1000 struct cgraph_node *cnode = cgraph (node);
1001
1002 if (cnode->local.finalized && !gimple_has_body_p (decl)
1003 && (!cnode->alias || !cnode->thunk.alias)
1004 && !cnode->thunk.thunk_p)
1005 cgraph_reset_node (cnode);
1006
1007 gcc_assert (!cnode->local.finalized || cnode->thunk.thunk_p
1008 || cnode->alias
1009 || gimple_has_body_p (decl));
1010 gcc_assert (cnode->analyzed == cnode->local.finalized);
1011 }
1012 node->symbol.aux = NULL;
1013 }
1014 first_analyzed = cgraph_first_function ();
1015 first_analyzed_var = varpool_first_variable ();
1016 if (cgraph_dump_file)
1017 {
1018 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1019 dump_symtab (cgraph_dump_file);
1020 }
1021 bitmap_obstack_release (NULL);
1022 ggc_collect ();
1023 }
1024
1025 /* Translate the ugly representation of aliases as alias pairs into nice
1026 representation in callgraph. We don't handle all cases yet,
1027 unforutnately. */
1028
1029 static void
1030 handle_alias_pairs (void)
1031 {
1032 alias_pair *p;
1033 unsigned i;
1034
1035 for (i = 0; VEC_iterate (alias_pair, alias_pairs, i, p);)
1036 {
1037 symtab_node target_node = symtab_node_for_asm (p->target);
1038
1039 /* Weakrefs with target not defined in current unit are easy to handle; they
1040 behave just as external variables except we need to note the alias flag
1041 to later output the weakref pseudo op into asm file. */
1042 if (!target_node && lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)) != NULL)
1043 {
1044 if (TREE_CODE (p->decl) == FUNCTION_DECL)
1045 cgraph_get_create_node (p->decl)->alias = true;
1046 else
1047 varpool_get_node (p->decl)->alias = true;
1048 DECL_EXTERNAL (p->decl) = 1;
1049 VEC_unordered_remove (alias_pair, alias_pairs, i);
1050 continue;
1051 }
1052 else if (!target_node)
1053 {
1054 error ("%q+D aliased to undefined symbol %qE", p->decl, p->target);
1055 VEC_unordered_remove (alias_pair, alias_pairs, i);
1056 continue;
1057 }
1058
1059 /* Normally EXTERNAL flag is used to mark external inlines,
1060 however for aliases it seems to be allowed to use it w/o
1061 any meaning. See gcc.dg/attr-alias-3.c
1062 However for weakref we insist on EXTERNAL flag being set.
1063 See gcc.dg/attr-alias-5.c */
1064 if (DECL_EXTERNAL (p->decl))
1065 DECL_EXTERNAL (p->decl)
1066 = lookup_attribute ("weakref",
1067 DECL_ATTRIBUTES (p->decl)) != NULL;
1068
1069 if (DECL_EXTERNAL (target_node->symbol.decl)
1070 /* We use local aliases for C++ thunks to force the tailcall
1071 to bind locally. This is a hack - to keep it working do
1072 the following (which is not strictly correct). */
1073 && (! TREE_CODE (target_node->symbol.decl) == FUNCTION_DECL
1074 || ! DECL_VIRTUAL_P (target_node->symbol.decl))
1075 && ! lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)))
1076 {
1077 error ("%q+D aliased to external symbol %qE",
1078 p->decl, p->target);
1079 }
1080
1081 if (TREE_CODE (p->decl) == FUNCTION_DECL
1082 && target_node && symtab_function_p (target_node))
1083 {
1084 struct cgraph_node *src_node = cgraph_get_node (p->decl);
1085 if (src_node && src_node->local.finalized)
1086 cgraph_reset_node (src_node);
1087 cgraph_create_function_alias (p->decl, target_node->symbol.decl);
1088 VEC_unordered_remove (alias_pair, alias_pairs, i);
1089 }
1090 else if (TREE_CODE (p->decl) == VAR_DECL
1091 && target_node && symtab_variable_p (target_node))
1092 {
1093 varpool_create_variable_alias (p->decl, target_node->symbol.decl);
1094 VEC_unordered_remove (alias_pair, alias_pairs, i);
1095 }
1096 else
1097 {
1098 error ("%q+D alias in between function and variable is not supported",
1099 p->decl);
1100 warning (0, "%q+D aliased declaration",
1101 target_node->symbol.decl);
1102 VEC_unordered_remove (alias_pair, alias_pairs, i);
1103 }
1104 }
1105 VEC_free (alias_pair, gc, alias_pairs);
1106 }
1107
1108
1109 /* Figure out what functions we want to assemble. */
1110
1111 static void
1112 mark_functions_to_output (void)
1113 {
1114 struct cgraph_node *node;
1115 #ifdef ENABLE_CHECKING
1116 bool check_same_comdat_groups = false;
1117
1118 FOR_EACH_FUNCTION (node)
1119 gcc_assert (!node->process);
1120 #endif
1121
1122 FOR_EACH_FUNCTION (node)
1123 {
1124 tree decl = node->symbol.decl;
1125
1126 gcc_assert (!node->process || node->symbol.same_comdat_group);
1127 if (node->process)
1128 continue;
1129
1130 /* We need to output all local functions that are used and not
1131 always inlined, as well as those that are reachable from
1132 outside the current compilation unit. */
1133 if (node->analyzed
1134 && !node->thunk.thunk_p
1135 && !node->alias
1136 && !node->global.inlined_to
1137 && !TREE_ASM_WRITTEN (decl)
1138 && !DECL_EXTERNAL (decl))
1139 {
1140 node->process = 1;
1141 if (node->symbol.same_comdat_group)
1142 {
1143 struct cgraph_node *next;
1144 for (next = cgraph (node->symbol.same_comdat_group);
1145 next != node;
1146 next = cgraph (next->symbol.same_comdat_group))
1147 if (!next->thunk.thunk_p && !next->alias)
1148 next->process = 1;
1149 }
1150 }
1151 else if (node->symbol.same_comdat_group)
1152 {
1153 #ifdef ENABLE_CHECKING
1154 check_same_comdat_groups = true;
1155 #endif
1156 }
1157 else
1158 {
1159 /* We should've reclaimed all functions that are not needed. */
1160 #ifdef ENABLE_CHECKING
1161 if (!node->global.inlined_to
1162 && gimple_has_body_p (decl)
1163 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1164 are inside partition, we can end up not removing the body since we no longer
1165 have analyzed node pointing to it. */
1166 && !node->symbol.in_other_partition
1167 && !node->alias
1168 && !node->clones
1169 && !DECL_EXTERNAL (decl))
1170 {
1171 dump_cgraph_node (stderr, node);
1172 internal_error ("failed to reclaim unneeded function");
1173 }
1174 #endif
1175 gcc_assert (node->global.inlined_to
1176 || !gimple_has_body_p (decl)
1177 || node->symbol.in_other_partition
1178 || node->clones
1179 || DECL_ARTIFICIAL (decl)
1180 || DECL_EXTERNAL (decl));
1181
1182 }
1183
1184 }
1185 #ifdef ENABLE_CHECKING
1186 if (check_same_comdat_groups)
1187 FOR_EACH_FUNCTION (node)
1188 if (node->symbol.same_comdat_group && !node->process)
1189 {
1190 tree decl = node->symbol.decl;
1191 if (!node->global.inlined_to
1192 && gimple_has_body_p (decl)
1193 /* FIXME: in an ltrans unit when the offline copy is outside a
1194 partition but inline copies are inside a partition, we can
1195 end up not removing the body since we no longer have an
1196 analyzed node pointing to it. */
1197 && !node->symbol.in_other_partition
1198 && !node->clones
1199 && !DECL_EXTERNAL (decl))
1200 {
1201 dump_cgraph_node (stderr, node);
1202 internal_error ("failed to reclaim unneeded function in same "
1203 "comdat group");
1204 }
1205 }
1206 #endif
1207 }
1208
1209 /* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1210 in lowered gimple form.
1211
1212 Set current_function_decl and cfun to newly constructed empty function body.
1213 return basic block in the function body. */
1214
1215 static basic_block
1216 init_lowered_empty_function (tree decl)
1217 {
1218 basic_block bb;
1219
1220 current_function_decl = decl;
1221 allocate_struct_function (decl, false);
1222 gimple_register_cfg_hooks ();
1223 init_empty_tree_cfg ();
1224 init_tree_ssa (cfun);
1225 init_ssa_operands (cfun);
1226 cfun->gimple_df->in_ssa_p = true;
1227 DECL_INITIAL (decl) = make_node (BLOCK);
1228
1229 DECL_SAVED_TREE (decl) = error_mark_node;
1230 cfun->curr_properties |=
1231 (PROP_gimple_lcf | PROP_gimple_leh | PROP_cfg | PROP_ssa | PROP_gimple_any);
1232
1233 /* Create BB for body of the function and connect it properly. */
1234 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR);
1235 make_edge (ENTRY_BLOCK_PTR, bb, 0);
1236 make_edge (bb, EXIT_BLOCK_PTR, 0);
1237
1238 return bb;
1239 }
1240
1241 /* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1242 offset indicated by VIRTUAL_OFFSET, if that is
1243 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1244 zero for a result adjusting thunk. */
1245
1246 static tree
1247 thunk_adjust (gimple_stmt_iterator * bsi,
1248 tree ptr, bool this_adjusting,
1249 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1250 {
1251 gimple stmt;
1252 tree ret;
1253
1254 if (this_adjusting
1255 && fixed_offset != 0)
1256 {
1257 stmt = gimple_build_assign
1258 (ptr, fold_build_pointer_plus_hwi_loc (input_location,
1259 ptr,
1260 fixed_offset));
1261 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1262 }
1263
1264 /* If there's a virtual offset, look up that value in the vtable and
1265 adjust the pointer again. */
1266 if (virtual_offset)
1267 {
1268 tree vtabletmp;
1269 tree vtabletmp2;
1270 tree vtabletmp3;
1271
1272 if (!vtable_entry_type)
1273 {
1274 tree vfunc_type = make_node (FUNCTION_TYPE);
1275 TREE_TYPE (vfunc_type) = integer_type_node;
1276 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1277 layout_type (vfunc_type);
1278
1279 vtable_entry_type = build_pointer_type (vfunc_type);
1280 }
1281
1282 vtabletmp =
1283 make_rename_temp (build_pointer_type
1284 (build_pointer_type (vtable_entry_type)), "vptr");
1285
1286 /* The vptr is always at offset zero in the object. */
1287 stmt = gimple_build_assign (vtabletmp,
1288 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1289 ptr));
1290 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1291
1292 /* Form the vtable address. */
1293 vtabletmp2 = make_rename_temp (TREE_TYPE (TREE_TYPE (vtabletmp)),
1294 "vtableaddr");
1295 stmt = gimple_build_assign (vtabletmp2,
1296 build_simple_mem_ref (vtabletmp));
1297 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1298
1299 /* Find the entry with the vcall offset. */
1300 stmt = gimple_build_assign (vtabletmp2,
1301 fold_build_pointer_plus_loc (input_location,
1302 vtabletmp2,
1303 virtual_offset));
1304 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1305
1306 /* Get the offset itself. */
1307 vtabletmp3 = make_rename_temp (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1308 "vcalloffset");
1309 stmt = gimple_build_assign (vtabletmp3,
1310 build_simple_mem_ref (vtabletmp2));
1311 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1312
1313 /* Adjust the `this' pointer. */
1314 ptr = fold_build_pointer_plus_loc (input_location, ptr, vtabletmp3);
1315 ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
1316 GSI_CONTINUE_LINKING);
1317 }
1318
1319 if (!this_adjusting
1320 && fixed_offset != 0)
1321 /* Adjust the pointer by the constant. */
1322 {
1323 tree ptrtmp;
1324
1325 if (TREE_CODE (ptr) == VAR_DECL)
1326 ptrtmp = ptr;
1327 else
1328 {
1329 ptrtmp = make_rename_temp (TREE_TYPE (ptr), "ptr");
1330 stmt = gimple_build_assign (ptrtmp, ptr);
1331 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1332 }
1333 ptr = fold_build_pointer_plus_hwi_loc (input_location,
1334 ptrtmp, fixed_offset);
1335 }
1336
1337 /* Emit the statement and gimplify the adjustment expression. */
1338 ret = make_rename_temp (TREE_TYPE (ptr), "adjusted_this");
1339 stmt = gimple_build_assign (ret, ptr);
1340 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1341
1342 return ret;
1343 }
1344
1345 /* Produce assembler for thunk NODE. */
1346
1347 static void
1348 assemble_thunk (struct cgraph_node *node)
1349 {
1350 bool this_adjusting = node->thunk.this_adjusting;
1351 HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset;
1352 HOST_WIDE_INT virtual_value = node->thunk.virtual_value;
1353 tree virtual_offset = NULL;
1354 tree alias = node->thunk.alias;
1355 tree thunk_fndecl = node->symbol.decl;
1356 tree a = DECL_ARGUMENTS (thunk_fndecl);
1357
1358 current_function_decl = thunk_fndecl;
1359
1360 /* Ensure thunks are emitted in their correct sections. */
1361 resolve_unique_section (thunk_fndecl, 0, flag_function_sections);
1362
1363 if (this_adjusting
1364 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1365 virtual_value, alias))
1366 {
1367 const char *fnname;
1368 tree fn_block;
1369 tree restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1370
1371 DECL_RESULT (thunk_fndecl)
1372 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1373 RESULT_DECL, 0, restype);
1374 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
1375
1376 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1377 create one. */
1378 fn_block = make_node (BLOCK);
1379 BLOCK_VARS (fn_block) = a;
1380 DECL_INITIAL (thunk_fndecl) = fn_block;
1381 init_function_start (thunk_fndecl);
1382 cfun->is_thunk = 1;
1383 assemble_start_function (thunk_fndecl, fnname);
1384
1385 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1386 fixed_offset, virtual_value, alias);
1387
1388 assemble_end_function (thunk_fndecl, fnname);
1389 init_insn_lengths ();
1390 free_after_compilation (cfun);
1391 set_cfun (NULL);
1392 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1393 node->thunk.thunk_p = false;
1394 node->analyzed = false;
1395 }
1396 else
1397 {
1398 tree restype;
1399 basic_block bb, then_bb, else_bb, return_bb;
1400 gimple_stmt_iterator bsi;
1401 int nargs = 0;
1402 tree arg;
1403 int i;
1404 tree resdecl;
1405 tree restmp = NULL;
1406 VEC(tree, heap) *vargs;
1407
1408 gimple call;
1409 gimple ret;
1410
1411 DECL_IGNORED_P (thunk_fndecl) = 1;
1412 bitmap_obstack_initialize (NULL);
1413
1414 if (node->thunk.virtual_offset_p)
1415 virtual_offset = size_int (virtual_value);
1416
1417 /* Build the return declaration for the function. */
1418 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1419 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1420 {
1421 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1422 DECL_ARTIFICIAL (resdecl) = 1;
1423 DECL_IGNORED_P (resdecl) = 1;
1424 DECL_RESULT (thunk_fndecl) = resdecl;
1425 }
1426 else
1427 resdecl = DECL_RESULT (thunk_fndecl);
1428
1429 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl);
1430
1431 bsi = gsi_start_bb (bb);
1432
1433 /* Build call to the function being thunked. */
1434 if (!VOID_TYPE_P (restype))
1435 {
1436 if (!is_gimple_reg_type (restype))
1437 {
1438 restmp = resdecl;
1439 add_local_decl (cfun, restmp);
1440 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1441 }
1442 else
1443 restmp = make_rename_temp (restype, "retval");
1444 }
1445
1446 for (arg = a; arg; arg = DECL_CHAIN (arg))
1447 nargs++;
1448 vargs = VEC_alloc (tree, heap, nargs);
1449 if (this_adjusting)
1450 VEC_quick_push (tree, vargs,
1451 thunk_adjust (&bsi,
1452 a, 1, fixed_offset,
1453 virtual_offset));
1454 else
1455 VEC_quick_push (tree, vargs, a);
1456 for (i = 1, arg = DECL_CHAIN (a); i < nargs; i++, arg = DECL_CHAIN (arg))
1457 VEC_quick_push (tree, vargs, arg);
1458 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1459 VEC_free (tree, heap, vargs);
1460 gimple_call_set_from_thunk (call, true);
1461 if (restmp)
1462 gimple_call_set_lhs (call, restmp);
1463 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1464
1465 if (restmp && !this_adjusting)
1466 {
1467 tree true_label = NULL_TREE;
1468
1469 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1470 {
1471 gimple stmt;
1472 /* If the return type is a pointer, we need to
1473 protect against NULL. We know there will be an
1474 adjustment, because that's why we're emitting a
1475 thunk. */
1476 then_bb = create_basic_block (NULL, (void *) 0, bb);
1477 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1478 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1479 remove_edge (single_succ_edge (bb));
1480 true_label = gimple_block_label (then_bb);
1481 stmt = gimple_build_cond (NE_EXPR, restmp,
1482 build_zero_cst (TREE_TYPE (restmp)),
1483 NULL_TREE, NULL_TREE);
1484 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1485 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1486 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1487 make_edge (return_bb, EXIT_BLOCK_PTR, 0);
1488 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1489 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1490 bsi = gsi_last_bb (then_bb);
1491 }
1492
1493 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1494 fixed_offset, virtual_offset);
1495 if (true_label)
1496 {
1497 gimple stmt;
1498 bsi = gsi_last_bb (else_bb);
1499 stmt = gimple_build_assign (restmp,
1500 build_zero_cst (TREE_TYPE (restmp)));
1501 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1502 bsi = gsi_last_bb (return_bb);
1503 }
1504 }
1505 else
1506 gimple_call_set_tail (call, true);
1507
1508 /* Build return value. */
1509 ret = gimple_build_return (restmp);
1510 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1511
1512 delete_unreachable_blocks ();
1513 update_ssa (TODO_update_ssa);
1514
1515 /* Since we want to emit the thunk, we explicitly mark its name as
1516 referenced. */
1517 node->thunk.thunk_p = false;
1518 cgraph_node_remove_callees (node);
1519 cgraph_add_new_function (thunk_fndecl, true);
1520 bitmap_obstack_release (NULL);
1521 }
1522 current_function_decl = NULL;
1523 }
1524
1525
1526
1527 /* Assemble thunks and aliases associated to NODE. */
1528
1529 static void
1530 assemble_thunks_and_aliases (struct cgraph_node *node)
1531 {
1532 struct cgraph_edge *e;
1533 int i;
1534 struct ipa_ref *ref;
1535
1536 for (e = node->callers; e;)
1537 if (e->caller->thunk.thunk_p)
1538 {
1539 struct cgraph_node *thunk = e->caller;
1540
1541 e = e->next_caller;
1542 assemble_thunks_and_aliases (thunk);
1543 assemble_thunk (thunk);
1544 }
1545 else
1546 e = e->next_caller;
1547 for (i = 0; ipa_ref_list_referring_iterate (&node->symbol.ref_list,
1548 i, ref); i++)
1549 if (ref->use == IPA_REF_ALIAS)
1550 {
1551 struct cgraph_node *alias = ipa_ref_referring_node (ref);
1552 bool saved_written = TREE_ASM_WRITTEN (alias->thunk.alias);
1553
1554 /* Force assemble_alias to really output the alias this time instead
1555 of buffering it in same alias pairs. */
1556 TREE_ASM_WRITTEN (alias->thunk.alias) = 1;
1557 do_assemble_alias (alias->symbol.decl,
1558 DECL_ASSEMBLER_NAME (alias->thunk.alias));
1559 assemble_thunks_and_aliases (alias);
1560 TREE_ASM_WRITTEN (alias->thunk.alias) = saved_written;
1561 }
1562 }
1563
1564 /* Expand function specified by NODE. */
1565
1566 static void
1567 expand_function (struct cgraph_node *node)
1568 {
1569 tree decl = node->symbol.decl;
1570 location_t saved_loc;
1571
1572 /* We ought to not compile any inline clones. */
1573 gcc_assert (!node->global.inlined_to);
1574
1575 announce_function (decl);
1576 node->process = 0;
1577 gcc_assert (node->lowered);
1578
1579 /* Generate RTL for the body of DECL. */
1580
1581 timevar_push (TV_REST_OF_COMPILATION);
1582
1583 gcc_assert (cgraph_global_info_ready);
1584
1585 /* Initialize the default bitmap obstack. */
1586 bitmap_obstack_initialize (NULL);
1587
1588 /* Initialize the RTL code for the function. */
1589 current_function_decl = decl;
1590 saved_loc = input_location;
1591 input_location = DECL_SOURCE_LOCATION (decl);
1592 init_function_start (decl);
1593
1594 gimple_register_cfg_hooks ();
1595
1596 bitmap_obstack_initialize (&reg_obstack); /* FIXME, only at RTL generation*/
1597
1598 execute_all_ipa_transforms ();
1599
1600 /* Perform all tree transforms and optimizations. */
1601
1602 /* Signal the start of passes. */
1603 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_START, NULL);
1604
1605 execute_pass_list (all_passes);
1606
1607 /* Signal the end of passes. */
1608 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_END, NULL);
1609
1610 bitmap_obstack_release (&reg_obstack);
1611
1612 /* Release the default bitmap obstack. */
1613 bitmap_obstack_release (NULL);
1614
1615 set_cfun (NULL);
1616
1617 /* If requested, warn about function definitions where the function will
1618 return a value (usually of some struct or union type) which itself will
1619 take up a lot of stack space. */
1620 if (warn_larger_than && !DECL_EXTERNAL (decl) && TREE_TYPE (decl))
1621 {
1622 tree ret_type = TREE_TYPE (TREE_TYPE (decl));
1623
1624 if (ret_type && TYPE_SIZE_UNIT (ret_type)
1625 && TREE_CODE (TYPE_SIZE_UNIT (ret_type)) == INTEGER_CST
1626 && 0 < compare_tree_int (TYPE_SIZE_UNIT (ret_type),
1627 larger_than_size))
1628 {
1629 unsigned int size_as_int
1630 = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (ret_type));
1631
1632 if (compare_tree_int (TYPE_SIZE_UNIT (ret_type), size_as_int) == 0)
1633 warning (OPT_Wlarger_than_, "size of return value of %q+D is %u bytes",
1634 decl, size_as_int);
1635 else
1636 warning (OPT_Wlarger_than_, "size of return value of %q+D is larger than %wd bytes",
1637 decl, larger_than_size);
1638 }
1639 }
1640
1641 gimple_set_body (decl, NULL);
1642 if (DECL_STRUCT_FUNCTION (decl) == 0
1643 && !cgraph_get_node (decl)->origin)
1644 {
1645 /* Stop pointing to the local nodes about to be freed.
1646 But DECL_INITIAL must remain nonzero so we know this
1647 was an actual function definition.
1648 For a nested function, this is done in c_pop_function_context.
1649 If rest_of_compilation set this to 0, leave it 0. */
1650 if (DECL_INITIAL (decl) != 0)
1651 DECL_INITIAL (decl) = error_mark_node;
1652 }
1653
1654 input_location = saved_loc;
1655
1656 ggc_collect ();
1657 timevar_pop (TV_REST_OF_COMPILATION);
1658
1659 /* Make sure that BE didn't give up on compiling. */
1660 gcc_assert (TREE_ASM_WRITTEN (decl));
1661 current_function_decl = NULL;
1662
1663 /* It would make a lot more sense to output thunks before function body to get more
1664 forward and lest backwarding jumps. This however would need solving problem
1665 with comdats. See PR48668. Also aliases must come after function itself to
1666 make one pass assemblers, like one on AIX, happy. See PR 50689.
1667 FIXME: Perhaps thunks should be move before function IFF they are not in comdat
1668 groups. */
1669 assemble_thunks_and_aliases (node);
1670 cgraph_release_function_body (node);
1671 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1672 points to the dead function body. */
1673 cgraph_node_remove_callees (node);
1674 }
1675
1676
1677 /* Expand all functions that must be output.
1678
1679 Attempt to topologically sort the nodes so function is output when
1680 all called functions are already assembled to allow data to be
1681 propagated across the callgraph. Use a stack to get smaller distance
1682 between a function and its callees (later we may choose to use a more
1683 sophisticated algorithm for function reordering; we will likely want
1684 to use subsections to make the output functions appear in top-down
1685 order). */
1686
1687 static void
1688 expand_all_functions (void)
1689 {
1690 struct cgraph_node *node;
1691 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1692 int order_pos, new_order_pos = 0;
1693 int i;
1694
1695 order_pos = ipa_reverse_postorder (order);
1696 gcc_assert (order_pos == cgraph_n_nodes);
1697
1698 /* Garbage collector may remove inline clones we eliminate during
1699 optimization. So we must be sure to not reference them. */
1700 for (i = 0; i < order_pos; i++)
1701 if (order[i]->process)
1702 order[new_order_pos++] = order[i];
1703
1704 for (i = new_order_pos - 1; i >= 0; i--)
1705 {
1706 node = order[i];
1707 if (node->process)
1708 {
1709 node->process = 0;
1710 expand_function (node);
1711 }
1712 }
1713 cgraph_process_new_functions ();
1714
1715 free (order);
1716
1717 }
1718
1719 /* This is used to sort the node types by the cgraph order number. */
1720
1721 enum cgraph_order_sort_kind
1722 {
1723 ORDER_UNDEFINED = 0,
1724 ORDER_FUNCTION,
1725 ORDER_VAR,
1726 ORDER_ASM
1727 };
1728
1729 struct cgraph_order_sort
1730 {
1731 enum cgraph_order_sort_kind kind;
1732 union
1733 {
1734 struct cgraph_node *f;
1735 struct varpool_node *v;
1736 struct asm_node *a;
1737 } u;
1738 };
1739
1740 /* Output all functions, variables, and asm statements in the order
1741 according to their order fields, which is the order in which they
1742 appeared in the file. This implements -fno-toplevel-reorder. In
1743 this mode we may output functions and variables which don't really
1744 need to be output. */
1745
1746 static void
1747 output_in_order (void)
1748 {
1749 int max;
1750 struct cgraph_order_sort *nodes;
1751 int i;
1752 struct cgraph_node *pf;
1753 struct varpool_node *pv;
1754 struct asm_node *pa;
1755
1756 max = symtab_order;
1757 nodes = XCNEWVEC (struct cgraph_order_sort, max);
1758
1759 FOR_EACH_DEFINED_FUNCTION (pf)
1760 {
1761 if (pf->process && !pf->thunk.thunk_p && !pf->alias)
1762 {
1763 i = pf->symbol.order;
1764 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1765 nodes[i].kind = ORDER_FUNCTION;
1766 nodes[i].u.f = pf;
1767 }
1768 }
1769
1770 FOR_EACH_DEFINED_VARIABLE (pv)
1771 if (!DECL_EXTERNAL (pv->symbol.decl))
1772 {
1773 i = pv->symbol.order;
1774 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1775 nodes[i].kind = ORDER_VAR;
1776 nodes[i].u.v = pv;
1777 }
1778
1779 for (pa = asm_nodes; pa; pa = pa->next)
1780 {
1781 i = pa->order;
1782 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1783 nodes[i].kind = ORDER_ASM;
1784 nodes[i].u.a = pa;
1785 }
1786
1787 /* In toplevel reorder mode we output all statics; mark them as needed. */
1788
1789 for (i = 0; i < max; ++i)
1790 if (nodes[i].kind == ORDER_VAR)
1791 varpool_finalize_named_section_flags (nodes[i].u.v);
1792
1793 for (i = 0; i < max; ++i)
1794 {
1795 switch (nodes[i].kind)
1796 {
1797 case ORDER_FUNCTION:
1798 nodes[i].u.f->process = 0;
1799 expand_function (nodes[i].u.f);
1800 break;
1801
1802 case ORDER_VAR:
1803 varpool_assemble_decl (nodes[i].u.v);
1804 break;
1805
1806 case ORDER_ASM:
1807 assemble_asm (nodes[i].u.a->asm_str);
1808 break;
1809
1810 case ORDER_UNDEFINED:
1811 break;
1812
1813 default:
1814 gcc_unreachable ();
1815 }
1816 }
1817
1818 asm_nodes = NULL;
1819 free (nodes);
1820 }
1821
1822 static void
1823 ipa_passes (void)
1824 {
1825 set_cfun (NULL);
1826 current_function_decl = NULL;
1827 gimple_register_cfg_hooks ();
1828 bitmap_obstack_initialize (NULL);
1829
1830 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
1831
1832 if (!in_lto_p)
1833 {
1834 execute_ipa_pass_list (all_small_ipa_passes);
1835 if (seen_error ())
1836 return;
1837 }
1838
1839 /* We never run removal of unreachable nodes after early passes. This is
1840 because TODO is run before the subpasses. It is important to remove
1841 the unreachable functions to save works at IPA level and to get LTO
1842 symbol tables right. */
1843 symtab_remove_unreachable_nodes (true, cgraph_dump_file);
1844
1845 /* If pass_all_early_optimizations was not scheduled, the state of
1846 the cgraph will not be properly updated. Update it now. */
1847 if (cgraph_state < CGRAPH_STATE_IPA_SSA)
1848 cgraph_state = CGRAPH_STATE_IPA_SSA;
1849
1850 if (!in_lto_p)
1851 {
1852 /* Generate coverage variables and constructors. */
1853 coverage_finish ();
1854
1855 /* Process new functions added. */
1856 set_cfun (NULL);
1857 current_function_decl = NULL;
1858 cgraph_process_new_functions ();
1859
1860 execute_ipa_summary_passes
1861 ((struct ipa_opt_pass_d *) all_regular_ipa_passes);
1862 }
1863
1864 /* Some targets need to handle LTO assembler output specially. */
1865 if (flag_generate_lto)
1866 targetm.asm_out.lto_start ();
1867
1868 execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_lto_gen_passes);
1869
1870 if (!in_lto_p)
1871 ipa_write_summaries ();
1872
1873 if (flag_generate_lto)
1874 targetm.asm_out.lto_end ();
1875
1876 if (!flag_ltrans && (in_lto_p || !flag_lto || flag_fat_lto_objects))
1877 execute_ipa_pass_list (all_regular_ipa_passes);
1878 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
1879
1880 bitmap_obstack_release (NULL);
1881 }
1882
1883
1884 /* Return string alias is alias of. */
1885
1886 static tree
1887 get_alias_symbol (tree decl)
1888 {
1889 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
1890 return get_identifier (TREE_STRING_POINTER
1891 (TREE_VALUE (TREE_VALUE (alias))));
1892 }
1893
1894
1895 /* Weakrefs may be associated to external decls and thus not output
1896 at expansion time. Emit all necessary aliases. */
1897
1898 static void
1899 output_weakrefs (void)
1900 {
1901 struct cgraph_node *node;
1902 struct varpool_node *vnode;
1903 FOR_EACH_FUNCTION (node)
1904 if (node->alias && DECL_EXTERNAL (node->symbol.decl)
1905 && !TREE_ASM_WRITTEN (node->symbol.decl)
1906 && lookup_attribute ("weakref", DECL_ATTRIBUTES (node->symbol.decl)))
1907 do_assemble_alias (node->symbol.decl,
1908 node->thunk.alias ? DECL_ASSEMBLER_NAME (node->thunk.alias)
1909 : get_alias_symbol (node->symbol.decl));
1910 FOR_EACH_VARIABLE (vnode)
1911 if (vnode->alias && DECL_EXTERNAL (vnode->symbol.decl)
1912 && !TREE_ASM_WRITTEN (vnode->symbol.decl)
1913 && lookup_attribute ("weakref", DECL_ATTRIBUTES (vnode->symbol.decl)))
1914 do_assemble_alias (vnode->symbol.decl,
1915 vnode->alias_of ? DECL_ASSEMBLER_NAME (vnode->alias_of)
1916 : get_alias_symbol (vnode->symbol.decl));
1917 }
1918
1919 /* Initialize callgraph dump file. */
1920
1921 void
1922 init_cgraph (void)
1923 {
1924 if (!cgraph_dump_file)
1925 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
1926 }
1927
1928
1929 /* Perform simple optimizations based on callgraph. */
1930
1931 void
1932 compile (void)
1933 {
1934 if (seen_error ())
1935 return;
1936
1937 #ifdef ENABLE_CHECKING
1938 verify_symtab ();
1939 #endif
1940
1941 timevar_push (TV_CGRAPHOPT);
1942 if (pre_ipa_mem_report)
1943 {
1944 fprintf (stderr, "Memory consumption before IPA\n");
1945 dump_memory_report (false);
1946 }
1947 if (!quiet_flag)
1948 fprintf (stderr, "Performing interprocedural optimizations\n");
1949 cgraph_state = CGRAPH_STATE_IPA;
1950
1951 /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE. */
1952 if (flag_lto)
1953 lto_streamer_hooks_init ();
1954
1955 /* Don't run the IPA passes if there was any error or sorry messages. */
1956 if (!seen_error ())
1957 ipa_passes ();
1958
1959 /* Do nothing else if any IPA pass found errors or if we are just streaming LTO. */
1960 if (seen_error ()
1961 || (!in_lto_p && flag_lto && !flag_fat_lto_objects))
1962 {
1963 timevar_pop (TV_CGRAPHOPT);
1964 return;
1965 }
1966
1967 /* This pass remove bodies of extern inline functions we never inlined.
1968 Do this later so other IPA passes see what is really going on. */
1969 symtab_remove_unreachable_nodes (false, dump_file);
1970 cgraph_global_info_ready = true;
1971 if (cgraph_dump_file)
1972 {
1973 fprintf (cgraph_dump_file, "Optimized ");
1974 dump_symtab (cgraph_dump_file);
1975 }
1976 if (post_ipa_mem_report)
1977 {
1978 fprintf (stderr, "Memory consumption after IPA\n");
1979 dump_memory_report (false);
1980 }
1981 timevar_pop (TV_CGRAPHOPT);
1982
1983 /* Output everything. */
1984 (*debug_hooks->assembly_start) ();
1985 if (!quiet_flag)
1986 fprintf (stderr, "Assembling functions:\n");
1987 #ifdef ENABLE_CHECKING
1988 verify_symtab ();
1989 #endif
1990
1991 cgraph_materialize_all_clones ();
1992 bitmap_obstack_initialize (NULL);
1993 execute_ipa_pass_list (all_late_ipa_passes);
1994 symtab_remove_unreachable_nodes (true, dump_file);
1995 #ifdef ENABLE_CHECKING
1996 verify_symtab ();
1997 #endif
1998 bitmap_obstack_release (NULL);
1999 mark_functions_to_output ();
2000
2001 cgraph_state = CGRAPH_STATE_EXPANSION;
2002 if (!flag_toplevel_reorder)
2003 output_in_order ();
2004 else
2005 {
2006 output_asm_statements ();
2007
2008 expand_all_functions ();
2009 varpool_output_variables ();
2010 }
2011
2012 cgraph_process_new_functions ();
2013 cgraph_state = CGRAPH_STATE_FINISHED;
2014 output_weakrefs ();
2015
2016 if (cgraph_dump_file)
2017 {
2018 fprintf (cgraph_dump_file, "\nFinal ");
2019 dump_symtab (cgraph_dump_file);
2020 }
2021 #ifdef ENABLE_CHECKING
2022 verify_symtab ();
2023 /* Double check that all inline clones are gone and that all
2024 function bodies have been released from memory. */
2025 if (!seen_error ())
2026 {
2027 struct cgraph_node *node;
2028 bool error_found = false;
2029
2030 FOR_EACH_DEFINED_FUNCTION (node)
2031 if (node->global.inlined_to
2032 || gimple_has_body_p (node->symbol.decl))
2033 {
2034 error_found = true;
2035 dump_cgraph_node (stderr, node);
2036 }
2037 if (error_found)
2038 internal_error ("nodes with unreleased memory found");
2039 }
2040 #endif
2041 }
2042
2043
2044 /* Analyze the whole compilation unit once it is parsed completely. */
2045
2046 void
2047 finalize_compilation_unit (void)
2048 {
2049 timevar_push (TV_CGRAPH);
2050
2051 /* If we're here there's no current function anymore. Some frontends
2052 are lazy in clearing these. */
2053 current_function_decl = NULL;
2054 set_cfun (NULL);
2055
2056 /* Do not skip analyzing the functions if there were errors, we
2057 miss diagnostics for following functions otherwise. */
2058
2059 /* Emit size functions we didn't inline. */
2060 finalize_size_functions ();
2061
2062 /* Mark alias targets necessary and emit diagnostics. */
2063 handle_alias_pairs ();
2064
2065 if (!quiet_flag)
2066 {
2067 fprintf (stderr, "\nAnalyzing compilation unit\n");
2068 fflush (stderr);
2069 }
2070
2071 if (flag_dump_passes)
2072 dump_passes ();
2073
2074 /* Gimplify and lower all functions, compute reachability and
2075 remove unreachable nodes. */
2076 cgraph_analyze_functions ();
2077
2078 /* Mark alias targets necessary and emit diagnostics. */
2079 handle_alias_pairs ();
2080
2081 /* Gimplify and lower thunks. */
2082 cgraph_analyze_functions ();
2083
2084 /* Finally drive the pass manager. */
2085 compile ();
2086
2087 timevar_pop (TV_CGRAPH);
2088 }
2089
2090
2091 #include "gt-cgraphunit.h"