8ab274b5640659d933b108beb06385318ad5e8a7
[gcc.git] / gcc / cgraphunit.c
1 /* Driver of optimization process
2 Copyright (C) 2003-2013 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This module implements main driver of compilation process.
22
23 The main scope of this file is to act as an interface in between
24 tree based frontends and the backend.
25
26 The front-end is supposed to use following functionality:
27
28 - cgraph_finalize_function
29
30 This function is called once front-end has parsed whole body of function
31 and it is certain that the function body nor the declaration will change.
32
33 (There is one exception needed for implementing GCC extern inline
34 function.)
35
36 - varpool_finalize_decl
37
38 This function has same behavior as the above but is used for static
39 variables.
40
41 - add_asm_node
42
43 Insert new toplevel ASM statement
44
45 - finalize_compilation_unit
46
47 This function is called once (source level) compilation unit is finalized
48 and it will no longer change.
49
50 The symbol table is constructed starting from the trivially needed
51 symbols finalized by the frontend. Functions are lowered into
52 GIMPLE representation and callgraph/reference lists are constructed.
53 Those are used to discover other necessary functions and variables.
54
55 At the end the bodies of unreachable functions are removed.
56
57 The function can be called multiple times when multiple source level
58 compilation units are combined.
59
60 - compile
61
62 This passes control to the back-end. Optimizations are performed and
63 final assembler is generated. This is done in the following way. Note
64 that with link time optimization the process is split into three
65 stages (compile time, linktime analysis and parallel linktime as
66 indicated bellow).
67
68 Compile time:
69
70 1) Inter-procedural optimization.
71 (ipa_passes)
72
73 This part is further split into:
74
75 a) early optimizations. These are local passes executed in
76 the topological order on the callgraph.
77
78 The purpose of early optimiations is to optimize away simple
79 things that may otherwise confuse IP analysis. Very simple
80 propagation across the callgraph is done i.e. to discover
81 functions without side effects and simple inlining is performed.
82
83 b) early small interprocedural passes.
84
85 Those are interprocedural passes executed only at compilation
86 time. These include, for example, transational memory lowering,
87 unreachable code removal and other simple transformations.
88
89 c) IP analysis stage. All interprocedural passes do their
90 analysis.
91
92 Interprocedural passes differ from small interprocedural
93 passes by their ability to operate across whole program
94 at linktime. Their analysis stage is performed early to
95 both reduce linking times and linktime memory usage by
96 not having to represent whole program in memory.
97
98 d) LTO sreaming. When doing LTO, everything important gets
99 streamed into the object file.
100
101 Compile time and or linktime analysis stage (WPA):
102
103 At linktime units gets streamed back and symbol table is
104 merged. Function bodies are not streamed in and not
105 available.
106 e) IP propagation stage. All IP passes execute their
107 IP propagation. This is done based on the earlier analysis
108 without having function bodies at hand.
109 f) Ltrans streaming. When doing WHOPR LTO, the program
110 is partitioned and streamed into multple object files.
111
112 Compile time and/or parallel linktime stage (ltrans)
113
114 Each of the object files is streamed back and compiled
115 separately. Now the function bodies becomes available
116 again.
117
118 2) Virtual clone materialization
119 (cgraph_materialize_clone)
120
121 IP passes can produce copies of existing functoins (such
122 as versioned clones or inline clones) without actually
123 manipulating their bodies by creating virtual clones in
124 the callgraph. At this time the virtual clones are
125 turned into real functions
126 3) IP transformation
127
128 All IP passes transform function bodies based on earlier
129 decision of the IP propagation.
130
131 4) late small IP passes
132
133 Simple IP passes working within single program partition.
134
135 5) Expansion
136 (expand_all_functions)
137
138 At this stage functions that needs to be output into
139 assembler are identified and compiled in topological order
140 6) Output of variables and aliases
141 Now it is known what variable references was not optimized
142 out and thus all variables are output to the file.
143
144 Note that with -fno-toplevel-reorder passes 5 and 6
145 are combined together in cgraph_output_in_order.
146
147 Finally there are functions to manipulate the callgraph from
148 backend.
149 - cgraph_add_new_function is used to add backend produced
150 functions introduced after the unit is finalized.
151 The functions are enqueue for later processing and inserted
152 into callgraph with cgraph_process_new_functions.
153
154 - cgraph_function_versioning
155
156 produces a copy of function into new one (a version)
157 and apply simple transformations
158 */
159
160 #include "config.h"
161 #include "system.h"
162 #include "coretypes.h"
163 #include "tm.h"
164 #include "tree.h"
165 #include "output.h"
166 #include "rtl.h"
167 #include "gimple.h"
168 #include "gimplify.h"
169 #include "gimple-iterator.h"
170 #include "gimplify-me.h"
171 #include "gimple-ssa.h"
172 #include "tree-cfg.h"
173 #include "tree-into-ssa.h"
174 #include "tree-ssa.h"
175 #include "tree-inline.h"
176 #include "langhooks.h"
177 #include "pointer-set.h"
178 #include "toplev.h"
179 #include "flags.h"
180 #include "ggc.h"
181 #include "debug.h"
182 #include "target.h"
183 #include "diagnostic.h"
184 #include "params.h"
185 #include "fibheap.h"
186 #include "intl.h"
187 #include "function.h"
188 #include "ipa-prop.h"
189 #include "tree-iterator.h"
190 #include "tree-pass.h"
191 #include "tree-dump.h"
192 #include "gimple-pretty-print.h"
193 #include "output.h"
194 #include "coverage.h"
195 #include "plugin.h"
196 #include "ipa-inline.h"
197 #include "ipa-utils.h"
198 #include "lto-streamer.h"
199 #include "except.h"
200 #include "cfgloop.h"
201 #include "regset.h" /* FIXME: For reg_obstack. */
202 #include "context.h"
203 #include "pass_manager.h"
204
205 /* Queue of cgraph nodes scheduled to be added into cgraph. This is a
206 secondary queue used during optimization to accommodate passes that
207 may generate new functions that need to be optimized and expanded. */
208 cgraph_node_set cgraph_new_nodes;
209
210 static void expand_all_functions (void);
211 static void mark_functions_to_output (void);
212 static void expand_function (struct cgraph_node *);
213 static void analyze_function (struct cgraph_node *);
214 static void handle_alias_pairs (void);
215
216 FILE *cgraph_dump_file;
217
218 /* Linked list of cgraph asm nodes. */
219 struct asm_node *asm_nodes;
220
221 /* Last node in cgraph_asm_nodes. */
222 static GTY(()) struct asm_node *asm_last_node;
223
224 /* Used for vtable lookup in thunk adjusting. */
225 static GTY (()) tree vtable_entry_type;
226
227 /* Determine if symbol DECL is needed. That is, visible to something
228 either outside this translation unit, something magic in the system
229 configury */
230 bool
231 decide_is_symbol_needed (symtab_node *node)
232 {
233 tree decl = node->decl;
234
235 /* Double check that no one output the function into assembly file
236 early. */
237 gcc_checking_assert (!DECL_ASSEMBLER_NAME_SET_P (decl)
238 || !TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)));
239
240 if (!node->definition)
241 return false;
242
243 if (DECL_EXTERNAL (decl))
244 return false;
245
246 /* If the user told us it is used, then it must be so. */
247 if (node->force_output)
248 return true;
249
250 /* ABI forced symbols are needed when they are external. */
251 if (node->forced_by_abi && TREE_PUBLIC (decl))
252 return true;
253
254 /* Keep constructors, destructors and virtual functions. */
255 if (TREE_CODE (decl) == FUNCTION_DECL
256 && (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl)))
257 return true;
258
259 /* Externally visible variables must be output. The exception is
260 COMDAT variables that must be output only when they are needed. */
261 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
262 return true;
263
264 return false;
265 }
266
267 /* Head of the queue of nodes to be processed while building callgraph */
268
269 static symtab_node *first = (symtab_node *)(void *)1;
270
271 /* Add NODE to queue starting at FIRST.
272 The queue is linked via AUX pointers and terminated by pointer to 1. */
273
274 static void
275 enqueue_node (symtab_node *node)
276 {
277 if (node->aux)
278 return;
279 gcc_checking_assert (first);
280 node->aux = first;
281 first = node;
282 }
283
284 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
285 functions into callgraph in a way so they look like ordinary reachable
286 functions inserted into callgraph already at construction time. */
287
288 bool
289 cgraph_process_new_functions (void)
290 {
291 bool output = false;
292 tree fndecl;
293 struct cgraph_node *node;
294 cgraph_node_set_iterator csi;
295
296 if (!cgraph_new_nodes)
297 return false;
298 handle_alias_pairs ();
299 /* Note that this queue may grow as its being processed, as the new
300 functions may generate new ones. */
301 for (csi = csi_start (cgraph_new_nodes); !csi_end_p (csi); csi_next (&csi))
302 {
303 node = csi_node (csi);
304 fndecl = node->decl;
305 switch (cgraph_state)
306 {
307 case CGRAPH_STATE_CONSTRUCTION:
308 /* At construction time we just need to finalize function and move
309 it into reachable functions list. */
310
311 cgraph_finalize_function (fndecl, false);
312 output = true;
313 cgraph_call_function_insertion_hooks (node);
314 enqueue_node (node);
315 break;
316
317 case CGRAPH_STATE_IPA:
318 case CGRAPH_STATE_IPA_SSA:
319 /* When IPA optimization already started, do all essential
320 transformations that has been already performed on the whole
321 cgraph but not on this function. */
322
323 gimple_register_cfg_hooks ();
324 if (!node->analyzed)
325 analyze_function (node);
326 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
327 if (cgraph_state == CGRAPH_STATE_IPA_SSA
328 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
329 g->get_passes ()->execute_early_local_passes ();
330 else if (inline_summary_vec != NULL)
331 compute_inline_parameters (node, true);
332 free_dominance_info (CDI_POST_DOMINATORS);
333 free_dominance_info (CDI_DOMINATORS);
334 pop_cfun ();
335 cgraph_call_function_insertion_hooks (node);
336 break;
337
338 case CGRAPH_STATE_EXPANSION:
339 /* Functions created during expansion shall be compiled
340 directly. */
341 node->process = 0;
342 cgraph_call_function_insertion_hooks (node);
343 expand_function (node);
344 break;
345
346 default:
347 gcc_unreachable ();
348 break;
349 }
350 }
351 free_cgraph_node_set (cgraph_new_nodes);
352 cgraph_new_nodes = NULL;
353 return output;
354 }
355
356 /* As an GCC extension we allow redefinition of the function. The
357 semantics when both copies of bodies differ is not well defined.
358 We replace the old body with new body so in unit at a time mode
359 we always use new body, while in normal mode we may end up with
360 old body inlined into some functions and new body expanded and
361 inlined in others.
362
363 ??? It may make more sense to use one body for inlining and other
364 body for expanding the function but this is difficult to do. */
365
366 void
367 cgraph_reset_node (struct cgraph_node *node)
368 {
369 /* If node->process is set, then we have already begun whole-unit analysis.
370 This is *not* testing for whether we've already emitted the function.
371 That case can be sort-of legitimately seen with real function redefinition
372 errors. I would argue that the front end should never present us with
373 such a case, but don't enforce that for now. */
374 gcc_assert (!node->process);
375
376 /* Reset our data structures so we can analyze the function again. */
377 memset (&node->local, 0, sizeof (node->local));
378 memset (&node->global, 0, sizeof (node->global));
379 memset (&node->rtl, 0, sizeof (node->rtl));
380 node->analyzed = false;
381 node->definition = false;
382 node->alias = false;
383 node->weakref = false;
384 node->cpp_implicit_alias = false;
385
386 cgraph_node_remove_callees (node);
387 ipa_remove_all_references (&node->ref_list);
388 }
389
390 /* Return true when there are references to NODE. */
391
392 static bool
393 referred_to_p (symtab_node *node)
394 {
395 struct ipa_ref *ref;
396
397 /* See if there are any references at all. */
398 if (ipa_ref_list_referring_iterate (&node->ref_list, 0, ref))
399 return true;
400 /* For functions check also calls. */
401 cgraph_node *cn = dyn_cast <cgraph_node> (node);
402 if (cn && cn->callers)
403 return true;
404 return false;
405 }
406
407 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
408 logic in effect. If NO_COLLECT is true, then our caller cannot stand to have
409 the garbage collector run at the moment. We would need to either create
410 a new GC context, or just not compile right now. */
411
412 void
413 cgraph_finalize_function (tree decl, bool no_collect)
414 {
415 struct cgraph_node *node = cgraph_get_create_node (decl);
416
417 if (node->definition)
418 {
419 /* Nested functions should only be defined once. */
420 gcc_assert (!DECL_CONTEXT (decl)
421 || TREE_CODE (DECL_CONTEXT (decl)) != FUNCTION_DECL);
422 cgraph_reset_node (node);
423 node->local.redefined_extern_inline = true;
424 }
425
426 notice_global_symbol (decl);
427 node->definition = true;
428 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
429
430 /* With -fkeep-inline-functions we are keeping all inline functions except
431 for extern inline ones. */
432 if (flag_keep_inline_functions
433 && DECL_DECLARED_INLINE_P (decl)
434 && !DECL_EXTERNAL (decl)
435 && !DECL_DISREGARD_INLINE_LIMITS (decl))
436 node->force_output = 1;
437
438 /* When not optimizing, also output the static functions. (see
439 PR24561), but don't do so for always_inline functions, functions
440 declared inline and nested functions. These were optimized out
441 in the original implementation and it is unclear whether we want
442 to change the behavior here. */
443 if ((!optimize
444 && !node->cpp_implicit_alias
445 && !DECL_DISREGARD_INLINE_LIMITS (decl)
446 && !DECL_DECLARED_INLINE_P (decl)
447 && !(DECL_CONTEXT (decl)
448 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL))
449 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
450 node->force_output = 1;
451
452 /* If we've not yet emitted decl, tell the debug info about it. */
453 if (!TREE_ASM_WRITTEN (decl))
454 (*debug_hooks->deferred_inline_function) (decl);
455
456 /* Possibly warn about unused parameters. */
457 if (warn_unused_parameter)
458 do_warn_unused_parameter (decl);
459
460 if (!no_collect)
461 ggc_collect ();
462
463 if (cgraph_state == CGRAPH_STATE_CONSTRUCTION
464 && (decide_is_symbol_needed (node)
465 || referred_to_p (node)))
466 enqueue_node (node);
467 }
468
469 /* Add the function FNDECL to the call graph.
470 Unlike cgraph_finalize_function, this function is intended to be used
471 by middle end and allows insertion of new function at arbitrary point
472 of compilation. The function can be either in high, low or SSA form
473 GIMPLE.
474
475 The function is assumed to be reachable and have address taken (so no
476 API breaking optimizations are performed on it).
477
478 Main work done by this function is to enqueue the function for later
479 processing to avoid need the passes to be re-entrant. */
480
481 void
482 cgraph_add_new_function (tree fndecl, bool lowered)
483 {
484 gcc::pass_manager *passes = g->get_passes ();
485 struct cgraph_node *node;
486 switch (cgraph_state)
487 {
488 case CGRAPH_STATE_PARSING:
489 cgraph_finalize_function (fndecl, false);
490 break;
491 case CGRAPH_STATE_CONSTRUCTION:
492 /* Just enqueue function to be processed at nearest occurrence. */
493 node = cgraph_create_node (fndecl);
494 if (lowered)
495 node->lowered = true;
496 if (!cgraph_new_nodes)
497 cgraph_new_nodes = cgraph_node_set_new ();
498 cgraph_node_set_add (cgraph_new_nodes, node);
499 break;
500
501 case CGRAPH_STATE_IPA:
502 case CGRAPH_STATE_IPA_SSA:
503 case CGRAPH_STATE_EXPANSION:
504 /* Bring the function into finalized state and enqueue for later
505 analyzing and compilation. */
506 node = cgraph_get_create_node (fndecl);
507 node->local.local = false;
508 node->definition = true;
509 node->force_output = true;
510 if (!lowered && cgraph_state == CGRAPH_STATE_EXPANSION)
511 {
512 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
513 gimple_register_cfg_hooks ();
514 bitmap_obstack_initialize (NULL);
515 execute_pass_list (passes->all_lowering_passes);
516 passes->execute_early_local_passes ();
517 bitmap_obstack_release (NULL);
518 pop_cfun ();
519
520 lowered = true;
521 }
522 if (lowered)
523 node->lowered = true;
524 if (!cgraph_new_nodes)
525 cgraph_new_nodes = cgraph_node_set_new ();
526 cgraph_node_set_add (cgraph_new_nodes, node);
527 break;
528
529 case CGRAPH_STATE_FINISHED:
530 /* At the very end of compilation we have to do all the work up
531 to expansion. */
532 node = cgraph_create_node (fndecl);
533 if (lowered)
534 node->lowered = true;
535 node->definition = true;
536 analyze_function (node);
537 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
538 gimple_register_cfg_hooks ();
539 bitmap_obstack_initialize (NULL);
540 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
541 g->get_passes ()->execute_early_local_passes ();
542 bitmap_obstack_release (NULL);
543 pop_cfun ();
544 expand_function (node);
545 break;
546
547 default:
548 gcc_unreachable ();
549 }
550
551 /* Set a personality if required and we already passed EH lowering. */
552 if (lowered
553 && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
554 == eh_personality_lang))
555 DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
556 }
557
558 /* Add a top-level asm statement to the list. */
559
560 struct asm_node *
561 add_asm_node (tree asm_str)
562 {
563 struct asm_node *node;
564
565 node = ggc_alloc_cleared_asm_node ();
566 node->asm_str = asm_str;
567 node->order = symtab_order++;
568 node->next = NULL;
569 if (asm_nodes == NULL)
570 asm_nodes = node;
571 else
572 asm_last_node->next = node;
573 asm_last_node = node;
574 return node;
575 }
576
577 /* Output all asm statements we have stored up to be output. */
578
579 static void
580 output_asm_statements (void)
581 {
582 struct asm_node *can;
583
584 if (seen_error ())
585 return;
586
587 for (can = asm_nodes; can; can = can->next)
588 assemble_asm (can->asm_str);
589 asm_nodes = NULL;
590 }
591
592 /* Analyze the function scheduled to be output. */
593 static void
594 analyze_function (struct cgraph_node *node)
595 {
596 tree decl = node->decl;
597 location_t saved_loc = input_location;
598 input_location = DECL_SOURCE_LOCATION (decl);
599
600 if (node->thunk.thunk_p)
601 {
602 cgraph_create_edge (node, cgraph_get_node (node->thunk.alias),
603 NULL, 0, CGRAPH_FREQ_BASE);
604 if (!expand_thunk (node, false))
605 {
606 node->thunk.alias = NULL;
607 node->analyzed = true;
608 return;
609 }
610 node->thunk.alias = NULL;
611 }
612 if (node->alias)
613 symtab_resolve_alias
614 (node, cgraph_get_node (node->alias_target));
615 else if (node->dispatcher_function)
616 {
617 /* Generate the dispatcher body of multi-versioned functions. */
618 struct cgraph_function_version_info *dispatcher_version_info
619 = get_cgraph_node_version (node);
620 if (dispatcher_version_info != NULL
621 && (dispatcher_version_info->dispatcher_resolver
622 == NULL_TREE))
623 {
624 tree resolver = NULL_TREE;
625 gcc_assert (targetm.generate_version_dispatcher_body);
626 resolver = targetm.generate_version_dispatcher_body (node);
627 gcc_assert (resolver != NULL_TREE);
628 }
629 }
630 else
631 {
632 push_cfun (DECL_STRUCT_FUNCTION (decl));
633
634 assign_assembler_name_if_neeeded (node->decl);
635
636 /* Make sure to gimplify bodies only once. During analyzing a
637 function we lower it, which will require gimplified nested
638 functions, so we can end up here with an already gimplified
639 body. */
640 if (!gimple_has_body_p (decl))
641 gimplify_function_tree (decl);
642 dump_function (TDI_generic, decl);
643
644 /* Lower the function. */
645 if (!node->lowered)
646 {
647 if (node->nested)
648 lower_nested_functions (node->decl);
649 gcc_assert (!node->nested);
650
651 gimple_register_cfg_hooks ();
652 bitmap_obstack_initialize (NULL);
653 execute_pass_list (g->get_passes ()->all_lowering_passes);
654 free_dominance_info (CDI_POST_DOMINATORS);
655 free_dominance_info (CDI_DOMINATORS);
656 compact_blocks ();
657 bitmap_obstack_release (NULL);
658 node->lowered = true;
659 }
660
661 pop_cfun ();
662 }
663 node->analyzed = true;
664
665 input_location = saved_loc;
666 }
667
668 /* C++ frontend produce same body aliases all over the place, even before PCH
669 gets streamed out. It relies on us linking the aliases with their function
670 in order to do the fixups, but ipa-ref is not PCH safe. Consequentely we
671 first produce aliases without links, but once C++ FE is sure he won't sream
672 PCH we build the links via this function. */
673
674 void
675 cgraph_process_same_body_aliases (void)
676 {
677 symtab_node *node;
678 FOR_EACH_SYMBOL (node)
679 if (node->cpp_implicit_alias && !node->analyzed)
680 symtab_resolve_alias
681 (node,
682 TREE_CODE (node->alias_target) == VAR_DECL
683 ? (symtab_node *)varpool_node_for_decl (node->alias_target)
684 : (symtab_node *)cgraph_get_create_node (node->alias_target));
685 cpp_implicit_aliases_done = true;
686 }
687
688 /* Process attributes common for vars and functions. */
689
690 static void
691 process_common_attributes (tree decl)
692 {
693 tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
694
695 if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
696 {
697 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
698 "%<weakref%> attribute should be accompanied with"
699 " an %<alias%> attribute");
700 DECL_WEAK (decl) = 0;
701 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
702 DECL_ATTRIBUTES (decl));
703 }
704 }
705
706 /* Look for externally_visible and used attributes and mark cgraph nodes
707 accordingly.
708
709 We cannot mark the nodes at the point the attributes are processed (in
710 handle_*_attribute) because the copy of the declarations available at that
711 point may not be canonical. For example, in:
712
713 void f();
714 void f() __attribute__((used));
715
716 the declaration we see in handle_used_attribute will be the second
717 declaration -- but the front end will subsequently merge that declaration
718 with the original declaration and discard the second declaration.
719
720 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
721
722 void f() {}
723 void f() __attribute__((externally_visible));
724
725 is valid.
726
727 So, we walk the nodes at the end of the translation unit, applying the
728 attributes at that point. */
729
730 static void
731 process_function_and_variable_attributes (struct cgraph_node *first,
732 struct varpool_node *first_var)
733 {
734 struct cgraph_node *node;
735 struct varpool_node *vnode;
736
737 for (node = cgraph_first_function (); node != first;
738 node = cgraph_next_function (node))
739 {
740 tree decl = node->decl;
741 if (DECL_PRESERVE_P (decl))
742 cgraph_mark_force_output_node (node);
743 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
744 {
745 if (! TREE_PUBLIC (node->decl))
746 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
747 "%<externally_visible%>"
748 " attribute have effect only on public objects");
749 }
750 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
751 && (node->definition && !node->alias))
752 {
753 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
754 "%<weakref%> attribute ignored"
755 " because function is defined");
756 DECL_WEAK (decl) = 0;
757 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
758 DECL_ATTRIBUTES (decl));
759 }
760
761 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl))
762 && !DECL_DECLARED_INLINE_P (decl)
763 /* redefining extern inline function makes it DECL_UNINLINABLE. */
764 && !DECL_UNINLINABLE (decl))
765 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
766 "always_inline function might not be inlinable");
767
768 process_common_attributes (decl);
769 }
770 for (vnode = varpool_first_variable (); vnode != first_var;
771 vnode = varpool_next_variable (vnode))
772 {
773 tree decl = vnode->decl;
774 if (DECL_EXTERNAL (decl)
775 && DECL_INITIAL (decl))
776 varpool_finalize_decl (decl);
777 if (DECL_PRESERVE_P (decl))
778 vnode->force_output = true;
779 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
780 {
781 if (! TREE_PUBLIC (vnode->decl))
782 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
783 "%<externally_visible%>"
784 " attribute have effect only on public objects");
785 }
786 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
787 && vnode->definition
788 && DECL_INITIAL (decl))
789 {
790 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
791 "%<weakref%> attribute ignored"
792 " because variable is initialized");
793 DECL_WEAK (decl) = 0;
794 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
795 DECL_ATTRIBUTES (decl));
796 }
797 process_common_attributes (decl);
798 }
799 }
800
801 /* Mark DECL as finalized. By finalizing the declaration, frontend instruct the
802 middle end to output the variable to asm file, if needed or externally
803 visible. */
804
805 void
806 varpool_finalize_decl (tree decl)
807 {
808 struct varpool_node *node = varpool_node_for_decl (decl);
809
810 gcc_assert (TREE_STATIC (decl) || DECL_EXTERNAL (decl));
811
812 if (node->definition)
813 return;
814 notice_global_symbol (decl);
815 node->definition = true;
816 if (TREE_THIS_VOLATILE (decl) || DECL_PRESERVE_P (decl)
817 /* Traditionally we do not eliminate static variables when not
818 optimizing and when not doing toplevel reoder. */
819 || (!flag_toplevel_reorder && !DECL_COMDAT (node->decl)
820 && !DECL_ARTIFICIAL (node->decl)))
821 node->force_output = true;
822
823 if (cgraph_state == CGRAPH_STATE_CONSTRUCTION
824 && (decide_is_symbol_needed (node)
825 || referred_to_p (node)))
826 enqueue_node (node);
827 if (cgraph_state >= CGRAPH_STATE_IPA_SSA)
828 varpool_analyze_node (node);
829 /* Some frontends produce various interface variables after compilation
830 finished. */
831 if (cgraph_state == CGRAPH_STATE_FINISHED)
832 varpool_assemble_decl (node);
833 }
834
835 /* EDGE is an polymorphic call. Mark all possible targets as reachable
836 and if there is only one target, perform trivial devirtualization.
837 REACHABLE_CALL_TARGETS collects target lists we already walked to
838 avoid udplicate work. */
839
840 static void
841 walk_polymorphic_call_targets (pointer_set_t *reachable_call_targets,
842 struct cgraph_edge *edge)
843 {
844 unsigned int i;
845 void *cache_token;
846 bool final;
847 vec <cgraph_node *>targets
848 = possible_polymorphic_call_targets
849 (edge, &final, &cache_token);
850
851 if (!pointer_set_insert (reachable_call_targets,
852 cache_token))
853 {
854 if (cgraph_dump_file)
855 dump_possible_polymorphic_call_targets
856 (cgraph_dump_file, edge);
857
858 for (i = 0; i < targets.length (); i++)
859 {
860 /* Do not bother to mark virtual methods in anonymous namespace;
861 either we will find use of virtual table defining it, or it is
862 unused. */
863 if (targets[i]->definition
864 && TREE_CODE
865 (TREE_TYPE (targets[i]->decl))
866 == METHOD_TYPE
867 && !type_in_anonymous_namespace_p
868 (method_class_type
869 (TREE_TYPE (targets[i]->decl))))
870 enqueue_node (targets[i]);
871 }
872 }
873
874 /* Very trivial devirtualization; when the type is
875 final or anonymous (so we know all its derivation)
876 and there is only one possible virtual call target,
877 make the edge direct. */
878 if (final)
879 {
880 if (targets.length () <= 1)
881 {
882 cgraph_node *target;
883 if (targets.length () == 1)
884 target = targets[0];
885 else
886 target = cgraph_get_create_node
887 (builtin_decl_implicit (BUILT_IN_UNREACHABLE));
888
889 if (cgraph_dump_file)
890 {
891 fprintf (cgraph_dump_file,
892 "Devirtualizing call: ");
893 print_gimple_stmt (cgraph_dump_file,
894 edge->call_stmt, 0,
895 TDF_SLIM);
896 }
897 cgraph_make_edge_direct (edge, target);
898 cgraph_redirect_edge_call_stmt_to_callee (edge);
899 if (cgraph_dump_file)
900 {
901 fprintf (cgraph_dump_file,
902 "Devirtualized as: ");
903 print_gimple_stmt (cgraph_dump_file,
904 edge->call_stmt, 0,
905 TDF_SLIM);
906 }
907 }
908 }
909 }
910
911
912 /* Discover all functions and variables that are trivially needed, analyze
913 them as well as all functions and variables referred by them */
914
915 static void
916 analyze_functions (void)
917 {
918 /* Keep track of already processed nodes when called multiple times for
919 intermodule optimization. */
920 static struct cgraph_node *first_analyzed;
921 struct cgraph_node *first_handled = first_analyzed;
922 static struct varpool_node *first_analyzed_var;
923 struct varpool_node *first_handled_var = first_analyzed_var;
924 struct pointer_set_t *reachable_call_targets = pointer_set_create ();
925
926 symtab_node *node;
927 symtab_node *next;
928 int i;
929 struct ipa_ref *ref;
930 bool changed = true;
931 location_t saved_loc = input_location;
932
933 bitmap_obstack_initialize (NULL);
934 cgraph_state = CGRAPH_STATE_CONSTRUCTION;
935 input_location = UNKNOWN_LOCATION;
936
937 /* Ugly, but the fixup can not happen at a time same body alias is created;
938 C++ FE is confused about the COMDAT groups being right. */
939 if (cpp_implicit_aliases_done)
940 FOR_EACH_SYMBOL (node)
941 if (node->cpp_implicit_alias)
942 fixup_same_cpp_alias_visibility (node, symtab_alias_target (node));
943 if (optimize && flag_devirtualize)
944 build_type_inheritance_graph ();
945
946 /* Analysis adds static variables that in turn adds references to new functions.
947 So we need to iterate the process until it stabilize. */
948 while (changed)
949 {
950 changed = false;
951 process_function_and_variable_attributes (first_analyzed,
952 first_analyzed_var);
953
954 /* First identify the trivially needed symbols. */
955 for (node = symtab_nodes;
956 node != first_analyzed
957 && node != first_analyzed_var; node = node->next)
958 {
959 if (decide_is_symbol_needed (node))
960 {
961 enqueue_node (node);
962 if (!changed && cgraph_dump_file)
963 fprintf (cgraph_dump_file, "Trivially needed symbols:");
964 changed = true;
965 if (cgraph_dump_file)
966 fprintf (cgraph_dump_file, " %s", symtab_node_asm_name (node));
967 if (!changed && cgraph_dump_file)
968 fprintf (cgraph_dump_file, "\n");
969 }
970 if (node == first_analyzed
971 || node == first_analyzed_var)
972 break;
973 }
974 cgraph_process_new_functions ();
975 first_analyzed_var = varpool_first_variable ();
976 first_analyzed = cgraph_first_function ();
977
978 if (changed && dump_file)
979 fprintf (cgraph_dump_file, "\n");
980
981 /* Lower representation, build callgraph edges and references for all trivially
982 needed symbols and all symbols referred by them. */
983 while (first != (symtab_node *)(void *)1)
984 {
985 changed = true;
986 node = first;
987 first = (symtab_node *)first->aux;
988 cgraph_node *cnode = dyn_cast <cgraph_node> (node);
989 if (cnode && cnode->definition)
990 {
991 struct cgraph_edge *edge;
992 tree decl = cnode->decl;
993
994 /* ??? It is possible to create extern inline function
995 and later using weak alias attribute to kill its body.
996 See gcc.c-torture/compile/20011119-1.c */
997 if (!DECL_STRUCT_FUNCTION (decl)
998 && !cnode->alias
999 && !cnode->thunk.thunk_p
1000 && !cnode->dispatcher_function)
1001 {
1002 cgraph_reset_node (cnode);
1003 cnode->local.redefined_extern_inline = true;
1004 continue;
1005 }
1006
1007 if (!cnode->analyzed)
1008 analyze_function (cnode);
1009
1010 for (edge = cnode->callees; edge; edge = edge->next_callee)
1011 if (edge->callee->definition)
1012 enqueue_node (edge->callee);
1013 if (optimize && flag_devirtualize)
1014 {
1015 struct cgraph_edge *next;
1016
1017 for (edge = cnode->indirect_calls; edge; edge = next)
1018 {
1019 next = edge->next_callee;
1020 if (edge->indirect_info->polymorphic)
1021 walk_polymorphic_call_targets (reachable_call_targets,
1022 edge);
1023 }
1024 }
1025
1026 /* If decl is a clone of an abstract function,
1027 mark that abstract function so that we don't release its body.
1028 The DECL_INITIAL() of that abstract function declaration
1029 will be later needed to output debug info. */
1030 if (DECL_ABSTRACT_ORIGIN (decl))
1031 {
1032 struct cgraph_node *origin_node
1033 = cgraph_get_node (DECL_ABSTRACT_ORIGIN (decl));
1034 origin_node->used_as_abstract_origin = true;
1035 }
1036 }
1037 else
1038 {
1039 varpool_node *vnode = dyn_cast <varpool_node> (node);
1040 if (vnode && vnode->definition && !vnode->analyzed)
1041 varpool_analyze_node (vnode);
1042 }
1043
1044 if (node->same_comdat_group)
1045 {
1046 symtab_node *next;
1047 for (next = node->same_comdat_group;
1048 next != node;
1049 next = next->same_comdat_group)
1050 enqueue_node (next);
1051 }
1052 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
1053 if (ref->referred->definition)
1054 enqueue_node (ref->referred);
1055 cgraph_process_new_functions ();
1056 }
1057 }
1058 if (optimize && flag_devirtualize)
1059 update_type_inheritance_graph ();
1060
1061 /* Collect entry points to the unit. */
1062 if (cgraph_dump_file)
1063 {
1064 fprintf (cgraph_dump_file, "\n\nInitial ");
1065 dump_symtab (cgraph_dump_file);
1066 }
1067
1068 if (cgraph_dump_file)
1069 fprintf (cgraph_dump_file, "\nRemoving unused symbols:");
1070
1071 for (node = symtab_nodes;
1072 node != first_handled
1073 && node != first_handled_var; node = next)
1074 {
1075 next = node->next;
1076 if (!node->aux && !referred_to_p (node))
1077 {
1078 if (cgraph_dump_file)
1079 fprintf (cgraph_dump_file, " %s", symtab_node_name (node));
1080 symtab_remove_node (node);
1081 continue;
1082 }
1083 if (cgraph_node *cnode = dyn_cast <cgraph_node> (node))
1084 {
1085 tree decl = node->decl;
1086
1087 if (cnode->definition && !gimple_has_body_p (decl)
1088 && !cnode->alias
1089 && !cnode->thunk.thunk_p)
1090 cgraph_reset_node (cnode);
1091
1092 gcc_assert (!cnode->definition || cnode->thunk.thunk_p
1093 || cnode->alias
1094 || gimple_has_body_p (decl));
1095 gcc_assert (cnode->analyzed == cnode->definition);
1096 }
1097 node->aux = NULL;
1098 }
1099 for (;node; node = node->next)
1100 node->aux = NULL;
1101 first_analyzed = cgraph_first_function ();
1102 first_analyzed_var = varpool_first_variable ();
1103 if (cgraph_dump_file)
1104 {
1105 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1106 dump_symtab (cgraph_dump_file);
1107 }
1108 bitmap_obstack_release (NULL);
1109 pointer_set_destroy (reachable_call_targets);
1110 ggc_collect ();
1111 /* Initialize assembler name hash, in particular we want to trigger C++
1112 mangling and same body alias creation before we free DECL_ARGUMENTS
1113 used by it. */
1114 if (!seen_error ())
1115 symtab_initialize_asm_name_hash ();
1116
1117 input_location = saved_loc;
1118 }
1119
1120 /* Translate the ugly representation of aliases as alias pairs into nice
1121 representation in callgraph. We don't handle all cases yet,
1122 unfortunately. */
1123
1124 static void
1125 handle_alias_pairs (void)
1126 {
1127 alias_pair *p;
1128 unsigned i;
1129
1130 for (i = 0; alias_pairs && alias_pairs->iterate (i, &p);)
1131 {
1132 symtab_node *target_node = symtab_node_for_asm (p->target);
1133
1134 /* Weakrefs with target not defined in current unit are easy to handle:
1135 they behave just as external variables except we need to note the
1136 alias flag to later output the weakref pseudo op into asm file. */
1137 if (!target_node
1138 && lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)) != NULL)
1139 {
1140 symtab_node *node = symtab_get_node (p->decl);
1141 if (node)
1142 {
1143 node->alias_target = p->target;
1144 node->weakref = true;
1145 node->alias = true;
1146 }
1147 alias_pairs->unordered_remove (i);
1148 continue;
1149 }
1150 else if (!target_node)
1151 {
1152 error ("%q+D aliased to undefined symbol %qE", p->decl, p->target);
1153 symtab_node *node = symtab_get_node (p->decl);
1154 if (node)
1155 node->alias = false;
1156 alias_pairs->unordered_remove (i);
1157 continue;
1158 }
1159
1160 if (DECL_EXTERNAL (target_node->decl)
1161 /* We use local aliases for C++ thunks to force the tailcall
1162 to bind locally. This is a hack - to keep it working do
1163 the following (which is not strictly correct). */
1164 && (! TREE_CODE (target_node->decl) == FUNCTION_DECL
1165 || ! DECL_VIRTUAL_P (target_node->decl))
1166 && ! lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)))
1167 {
1168 error ("%q+D aliased to external symbol %qE",
1169 p->decl, p->target);
1170 }
1171
1172 if (TREE_CODE (p->decl) == FUNCTION_DECL
1173 && target_node && is_a <cgraph_node> (target_node))
1174 {
1175 struct cgraph_node *src_node = cgraph_get_node (p->decl);
1176 if (src_node && src_node->definition)
1177 cgraph_reset_node (src_node);
1178 cgraph_create_function_alias (p->decl, target_node->decl);
1179 alias_pairs->unordered_remove (i);
1180 }
1181 else if (TREE_CODE (p->decl) == VAR_DECL
1182 && target_node && is_a <varpool_node> (target_node))
1183 {
1184 varpool_create_variable_alias (p->decl, target_node->decl);
1185 alias_pairs->unordered_remove (i);
1186 }
1187 else
1188 {
1189 error ("%q+D alias in between function and variable is not supported",
1190 p->decl);
1191 warning (0, "%q+D aliased declaration",
1192 target_node->decl);
1193 alias_pairs->unordered_remove (i);
1194 }
1195 }
1196 vec_free (alias_pairs);
1197 }
1198
1199
1200 /* Figure out what functions we want to assemble. */
1201
1202 static void
1203 mark_functions_to_output (void)
1204 {
1205 struct cgraph_node *node;
1206 #ifdef ENABLE_CHECKING
1207 bool check_same_comdat_groups = false;
1208
1209 FOR_EACH_FUNCTION (node)
1210 gcc_assert (!node->process);
1211 #endif
1212
1213 FOR_EACH_FUNCTION (node)
1214 {
1215 tree decl = node->decl;
1216
1217 gcc_assert (!node->process || node->same_comdat_group);
1218 if (node->process)
1219 continue;
1220
1221 /* We need to output all local functions that are used and not
1222 always inlined, as well as those that are reachable from
1223 outside the current compilation unit. */
1224 if (node->analyzed
1225 && !node->thunk.thunk_p
1226 && !node->alias
1227 && !node->global.inlined_to
1228 && !TREE_ASM_WRITTEN (decl)
1229 && !DECL_EXTERNAL (decl))
1230 {
1231 node->process = 1;
1232 if (node->same_comdat_group)
1233 {
1234 struct cgraph_node *next;
1235 for (next = cgraph (node->same_comdat_group);
1236 next != node;
1237 next = cgraph (next->same_comdat_group))
1238 if (!next->thunk.thunk_p && !next->alias)
1239 next->process = 1;
1240 }
1241 }
1242 else if (node->same_comdat_group)
1243 {
1244 #ifdef ENABLE_CHECKING
1245 check_same_comdat_groups = true;
1246 #endif
1247 }
1248 else
1249 {
1250 /* We should've reclaimed all functions that are not needed. */
1251 #ifdef ENABLE_CHECKING
1252 if (!node->global.inlined_to
1253 && gimple_has_body_p (decl)
1254 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1255 are inside partition, we can end up not removing the body since we no longer
1256 have analyzed node pointing to it. */
1257 && !node->in_other_partition
1258 && !node->alias
1259 && !node->clones
1260 && !DECL_EXTERNAL (decl))
1261 {
1262 dump_cgraph_node (stderr, node);
1263 internal_error ("failed to reclaim unneeded function");
1264 }
1265 #endif
1266 gcc_assert (node->global.inlined_to
1267 || !gimple_has_body_p (decl)
1268 || node->in_other_partition
1269 || node->clones
1270 || DECL_ARTIFICIAL (decl)
1271 || DECL_EXTERNAL (decl));
1272
1273 }
1274
1275 }
1276 #ifdef ENABLE_CHECKING
1277 if (check_same_comdat_groups)
1278 FOR_EACH_FUNCTION (node)
1279 if (node->same_comdat_group && !node->process)
1280 {
1281 tree decl = node->decl;
1282 if (!node->global.inlined_to
1283 && gimple_has_body_p (decl)
1284 /* FIXME: in an ltrans unit when the offline copy is outside a
1285 partition but inline copies are inside a partition, we can
1286 end up not removing the body since we no longer have an
1287 analyzed node pointing to it. */
1288 && !node->in_other_partition
1289 && !node->clones
1290 && !DECL_EXTERNAL (decl))
1291 {
1292 dump_cgraph_node (stderr, node);
1293 internal_error ("failed to reclaim unneeded function in same "
1294 "comdat group");
1295 }
1296 }
1297 #endif
1298 }
1299
1300 /* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1301 in lowered gimple form. IN_SSA is true if the gimple is in SSA.
1302
1303 Set current_function_decl and cfun to newly constructed empty function body.
1304 return basic block in the function body. */
1305
1306 basic_block
1307 init_lowered_empty_function (tree decl, bool in_ssa)
1308 {
1309 basic_block bb;
1310
1311 current_function_decl = decl;
1312 allocate_struct_function (decl, false);
1313 gimple_register_cfg_hooks ();
1314 init_empty_tree_cfg ();
1315
1316 if (in_ssa)
1317 {
1318 init_tree_ssa (cfun);
1319 init_ssa_operands (cfun);
1320 cfun->gimple_df->in_ssa_p = true;
1321 cfun->curr_properties |= PROP_ssa;
1322 }
1323
1324 DECL_INITIAL (decl) = make_node (BLOCK);
1325
1326 DECL_SAVED_TREE (decl) = error_mark_node;
1327 cfun->curr_properties |= (PROP_gimple_lcf | PROP_gimple_leh | PROP_gimple_any
1328 | PROP_cfg | PROP_loops);
1329
1330 set_loops_for_fn (cfun, ggc_alloc_cleared_loops ());
1331 init_loops_structure (cfun, loops_for_fn (cfun), 1);
1332 loops_for_fn (cfun)->state |= LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
1333
1334 /* Create BB for body of the function and connect it properly. */
1335 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR);
1336 make_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
1337 make_edge (bb, EXIT_BLOCK_PTR, 0);
1338 add_bb_to_loop (bb, ENTRY_BLOCK_PTR->loop_father);
1339
1340 return bb;
1341 }
1342
1343 /* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1344 offset indicated by VIRTUAL_OFFSET, if that is
1345 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1346 zero for a result adjusting thunk. */
1347
1348 static tree
1349 thunk_adjust (gimple_stmt_iterator * bsi,
1350 tree ptr, bool this_adjusting,
1351 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1352 {
1353 gimple stmt;
1354 tree ret;
1355
1356 if (this_adjusting
1357 && fixed_offset != 0)
1358 {
1359 stmt = gimple_build_assign
1360 (ptr, fold_build_pointer_plus_hwi_loc (input_location,
1361 ptr,
1362 fixed_offset));
1363 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1364 }
1365
1366 /* If there's a virtual offset, look up that value in the vtable and
1367 adjust the pointer again. */
1368 if (virtual_offset)
1369 {
1370 tree vtabletmp;
1371 tree vtabletmp2;
1372 tree vtabletmp3;
1373
1374 if (!vtable_entry_type)
1375 {
1376 tree vfunc_type = make_node (FUNCTION_TYPE);
1377 TREE_TYPE (vfunc_type) = integer_type_node;
1378 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1379 layout_type (vfunc_type);
1380
1381 vtable_entry_type = build_pointer_type (vfunc_type);
1382 }
1383
1384 vtabletmp =
1385 create_tmp_reg (build_pointer_type
1386 (build_pointer_type (vtable_entry_type)), "vptr");
1387
1388 /* The vptr is always at offset zero in the object. */
1389 stmt = gimple_build_assign (vtabletmp,
1390 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1391 ptr));
1392 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1393
1394 /* Form the vtable address. */
1395 vtabletmp2 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp)),
1396 "vtableaddr");
1397 stmt = gimple_build_assign (vtabletmp2,
1398 build_simple_mem_ref (vtabletmp));
1399 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1400
1401 /* Find the entry with the vcall offset. */
1402 stmt = gimple_build_assign (vtabletmp2,
1403 fold_build_pointer_plus_loc (input_location,
1404 vtabletmp2,
1405 virtual_offset));
1406 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1407
1408 /* Get the offset itself. */
1409 vtabletmp3 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1410 "vcalloffset");
1411 stmt = gimple_build_assign (vtabletmp3,
1412 build_simple_mem_ref (vtabletmp2));
1413 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1414
1415 /* Adjust the `this' pointer. */
1416 ptr = fold_build_pointer_plus_loc (input_location, ptr, vtabletmp3);
1417 ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
1418 GSI_CONTINUE_LINKING);
1419 }
1420
1421 if (!this_adjusting
1422 && fixed_offset != 0)
1423 /* Adjust the pointer by the constant. */
1424 {
1425 tree ptrtmp;
1426
1427 if (TREE_CODE (ptr) == VAR_DECL)
1428 ptrtmp = ptr;
1429 else
1430 {
1431 ptrtmp = create_tmp_reg (TREE_TYPE (ptr), "ptr");
1432 stmt = gimple_build_assign (ptrtmp, ptr);
1433 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1434 }
1435 ptr = fold_build_pointer_plus_hwi_loc (input_location,
1436 ptrtmp, fixed_offset);
1437 }
1438
1439 /* Emit the statement and gimplify the adjustment expression. */
1440 ret = create_tmp_reg (TREE_TYPE (ptr), "adjusted_this");
1441 stmt = gimple_build_assign (ret, ptr);
1442 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1443
1444 return ret;
1445 }
1446
1447 /* Expand thunk NODE to gimple if possible.
1448 When OUTPUT_ASM_THUNK is true, also produce assembler for
1449 thunks that are not lowered. */
1450
1451 bool
1452 expand_thunk (struct cgraph_node *node, bool output_asm_thunks)
1453 {
1454 bool this_adjusting = node->thunk.this_adjusting;
1455 HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset;
1456 HOST_WIDE_INT virtual_value = node->thunk.virtual_value;
1457 tree virtual_offset = NULL;
1458 tree alias = node->callees->callee->decl;
1459 tree thunk_fndecl = node->decl;
1460 tree a;
1461
1462
1463 if (this_adjusting
1464 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1465 virtual_value, alias))
1466 {
1467 const char *fnname;
1468 tree fn_block;
1469 tree restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1470
1471 if (!output_asm_thunks)
1472 return false;
1473
1474 if (in_lto_p)
1475 cgraph_get_body (node);
1476 a = DECL_ARGUMENTS (thunk_fndecl);
1477
1478 current_function_decl = thunk_fndecl;
1479
1480 /* Ensure thunks are emitted in their correct sections. */
1481 resolve_unique_section (thunk_fndecl, 0, flag_function_sections);
1482
1483 DECL_RESULT (thunk_fndecl)
1484 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1485 RESULT_DECL, 0, restype);
1486 DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1487 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
1488
1489 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1490 create one. */
1491 fn_block = make_node (BLOCK);
1492 BLOCK_VARS (fn_block) = a;
1493 DECL_INITIAL (thunk_fndecl) = fn_block;
1494 init_function_start (thunk_fndecl);
1495 cfun->is_thunk = 1;
1496 insn_locations_init ();
1497 set_curr_insn_location (DECL_SOURCE_LOCATION (thunk_fndecl));
1498 prologue_location = curr_insn_location ();
1499 assemble_start_function (thunk_fndecl, fnname);
1500
1501 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1502 fixed_offset, virtual_value, alias);
1503
1504 assemble_end_function (thunk_fndecl, fnname);
1505 insn_locations_finalize ();
1506 init_insn_lengths ();
1507 free_after_compilation (cfun);
1508 set_cfun (NULL);
1509 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1510 node->thunk.thunk_p = false;
1511 node->analyzed = false;
1512 }
1513 else
1514 {
1515 tree restype;
1516 basic_block bb, then_bb, else_bb, return_bb;
1517 gimple_stmt_iterator bsi;
1518 int nargs = 0;
1519 tree arg;
1520 int i;
1521 tree resdecl;
1522 tree restmp = NULL;
1523 vec<tree> vargs;
1524
1525 gimple call;
1526 gimple ret;
1527
1528 if (in_lto_p)
1529 cgraph_get_body (node);
1530 a = DECL_ARGUMENTS (thunk_fndecl);
1531
1532 current_function_decl = thunk_fndecl;
1533
1534 /* Ensure thunks are emitted in their correct sections. */
1535 resolve_unique_section (thunk_fndecl, 0, flag_function_sections);
1536
1537 DECL_IGNORED_P (thunk_fndecl) = 1;
1538 bitmap_obstack_initialize (NULL);
1539
1540 if (node->thunk.virtual_offset_p)
1541 virtual_offset = size_int (virtual_value);
1542
1543 /* Build the return declaration for the function. */
1544 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1545 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1546 {
1547 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1548 DECL_ARTIFICIAL (resdecl) = 1;
1549 DECL_IGNORED_P (resdecl) = 1;
1550 DECL_RESULT (thunk_fndecl) = resdecl;
1551 DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1552 }
1553 else
1554 resdecl = DECL_RESULT (thunk_fndecl);
1555
1556 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl, true);
1557
1558 bsi = gsi_start_bb (bb);
1559
1560 /* Build call to the function being thunked. */
1561 if (!VOID_TYPE_P (restype))
1562 {
1563 if (DECL_BY_REFERENCE (resdecl))
1564 restmp = gimple_fold_indirect_ref (resdecl);
1565 else if (!is_gimple_reg_type (restype))
1566 {
1567 restmp = resdecl;
1568 add_local_decl (cfun, restmp);
1569 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1570 }
1571 else
1572 restmp = create_tmp_reg (restype, "retval");
1573 }
1574
1575 for (arg = a; arg; arg = DECL_CHAIN (arg))
1576 nargs++;
1577 vargs.create (nargs);
1578 if (this_adjusting)
1579 vargs.quick_push (thunk_adjust (&bsi, a, 1, fixed_offset,
1580 virtual_offset));
1581 else if (nargs)
1582 vargs.quick_push (a);
1583
1584 if (nargs)
1585 for (i = 1, arg = DECL_CHAIN (a); i < nargs; i++, arg = DECL_CHAIN (arg))
1586 vargs.quick_push (arg);
1587 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1588 node->callees->call_stmt = call;
1589 vargs.release ();
1590 gimple_call_set_from_thunk (call, true);
1591 if (restmp)
1592 {
1593 gimple_call_set_lhs (call, restmp);
1594 gcc_assert (useless_type_conversion_p (TREE_TYPE (restmp),
1595 TREE_TYPE (TREE_TYPE (alias))));
1596 }
1597 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1598 if (!(gimple_call_flags (call) & ECF_NORETURN))
1599 {
1600 if (restmp && !this_adjusting
1601 && (fixed_offset || virtual_offset))
1602 {
1603 tree true_label = NULL_TREE;
1604
1605 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1606 {
1607 gimple stmt;
1608 /* If the return type is a pointer, we need to
1609 protect against NULL. We know there will be an
1610 adjustment, because that's why we're emitting a
1611 thunk. */
1612 then_bb = create_basic_block (NULL, (void *) 0, bb);
1613 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1614 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1615 add_bb_to_loop (then_bb, bb->loop_father);
1616 add_bb_to_loop (return_bb, bb->loop_father);
1617 add_bb_to_loop (else_bb, bb->loop_father);
1618 remove_edge (single_succ_edge (bb));
1619 true_label = gimple_block_label (then_bb);
1620 stmt = gimple_build_cond (NE_EXPR, restmp,
1621 build_zero_cst (TREE_TYPE (restmp)),
1622 NULL_TREE, NULL_TREE);
1623 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1624 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1625 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1626 make_edge (return_bb, EXIT_BLOCK_PTR, 0);
1627 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1628 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1629 bsi = gsi_last_bb (then_bb);
1630 }
1631
1632 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1633 fixed_offset, virtual_offset);
1634 if (true_label)
1635 {
1636 gimple stmt;
1637 bsi = gsi_last_bb (else_bb);
1638 stmt = gimple_build_assign (restmp,
1639 build_zero_cst (TREE_TYPE (restmp)));
1640 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1641 bsi = gsi_last_bb (return_bb);
1642 }
1643 }
1644 else
1645 gimple_call_set_tail (call, true);
1646
1647 /* Build return value. */
1648 ret = gimple_build_return (restmp);
1649 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1650 }
1651 else
1652 {
1653 gimple_call_set_tail (call, true);
1654 remove_edge (single_succ_edge (bb));
1655 }
1656
1657 cfun->gimple_df->in_ssa_p = true;
1658 /* FIXME: C++ FE should stop setting TREE_ASM_WRITTEN on thunks. */
1659 TREE_ASM_WRITTEN (thunk_fndecl) = false;
1660 delete_unreachable_blocks ();
1661 update_ssa (TODO_update_ssa);
1662 #ifdef ENABLE_CHECKING
1663 verify_flow_info ();
1664 #endif
1665
1666 /* Since we want to emit the thunk, we explicitly mark its name as
1667 referenced. */
1668 node->thunk.thunk_p = false;
1669 node->lowered = true;
1670 bitmap_obstack_release (NULL);
1671 }
1672 current_function_decl = NULL;
1673 set_cfun (NULL);
1674 return true;
1675 }
1676
1677 /* Assemble thunks and aliases associated to NODE. */
1678
1679 static void
1680 assemble_thunks_and_aliases (struct cgraph_node *node)
1681 {
1682 struct cgraph_edge *e;
1683 int i;
1684 struct ipa_ref *ref;
1685
1686 for (e = node->callers; e;)
1687 if (e->caller->thunk.thunk_p)
1688 {
1689 struct cgraph_node *thunk = e->caller;
1690
1691 e = e->next_caller;
1692 assemble_thunks_and_aliases (thunk);
1693 expand_thunk (thunk, true);
1694 }
1695 else
1696 e = e->next_caller;
1697 for (i = 0; ipa_ref_list_referring_iterate (&node->ref_list,
1698 i, ref); i++)
1699 if (ref->use == IPA_REF_ALIAS)
1700 {
1701 struct cgraph_node *alias = ipa_ref_referring_node (ref);
1702 bool saved_written = TREE_ASM_WRITTEN (node->decl);
1703
1704 /* Force assemble_alias to really output the alias this time instead
1705 of buffering it in same alias pairs. */
1706 TREE_ASM_WRITTEN (node->decl) = 1;
1707 do_assemble_alias (alias->decl,
1708 DECL_ASSEMBLER_NAME (node->decl));
1709 assemble_thunks_and_aliases (alias);
1710 TREE_ASM_WRITTEN (node->decl) = saved_written;
1711 }
1712 }
1713
1714 /* Expand function specified by NODE. */
1715
1716 static void
1717 expand_function (struct cgraph_node *node)
1718 {
1719 tree decl = node->decl;
1720 location_t saved_loc;
1721
1722 /* We ought to not compile any inline clones. */
1723 gcc_assert (!node->global.inlined_to);
1724
1725 announce_function (decl);
1726 node->process = 0;
1727 gcc_assert (node->lowered);
1728 cgraph_get_body (node);
1729
1730 /* Generate RTL for the body of DECL. */
1731
1732 timevar_push (TV_REST_OF_COMPILATION);
1733
1734 gcc_assert (cgraph_global_info_ready);
1735
1736 /* Initialize the default bitmap obstack. */
1737 bitmap_obstack_initialize (NULL);
1738
1739 /* Initialize the RTL code for the function. */
1740 current_function_decl = decl;
1741 saved_loc = input_location;
1742 input_location = DECL_SOURCE_LOCATION (decl);
1743 init_function_start (decl);
1744
1745 gimple_register_cfg_hooks ();
1746
1747 bitmap_obstack_initialize (&reg_obstack); /* FIXME, only at RTL generation*/
1748
1749 execute_all_ipa_transforms ();
1750
1751 /* Perform all tree transforms and optimizations. */
1752
1753 /* Signal the start of passes. */
1754 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_START, NULL);
1755
1756 execute_pass_list (g->get_passes ()->all_passes);
1757
1758 /* Signal the end of passes. */
1759 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_END, NULL);
1760
1761 bitmap_obstack_release (&reg_obstack);
1762
1763 /* Release the default bitmap obstack. */
1764 bitmap_obstack_release (NULL);
1765
1766 /* If requested, warn about function definitions where the function will
1767 return a value (usually of some struct or union type) which itself will
1768 take up a lot of stack space. */
1769 if (warn_larger_than && !DECL_EXTERNAL (decl) && TREE_TYPE (decl))
1770 {
1771 tree ret_type = TREE_TYPE (TREE_TYPE (decl));
1772
1773 if (ret_type && TYPE_SIZE_UNIT (ret_type)
1774 && TREE_CODE (TYPE_SIZE_UNIT (ret_type)) == INTEGER_CST
1775 && 0 < compare_tree_int (TYPE_SIZE_UNIT (ret_type),
1776 larger_than_size))
1777 {
1778 unsigned int size_as_int
1779 = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (ret_type));
1780
1781 if (compare_tree_int (TYPE_SIZE_UNIT (ret_type), size_as_int) == 0)
1782 warning (OPT_Wlarger_than_, "size of return value of %q+D is %u bytes",
1783 decl, size_as_int);
1784 else
1785 warning (OPT_Wlarger_than_, "size of return value of %q+D is larger than %wd bytes",
1786 decl, larger_than_size);
1787 }
1788 }
1789
1790 gimple_set_body (decl, NULL);
1791 if (DECL_STRUCT_FUNCTION (decl) == 0
1792 && !cgraph_get_node (decl)->origin)
1793 {
1794 /* Stop pointing to the local nodes about to be freed.
1795 But DECL_INITIAL must remain nonzero so we know this
1796 was an actual function definition.
1797 For a nested function, this is done in c_pop_function_context.
1798 If rest_of_compilation set this to 0, leave it 0. */
1799 if (DECL_INITIAL (decl) != 0)
1800 DECL_INITIAL (decl) = error_mark_node;
1801 }
1802
1803 input_location = saved_loc;
1804
1805 ggc_collect ();
1806 timevar_pop (TV_REST_OF_COMPILATION);
1807
1808 /* Make sure that BE didn't give up on compiling. */
1809 gcc_assert (TREE_ASM_WRITTEN (decl));
1810 set_cfun (NULL);
1811 current_function_decl = NULL;
1812
1813 /* It would make a lot more sense to output thunks before function body to get more
1814 forward and lest backwarding jumps. This however would need solving problem
1815 with comdats. See PR48668. Also aliases must come after function itself to
1816 make one pass assemblers, like one on AIX, happy. See PR 50689.
1817 FIXME: Perhaps thunks should be move before function IFF they are not in comdat
1818 groups. */
1819 assemble_thunks_and_aliases (node);
1820 cgraph_release_function_body (node);
1821 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1822 points to the dead function body. */
1823 cgraph_node_remove_callees (node);
1824 ipa_remove_all_references (&node->ref_list);
1825 }
1826
1827
1828 /* Expand all functions that must be output.
1829
1830 Attempt to topologically sort the nodes so function is output when
1831 all called functions are already assembled to allow data to be
1832 propagated across the callgraph. Use a stack to get smaller distance
1833 between a function and its callees (later we may choose to use a more
1834 sophisticated algorithm for function reordering; we will likely want
1835 to use subsections to make the output functions appear in top-down
1836 order). */
1837
1838 static void
1839 expand_all_functions (void)
1840 {
1841 struct cgraph_node *node;
1842 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1843 int order_pos, new_order_pos = 0;
1844 int i;
1845
1846 order_pos = ipa_reverse_postorder (order);
1847 gcc_assert (order_pos == cgraph_n_nodes);
1848
1849 /* Garbage collector may remove inline clones we eliminate during
1850 optimization. So we must be sure to not reference them. */
1851 for (i = 0; i < order_pos; i++)
1852 if (order[i]->process)
1853 order[new_order_pos++] = order[i];
1854
1855 for (i = new_order_pos - 1; i >= 0; i--)
1856 {
1857 node = order[i];
1858 if (node->process)
1859 {
1860 node->process = 0;
1861 expand_function (node);
1862 }
1863 }
1864 cgraph_process_new_functions ();
1865
1866 free (order);
1867
1868 }
1869
1870 /* This is used to sort the node types by the cgraph order number. */
1871
1872 enum cgraph_order_sort_kind
1873 {
1874 ORDER_UNDEFINED = 0,
1875 ORDER_FUNCTION,
1876 ORDER_VAR,
1877 ORDER_ASM
1878 };
1879
1880 struct cgraph_order_sort
1881 {
1882 enum cgraph_order_sort_kind kind;
1883 union
1884 {
1885 struct cgraph_node *f;
1886 struct varpool_node *v;
1887 struct asm_node *a;
1888 } u;
1889 };
1890
1891 /* Output all functions, variables, and asm statements in the order
1892 according to their order fields, which is the order in which they
1893 appeared in the file. This implements -fno-toplevel-reorder. In
1894 this mode we may output functions and variables which don't really
1895 need to be output. */
1896
1897 static void
1898 output_in_order (void)
1899 {
1900 int max;
1901 struct cgraph_order_sort *nodes;
1902 int i;
1903 struct cgraph_node *pf;
1904 struct varpool_node *pv;
1905 struct asm_node *pa;
1906
1907 max = symtab_order;
1908 nodes = XCNEWVEC (struct cgraph_order_sort, max);
1909
1910 FOR_EACH_DEFINED_FUNCTION (pf)
1911 {
1912 if (pf->process && !pf->thunk.thunk_p && !pf->alias)
1913 {
1914 i = pf->order;
1915 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1916 nodes[i].kind = ORDER_FUNCTION;
1917 nodes[i].u.f = pf;
1918 }
1919 }
1920
1921 FOR_EACH_DEFINED_VARIABLE (pv)
1922 if (!DECL_EXTERNAL (pv->decl))
1923 {
1924 i = pv->order;
1925 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1926 nodes[i].kind = ORDER_VAR;
1927 nodes[i].u.v = pv;
1928 }
1929
1930 for (pa = asm_nodes; pa; pa = pa->next)
1931 {
1932 i = pa->order;
1933 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1934 nodes[i].kind = ORDER_ASM;
1935 nodes[i].u.a = pa;
1936 }
1937
1938 /* In toplevel reorder mode we output all statics; mark them as needed. */
1939
1940 for (i = 0; i < max; ++i)
1941 if (nodes[i].kind == ORDER_VAR)
1942 varpool_finalize_named_section_flags (nodes[i].u.v);
1943
1944 for (i = 0; i < max; ++i)
1945 {
1946 switch (nodes[i].kind)
1947 {
1948 case ORDER_FUNCTION:
1949 nodes[i].u.f->process = 0;
1950 expand_function (nodes[i].u.f);
1951 break;
1952
1953 case ORDER_VAR:
1954 varpool_assemble_decl (nodes[i].u.v);
1955 break;
1956
1957 case ORDER_ASM:
1958 assemble_asm (nodes[i].u.a->asm_str);
1959 break;
1960
1961 case ORDER_UNDEFINED:
1962 break;
1963
1964 default:
1965 gcc_unreachable ();
1966 }
1967 }
1968
1969 asm_nodes = NULL;
1970 free (nodes);
1971 }
1972
1973 static void
1974 ipa_passes (void)
1975 {
1976 gcc::pass_manager *passes = g->get_passes ();
1977
1978 set_cfun (NULL);
1979 current_function_decl = NULL;
1980 gimple_register_cfg_hooks ();
1981 bitmap_obstack_initialize (NULL);
1982
1983 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
1984
1985 if (!in_lto_p)
1986 {
1987 execute_ipa_pass_list (passes->all_small_ipa_passes);
1988 if (seen_error ())
1989 return;
1990 }
1991
1992 /* We never run removal of unreachable nodes after early passes. This is
1993 because TODO is run before the subpasses. It is important to remove
1994 the unreachable functions to save works at IPA level and to get LTO
1995 symbol tables right. */
1996 symtab_remove_unreachable_nodes (true, cgraph_dump_file);
1997
1998 /* If pass_all_early_optimizations was not scheduled, the state of
1999 the cgraph will not be properly updated. Update it now. */
2000 if (cgraph_state < CGRAPH_STATE_IPA_SSA)
2001 cgraph_state = CGRAPH_STATE_IPA_SSA;
2002
2003 if (!in_lto_p)
2004 {
2005 /* Generate coverage variables and constructors. */
2006 coverage_finish ();
2007
2008 /* Process new functions added. */
2009 set_cfun (NULL);
2010 current_function_decl = NULL;
2011 cgraph_process_new_functions ();
2012
2013 execute_ipa_summary_passes
2014 ((struct ipa_opt_pass_d *) passes->all_regular_ipa_passes);
2015 }
2016
2017 /* Some targets need to handle LTO assembler output specially. */
2018 if (flag_generate_lto)
2019 targetm.asm_out.lto_start ();
2020
2021 execute_ipa_summary_passes ((struct ipa_opt_pass_d *)
2022 passes->all_lto_gen_passes);
2023
2024 if (!in_lto_p)
2025 ipa_write_summaries ();
2026
2027 if (flag_generate_lto)
2028 targetm.asm_out.lto_end ();
2029
2030 if (!flag_ltrans && (in_lto_p || !flag_lto || flag_fat_lto_objects))
2031 execute_ipa_pass_list (passes->all_regular_ipa_passes);
2032 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
2033
2034 bitmap_obstack_release (NULL);
2035 }
2036
2037
2038 /* Return string alias is alias of. */
2039
2040 static tree
2041 get_alias_symbol (tree decl)
2042 {
2043 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
2044 return get_identifier (TREE_STRING_POINTER
2045 (TREE_VALUE (TREE_VALUE (alias))));
2046 }
2047
2048
2049 /* Weakrefs may be associated to external decls and thus not output
2050 at expansion time. Emit all necessary aliases. */
2051
2052 static void
2053 output_weakrefs (void)
2054 {
2055 symtab_node *node;
2056 FOR_EACH_SYMBOL (node)
2057 if (node->alias
2058 && !TREE_ASM_WRITTEN (node->decl)
2059 && node->weakref)
2060 {
2061 tree target;
2062
2063 /* Weakrefs are special by not requiring target definition in current
2064 compilation unit. It is thus bit hard to work out what we want to
2065 alias.
2066 When alias target is defined, we need to fetch it from symtab reference,
2067 otherwise it is pointed to by alias_target. */
2068 if (node->alias_target)
2069 target = (DECL_P (node->alias_target)
2070 ? DECL_ASSEMBLER_NAME (node->alias_target)
2071 : node->alias_target);
2072 else if (node->analyzed)
2073 target = DECL_ASSEMBLER_NAME (symtab_alias_target (node)->decl);
2074 else
2075 {
2076 gcc_unreachable ();
2077 target = get_alias_symbol (node->decl);
2078 }
2079 do_assemble_alias (node->decl, target);
2080 }
2081 }
2082
2083 /* Initialize callgraph dump file. */
2084
2085 void
2086 init_cgraph (void)
2087 {
2088 if (!cgraph_dump_file)
2089 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
2090 }
2091
2092
2093 /* Perform simple optimizations based on callgraph. */
2094
2095 void
2096 compile (void)
2097 {
2098 if (seen_error ())
2099 return;
2100
2101 #ifdef ENABLE_CHECKING
2102 verify_symtab ();
2103 #endif
2104
2105 timevar_push (TV_CGRAPHOPT);
2106 if (pre_ipa_mem_report)
2107 {
2108 fprintf (stderr, "Memory consumption before IPA\n");
2109 dump_memory_report (false);
2110 }
2111 if (!quiet_flag)
2112 fprintf (stderr, "Performing interprocedural optimizations\n");
2113 cgraph_state = CGRAPH_STATE_IPA;
2114
2115 /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE. */
2116 if (flag_lto)
2117 lto_streamer_hooks_init ();
2118
2119 /* Don't run the IPA passes if there was any error or sorry messages. */
2120 if (!seen_error ())
2121 ipa_passes ();
2122
2123 /* Do nothing else if any IPA pass found errors or if we are just streaming LTO. */
2124 if (seen_error ()
2125 || (!in_lto_p && flag_lto && !flag_fat_lto_objects))
2126 {
2127 timevar_pop (TV_CGRAPHOPT);
2128 return;
2129 }
2130
2131 /* This pass remove bodies of extern inline functions we never inlined.
2132 Do this later so other IPA passes see what is really going on. */
2133 symtab_remove_unreachable_nodes (false, dump_file);
2134 cgraph_global_info_ready = true;
2135 if (cgraph_dump_file)
2136 {
2137 fprintf (cgraph_dump_file, "Optimized ");
2138 dump_symtab (cgraph_dump_file);
2139 }
2140 if (post_ipa_mem_report)
2141 {
2142 fprintf (stderr, "Memory consumption after IPA\n");
2143 dump_memory_report (false);
2144 }
2145 timevar_pop (TV_CGRAPHOPT);
2146
2147 /* Output everything. */
2148 (*debug_hooks->assembly_start) ();
2149 if (!quiet_flag)
2150 fprintf (stderr, "Assembling functions:\n");
2151 #ifdef ENABLE_CHECKING
2152 verify_symtab ();
2153 #endif
2154
2155 cgraph_materialize_all_clones ();
2156 bitmap_obstack_initialize (NULL);
2157 execute_ipa_pass_list (g->get_passes ()->all_late_ipa_passes);
2158 symtab_remove_unreachable_nodes (true, dump_file);
2159 #ifdef ENABLE_CHECKING
2160 verify_symtab ();
2161 #endif
2162 bitmap_obstack_release (NULL);
2163 mark_functions_to_output ();
2164
2165 /* When weakref support is missing, we autmatically translate all
2166 references to NODE to references to its ultimate alias target.
2167 The renaming mechanizm uses flag IDENTIFIER_TRANSPARENT_ALIAS and
2168 TREE_CHAIN.
2169
2170 Set up this mapping before we output any assembler but once we are sure
2171 that all symbol renaming is done.
2172
2173 FIXME: All this uglyness can go away if we just do renaming at gimple
2174 level by physically rewritting the IL. At the moment we can only redirect
2175 calls, so we need infrastructure for renaming references as well. */
2176 #ifndef ASM_OUTPUT_WEAKREF
2177 symtab_node *node;
2178
2179 FOR_EACH_SYMBOL (node)
2180 if (node->alias
2181 && lookup_attribute ("weakref", DECL_ATTRIBUTES (node->decl)))
2182 {
2183 IDENTIFIER_TRANSPARENT_ALIAS
2184 (DECL_ASSEMBLER_NAME (node->decl)) = 1;
2185 TREE_CHAIN (DECL_ASSEMBLER_NAME (node->decl))
2186 = (node->alias_target ? node->alias_target
2187 : DECL_ASSEMBLER_NAME (symtab_alias_target (node)->decl));
2188 }
2189 #endif
2190
2191 cgraph_state = CGRAPH_STATE_EXPANSION;
2192 if (!flag_toplevel_reorder)
2193 output_in_order ();
2194 else
2195 {
2196 output_asm_statements ();
2197
2198 expand_all_functions ();
2199 varpool_output_variables ();
2200 }
2201
2202 cgraph_process_new_functions ();
2203 cgraph_state = CGRAPH_STATE_FINISHED;
2204 output_weakrefs ();
2205
2206 if (cgraph_dump_file)
2207 {
2208 fprintf (cgraph_dump_file, "\nFinal ");
2209 dump_symtab (cgraph_dump_file);
2210 }
2211 #ifdef ENABLE_CHECKING
2212 verify_symtab ();
2213 /* Double check that all inline clones are gone and that all
2214 function bodies have been released from memory. */
2215 if (!seen_error ())
2216 {
2217 struct cgraph_node *node;
2218 bool error_found = false;
2219
2220 FOR_EACH_DEFINED_FUNCTION (node)
2221 if (node->global.inlined_to
2222 || gimple_has_body_p (node->decl))
2223 {
2224 error_found = true;
2225 dump_cgraph_node (stderr, node);
2226 }
2227 if (error_found)
2228 internal_error ("nodes with unreleased memory found");
2229 }
2230 #endif
2231 }
2232
2233
2234 /* Analyze the whole compilation unit once it is parsed completely. */
2235
2236 void
2237 finalize_compilation_unit (void)
2238 {
2239 timevar_push (TV_CGRAPH);
2240
2241 /* If we're here there's no current function anymore. Some frontends
2242 are lazy in clearing these. */
2243 current_function_decl = NULL;
2244 set_cfun (NULL);
2245
2246 /* Do not skip analyzing the functions if there were errors, we
2247 miss diagnostics for following functions otherwise. */
2248
2249 /* Emit size functions we didn't inline. */
2250 finalize_size_functions ();
2251
2252 /* Mark alias targets necessary and emit diagnostics. */
2253 handle_alias_pairs ();
2254
2255 if (!quiet_flag)
2256 {
2257 fprintf (stderr, "\nAnalyzing compilation unit\n");
2258 fflush (stderr);
2259 }
2260
2261 if (flag_dump_passes)
2262 dump_passes ();
2263
2264 /* Gimplify and lower all functions, compute reachability and
2265 remove unreachable nodes. */
2266 analyze_functions ();
2267
2268 /* Mark alias targets necessary and emit diagnostics. */
2269 handle_alias_pairs ();
2270
2271 /* Gimplify and lower thunks. */
2272 analyze_functions ();
2273
2274 /* Finally drive the pass manager. */
2275 compile ();
2276
2277 timevar_pop (TV_CGRAPH);
2278 }
2279
2280
2281 #include "gt-cgraphunit.h"