cgraphunit.c (assemble_thunks_and_aliases): Expand thunks before outputting aliases.
[gcc.git] / gcc / cgraphunit.c
1 /* Driver of optimization process
2 Copyright (C) 2003-2014 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This module implements main driver of compilation process.
22
23 The main scope of this file is to act as an interface in between
24 tree based frontends and the backend.
25
26 The front-end is supposed to use following functionality:
27
28 - cgraph_finalize_function
29
30 This function is called once front-end has parsed whole body of function
31 and it is certain that the function body nor the declaration will change.
32
33 (There is one exception needed for implementing GCC extern inline
34 function.)
35
36 - varpool_finalize_decl
37
38 This function has same behavior as the above but is used for static
39 variables.
40
41 - add_asm_node
42
43 Insert new toplevel ASM statement
44
45 - finalize_compilation_unit
46
47 This function is called once (source level) compilation unit is finalized
48 and it will no longer change.
49
50 The symbol table is constructed starting from the trivially needed
51 symbols finalized by the frontend. Functions are lowered into
52 GIMPLE representation and callgraph/reference lists are constructed.
53 Those are used to discover other necessary functions and variables.
54
55 At the end the bodies of unreachable functions are removed.
56
57 The function can be called multiple times when multiple source level
58 compilation units are combined.
59
60 - compile
61
62 This passes control to the back-end. Optimizations are performed and
63 final assembler is generated. This is done in the following way. Note
64 that with link time optimization the process is split into three
65 stages (compile time, linktime analysis and parallel linktime as
66 indicated bellow).
67
68 Compile time:
69
70 1) Inter-procedural optimization.
71 (ipa_passes)
72
73 This part is further split into:
74
75 a) early optimizations. These are local passes executed in
76 the topological order on the callgraph.
77
78 The purpose of early optimiations is to optimize away simple
79 things that may otherwise confuse IP analysis. Very simple
80 propagation across the callgraph is done i.e. to discover
81 functions without side effects and simple inlining is performed.
82
83 b) early small interprocedural passes.
84
85 Those are interprocedural passes executed only at compilation
86 time. These include, for example, transational memory lowering,
87 unreachable code removal and other simple transformations.
88
89 c) IP analysis stage. All interprocedural passes do their
90 analysis.
91
92 Interprocedural passes differ from small interprocedural
93 passes by their ability to operate across whole program
94 at linktime. Their analysis stage is performed early to
95 both reduce linking times and linktime memory usage by
96 not having to represent whole program in memory.
97
98 d) LTO sreaming. When doing LTO, everything important gets
99 streamed into the object file.
100
101 Compile time and or linktime analysis stage (WPA):
102
103 At linktime units gets streamed back and symbol table is
104 merged. Function bodies are not streamed in and not
105 available.
106 e) IP propagation stage. All IP passes execute their
107 IP propagation. This is done based on the earlier analysis
108 without having function bodies at hand.
109 f) Ltrans streaming. When doing WHOPR LTO, the program
110 is partitioned and streamed into multple object files.
111
112 Compile time and/or parallel linktime stage (ltrans)
113
114 Each of the object files is streamed back and compiled
115 separately. Now the function bodies becomes available
116 again.
117
118 2) Virtual clone materialization
119 (cgraph_materialize_clone)
120
121 IP passes can produce copies of existing functoins (such
122 as versioned clones or inline clones) without actually
123 manipulating their bodies by creating virtual clones in
124 the callgraph. At this time the virtual clones are
125 turned into real functions
126 3) IP transformation
127
128 All IP passes transform function bodies based on earlier
129 decision of the IP propagation.
130
131 4) late small IP passes
132
133 Simple IP passes working within single program partition.
134
135 5) Expansion
136 (expand_all_functions)
137
138 At this stage functions that needs to be output into
139 assembler are identified and compiled in topological order
140 6) Output of variables and aliases
141 Now it is known what variable references was not optimized
142 out and thus all variables are output to the file.
143
144 Note that with -fno-toplevel-reorder passes 5 and 6
145 are combined together in cgraph_output_in_order.
146
147 Finally there are functions to manipulate the callgraph from
148 backend.
149 - cgraph_add_new_function is used to add backend produced
150 functions introduced after the unit is finalized.
151 The functions are enqueue for later processing and inserted
152 into callgraph with cgraph_process_new_functions.
153
154 - cgraph_function_versioning
155
156 produces a copy of function into new one (a version)
157 and apply simple transformations
158 */
159
160 #include "config.h"
161 #include "system.h"
162 #include "coretypes.h"
163 #include "tm.h"
164 #include "tree.h"
165 #include "varasm.h"
166 #include "stor-layout.h"
167 #include "stringpool.h"
168 #include "output.h"
169 #include "rtl.h"
170 #include "basic-block.h"
171 #include "tree-ssa-alias.h"
172 #include "internal-fn.h"
173 #include "gimple-fold.h"
174 #include "gimple-expr.h"
175 #include "is-a.h"
176 #include "gimple.h"
177 #include "gimplify.h"
178 #include "gimple-iterator.h"
179 #include "gimplify-me.h"
180 #include "gimple-ssa.h"
181 #include "tree-cfg.h"
182 #include "tree-into-ssa.h"
183 #include "tree-ssa.h"
184 #include "tree-inline.h"
185 #include "langhooks.h"
186 #include "toplev.h"
187 #include "flags.h"
188 #include "debug.h"
189 #include "target.h"
190 #include "diagnostic.h"
191 #include "params.h"
192 #include "fibheap.h"
193 #include "intl.h"
194 #include "function.h"
195 #include "ipa-prop.h"
196 #include "tree-iterator.h"
197 #include "tree-pass.h"
198 #include "tree-dump.h"
199 #include "gimple-pretty-print.h"
200 #include "output.h"
201 #include "coverage.h"
202 #include "plugin.h"
203 #include "ipa-inline.h"
204 #include "ipa-utils.h"
205 #include "lto-streamer.h"
206 #include "except.h"
207 #include "cfgloop.h"
208 #include "regset.h" /* FIXME: For reg_obstack. */
209 #include "context.h"
210 #include "pass_manager.h"
211 #include "tree-nested.h"
212 #include "gimplify.h"
213 #include "dbgcnt.h"
214
215 /* Queue of cgraph nodes scheduled to be added into cgraph. This is a
216 secondary queue used during optimization to accommodate passes that
217 may generate new functions that need to be optimized and expanded. */
218 cgraph_node_set cgraph_new_nodes;
219
220 static void expand_all_functions (void);
221 static void mark_functions_to_output (void);
222 static void expand_function (struct cgraph_node *);
223 static void analyze_function (struct cgraph_node *);
224 static void handle_alias_pairs (void);
225
226 FILE *cgraph_dump_file;
227
228 /* Linked list of cgraph asm nodes. */
229 struct asm_node *asm_nodes;
230
231 /* Last node in cgraph_asm_nodes. */
232 static GTY(()) struct asm_node *asm_last_node;
233
234 /* Used for vtable lookup in thunk adjusting. */
235 static GTY (()) tree vtable_entry_type;
236
237 /* Determine if symbol DECL is needed. That is, visible to something
238 either outside this translation unit, something magic in the system
239 configury */
240 bool
241 decide_is_symbol_needed (symtab_node *node)
242 {
243 tree decl = node->decl;
244
245 /* Double check that no one output the function into assembly file
246 early. */
247 gcc_checking_assert (!DECL_ASSEMBLER_NAME_SET_P (decl)
248 || !TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)));
249
250 if (!node->definition)
251 return false;
252
253 if (DECL_EXTERNAL (decl))
254 return false;
255
256 /* If the user told us it is used, then it must be so. */
257 if (node->force_output)
258 return true;
259
260 /* ABI forced symbols are needed when they are external. */
261 if (node->forced_by_abi && TREE_PUBLIC (decl))
262 return true;
263
264 /* Keep constructors, destructors and virtual functions. */
265 if (TREE_CODE (decl) == FUNCTION_DECL
266 && (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl)))
267 return true;
268
269 /* Externally visible variables must be output. The exception is
270 COMDAT variables that must be output only when they are needed. */
271 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
272 return true;
273
274 return false;
275 }
276
277 /* Head and terminator of the queue of nodes to be processed while building
278 callgraph. */
279
280 static symtab_node symtab_terminator;
281 static symtab_node *queued_nodes = &symtab_terminator;
282
283 /* Add NODE to queue starting at QUEUED_NODES.
284 The queue is linked via AUX pointers and terminated by pointer to 1. */
285
286 static void
287 enqueue_node (symtab_node *node)
288 {
289 if (node->aux)
290 return;
291 gcc_checking_assert (queued_nodes);
292 node->aux = queued_nodes;
293 queued_nodes = node;
294 }
295
296 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
297 functions into callgraph in a way so they look like ordinary reachable
298 functions inserted into callgraph already at construction time. */
299
300 void
301 cgraph_process_new_functions (void)
302 {
303 tree fndecl;
304 struct cgraph_node *node;
305 cgraph_node_set_iterator csi;
306
307 if (!cgraph_new_nodes)
308 return;
309 handle_alias_pairs ();
310 /* Note that this queue may grow as its being processed, as the new
311 functions may generate new ones. */
312 for (csi = csi_start (cgraph_new_nodes); !csi_end_p (csi); csi_next (&csi))
313 {
314 node = csi_node (csi);
315 fndecl = node->decl;
316 switch (cgraph_state)
317 {
318 case CGRAPH_STATE_CONSTRUCTION:
319 /* At construction time we just need to finalize function and move
320 it into reachable functions list. */
321
322 cgraph_finalize_function (fndecl, false);
323 cgraph_call_function_insertion_hooks (node);
324 enqueue_node (node);
325 break;
326
327 case CGRAPH_STATE_IPA:
328 case CGRAPH_STATE_IPA_SSA:
329 /* When IPA optimization already started, do all essential
330 transformations that has been already performed on the whole
331 cgraph but not on this function. */
332
333 gimple_register_cfg_hooks ();
334 if (!node->analyzed)
335 analyze_function (node);
336 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
337 if (cgraph_state == CGRAPH_STATE_IPA_SSA
338 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
339 g->get_passes ()->execute_early_local_passes ();
340 else if (inline_summary_vec != NULL)
341 compute_inline_parameters (node, true);
342 free_dominance_info (CDI_POST_DOMINATORS);
343 free_dominance_info (CDI_DOMINATORS);
344 pop_cfun ();
345 cgraph_call_function_insertion_hooks (node);
346 break;
347
348 case CGRAPH_STATE_EXPANSION:
349 /* Functions created during expansion shall be compiled
350 directly. */
351 node->process = 0;
352 cgraph_call_function_insertion_hooks (node);
353 expand_function (node);
354 break;
355
356 default:
357 gcc_unreachable ();
358 break;
359 }
360 }
361 free_cgraph_node_set (cgraph_new_nodes);
362 cgraph_new_nodes = NULL;
363 }
364
365 /* As an GCC extension we allow redefinition of the function. The
366 semantics when both copies of bodies differ is not well defined.
367 We replace the old body with new body so in unit at a time mode
368 we always use new body, while in normal mode we may end up with
369 old body inlined into some functions and new body expanded and
370 inlined in others.
371
372 ??? It may make more sense to use one body for inlining and other
373 body for expanding the function but this is difficult to do. */
374
375 void
376 cgraph_reset_node (struct cgraph_node *node)
377 {
378 /* If node->process is set, then we have already begun whole-unit analysis.
379 This is *not* testing for whether we've already emitted the function.
380 That case can be sort-of legitimately seen with real function redefinition
381 errors. I would argue that the front end should never present us with
382 such a case, but don't enforce that for now. */
383 gcc_assert (!node->process);
384
385 /* Reset our data structures so we can analyze the function again. */
386 memset (&node->local, 0, sizeof (node->local));
387 memset (&node->global, 0, sizeof (node->global));
388 memset (&node->rtl, 0, sizeof (node->rtl));
389 node->analyzed = false;
390 node->definition = false;
391 node->alias = false;
392 node->weakref = false;
393 node->cpp_implicit_alias = false;
394
395 cgraph_node_remove_callees (node);
396 ipa_remove_all_references (&node->ref_list);
397 }
398
399 /* Return true when there are references to NODE. */
400
401 static bool
402 referred_to_p (symtab_node *node)
403 {
404 struct ipa_ref *ref;
405
406 /* See if there are any references at all. */
407 if (ipa_ref_list_referring_iterate (&node->ref_list, 0, ref))
408 return true;
409 /* For functions check also calls. */
410 cgraph_node *cn = dyn_cast <cgraph_node *> (node);
411 if (cn && cn->callers)
412 return true;
413 return false;
414 }
415
416 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
417 logic in effect. If NO_COLLECT is true, then our caller cannot stand to have
418 the garbage collector run at the moment. We would need to either create
419 a new GC context, or just not compile right now. */
420
421 void
422 cgraph_finalize_function (tree decl, bool no_collect)
423 {
424 struct cgraph_node *node = cgraph_get_create_node (decl);
425
426 if (node->definition)
427 {
428 /* Nested functions should only be defined once. */
429 gcc_assert (!DECL_CONTEXT (decl)
430 || TREE_CODE (DECL_CONTEXT (decl)) != FUNCTION_DECL);
431 cgraph_reset_node (node);
432 node->local.redefined_extern_inline = true;
433 }
434
435 notice_global_symbol (decl);
436 node->definition = true;
437 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
438
439 /* With -fkeep-inline-functions we are keeping all inline functions except
440 for extern inline ones. */
441 if (flag_keep_inline_functions
442 && DECL_DECLARED_INLINE_P (decl)
443 && !DECL_EXTERNAL (decl)
444 && !DECL_DISREGARD_INLINE_LIMITS (decl))
445 node->force_output = 1;
446
447 /* When not optimizing, also output the static functions. (see
448 PR24561), but don't do so for always_inline functions, functions
449 declared inline and nested functions. These were optimized out
450 in the original implementation and it is unclear whether we want
451 to change the behavior here. */
452 if ((!optimize
453 && !node->cpp_implicit_alias
454 && !DECL_DISREGARD_INLINE_LIMITS (decl)
455 && !DECL_DECLARED_INLINE_P (decl)
456 && !(DECL_CONTEXT (decl)
457 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL))
458 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
459 node->force_output = 1;
460
461 /* If we've not yet emitted decl, tell the debug info about it. */
462 if (!TREE_ASM_WRITTEN (decl))
463 (*debug_hooks->deferred_inline_function) (decl);
464
465 /* Possibly warn about unused parameters. */
466 if (warn_unused_parameter)
467 do_warn_unused_parameter (decl);
468
469 if (!no_collect)
470 ggc_collect ();
471
472 if (cgraph_state == CGRAPH_STATE_CONSTRUCTION
473 && (decide_is_symbol_needed (node)
474 || referred_to_p (node)))
475 enqueue_node (node);
476 }
477
478 /* Add the function FNDECL to the call graph.
479 Unlike cgraph_finalize_function, this function is intended to be used
480 by middle end and allows insertion of new function at arbitrary point
481 of compilation. The function can be either in high, low or SSA form
482 GIMPLE.
483
484 The function is assumed to be reachable and have address taken (so no
485 API breaking optimizations are performed on it).
486
487 Main work done by this function is to enqueue the function for later
488 processing to avoid need the passes to be re-entrant. */
489
490 void
491 cgraph_add_new_function (tree fndecl, bool lowered)
492 {
493 gcc::pass_manager *passes = g->get_passes ();
494 struct cgraph_node *node;
495 switch (cgraph_state)
496 {
497 case CGRAPH_STATE_PARSING:
498 cgraph_finalize_function (fndecl, false);
499 break;
500 case CGRAPH_STATE_CONSTRUCTION:
501 /* Just enqueue function to be processed at nearest occurrence. */
502 node = cgraph_get_create_node (fndecl);
503 if (lowered)
504 node->lowered = true;
505 if (!cgraph_new_nodes)
506 cgraph_new_nodes = cgraph_node_set_new ();
507 cgraph_node_set_add (cgraph_new_nodes, node);
508 break;
509
510 case CGRAPH_STATE_IPA:
511 case CGRAPH_STATE_IPA_SSA:
512 case CGRAPH_STATE_EXPANSION:
513 /* Bring the function into finalized state and enqueue for later
514 analyzing and compilation. */
515 node = cgraph_get_create_node (fndecl);
516 node->local.local = false;
517 node->definition = true;
518 node->force_output = true;
519 if (!lowered && cgraph_state == CGRAPH_STATE_EXPANSION)
520 {
521 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
522 gimple_register_cfg_hooks ();
523 bitmap_obstack_initialize (NULL);
524 execute_pass_list (cfun, passes->all_lowering_passes);
525 passes->execute_early_local_passes ();
526 bitmap_obstack_release (NULL);
527 pop_cfun ();
528
529 lowered = true;
530 }
531 if (lowered)
532 node->lowered = true;
533 if (!cgraph_new_nodes)
534 cgraph_new_nodes = cgraph_node_set_new ();
535 cgraph_node_set_add (cgraph_new_nodes, node);
536 break;
537
538 case CGRAPH_STATE_FINISHED:
539 /* At the very end of compilation we have to do all the work up
540 to expansion. */
541 node = cgraph_create_node (fndecl);
542 if (lowered)
543 node->lowered = true;
544 node->definition = true;
545 analyze_function (node);
546 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
547 gimple_register_cfg_hooks ();
548 bitmap_obstack_initialize (NULL);
549 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
550 g->get_passes ()->execute_early_local_passes ();
551 bitmap_obstack_release (NULL);
552 pop_cfun ();
553 expand_function (node);
554 break;
555
556 default:
557 gcc_unreachable ();
558 }
559
560 /* Set a personality if required and we already passed EH lowering. */
561 if (lowered
562 && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
563 == eh_personality_lang))
564 DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
565 }
566
567 /* Add a top-level asm statement to the list. */
568
569 struct asm_node *
570 add_asm_node (tree asm_str)
571 {
572 struct asm_node *node;
573
574 node = ggc_cleared_alloc<asm_node> ();
575 node->asm_str = asm_str;
576 node->order = symtab_order++;
577 node->next = NULL;
578 if (asm_nodes == NULL)
579 asm_nodes = node;
580 else
581 asm_last_node->next = node;
582 asm_last_node = node;
583 return node;
584 }
585
586 /* Output all asm statements we have stored up to be output. */
587
588 static void
589 output_asm_statements (void)
590 {
591 struct asm_node *can;
592
593 if (seen_error ())
594 return;
595
596 for (can = asm_nodes; can; can = can->next)
597 assemble_asm (can->asm_str);
598 asm_nodes = NULL;
599 }
600
601 /* Analyze the function scheduled to be output. */
602 static void
603 analyze_function (struct cgraph_node *node)
604 {
605 tree decl = node->decl;
606 location_t saved_loc = input_location;
607 input_location = DECL_SOURCE_LOCATION (decl);
608
609 if (node->thunk.thunk_p)
610 {
611 cgraph_create_edge (node, cgraph_get_node (node->thunk.alias),
612 NULL, 0, CGRAPH_FREQ_BASE);
613 if (!expand_thunk (node, false, false))
614 {
615 node->thunk.alias = NULL;
616 node->analyzed = true;
617 return;
618 }
619 node->thunk.alias = NULL;
620 }
621 if (node->alias)
622 symtab_resolve_alias
623 (node, cgraph_get_node (node->alias_target));
624 else if (node->dispatcher_function)
625 {
626 /* Generate the dispatcher body of multi-versioned functions. */
627 struct cgraph_function_version_info *dispatcher_version_info
628 = get_cgraph_node_version (node);
629 if (dispatcher_version_info != NULL
630 && (dispatcher_version_info->dispatcher_resolver
631 == NULL_TREE))
632 {
633 tree resolver = NULL_TREE;
634 gcc_assert (targetm.generate_version_dispatcher_body);
635 resolver = targetm.generate_version_dispatcher_body (node);
636 gcc_assert (resolver != NULL_TREE);
637 }
638 }
639 else
640 {
641 push_cfun (DECL_STRUCT_FUNCTION (decl));
642
643 assign_assembler_name_if_neeeded (node->decl);
644
645 /* Make sure to gimplify bodies only once. During analyzing a
646 function we lower it, which will require gimplified nested
647 functions, so we can end up here with an already gimplified
648 body. */
649 if (!gimple_has_body_p (decl))
650 gimplify_function_tree (decl);
651 dump_function (TDI_generic, decl);
652
653 /* Lower the function. */
654 if (!node->lowered)
655 {
656 if (node->nested)
657 lower_nested_functions (node->decl);
658 gcc_assert (!node->nested);
659
660 gimple_register_cfg_hooks ();
661 bitmap_obstack_initialize (NULL);
662 execute_pass_list (cfun, g->get_passes ()->all_lowering_passes);
663 free_dominance_info (CDI_POST_DOMINATORS);
664 free_dominance_info (CDI_DOMINATORS);
665 compact_blocks ();
666 bitmap_obstack_release (NULL);
667 node->lowered = true;
668 }
669
670 pop_cfun ();
671 }
672 node->analyzed = true;
673
674 input_location = saved_loc;
675 }
676
677 /* C++ frontend produce same body aliases all over the place, even before PCH
678 gets streamed out. It relies on us linking the aliases with their function
679 in order to do the fixups, but ipa-ref is not PCH safe. Consequentely we
680 first produce aliases without links, but once C++ FE is sure he won't sream
681 PCH we build the links via this function. */
682
683 void
684 cgraph_process_same_body_aliases (void)
685 {
686 symtab_node *node;
687 FOR_EACH_SYMBOL (node)
688 if (node->cpp_implicit_alias && !node->analyzed)
689 symtab_resolve_alias
690 (node,
691 TREE_CODE (node->alias_target) == VAR_DECL
692 ? (symtab_node *)varpool_node_for_decl (node->alias_target)
693 : (symtab_node *)cgraph_get_create_node (node->alias_target));
694 cpp_implicit_aliases_done = true;
695 }
696
697 /* Process attributes common for vars and functions. */
698
699 static void
700 process_common_attributes (tree decl)
701 {
702 tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
703
704 if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
705 {
706 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
707 "%<weakref%> attribute should be accompanied with"
708 " an %<alias%> attribute");
709 DECL_WEAK (decl) = 0;
710 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
711 DECL_ATTRIBUTES (decl));
712 }
713 }
714
715 /* Look for externally_visible and used attributes and mark cgraph nodes
716 accordingly.
717
718 We cannot mark the nodes at the point the attributes are processed (in
719 handle_*_attribute) because the copy of the declarations available at that
720 point may not be canonical. For example, in:
721
722 void f();
723 void f() __attribute__((used));
724
725 the declaration we see in handle_used_attribute will be the second
726 declaration -- but the front end will subsequently merge that declaration
727 with the original declaration and discard the second declaration.
728
729 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
730
731 void f() {}
732 void f() __attribute__((externally_visible));
733
734 is valid.
735
736 So, we walk the nodes at the end of the translation unit, applying the
737 attributes at that point. */
738
739 static void
740 process_function_and_variable_attributes (struct cgraph_node *first,
741 varpool_node *first_var)
742 {
743 struct cgraph_node *node;
744 varpool_node *vnode;
745
746 for (node = cgraph_first_function (); node != first;
747 node = cgraph_next_function (node))
748 {
749 tree decl = node->decl;
750 if (DECL_PRESERVE_P (decl))
751 cgraph_mark_force_output_node (node);
752 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
753 {
754 if (! TREE_PUBLIC (node->decl))
755 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
756 "%<externally_visible%>"
757 " attribute have effect only on public objects");
758 }
759 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
760 && (node->definition && !node->alias))
761 {
762 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
763 "%<weakref%> attribute ignored"
764 " because function is defined");
765 DECL_WEAK (decl) = 0;
766 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
767 DECL_ATTRIBUTES (decl));
768 }
769
770 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl))
771 && !DECL_DECLARED_INLINE_P (decl)
772 /* redefining extern inline function makes it DECL_UNINLINABLE. */
773 && !DECL_UNINLINABLE (decl))
774 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
775 "always_inline function might not be inlinable");
776
777 process_common_attributes (decl);
778 }
779 for (vnode = varpool_first_variable (); vnode != first_var;
780 vnode = varpool_next_variable (vnode))
781 {
782 tree decl = vnode->decl;
783 if (DECL_EXTERNAL (decl)
784 && DECL_INITIAL (decl))
785 varpool_finalize_decl (decl);
786 if (DECL_PRESERVE_P (decl))
787 vnode->force_output = true;
788 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
789 {
790 if (! TREE_PUBLIC (vnode->decl))
791 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
792 "%<externally_visible%>"
793 " attribute have effect only on public objects");
794 }
795 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
796 && vnode->definition
797 && DECL_INITIAL (decl))
798 {
799 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
800 "%<weakref%> attribute ignored"
801 " because variable is initialized");
802 DECL_WEAK (decl) = 0;
803 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
804 DECL_ATTRIBUTES (decl));
805 }
806 process_common_attributes (decl);
807 }
808 }
809
810 /* Mark DECL as finalized. By finalizing the declaration, frontend instruct the
811 middle end to output the variable to asm file, if needed or externally
812 visible. */
813
814 void
815 varpool_finalize_decl (tree decl)
816 {
817 varpool_node *node = varpool_node_for_decl (decl);
818
819 gcc_assert (TREE_STATIC (decl) || DECL_EXTERNAL (decl));
820
821 if (node->definition)
822 return;
823 notice_global_symbol (decl);
824 node->definition = true;
825 if (TREE_THIS_VOLATILE (decl) || DECL_PRESERVE_P (decl)
826 /* Traditionally we do not eliminate static variables when not
827 optimizing and when not doing toplevel reoder. */
828 || (!flag_toplevel_reorder && !DECL_COMDAT (node->decl)
829 && !DECL_ARTIFICIAL (node->decl)))
830 node->force_output = true;
831
832 if (cgraph_state == CGRAPH_STATE_CONSTRUCTION
833 && (decide_is_symbol_needed (node)
834 || referred_to_p (node)))
835 enqueue_node (node);
836 if (cgraph_state >= CGRAPH_STATE_IPA_SSA)
837 varpool_analyze_node (node);
838 /* Some frontends produce various interface variables after compilation
839 finished. */
840 if (cgraph_state == CGRAPH_STATE_FINISHED
841 || (!flag_toplevel_reorder && cgraph_state == CGRAPH_STATE_EXPANSION))
842 varpool_assemble_decl (node);
843 }
844
845 /* EDGE is an polymorphic call. Mark all possible targets as reachable
846 and if there is only one target, perform trivial devirtualization.
847 REACHABLE_CALL_TARGETS collects target lists we already walked to
848 avoid udplicate work. */
849
850 static void
851 walk_polymorphic_call_targets (pointer_set_t *reachable_call_targets,
852 struct cgraph_edge *edge)
853 {
854 unsigned int i;
855 void *cache_token;
856 bool final;
857 vec <cgraph_node *>targets
858 = possible_polymorphic_call_targets
859 (edge, &final, &cache_token);
860
861 if (!pointer_set_insert (reachable_call_targets,
862 cache_token))
863 {
864 if (cgraph_dump_file)
865 dump_possible_polymorphic_call_targets
866 (cgraph_dump_file, edge);
867
868 for (i = 0; i < targets.length (); i++)
869 {
870 /* Do not bother to mark virtual methods in anonymous namespace;
871 either we will find use of virtual table defining it, or it is
872 unused. */
873 if (targets[i]->definition
874 && TREE_CODE
875 (TREE_TYPE (targets[i]->decl))
876 == METHOD_TYPE
877 && !type_in_anonymous_namespace_p
878 (method_class_type
879 (TREE_TYPE (targets[i]->decl))))
880 enqueue_node (targets[i]);
881 }
882 }
883
884 /* Very trivial devirtualization; when the type is
885 final or anonymous (so we know all its derivation)
886 and there is only one possible virtual call target,
887 make the edge direct. */
888 if (final)
889 {
890 if (targets.length () <= 1 && dbg_cnt (devirt))
891 {
892 cgraph_node *target;
893 if (targets.length () == 1)
894 target = targets[0];
895 else
896 target = cgraph_get_create_node
897 (builtin_decl_implicit (BUILT_IN_UNREACHABLE));
898
899 if (cgraph_dump_file)
900 {
901 fprintf (cgraph_dump_file,
902 "Devirtualizing call: ");
903 print_gimple_stmt (cgraph_dump_file,
904 edge->call_stmt, 0,
905 TDF_SLIM);
906 }
907 if (dump_enabled_p ())
908 {
909 location_t locus = gimple_location (edge->call_stmt);
910 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
911 "devirtualizing call in %s to %s\n",
912 edge->caller->name (), target->name ());
913 }
914
915 cgraph_make_edge_direct (edge, target);
916 cgraph_redirect_edge_call_stmt_to_callee (edge);
917 if (cgraph_dump_file)
918 {
919 fprintf (cgraph_dump_file,
920 "Devirtualized as: ");
921 print_gimple_stmt (cgraph_dump_file,
922 edge->call_stmt, 0,
923 TDF_SLIM);
924 }
925 }
926 }
927 }
928
929
930 /* Discover all functions and variables that are trivially needed, analyze
931 them as well as all functions and variables referred by them */
932
933 static void
934 analyze_functions (void)
935 {
936 /* Keep track of already processed nodes when called multiple times for
937 intermodule optimization. */
938 static struct cgraph_node *first_analyzed;
939 struct cgraph_node *first_handled = first_analyzed;
940 static varpool_node *first_analyzed_var;
941 varpool_node *first_handled_var = first_analyzed_var;
942 struct pointer_set_t *reachable_call_targets = pointer_set_create ();
943
944 symtab_node *node;
945 symtab_node *next;
946 int i;
947 struct ipa_ref *ref;
948 bool changed = true;
949 location_t saved_loc = input_location;
950
951 bitmap_obstack_initialize (NULL);
952 cgraph_state = CGRAPH_STATE_CONSTRUCTION;
953 input_location = UNKNOWN_LOCATION;
954
955 /* Ugly, but the fixup can not happen at a time same body alias is created;
956 C++ FE is confused about the COMDAT groups being right. */
957 if (cpp_implicit_aliases_done)
958 FOR_EACH_SYMBOL (node)
959 if (node->cpp_implicit_alias)
960 fixup_same_cpp_alias_visibility (node, symtab_alias_target (node));
961 if (optimize && flag_devirtualize)
962 build_type_inheritance_graph ();
963
964 /* Analysis adds static variables that in turn adds references to new functions.
965 So we need to iterate the process until it stabilize. */
966 while (changed)
967 {
968 changed = false;
969 process_function_and_variable_attributes (first_analyzed,
970 first_analyzed_var);
971
972 /* First identify the trivially needed symbols. */
973 for (node = symtab_nodes;
974 node != first_analyzed
975 && node != first_analyzed_var; node = node->next)
976 {
977 /* Convert COMDAT group designators to IDENTIFIER_NODEs. */
978 node->get_comdat_group_id ();
979 if (decide_is_symbol_needed (node))
980 {
981 enqueue_node (node);
982 if (!changed && cgraph_dump_file)
983 fprintf (cgraph_dump_file, "Trivially needed symbols:");
984 changed = true;
985 if (cgraph_dump_file)
986 fprintf (cgraph_dump_file, " %s", node->asm_name ());
987 if (!changed && cgraph_dump_file)
988 fprintf (cgraph_dump_file, "\n");
989 }
990 if (node == first_analyzed
991 || node == first_analyzed_var)
992 break;
993 }
994 cgraph_process_new_functions ();
995 first_analyzed_var = varpool_first_variable ();
996 first_analyzed = cgraph_first_function ();
997
998 if (changed && cgraph_dump_file)
999 fprintf (cgraph_dump_file, "\n");
1000
1001 /* Lower representation, build callgraph edges and references for all trivially
1002 needed symbols and all symbols referred by them. */
1003 while (queued_nodes != &symtab_terminator)
1004 {
1005 changed = true;
1006 node = queued_nodes;
1007 queued_nodes = (symtab_node *)queued_nodes->aux;
1008 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
1009 if (cnode && cnode->definition)
1010 {
1011 struct cgraph_edge *edge;
1012 tree decl = cnode->decl;
1013
1014 /* ??? It is possible to create extern inline function
1015 and later using weak alias attribute to kill its body.
1016 See gcc.c-torture/compile/20011119-1.c */
1017 if (!DECL_STRUCT_FUNCTION (decl)
1018 && !cnode->alias
1019 && !cnode->thunk.thunk_p
1020 && !cnode->dispatcher_function)
1021 {
1022 cgraph_reset_node (cnode);
1023 cnode->local.redefined_extern_inline = true;
1024 continue;
1025 }
1026
1027 if (!cnode->analyzed)
1028 analyze_function (cnode);
1029
1030 for (edge = cnode->callees; edge; edge = edge->next_callee)
1031 if (edge->callee->definition)
1032 enqueue_node (edge->callee);
1033 if (optimize && flag_devirtualize)
1034 {
1035 struct cgraph_edge *next;
1036
1037 for (edge = cnode->indirect_calls; edge; edge = next)
1038 {
1039 next = edge->next_callee;
1040 if (edge->indirect_info->polymorphic)
1041 walk_polymorphic_call_targets (reachable_call_targets,
1042 edge);
1043 }
1044 }
1045
1046 /* If decl is a clone of an abstract function,
1047 mark that abstract function so that we don't release its body.
1048 The DECL_INITIAL() of that abstract function declaration
1049 will be later needed to output debug info. */
1050 if (DECL_ABSTRACT_ORIGIN (decl))
1051 {
1052 struct cgraph_node *origin_node
1053 = cgraph_get_node (DECL_ABSTRACT_ORIGIN (decl));
1054 origin_node->used_as_abstract_origin = true;
1055 }
1056 }
1057 else
1058 {
1059 varpool_node *vnode = dyn_cast <varpool_node *> (node);
1060 if (vnode && vnode->definition && !vnode->analyzed)
1061 varpool_analyze_node (vnode);
1062 }
1063
1064 if (node->same_comdat_group)
1065 {
1066 symtab_node *next;
1067 for (next = node->same_comdat_group;
1068 next != node;
1069 next = next->same_comdat_group)
1070 enqueue_node (next);
1071 }
1072 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
1073 if (ref->referred->definition)
1074 enqueue_node (ref->referred);
1075 cgraph_process_new_functions ();
1076 }
1077 }
1078 if (optimize && flag_devirtualize)
1079 update_type_inheritance_graph ();
1080
1081 /* Collect entry points to the unit. */
1082 if (cgraph_dump_file)
1083 {
1084 fprintf (cgraph_dump_file, "\n\nInitial ");
1085 dump_symtab (cgraph_dump_file);
1086 }
1087
1088 if (cgraph_dump_file)
1089 fprintf (cgraph_dump_file, "\nRemoving unused symbols:");
1090
1091 for (node = symtab_nodes;
1092 node != first_handled
1093 && node != first_handled_var; node = next)
1094 {
1095 next = node->next;
1096 if (!node->aux && !referred_to_p (node))
1097 {
1098 if (cgraph_dump_file)
1099 fprintf (cgraph_dump_file, " %s", node->name ());
1100 symtab_remove_node (node);
1101 continue;
1102 }
1103 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1104 {
1105 tree decl = node->decl;
1106
1107 if (cnode->definition && !gimple_has_body_p (decl)
1108 && !cnode->alias
1109 && !cnode->thunk.thunk_p)
1110 cgraph_reset_node (cnode);
1111
1112 gcc_assert (!cnode->definition || cnode->thunk.thunk_p
1113 || cnode->alias
1114 || gimple_has_body_p (decl));
1115 gcc_assert (cnode->analyzed == cnode->definition);
1116 }
1117 node->aux = NULL;
1118 }
1119 for (;node; node = node->next)
1120 node->aux = NULL;
1121 first_analyzed = cgraph_first_function ();
1122 first_analyzed_var = varpool_first_variable ();
1123 if (cgraph_dump_file)
1124 {
1125 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1126 dump_symtab (cgraph_dump_file);
1127 }
1128 bitmap_obstack_release (NULL);
1129 pointer_set_destroy (reachable_call_targets);
1130 ggc_collect ();
1131 /* Initialize assembler name hash, in particular we want to trigger C++
1132 mangling and same body alias creation before we free DECL_ARGUMENTS
1133 used by it. */
1134 if (!seen_error ())
1135 symtab_initialize_asm_name_hash ();
1136
1137 input_location = saved_loc;
1138 }
1139
1140 /* Translate the ugly representation of aliases as alias pairs into nice
1141 representation in callgraph. We don't handle all cases yet,
1142 unfortunately. */
1143
1144 static void
1145 handle_alias_pairs (void)
1146 {
1147 alias_pair *p;
1148 unsigned i;
1149
1150 for (i = 0; alias_pairs && alias_pairs->iterate (i, &p);)
1151 {
1152 symtab_node *target_node = symtab_node_for_asm (p->target);
1153
1154 /* Weakrefs with target not defined in current unit are easy to handle:
1155 they behave just as external variables except we need to note the
1156 alias flag to later output the weakref pseudo op into asm file. */
1157 if (!target_node
1158 && lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)) != NULL)
1159 {
1160 symtab_node *node = symtab_get_node (p->decl);
1161 if (node)
1162 {
1163 node->alias_target = p->target;
1164 node->weakref = true;
1165 node->alias = true;
1166 }
1167 alias_pairs->unordered_remove (i);
1168 continue;
1169 }
1170 else if (!target_node)
1171 {
1172 error ("%q+D aliased to undefined symbol %qE", p->decl, p->target);
1173 symtab_node *node = symtab_get_node (p->decl);
1174 if (node)
1175 node->alias = false;
1176 alias_pairs->unordered_remove (i);
1177 continue;
1178 }
1179
1180 if (DECL_EXTERNAL (target_node->decl)
1181 /* We use local aliases for C++ thunks to force the tailcall
1182 to bind locally. This is a hack - to keep it working do
1183 the following (which is not strictly correct). */
1184 && (! TREE_CODE (target_node->decl) == FUNCTION_DECL
1185 || ! DECL_VIRTUAL_P (target_node->decl))
1186 && ! lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)))
1187 {
1188 error ("%q+D aliased to external symbol %qE",
1189 p->decl, p->target);
1190 }
1191
1192 if (TREE_CODE (p->decl) == FUNCTION_DECL
1193 && target_node && is_a <cgraph_node *> (target_node))
1194 {
1195 struct cgraph_node *src_node = cgraph_get_node (p->decl);
1196 if (src_node && src_node->definition)
1197 cgraph_reset_node (src_node);
1198 cgraph_create_function_alias (p->decl, target_node->decl);
1199 alias_pairs->unordered_remove (i);
1200 }
1201 else if (TREE_CODE (p->decl) == VAR_DECL
1202 && target_node && is_a <varpool_node *> (target_node))
1203 {
1204 varpool_create_variable_alias (p->decl, target_node->decl);
1205 alias_pairs->unordered_remove (i);
1206 }
1207 else
1208 {
1209 error ("%q+D alias in between function and variable is not supported",
1210 p->decl);
1211 warning (0, "%q+D aliased declaration",
1212 target_node->decl);
1213 alias_pairs->unordered_remove (i);
1214 }
1215 }
1216 vec_free (alias_pairs);
1217 }
1218
1219
1220 /* Figure out what functions we want to assemble. */
1221
1222 static void
1223 mark_functions_to_output (void)
1224 {
1225 struct cgraph_node *node;
1226 #ifdef ENABLE_CHECKING
1227 bool check_same_comdat_groups = false;
1228
1229 FOR_EACH_FUNCTION (node)
1230 gcc_assert (!node->process);
1231 #endif
1232
1233 FOR_EACH_FUNCTION (node)
1234 {
1235 tree decl = node->decl;
1236
1237 gcc_assert (!node->process || node->same_comdat_group);
1238 if (node->process)
1239 continue;
1240
1241 /* We need to output all local functions that are used and not
1242 always inlined, as well as those that are reachable from
1243 outside the current compilation unit. */
1244 if (node->analyzed
1245 && !node->thunk.thunk_p
1246 && !node->alias
1247 && !node->global.inlined_to
1248 && !TREE_ASM_WRITTEN (decl)
1249 && !DECL_EXTERNAL (decl))
1250 {
1251 node->process = 1;
1252 if (node->same_comdat_group)
1253 {
1254 struct cgraph_node *next;
1255 for (next = cgraph (node->same_comdat_group);
1256 next != node;
1257 next = cgraph (next->same_comdat_group))
1258 if (!next->thunk.thunk_p && !next->alias
1259 && !symtab_comdat_local_p (next))
1260 next->process = 1;
1261 }
1262 }
1263 else if (node->same_comdat_group)
1264 {
1265 #ifdef ENABLE_CHECKING
1266 check_same_comdat_groups = true;
1267 #endif
1268 }
1269 else
1270 {
1271 /* We should've reclaimed all functions that are not needed. */
1272 #ifdef ENABLE_CHECKING
1273 if (!node->global.inlined_to
1274 && gimple_has_body_p (decl)
1275 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1276 are inside partition, we can end up not removing the body since we no longer
1277 have analyzed node pointing to it. */
1278 && !node->in_other_partition
1279 && !node->alias
1280 && !node->clones
1281 && !DECL_EXTERNAL (decl))
1282 {
1283 dump_cgraph_node (stderr, node);
1284 internal_error ("failed to reclaim unneeded function");
1285 }
1286 #endif
1287 gcc_assert (node->global.inlined_to
1288 || !gimple_has_body_p (decl)
1289 || node->in_other_partition
1290 || node->clones
1291 || DECL_ARTIFICIAL (decl)
1292 || DECL_EXTERNAL (decl));
1293
1294 }
1295
1296 }
1297 #ifdef ENABLE_CHECKING
1298 if (check_same_comdat_groups)
1299 FOR_EACH_FUNCTION (node)
1300 if (node->same_comdat_group && !node->process)
1301 {
1302 tree decl = node->decl;
1303 if (!node->global.inlined_to
1304 && gimple_has_body_p (decl)
1305 /* FIXME: in an ltrans unit when the offline copy is outside a
1306 partition but inline copies are inside a partition, we can
1307 end up not removing the body since we no longer have an
1308 analyzed node pointing to it. */
1309 && !node->in_other_partition
1310 && !node->clones
1311 && !DECL_EXTERNAL (decl))
1312 {
1313 dump_cgraph_node (stderr, node);
1314 internal_error ("failed to reclaim unneeded function in same "
1315 "comdat group");
1316 }
1317 }
1318 #endif
1319 }
1320
1321 /* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1322 in lowered gimple form. IN_SSA is true if the gimple is in SSA.
1323
1324 Set current_function_decl and cfun to newly constructed empty function body.
1325 return basic block in the function body. */
1326
1327 basic_block
1328 init_lowered_empty_function (tree decl, bool in_ssa)
1329 {
1330 basic_block bb;
1331
1332 current_function_decl = decl;
1333 allocate_struct_function (decl, false);
1334 gimple_register_cfg_hooks ();
1335 init_empty_tree_cfg ();
1336
1337 if (in_ssa)
1338 {
1339 init_tree_ssa (cfun);
1340 init_ssa_operands (cfun);
1341 cfun->gimple_df->in_ssa_p = true;
1342 cfun->curr_properties |= PROP_ssa;
1343 }
1344
1345 DECL_INITIAL (decl) = make_node (BLOCK);
1346
1347 DECL_SAVED_TREE (decl) = error_mark_node;
1348 cfun->curr_properties |= (PROP_gimple_lcf | PROP_gimple_leh | PROP_gimple_any
1349 | PROP_cfg | PROP_loops);
1350
1351 set_loops_for_fn (cfun, ggc_cleared_alloc<loops> ());
1352 init_loops_structure (cfun, loops_for_fn (cfun), 1);
1353 loops_for_fn (cfun)->state |= LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
1354
1355 /* Create BB for body of the function and connect it properly. */
1356 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR_FOR_FN (cfun));
1357 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, EDGE_FALLTHRU);
1358 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1359 add_bb_to_loop (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
1360
1361 return bb;
1362 }
1363
1364 /* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1365 offset indicated by VIRTUAL_OFFSET, if that is
1366 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1367 zero for a result adjusting thunk. */
1368
1369 static tree
1370 thunk_adjust (gimple_stmt_iterator * bsi,
1371 tree ptr, bool this_adjusting,
1372 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1373 {
1374 gimple stmt;
1375 tree ret;
1376
1377 if (this_adjusting
1378 && fixed_offset != 0)
1379 {
1380 stmt = gimple_build_assign
1381 (ptr, fold_build_pointer_plus_hwi_loc (input_location,
1382 ptr,
1383 fixed_offset));
1384 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1385 }
1386
1387 /* If there's a virtual offset, look up that value in the vtable and
1388 adjust the pointer again. */
1389 if (virtual_offset)
1390 {
1391 tree vtabletmp;
1392 tree vtabletmp2;
1393 tree vtabletmp3;
1394
1395 if (!vtable_entry_type)
1396 {
1397 tree vfunc_type = make_node (FUNCTION_TYPE);
1398 TREE_TYPE (vfunc_type) = integer_type_node;
1399 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1400 layout_type (vfunc_type);
1401
1402 vtable_entry_type = build_pointer_type (vfunc_type);
1403 }
1404
1405 vtabletmp =
1406 create_tmp_reg (build_pointer_type
1407 (build_pointer_type (vtable_entry_type)), "vptr");
1408
1409 /* The vptr is always at offset zero in the object. */
1410 stmt = gimple_build_assign (vtabletmp,
1411 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1412 ptr));
1413 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1414
1415 /* Form the vtable address. */
1416 vtabletmp2 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp)),
1417 "vtableaddr");
1418 stmt = gimple_build_assign (vtabletmp2,
1419 build_simple_mem_ref (vtabletmp));
1420 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1421
1422 /* Find the entry with the vcall offset. */
1423 stmt = gimple_build_assign (vtabletmp2,
1424 fold_build_pointer_plus_loc (input_location,
1425 vtabletmp2,
1426 virtual_offset));
1427 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1428
1429 /* Get the offset itself. */
1430 vtabletmp3 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1431 "vcalloffset");
1432 stmt = gimple_build_assign (vtabletmp3,
1433 build_simple_mem_ref (vtabletmp2));
1434 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1435
1436 /* Adjust the `this' pointer. */
1437 ptr = fold_build_pointer_plus_loc (input_location, ptr, vtabletmp3);
1438 ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
1439 GSI_CONTINUE_LINKING);
1440 }
1441
1442 if (!this_adjusting
1443 && fixed_offset != 0)
1444 /* Adjust the pointer by the constant. */
1445 {
1446 tree ptrtmp;
1447
1448 if (TREE_CODE (ptr) == VAR_DECL)
1449 ptrtmp = ptr;
1450 else
1451 {
1452 ptrtmp = create_tmp_reg (TREE_TYPE (ptr), "ptr");
1453 stmt = gimple_build_assign (ptrtmp, ptr);
1454 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1455 }
1456 ptr = fold_build_pointer_plus_hwi_loc (input_location,
1457 ptrtmp, fixed_offset);
1458 }
1459
1460 /* Emit the statement and gimplify the adjustment expression. */
1461 ret = create_tmp_reg (TREE_TYPE (ptr), "adjusted_this");
1462 stmt = gimple_build_assign (ret, ptr);
1463 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1464
1465 return ret;
1466 }
1467
1468 /* Expand thunk NODE to gimple if possible.
1469 When FORCE_GIMPLE_THUNK is true, gimple thunk is created and
1470 no assembler is produced.
1471 When OUTPUT_ASM_THUNK is true, also produce assembler for
1472 thunks that are not lowered. */
1473
1474 bool
1475 expand_thunk (struct cgraph_node *node, bool output_asm_thunks, bool force_gimple_thunk)
1476 {
1477 bool this_adjusting = node->thunk.this_adjusting;
1478 HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset;
1479 HOST_WIDE_INT virtual_value = node->thunk.virtual_value;
1480 tree virtual_offset = NULL;
1481 tree alias = node->callees->callee->decl;
1482 tree thunk_fndecl = node->decl;
1483 tree a;
1484
1485
1486 if (!force_gimple_thunk && this_adjusting
1487 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1488 virtual_value, alias))
1489 {
1490 const char *fnname;
1491 tree fn_block;
1492 tree restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1493
1494 if (!output_asm_thunks)
1495 return false;
1496
1497 if (in_lto_p)
1498 cgraph_get_body (node);
1499 a = DECL_ARGUMENTS (thunk_fndecl);
1500
1501 current_function_decl = thunk_fndecl;
1502
1503 /* Ensure thunks are emitted in their correct sections. */
1504 resolve_unique_section (thunk_fndecl, 0, flag_function_sections);
1505
1506 DECL_RESULT (thunk_fndecl)
1507 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1508 RESULT_DECL, 0, restype);
1509 DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1510 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
1511
1512 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1513 create one. */
1514 fn_block = make_node (BLOCK);
1515 BLOCK_VARS (fn_block) = a;
1516 DECL_INITIAL (thunk_fndecl) = fn_block;
1517 init_function_start (thunk_fndecl);
1518 cfun->is_thunk = 1;
1519 insn_locations_init ();
1520 set_curr_insn_location (DECL_SOURCE_LOCATION (thunk_fndecl));
1521 prologue_location = curr_insn_location ();
1522 assemble_start_function (thunk_fndecl, fnname);
1523
1524 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1525 fixed_offset, virtual_value, alias);
1526
1527 assemble_end_function (thunk_fndecl, fnname);
1528 insn_locations_finalize ();
1529 init_insn_lengths ();
1530 free_after_compilation (cfun);
1531 set_cfun (NULL);
1532 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1533 node->thunk.thunk_p = false;
1534 node->analyzed = false;
1535 }
1536 else
1537 {
1538 tree restype;
1539 basic_block bb, then_bb, else_bb, return_bb;
1540 gimple_stmt_iterator bsi;
1541 int nargs = 0;
1542 tree arg;
1543 int i;
1544 tree resdecl;
1545 tree restmp = NULL;
1546
1547 gimple call;
1548 gimple ret;
1549
1550 if (in_lto_p)
1551 cgraph_get_body (node);
1552 a = DECL_ARGUMENTS (thunk_fndecl);
1553
1554 current_function_decl = thunk_fndecl;
1555
1556 /* Ensure thunks are emitted in their correct sections. */
1557 resolve_unique_section (thunk_fndecl, 0, flag_function_sections);
1558
1559 DECL_IGNORED_P (thunk_fndecl) = 1;
1560 bitmap_obstack_initialize (NULL);
1561
1562 if (node->thunk.virtual_offset_p)
1563 virtual_offset = size_int (virtual_value);
1564
1565 /* Build the return declaration for the function. */
1566 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1567 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1568 {
1569 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1570 DECL_ARTIFICIAL (resdecl) = 1;
1571 DECL_IGNORED_P (resdecl) = 1;
1572 DECL_RESULT (thunk_fndecl) = resdecl;
1573 DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1574 }
1575 else
1576 resdecl = DECL_RESULT (thunk_fndecl);
1577
1578 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl, true);
1579
1580 bsi = gsi_start_bb (bb);
1581
1582 /* Build call to the function being thunked. */
1583 if (!VOID_TYPE_P (restype))
1584 {
1585 if (DECL_BY_REFERENCE (resdecl))
1586 restmp = gimple_fold_indirect_ref (resdecl);
1587 else if (!is_gimple_reg_type (restype))
1588 {
1589 restmp = resdecl;
1590 add_local_decl (cfun, restmp);
1591 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1592 }
1593 else
1594 restmp = create_tmp_reg (restype, "retval");
1595 }
1596
1597 for (arg = a; arg; arg = DECL_CHAIN (arg))
1598 nargs++;
1599 auto_vec<tree> vargs (nargs);
1600 if (this_adjusting)
1601 vargs.quick_push (thunk_adjust (&bsi, a, 1, fixed_offset,
1602 virtual_offset));
1603 else if (nargs)
1604 vargs.quick_push (a);
1605
1606 if (nargs)
1607 for (i = 1, arg = DECL_CHAIN (a); i < nargs; i++, arg = DECL_CHAIN (arg))
1608 {
1609 tree tmp = arg;
1610 if (!is_gimple_val (arg))
1611 {
1612 tmp = create_tmp_reg (TYPE_MAIN_VARIANT
1613 (TREE_TYPE (arg)), "arg");
1614 gimple stmt = gimple_build_assign (tmp, arg);
1615 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1616 }
1617 vargs.quick_push (tmp);
1618 }
1619 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1620 node->callees->call_stmt = call;
1621 gimple_call_set_from_thunk (call, true);
1622 if (restmp)
1623 {
1624 gimple_call_set_lhs (call, restmp);
1625 gcc_assert (useless_type_conversion_p (TREE_TYPE (restmp),
1626 TREE_TYPE (TREE_TYPE (alias))));
1627 }
1628 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1629 if (!(gimple_call_flags (call) & ECF_NORETURN))
1630 {
1631 if (restmp && !this_adjusting
1632 && (fixed_offset || virtual_offset))
1633 {
1634 tree true_label = NULL_TREE;
1635
1636 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1637 {
1638 gimple stmt;
1639 /* If the return type is a pointer, we need to
1640 protect against NULL. We know there will be an
1641 adjustment, because that's why we're emitting a
1642 thunk. */
1643 then_bb = create_basic_block (NULL, (void *) 0, bb);
1644 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1645 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1646 add_bb_to_loop (then_bb, bb->loop_father);
1647 add_bb_to_loop (return_bb, bb->loop_father);
1648 add_bb_to_loop (else_bb, bb->loop_father);
1649 remove_edge (single_succ_edge (bb));
1650 true_label = gimple_block_label (then_bb);
1651 stmt = gimple_build_cond (NE_EXPR, restmp,
1652 build_zero_cst (TREE_TYPE (restmp)),
1653 NULL_TREE, NULL_TREE);
1654 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1655 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1656 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1657 make_edge (return_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1658 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1659 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1660 bsi = gsi_last_bb (then_bb);
1661 }
1662
1663 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1664 fixed_offset, virtual_offset);
1665 if (true_label)
1666 {
1667 gimple stmt;
1668 bsi = gsi_last_bb (else_bb);
1669 stmt = gimple_build_assign (restmp,
1670 build_zero_cst (TREE_TYPE (restmp)));
1671 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1672 bsi = gsi_last_bb (return_bb);
1673 }
1674 }
1675 else
1676 gimple_call_set_tail (call, true);
1677
1678 /* Build return value. */
1679 ret = gimple_build_return (restmp);
1680 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1681 }
1682 else
1683 {
1684 gimple_call_set_tail (call, true);
1685 remove_edge (single_succ_edge (bb));
1686 }
1687
1688 cfun->gimple_df->in_ssa_p = true;
1689 /* FIXME: C++ FE should stop setting TREE_ASM_WRITTEN on thunks. */
1690 TREE_ASM_WRITTEN (thunk_fndecl) = false;
1691 delete_unreachable_blocks ();
1692 update_ssa (TODO_update_ssa);
1693 #ifdef ENABLE_CHECKING
1694 verify_flow_info ();
1695 #endif
1696
1697 /* Since we want to emit the thunk, we explicitly mark its name as
1698 referenced. */
1699 node->thunk.thunk_p = false;
1700 node->lowered = true;
1701 bitmap_obstack_release (NULL);
1702 }
1703 current_function_decl = NULL;
1704 set_cfun (NULL);
1705 return true;
1706 }
1707
1708 /* Assemble thunks and aliases associated to NODE. */
1709
1710 static void
1711 assemble_thunks_and_aliases (struct cgraph_node *node)
1712 {
1713 struct cgraph_edge *e;
1714 int i;
1715 struct ipa_ref *ref;
1716
1717 for (e = node->callers; e;)
1718 if (e->caller->thunk.thunk_p)
1719 {
1720 struct cgraph_node *thunk = e->caller;
1721
1722 e = e->next_caller;
1723 expand_thunk (thunk, true, false);
1724 assemble_thunks_and_aliases (thunk);
1725 }
1726 else
1727 e = e->next_caller;
1728 for (i = 0; ipa_ref_list_referring_iterate (&node->ref_list,
1729 i, ref); i++)
1730 if (ref->use == IPA_REF_ALIAS)
1731 {
1732 struct cgraph_node *alias = ipa_ref_referring_node (ref);
1733 bool saved_written = TREE_ASM_WRITTEN (node->decl);
1734
1735 /* Force assemble_alias to really output the alias this time instead
1736 of buffering it in same alias pairs. */
1737 TREE_ASM_WRITTEN (node->decl) = 1;
1738 do_assemble_alias (alias->decl,
1739 DECL_ASSEMBLER_NAME (node->decl));
1740 assemble_thunks_and_aliases (alias);
1741 TREE_ASM_WRITTEN (node->decl) = saved_written;
1742 }
1743 }
1744
1745 /* Expand function specified by NODE. */
1746
1747 static void
1748 expand_function (struct cgraph_node *node)
1749 {
1750 tree decl = node->decl;
1751 location_t saved_loc;
1752
1753 /* We ought to not compile any inline clones. */
1754 gcc_assert (!node->global.inlined_to);
1755
1756 announce_function (decl);
1757 node->process = 0;
1758 gcc_assert (node->lowered);
1759 cgraph_get_body (node);
1760
1761 /* Generate RTL for the body of DECL. */
1762
1763 timevar_push (TV_REST_OF_COMPILATION);
1764
1765 gcc_assert (cgraph_global_info_ready);
1766
1767 /* Initialize the default bitmap obstack. */
1768 bitmap_obstack_initialize (NULL);
1769
1770 /* Initialize the RTL code for the function. */
1771 current_function_decl = decl;
1772 saved_loc = input_location;
1773 input_location = DECL_SOURCE_LOCATION (decl);
1774 init_function_start (decl);
1775
1776 gimple_register_cfg_hooks ();
1777
1778 bitmap_obstack_initialize (&reg_obstack); /* FIXME, only at RTL generation*/
1779
1780 execute_all_ipa_transforms ();
1781
1782 /* Perform all tree transforms and optimizations. */
1783
1784 /* Signal the start of passes. */
1785 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_START, NULL);
1786
1787 execute_pass_list (cfun, g->get_passes ()->all_passes);
1788
1789 /* Signal the end of passes. */
1790 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_END, NULL);
1791
1792 bitmap_obstack_release (&reg_obstack);
1793
1794 /* Release the default bitmap obstack. */
1795 bitmap_obstack_release (NULL);
1796
1797 /* If requested, warn about function definitions where the function will
1798 return a value (usually of some struct or union type) which itself will
1799 take up a lot of stack space. */
1800 if (warn_larger_than && !DECL_EXTERNAL (decl) && TREE_TYPE (decl))
1801 {
1802 tree ret_type = TREE_TYPE (TREE_TYPE (decl));
1803
1804 if (ret_type && TYPE_SIZE_UNIT (ret_type)
1805 && TREE_CODE (TYPE_SIZE_UNIT (ret_type)) == INTEGER_CST
1806 && 0 < compare_tree_int (TYPE_SIZE_UNIT (ret_type),
1807 larger_than_size))
1808 {
1809 unsigned int size_as_int
1810 = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (ret_type));
1811
1812 if (compare_tree_int (TYPE_SIZE_UNIT (ret_type), size_as_int) == 0)
1813 warning (OPT_Wlarger_than_, "size of return value of %q+D is %u bytes",
1814 decl, size_as_int);
1815 else
1816 warning (OPT_Wlarger_than_, "size of return value of %q+D is larger than %wd bytes",
1817 decl, larger_than_size);
1818 }
1819 }
1820
1821 gimple_set_body (decl, NULL);
1822 if (DECL_STRUCT_FUNCTION (decl) == 0
1823 && !cgraph_get_node (decl)->origin)
1824 {
1825 /* Stop pointing to the local nodes about to be freed.
1826 But DECL_INITIAL must remain nonzero so we know this
1827 was an actual function definition.
1828 For a nested function, this is done in c_pop_function_context.
1829 If rest_of_compilation set this to 0, leave it 0. */
1830 if (DECL_INITIAL (decl) != 0)
1831 DECL_INITIAL (decl) = error_mark_node;
1832 }
1833
1834 input_location = saved_loc;
1835
1836 ggc_collect ();
1837 timevar_pop (TV_REST_OF_COMPILATION);
1838
1839 /* Make sure that BE didn't give up on compiling. */
1840 gcc_assert (TREE_ASM_WRITTEN (decl));
1841 set_cfun (NULL);
1842 current_function_decl = NULL;
1843
1844 /* It would make a lot more sense to output thunks before function body to get more
1845 forward and lest backwarding jumps. This however would need solving problem
1846 with comdats. See PR48668. Also aliases must come after function itself to
1847 make one pass assemblers, like one on AIX, happy. See PR 50689.
1848 FIXME: Perhaps thunks should be move before function IFF they are not in comdat
1849 groups. */
1850 assemble_thunks_and_aliases (node);
1851 cgraph_release_function_body (node);
1852 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1853 points to the dead function body. */
1854 cgraph_node_remove_callees (node);
1855 ipa_remove_all_references (&node->ref_list);
1856 }
1857
1858 /* Node comparer that is responsible for the order that corresponds
1859 to time when a function was launched for the first time. */
1860
1861 static int
1862 node_cmp (const void *pa, const void *pb)
1863 {
1864 const struct cgraph_node *a = *(const struct cgraph_node * const *) pa;
1865 const struct cgraph_node *b = *(const struct cgraph_node * const *) pb;
1866
1867 /* Functions with time profile must be before these without profile. */
1868 if (!a->tp_first_run || !b->tp_first_run)
1869 return a->tp_first_run - b->tp_first_run;
1870
1871 return a->tp_first_run != b->tp_first_run
1872 ? b->tp_first_run - a->tp_first_run
1873 : b->order - a->order;
1874 }
1875
1876 /* Expand all functions that must be output.
1877
1878 Attempt to topologically sort the nodes so function is output when
1879 all called functions are already assembled to allow data to be
1880 propagated across the callgraph. Use a stack to get smaller distance
1881 between a function and its callees (later we may choose to use a more
1882 sophisticated algorithm for function reordering; we will likely want
1883 to use subsections to make the output functions appear in top-down
1884 order). */
1885
1886 static void
1887 expand_all_functions (void)
1888 {
1889 struct cgraph_node *node;
1890 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1891 unsigned int expanded_func_count = 0, profiled_func_count = 0;
1892 int order_pos, new_order_pos = 0;
1893 int i;
1894
1895 order_pos = ipa_reverse_postorder (order);
1896 gcc_assert (order_pos == cgraph_n_nodes);
1897
1898 /* Garbage collector may remove inline clones we eliminate during
1899 optimization. So we must be sure to not reference them. */
1900 for (i = 0; i < order_pos; i++)
1901 if (order[i]->process)
1902 order[new_order_pos++] = order[i];
1903
1904 if (flag_profile_reorder_functions)
1905 qsort (order, new_order_pos, sizeof (struct cgraph_node *), node_cmp);
1906
1907 for (i = new_order_pos - 1; i >= 0; i--)
1908 {
1909 node = order[i];
1910
1911 if (node->process)
1912 {
1913 expanded_func_count++;
1914 if(node->tp_first_run)
1915 profiled_func_count++;
1916
1917 if (cgraph_dump_file)
1918 fprintf (cgraph_dump_file, "Time profile order in expand_all_functions:%s:%d\n", node->asm_name (), node->tp_first_run);
1919
1920 node->process = 0;
1921 expand_function (node);
1922 }
1923 }
1924
1925 if (dump_file)
1926 fprintf (dump_file, "Expanded functions with time profile (%s):%u/%u\n",
1927 main_input_filename, profiled_func_count, expanded_func_count);
1928
1929 if (cgraph_dump_file && flag_profile_reorder_functions)
1930 fprintf (cgraph_dump_file, "Expanded functions with time profile:%u/%u\n",
1931 profiled_func_count, expanded_func_count);
1932
1933 cgraph_process_new_functions ();
1934 free_gimplify_stack ();
1935
1936 free (order);
1937 }
1938
1939 /* This is used to sort the node types by the cgraph order number. */
1940
1941 enum cgraph_order_sort_kind
1942 {
1943 ORDER_UNDEFINED = 0,
1944 ORDER_FUNCTION,
1945 ORDER_VAR,
1946 ORDER_ASM
1947 };
1948
1949 struct cgraph_order_sort
1950 {
1951 enum cgraph_order_sort_kind kind;
1952 union
1953 {
1954 struct cgraph_node *f;
1955 varpool_node *v;
1956 struct asm_node *a;
1957 } u;
1958 };
1959
1960 /* Output all functions, variables, and asm statements in the order
1961 according to their order fields, which is the order in which they
1962 appeared in the file. This implements -fno-toplevel-reorder. In
1963 this mode we may output functions and variables which don't really
1964 need to be output. */
1965
1966 static void
1967 output_in_order (void)
1968 {
1969 int max;
1970 struct cgraph_order_sort *nodes;
1971 int i;
1972 struct cgraph_node *pf;
1973 varpool_node *pv;
1974 struct asm_node *pa;
1975
1976 max = symtab_order;
1977 nodes = XCNEWVEC (struct cgraph_order_sort, max);
1978
1979 FOR_EACH_DEFINED_FUNCTION (pf)
1980 {
1981 if (pf->process && !pf->thunk.thunk_p && !pf->alias)
1982 {
1983 i = pf->order;
1984 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1985 nodes[i].kind = ORDER_FUNCTION;
1986 nodes[i].u.f = pf;
1987 }
1988 }
1989
1990 FOR_EACH_DEFINED_VARIABLE (pv)
1991 if (!DECL_EXTERNAL (pv->decl))
1992 {
1993 i = pv->order;
1994 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1995 nodes[i].kind = ORDER_VAR;
1996 nodes[i].u.v = pv;
1997 }
1998
1999 for (pa = asm_nodes; pa; pa = pa->next)
2000 {
2001 i = pa->order;
2002 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2003 nodes[i].kind = ORDER_ASM;
2004 nodes[i].u.a = pa;
2005 }
2006
2007 /* In toplevel reorder mode we output all statics; mark them as needed. */
2008
2009 for (i = 0; i < max; ++i)
2010 if (nodes[i].kind == ORDER_VAR)
2011 varpool_finalize_named_section_flags (nodes[i].u.v);
2012
2013 for (i = 0; i < max; ++i)
2014 {
2015 switch (nodes[i].kind)
2016 {
2017 case ORDER_FUNCTION:
2018 nodes[i].u.f->process = 0;
2019 expand_function (nodes[i].u.f);
2020 break;
2021
2022 case ORDER_VAR:
2023 varpool_assemble_decl (nodes[i].u.v);
2024 break;
2025
2026 case ORDER_ASM:
2027 assemble_asm (nodes[i].u.a->asm_str);
2028 break;
2029
2030 case ORDER_UNDEFINED:
2031 break;
2032
2033 default:
2034 gcc_unreachable ();
2035 }
2036 }
2037
2038 asm_nodes = NULL;
2039 free (nodes);
2040 }
2041
2042 static void
2043 ipa_passes (void)
2044 {
2045 gcc::pass_manager *passes = g->get_passes ();
2046
2047 set_cfun (NULL);
2048 current_function_decl = NULL;
2049 gimple_register_cfg_hooks ();
2050 bitmap_obstack_initialize (NULL);
2051
2052 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
2053
2054 if (!in_lto_p)
2055 {
2056 execute_ipa_pass_list (passes->all_small_ipa_passes);
2057 if (seen_error ())
2058 return;
2059 }
2060
2061 /* We never run removal of unreachable nodes after early passes. This is
2062 because TODO is run before the subpasses. It is important to remove
2063 the unreachable functions to save works at IPA level and to get LTO
2064 symbol tables right. */
2065 symtab_remove_unreachable_nodes (true, cgraph_dump_file);
2066
2067 /* If pass_all_early_optimizations was not scheduled, the state of
2068 the cgraph will not be properly updated. Update it now. */
2069 if (cgraph_state < CGRAPH_STATE_IPA_SSA)
2070 cgraph_state = CGRAPH_STATE_IPA_SSA;
2071
2072 if (!in_lto_p)
2073 {
2074 /* Generate coverage variables and constructors. */
2075 coverage_finish ();
2076
2077 /* Process new functions added. */
2078 set_cfun (NULL);
2079 current_function_decl = NULL;
2080 cgraph_process_new_functions ();
2081
2082 execute_ipa_summary_passes
2083 ((ipa_opt_pass_d *) passes->all_regular_ipa_passes);
2084 }
2085
2086 /* Some targets need to handle LTO assembler output specially. */
2087 if (flag_generate_lto)
2088 targetm.asm_out.lto_start ();
2089
2090 if (!in_lto_p)
2091 ipa_write_summaries ();
2092
2093 if (flag_generate_lto)
2094 targetm.asm_out.lto_end ();
2095
2096 if (!flag_ltrans && (in_lto_p || !flag_lto || flag_fat_lto_objects))
2097 execute_ipa_pass_list (passes->all_regular_ipa_passes);
2098 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
2099
2100 bitmap_obstack_release (NULL);
2101 }
2102
2103
2104 /* Return string alias is alias of. */
2105
2106 static tree
2107 get_alias_symbol (tree decl)
2108 {
2109 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
2110 return get_identifier (TREE_STRING_POINTER
2111 (TREE_VALUE (TREE_VALUE (alias))));
2112 }
2113
2114
2115 /* Weakrefs may be associated to external decls and thus not output
2116 at expansion time. Emit all necessary aliases. */
2117
2118 static void
2119 output_weakrefs (void)
2120 {
2121 symtab_node *node;
2122 FOR_EACH_SYMBOL (node)
2123 if (node->alias
2124 && !TREE_ASM_WRITTEN (node->decl)
2125 && node->weakref)
2126 {
2127 tree target;
2128
2129 /* Weakrefs are special by not requiring target definition in current
2130 compilation unit. It is thus bit hard to work out what we want to
2131 alias.
2132 When alias target is defined, we need to fetch it from symtab reference,
2133 otherwise it is pointed to by alias_target. */
2134 if (node->alias_target)
2135 target = (DECL_P (node->alias_target)
2136 ? DECL_ASSEMBLER_NAME (node->alias_target)
2137 : node->alias_target);
2138 else if (node->analyzed)
2139 target = DECL_ASSEMBLER_NAME (symtab_alias_target (node)->decl);
2140 else
2141 {
2142 gcc_unreachable ();
2143 target = get_alias_symbol (node->decl);
2144 }
2145 do_assemble_alias (node->decl, target);
2146 }
2147 }
2148
2149 /* Initialize callgraph dump file. */
2150
2151 void
2152 init_cgraph (void)
2153 {
2154 if (!cgraph_dump_file)
2155 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
2156 }
2157
2158
2159 /* Perform simple optimizations based on callgraph. */
2160
2161 void
2162 compile (void)
2163 {
2164 if (seen_error ())
2165 return;
2166
2167 #ifdef ENABLE_CHECKING
2168 verify_symtab ();
2169 #endif
2170
2171 timevar_push (TV_CGRAPHOPT);
2172 if (pre_ipa_mem_report)
2173 {
2174 fprintf (stderr, "Memory consumption before IPA\n");
2175 dump_memory_report (false);
2176 }
2177 if (!quiet_flag)
2178 fprintf (stderr, "Performing interprocedural optimizations\n");
2179 cgraph_state = CGRAPH_STATE_IPA;
2180
2181 /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE. */
2182 if (flag_lto)
2183 lto_streamer_hooks_init ();
2184
2185 /* Don't run the IPA passes if there was any error or sorry messages. */
2186 if (!seen_error ())
2187 ipa_passes ();
2188
2189 /* Do nothing else if any IPA pass found errors or if we are just streaming LTO. */
2190 if (seen_error ()
2191 || (!in_lto_p && flag_lto && !flag_fat_lto_objects))
2192 {
2193 timevar_pop (TV_CGRAPHOPT);
2194 return;
2195 }
2196
2197 /* This pass remove bodies of extern inline functions we never inlined.
2198 Do this later so other IPA passes see what is really going on. */
2199 symtab_remove_unreachable_nodes (false, dump_file);
2200 cgraph_global_info_ready = true;
2201 if (cgraph_dump_file)
2202 {
2203 fprintf (cgraph_dump_file, "Optimized ");
2204 dump_symtab (cgraph_dump_file);
2205 }
2206 if (post_ipa_mem_report)
2207 {
2208 fprintf (stderr, "Memory consumption after IPA\n");
2209 dump_memory_report (false);
2210 }
2211 timevar_pop (TV_CGRAPHOPT);
2212
2213 /* Output everything. */
2214 (*debug_hooks->assembly_start) ();
2215 if (!quiet_flag)
2216 fprintf (stderr, "Assembling functions:\n");
2217 #ifdef ENABLE_CHECKING
2218 verify_symtab ();
2219 #endif
2220
2221 cgraph_materialize_all_clones ();
2222 bitmap_obstack_initialize (NULL);
2223 execute_ipa_pass_list (g->get_passes ()->all_late_ipa_passes);
2224 symtab_remove_unreachable_nodes (true, dump_file);
2225 #ifdef ENABLE_CHECKING
2226 verify_symtab ();
2227 #endif
2228 bitmap_obstack_release (NULL);
2229 mark_functions_to_output ();
2230
2231 /* When weakref support is missing, we autmatically translate all
2232 references to NODE to references to its ultimate alias target.
2233 The renaming mechanizm uses flag IDENTIFIER_TRANSPARENT_ALIAS and
2234 TREE_CHAIN.
2235
2236 Set up this mapping before we output any assembler but once we are sure
2237 that all symbol renaming is done.
2238
2239 FIXME: All this uglyness can go away if we just do renaming at gimple
2240 level by physically rewritting the IL. At the moment we can only redirect
2241 calls, so we need infrastructure for renaming references as well. */
2242 #ifndef ASM_OUTPUT_WEAKREF
2243 symtab_node *node;
2244
2245 FOR_EACH_SYMBOL (node)
2246 if (node->alias
2247 && lookup_attribute ("weakref", DECL_ATTRIBUTES (node->decl)))
2248 {
2249 IDENTIFIER_TRANSPARENT_ALIAS
2250 (DECL_ASSEMBLER_NAME (node->decl)) = 1;
2251 TREE_CHAIN (DECL_ASSEMBLER_NAME (node->decl))
2252 = (node->alias_target ? node->alias_target
2253 : DECL_ASSEMBLER_NAME (symtab_alias_target (node)->decl));
2254 }
2255 #endif
2256
2257 cgraph_state = CGRAPH_STATE_EXPANSION;
2258
2259 if (!flag_toplevel_reorder)
2260 output_in_order ();
2261 else
2262 {
2263 output_asm_statements ();
2264
2265 expand_all_functions ();
2266 varpool_output_variables ();
2267 }
2268
2269 cgraph_process_new_functions ();
2270 cgraph_state = CGRAPH_STATE_FINISHED;
2271 output_weakrefs ();
2272
2273 if (cgraph_dump_file)
2274 {
2275 fprintf (cgraph_dump_file, "\nFinal ");
2276 dump_symtab (cgraph_dump_file);
2277 }
2278 #ifdef ENABLE_CHECKING
2279 verify_symtab ();
2280 /* Double check that all inline clones are gone and that all
2281 function bodies have been released from memory. */
2282 if (!seen_error ())
2283 {
2284 struct cgraph_node *node;
2285 bool error_found = false;
2286
2287 FOR_EACH_DEFINED_FUNCTION (node)
2288 if (node->global.inlined_to
2289 || gimple_has_body_p (node->decl))
2290 {
2291 error_found = true;
2292 dump_cgraph_node (stderr, node);
2293 }
2294 if (error_found)
2295 internal_error ("nodes with unreleased memory found");
2296 }
2297 #endif
2298 }
2299
2300
2301 /* Analyze the whole compilation unit once it is parsed completely. */
2302
2303 void
2304 finalize_compilation_unit (void)
2305 {
2306 timevar_push (TV_CGRAPH);
2307
2308 /* If we're here there's no current function anymore. Some frontends
2309 are lazy in clearing these. */
2310 current_function_decl = NULL;
2311 set_cfun (NULL);
2312
2313 /* Do not skip analyzing the functions if there were errors, we
2314 miss diagnostics for following functions otherwise. */
2315
2316 /* Emit size functions we didn't inline. */
2317 finalize_size_functions ();
2318
2319 /* Mark alias targets necessary and emit diagnostics. */
2320 handle_alias_pairs ();
2321
2322 if (!quiet_flag)
2323 {
2324 fprintf (stderr, "\nAnalyzing compilation unit\n");
2325 fflush (stderr);
2326 }
2327
2328 if (flag_dump_passes)
2329 dump_passes ();
2330
2331 /* Gimplify and lower all functions, compute reachability and
2332 remove unreachable nodes. */
2333 analyze_functions ();
2334
2335 /* Mark alias targets necessary and emit diagnostics. */
2336 handle_alias_pairs ();
2337
2338 /* Gimplify and lower thunks. */
2339 analyze_functions ();
2340
2341 /* Finally drive the pass manager. */
2342 compile ();
2343
2344 timevar_pop (TV_CGRAPH);
2345 }
2346
2347 /* Creates a wrapper from SOURCE node to TARGET node. Thunk is used for this
2348 kind of wrapper method. */
2349
2350 void
2351 cgraph_make_wrapper (struct cgraph_node *source, struct cgraph_node *target)
2352 {
2353 /* Preserve DECL_RESULT so we get right by reference flag. */
2354 tree decl_result = DECL_RESULT (source->decl);
2355
2356 /* Remove the function's body. */
2357 cgraph_release_function_body (source);
2358 cgraph_reset_node (source);
2359
2360 DECL_RESULT (source->decl) = decl_result;
2361 DECL_INITIAL (source->decl) = NULL;
2362 allocate_struct_function (source->decl, false);
2363 set_cfun (NULL);
2364
2365 /* Turn alias into thunk and expand it into GIMPLE representation. */
2366 source->definition = true;
2367 source->thunk.thunk_p = true;
2368 source->thunk.this_adjusting = false;
2369
2370 struct cgraph_edge *e = cgraph_create_edge (source, target, NULL, 0,
2371 CGRAPH_FREQ_BASE);
2372
2373 if (!expand_thunk (source, false, true))
2374 source->analyzed = true;
2375
2376 e->call_stmt_cannot_inline_p = true;
2377
2378 /* Inline summary set-up. */
2379
2380 analyze_function (source);
2381 inline_analyze_function (source);
2382 }
2383
2384 #include "gt-cgraphunit.h"