cgraphunit.c (expand_thunk): If not expanding, set analyzed flag.
[gcc.git] / gcc / cgraphunit.c
1 /* Driver of optimization process
2 Copyright (C) 2003-2014 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This module implements main driver of compilation process.
22
23 The main scope of this file is to act as an interface in between
24 tree based frontends and the backend.
25
26 The front-end is supposed to use following functionality:
27
28 - finalize_function
29
30 This function is called once front-end has parsed whole body of function
31 and it is certain that the function body nor the declaration will change.
32
33 (There is one exception needed for implementing GCC extern inline
34 function.)
35
36 - varpool_finalize_decl
37
38 This function has same behavior as the above but is used for static
39 variables.
40
41 - add_asm_node
42
43 Insert new toplevel ASM statement
44
45 - finalize_compilation_unit
46
47 This function is called once (source level) compilation unit is finalized
48 and it will no longer change.
49
50 The symbol table is constructed starting from the trivially needed
51 symbols finalized by the frontend. Functions are lowered into
52 GIMPLE representation and callgraph/reference lists are constructed.
53 Those are used to discover other necessary functions and variables.
54
55 At the end the bodies of unreachable functions are removed.
56
57 The function can be called multiple times when multiple source level
58 compilation units are combined.
59
60 - compile
61
62 This passes control to the back-end. Optimizations are performed and
63 final assembler is generated. This is done in the following way. Note
64 that with link time optimization the process is split into three
65 stages (compile time, linktime analysis and parallel linktime as
66 indicated bellow).
67
68 Compile time:
69
70 1) Inter-procedural optimization.
71 (ipa_passes)
72
73 This part is further split into:
74
75 a) early optimizations. These are local passes executed in
76 the topological order on the callgraph.
77
78 The purpose of early optimiations is to optimize away simple
79 things that may otherwise confuse IP analysis. Very simple
80 propagation across the callgraph is done i.e. to discover
81 functions without side effects and simple inlining is performed.
82
83 b) early small interprocedural passes.
84
85 Those are interprocedural passes executed only at compilation
86 time. These include, for example, transational memory lowering,
87 unreachable code removal and other simple transformations.
88
89 c) IP analysis stage. All interprocedural passes do their
90 analysis.
91
92 Interprocedural passes differ from small interprocedural
93 passes by their ability to operate across whole program
94 at linktime. Their analysis stage is performed early to
95 both reduce linking times and linktime memory usage by
96 not having to represent whole program in memory.
97
98 d) LTO sreaming. When doing LTO, everything important gets
99 streamed into the object file.
100
101 Compile time and or linktime analysis stage (WPA):
102
103 At linktime units gets streamed back and symbol table is
104 merged. Function bodies are not streamed in and not
105 available.
106 e) IP propagation stage. All IP passes execute their
107 IP propagation. This is done based on the earlier analysis
108 without having function bodies at hand.
109 f) Ltrans streaming. When doing WHOPR LTO, the program
110 is partitioned and streamed into multple object files.
111
112 Compile time and/or parallel linktime stage (ltrans)
113
114 Each of the object files is streamed back and compiled
115 separately. Now the function bodies becomes available
116 again.
117
118 2) Virtual clone materialization
119 (cgraph_materialize_clone)
120
121 IP passes can produce copies of existing functoins (such
122 as versioned clones or inline clones) without actually
123 manipulating their bodies by creating virtual clones in
124 the callgraph. At this time the virtual clones are
125 turned into real functions
126 3) IP transformation
127
128 All IP passes transform function bodies based on earlier
129 decision of the IP propagation.
130
131 4) late small IP passes
132
133 Simple IP passes working within single program partition.
134
135 5) Expansion
136 (expand_all_functions)
137
138 At this stage functions that needs to be output into
139 assembler are identified and compiled in topological order
140 6) Output of variables and aliases
141 Now it is known what variable references was not optimized
142 out and thus all variables are output to the file.
143
144 Note that with -fno-toplevel-reorder passes 5 and 6
145 are combined together in cgraph_output_in_order.
146
147 Finally there are functions to manipulate the callgraph from
148 backend.
149 - cgraph_add_new_function is used to add backend produced
150 functions introduced after the unit is finalized.
151 The functions are enqueue for later processing and inserted
152 into callgraph with cgraph_process_new_functions.
153
154 - cgraph_function_versioning
155
156 produces a copy of function into new one (a version)
157 and apply simple transformations
158 */
159
160 #include "config.h"
161 #include "system.h"
162 #include "coretypes.h"
163 #include "tm.h"
164 #include "tree.h"
165 #include "varasm.h"
166 #include "stor-layout.h"
167 #include "stringpool.h"
168 #include "output.h"
169 #include "rtl.h"
170 #include "basic-block.h"
171 #include "tree-ssa-alias.h"
172 #include "internal-fn.h"
173 #include "gimple-fold.h"
174 #include "gimple-expr.h"
175 #include "is-a.h"
176 #include "gimple.h"
177 #include "gimplify.h"
178 #include "gimple-iterator.h"
179 #include "gimplify-me.h"
180 #include "gimple-ssa.h"
181 #include "tree-cfg.h"
182 #include "tree-into-ssa.h"
183 #include "tree-ssa.h"
184 #include "tree-inline.h"
185 #include "langhooks.h"
186 #include "toplev.h"
187 #include "flags.h"
188 #include "debug.h"
189 #include "target.h"
190 #include "diagnostic.h"
191 #include "params.h"
192 #include "fibheap.h"
193 #include "intl.h"
194 #include "function.h"
195 #include "ipa-prop.h"
196 #include "tree-iterator.h"
197 #include "tree-pass.h"
198 #include "tree-dump.h"
199 #include "gimple-pretty-print.h"
200 #include "output.h"
201 #include "coverage.h"
202 #include "plugin.h"
203 #include "ipa-inline.h"
204 #include "ipa-utils.h"
205 #include "lto-streamer.h"
206 #include "except.h"
207 #include "cfgloop.h"
208 #include "regset.h" /* FIXME: For reg_obstack. */
209 #include "context.h"
210 #include "pass_manager.h"
211 #include "tree-nested.h"
212 #include "gimplify.h"
213 #include "dbgcnt.h"
214
215 /* Queue of cgraph nodes scheduled to be added into cgraph. This is a
216 secondary queue used during optimization to accommodate passes that
217 may generate new functions that need to be optimized and expanded. */
218 vec<cgraph_node *> cgraph_new_nodes;
219
220 static void expand_all_functions (void);
221 static void mark_functions_to_output (void);
222 static void handle_alias_pairs (void);
223
224 /* Used for vtable lookup in thunk adjusting. */
225 static GTY (()) tree vtable_entry_type;
226
227 /* Determine if symbol declaration is needed. That is, visible to something
228 either outside this translation unit, something magic in the system
229 configury */
230 bool
231 symtab_node::needed_p (void)
232 {
233 /* Double check that no one output the function into assembly file
234 early. */
235 gcc_checking_assert (!DECL_ASSEMBLER_NAME_SET_P (decl)
236 || !TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)));
237
238 if (!definition)
239 return false;
240
241 if (DECL_EXTERNAL (decl))
242 return false;
243
244 /* If the user told us it is used, then it must be so. */
245 if (force_output)
246 return true;
247
248 /* ABI forced symbols are needed when they are external. */
249 if (forced_by_abi && TREE_PUBLIC (decl))
250 return true;
251
252 /* Keep constructors, destructors and virtual functions. */
253 if (TREE_CODE (decl) == FUNCTION_DECL
254 && (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl)))
255 return true;
256
257 /* Externally visible variables must be output. The exception is
258 COMDAT variables that must be output only when they are needed. */
259 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
260 return true;
261
262 return false;
263 }
264
265 /* Head and terminator of the queue of nodes to be processed while building
266 callgraph. */
267
268 static symtab_node symtab_terminator;
269 static symtab_node *queued_nodes = &symtab_terminator;
270
271 /* Add NODE to queue starting at QUEUED_NODES.
272 The queue is linked via AUX pointers and terminated by pointer to 1. */
273
274 static void
275 enqueue_node (symtab_node *node)
276 {
277 if (node->aux)
278 return;
279 gcc_checking_assert (queued_nodes);
280 node->aux = queued_nodes;
281 queued_nodes = node;
282 }
283
284 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
285 functions into callgraph in a way so they look like ordinary reachable
286 functions inserted into callgraph already at construction time. */
287
288 void
289 symbol_table::process_new_functions (void)
290 {
291 tree fndecl;
292
293 if (!cgraph_new_nodes.exists ())
294 return;
295
296 handle_alias_pairs ();
297 /* Note that this queue may grow as its being processed, as the new
298 functions may generate new ones. */
299 for (unsigned i = 0; i < cgraph_new_nodes.length (); i++)
300 {
301 cgraph_node *node = cgraph_new_nodes[i];
302 fndecl = node->decl;
303 switch (state)
304 {
305 case CONSTRUCTION:
306 /* At construction time we just need to finalize function and move
307 it into reachable functions list. */
308
309 cgraph_node::finalize_function (fndecl, false);
310 call_cgraph_insertion_hooks (node);
311 enqueue_node (node);
312 break;
313
314 case IPA:
315 case IPA_SSA:
316 /* When IPA optimization already started, do all essential
317 transformations that has been already performed on the whole
318 cgraph but not on this function. */
319
320 gimple_register_cfg_hooks ();
321 if (!node->analyzed)
322 node->analyze ();
323 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
324 if (state == IPA_SSA
325 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
326 g->get_passes ()->execute_early_local_passes ();
327 else if (inline_summary_vec != NULL)
328 compute_inline_parameters (node, true);
329 free_dominance_info (CDI_POST_DOMINATORS);
330 free_dominance_info (CDI_DOMINATORS);
331 pop_cfun ();
332 break;
333
334 case EXPANSION:
335 /* Functions created during expansion shall be compiled
336 directly. */
337 node->process = 0;
338 call_cgraph_insertion_hooks (node);
339 node->expand ();
340 break;
341
342 default:
343 gcc_unreachable ();
344 break;
345 }
346 }
347
348 cgraph_new_nodes.release ();
349 }
350
351 /* As an GCC extension we allow redefinition of the function. The
352 semantics when both copies of bodies differ is not well defined.
353 We replace the old body with new body so in unit at a time mode
354 we always use new body, while in normal mode we may end up with
355 old body inlined into some functions and new body expanded and
356 inlined in others.
357
358 ??? It may make more sense to use one body for inlining and other
359 body for expanding the function but this is difficult to do. */
360
361 void
362 cgraph_node::reset (void)
363 {
364 /* If process is set, then we have already begun whole-unit analysis.
365 This is *not* testing for whether we've already emitted the function.
366 That case can be sort-of legitimately seen with real function redefinition
367 errors. I would argue that the front end should never present us with
368 such a case, but don't enforce that for now. */
369 gcc_assert (!process);
370
371 /* Reset our data structures so we can analyze the function again. */
372 memset (&local, 0, sizeof (local));
373 memset (&global, 0, sizeof (global));
374 memset (&rtl, 0, sizeof (rtl));
375 analyzed = false;
376 definition = false;
377 alias = false;
378 weakref = false;
379 cpp_implicit_alias = false;
380
381 remove_callees ();
382 remove_all_references ();
383 }
384
385 /* Return true when there are references to the node. */
386
387 bool
388 symtab_node::referred_to_p (void)
389 {
390 ipa_ref *ref = NULL;
391
392 /* See if there are any references at all. */
393 if (iterate_referring (0, ref))
394 return true;
395 /* For functions check also calls. */
396 cgraph_node *cn = dyn_cast <cgraph_node *> (this);
397 if (cn && cn->callers)
398 return true;
399 return false;
400 }
401
402 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
403 logic in effect. If NO_COLLECT is true, then our caller cannot stand to have
404 the garbage collector run at the moment. We would need to either create
405 a new GC context, or just not compile right now. */
406
407 void
408 cgraph_node::finalize_function (tree decl, bool no_collect)
409 {
410 cgraph_node *node = cgraph_node::get_create (decl);
411
412 if (node->definition)
413 {
414 /* Nested functions should only be defined once. */
415 gcc_assert (!DECL_CONTEXT (decl)
416 || TREE_CODE (DECL_CONTEXT (decl)) != FUNCTION_DECL);
417 node->reset ();
418 node->local.redefined_extern_inline = true;
419 }
420
421 notice_global_symbol (decl);
422 node->definition = true;
423 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
424
425 /* With -fkeep-inline-functions we are keeping all inline functions except
426 for extern inline ones. */
427 if (flag_keep_inline_functions
428 && DECL_DECLARED_INLINE_P (decl)
429 && !DECL_EXTERNAL (decl)
430 && !DECL_DISREGARD_INLINE_LIMITS (decl))
431 node->force_output = 1;
432
433 /* When not optimizing, also output the static functions. (see
434 PR24561), but don't do so for always_inline functions, functions
435 declared inline and nested functions. These were optimized out
436 in the original implementation and it is unclear whether we want
437 to change the behavior here. */
438 if ((!optimize
439 && !node->cpp_implicit_alias
440 && !DECL_DISREGARD_INLINE_LIMITS (decl)
441 && !DECL_DECLARED_INLINE_P (decl)
442 && !(DECL_CONTEXT (decl)
443 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL))
444 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
445 node->force_output = 1;
446
447 /* If we've not yet emitted decl, tell the debug info about it. */
448 if (!TREE_ASM_WRITTEN (decl))
449 (*debug_hooks->deferred_inline_function) (decl);
450
451 /* Possibly warn about unused parameters. */
452 if (warn_unused_parameter)
453 do_warn_unused_parameter (decl);
454
455 if (!no_collect)
456 ggc_collect ();
457
458 if (symtab->state == CONSTRUCTION
459 && (node->needed_p () || node->referred_to_p ()))
460 enqueue_node (node);
461 }
462
463 /* Add the function FNDECL to the call graph.
464 Unlike finalize_function, this function is intended to be used
465 by middle end and allows insertion of new function at arbitrary point
466 of compilation. The function can be either in high, low or SSA form
467 GIMPLE.
468
469 The function is assumed to be reachable and have address taken (so no
470 API breaking optimizations are performed on it).
471
472 Main work done by this function is to enqueue the function for later
473 processing to avoid need the passes to be re-entrant. */
474
475 void
476 cgraph_node::add_new_function (tree fndecl, bool lowered)
477 {
478 gcc::pass_manager *passes = g->get_passes ();
479 cgraph_node *node;
480 switch (symtab->state)
481 {
482 case PARSING:
483 cgraph_node::finalize_function (fndecl, false);
484 break;
485 case CONSTRUCTION:
486 /* Just enqueue function to be processed at nearest occurrence. */
487 node = cgraph_node::get_create (fndecl);
488 if (lowered)
489 node->lowered = true;
490 cgraph_new_nodes.safe_push (node);
491 break;
492
493 case IPA:
494 case IPA_SSA:
495 case EXPANSION:
496 /* Bring the function into finalized state and enqueue for later
497 analyzing and compilation. */
498 node = cgraph_node::get_create (fndecl);
499 node->local.local = false;
500 node->definition = true;
501 node->force_output = true;
502 if (!lowered && symtab->state == EXPANSION)
503 {
504 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
505 gimple_register_cfg_hooks ();
506 bitmap_obstack_initialize (NULL);
507 execute_pass_list (cfun, passes->all_lowering_passes);
508 passes->execute_early_local_passes ();
509 bitmap_obstack_release (NULL);
510 pop_cfun ();
511
512 lowered = true;
513 }
514 if (lowered)
515 node->lowered = true;
516 cgraph_new_nodes.safe_push (node);
517 break;
518
519 case FINISHED:
520 /* At the very end of compilation we have to do all the work up
521 to expansion. */
522 node = cgraph_node::create (fndecl);
523 if (lowered)
524 node->lowered = true;
525 node->definition = true;
526 node->analyze ();
527 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
528 gimple_register_cfg_hooks ();
529 bitmap_obstack_initialize (NULL);
530 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
531 g->get_passes ()->execute_early_local_passes ();
532 bitmap_obstack_release (NULL);
533 pop_cfun ();
534 node->expand ();
535 break;
536
537 default:
538 gcc_unreachable ();
539 }
540
541 /* Set a personality if required and we already passed EH lowering. */
542 if (lowered
543 && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
544 == eh_personality_lang))
545 DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
546 }
547
548 /* Output all asm statements we have stored up to be output. */
549
550 void
551 symbol_table::output_asm_statements (void)
552 {
553 asm_node *can;
554
555 if (seen_error ())
556 return;
557
558 for (can = first_asm_symbol (); can; can = can->next)
559 assemble_asm (can->asm_str);
560
561 clear_asm_symbols ();
562 }
563
564 /* Analyze the function scheduled to be output. */
565 void
566 cgraph_node::analyze (void)
567 {
568 tree decl = this->decl;
569 location_t saved_loc = input_location;
570 input_location = DECL_SOURCE_LOCATION (decl);
571
572 if (thunk.thunk_p)
573 {
574 create_edge (cgraph_node::get (thunk.alias),
575 NULL, 0, CGRAPH_FREQ_BASE);
576 if (!expand_thunk (false, false))
577 {
578 thunk.alias = NULL;
579 return;
580 }
581 thunk.alias = NULL;
582 }
583 if (alias)
584 resolve_alias (cgraph_node::get (alias_target));
585 else if (dispatcher_function)
586 {
587 /* Generate the dispatcher body of multi-versioned functions. */
588 cgraph_function_version_info *dispatcher_version_info
589 = function_version ();
590 if (dispatcher_version_info != NULL
591 && (dispatcher_version_info->dispatcher_resolver
592 == NULL_TREE))
593 {
594 tree resolver = NULL_TREE;
595 gcc_assert (targetm.generate_version_dispatcher_body);
596 resolver = targetm.generate_version_dispatcher_body (this);
597 gcc_assert (resolver != NULL_TREE);
598 }
599 }
600 else
601 {
602 push_cfun (DECL_STRUCT_FUNCTION (decl));
603
604 assign_assembler_name_if_neeeded (decl);
605
606 /* Make sure to gimplify bodies only once. During analyzing a
607 function we lower it, which will require gimplified nested
608 functions, so we can end up here with an already gimplified
609 body. */
610 if (!gimple_has_body_p (decl))
611 gimplify_function_tree (decl);
612 dump_function (TDI_generic, decl);
613
614 /* Lower the function. */
615 if (!lowered)
616 {
617 if (nested)
618 lower_nested_functions (decl);
619 gcc_assert (!nested);
620
621 gimple_register_cfg_hooks ();
622 bitmap_obstack_initialize (NULL);
623 execute_pass_list (cfun, g->get_passes ()->all_lowering_passes);
624 free_dominance_info (CDI_POST_DOMINATORS);
625 free_dominance_info (CDI_DOMINATORS);
626 compact_blocks ();
627 bitmap_obstack_release (NULL);
628 lowered = true;
629 }
630
631 pop_cfun ();
632 }
633 analyzed = true;
634
635 input_location = saved_loc;
636 }
637
638 /* C++ frontend produce same body aliases all over the place, even before PCH
639 gets streamed out. It relies on us linking the aliases with their function
640 in order to do the fixups, but ipa-ref is not PCH safe. Consequentely we
641 first produce aliases without links, but once C++ FE is sure he won't sream
642 PCH we build the links via this function. */
643
644 void
645 symbol_table::process_same_body_aliases (void)
646 {
647 symtab_node *node;
648 FOR_EACH_SYMBOL (node)
649 if (node->cpp_implicit_alias && !node->analyzed)
650 node->resolve_alias
651 (TREE_CODE (node->alias_target) == VAR_DECL
652 ? (symtab_node *)varpool_node::get_create (node->alias_target)
653 : (symtab_node *)cgraph_node::get_create (node->alias_target));
654 cpp_implicit_aliases_done = true;
655 }
656
657 /* Process attributes common for vars and functions. */
658
659 static void
660 process_common_attributes (tree decl)
661 {
662 tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
663
664 if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
665 {
666 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
667 "%<weakref%> attribute should be accompanied with"
668 " an %<alias%> attribute");
669 DECL_WEAK (decl) = 0;
670 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
671 DECL_ATTRIBUTES (decl));
672 }
673 }
674
675 /* Look for externally_visible and used attributes and mark cgraph nodes
676 accordingly.
677
678 We cannot mark the nodes at the point the attributes are processed (in
679 handle_*_attribute) because the copy of the declarations available at that
680 point may not be canonical. For example, in:
681
682 void f();
683 void f() __attribute__((used));
684
685 the declaration we see in handle_used_attribute will be the second
686 declaration -- but the front end will subsequently merge that declaration
687 with the original declaration and discard the second declaration.
688
689 Furthermore, we can't mark these nodes in finalize_function because:
690
691 void f() {}
692 void f() __attribute__((externally_visible));
693
694 is valid.
695
696 So, we walk the nodes at the end of the translation unit, applying the
697 attributes at that point. */
698
699 static void
700 process_function_and_variable_attributes (cgraph_node *first,
701 varpool_node *first_var)
702 {
703 cgraph_node *node;
704 varpool_node *vnode;
705
706 for (node = symtab->first_function (); node != first;
707 node = symtab->next_function (node))
708 {
709 tree decl = node->decl;
710 if (DECL_PRESERVE_P (decl))
711 node->mark_force_output ();
712 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
713 {
714 if (! TREE_PUBLIC (node->decl))
715 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
716 "%<externally_visible%>"
717 " attribute have effect only on public objects");
718 }
719 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
720 && (node->definition && !node->alias))
721 {
722 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
723 "%<weakref%> attribute ignored"
724 " because function is defined");
725 DECL_WEAK (decl) = 0;
726 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
727 DECL_ATTRIBUTES (decl));
728 }
729
730 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl))
731 && !DECL_DECLARED_INLINE_P (decl)
732 /* redefining extern inline function makes it DECL_UNINLINABLE. */
733 && !DECL_UNINLINABLE (decl))
734 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
735 "always_inline function might not be inlinable");
736
737 process_common_attributes (decl);
738 }
739 for (vnode = symtab->first_variable (); vnode != first_var;
740 vnode = symtab->next_variable (vnode))
741 {
742 tree decl = vnode->decl;
743 if (DECL_EXTERNAL (decl)
744 && DECL_INITIAL (decl))
745 varpool_node::finalize_decl (decl);
746 if (DECL_PRESERVE_P (decl))
747 vnode->force_output = true;
748 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
749 {
750 if (! TREE_PUBLIC (vnode->decl))
751 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
752 "%<externally_visible%>"
753 " attribute have effect only on public objects");
754 }
755 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
756 && vnode->definition
757 && DECL_INITIAL (decl))
758 {
759 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
760 "%<weakref%> attribute ignored"
761 " because variable is initialized");
762 DECL_WEAK (decl) = 0;
763 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
764 DECL_ATTRIBUTES (decl));
765 }
766 process_common_attributes (decl);
767 }
768 }
769
770 /* Mark DECL as finalized. By finalizing the declaration, frontend instruct the
771 middle end to output the variable to asm file, if needed or externally
772 visible. */
773
774 void
775 varpool_node::finalize_decl (tree decl)
776 {
777 varpool_node *node = varpool_node::get_create (decl);
778
779 gcc_assert (TREE_STATIC (decl) || DECL_EXTERNAL (decl));
780
781 if (node->definition)
782 return;
783 notice_global_symbol (decl);
784 node->definition = true;
785 if (TREE_THIS_VOLATILE (decl) || DECL_PRESERVE_P (decl)
786 /* Traditionally we do not eliminate static variables when not
787 optimizing and when not doing toplevel reoder. */
788 || (!flag_toplevel_reorder && !DECL_COMDAT (node->decl)
789 && !DECL_ARTIFICIAL (node->decl)))
790 node->force_output = true;
791
792 if (symtab->state == CONSTRUCTION
793 && (node->needed_p () || node->referred_to_p ()))
794 enqueue_node (node);
795 if (symtab->state >= IPA_SSA)
796 node->analyze ();
797 /* Some frontends produce various interface variables after compilation
798 finished. */
799 if (symtab->state == FINISHED
800 || (!flag_toplevel_reorder
801 && symtab->state == EXPANSION))
802 node->assemble_decl ();
803 }
804
805 /* EDGE is an polymorphic call. Mark all possible targets as reachable
806 and if there is only one target, perform trivial devirtualization.
807 REACHABLE_CALL_TARGETS collects target lists we already walked to
808 avoid udplicate work. */
809
810 static void
811 walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
812 cgraph_edge *edge)
813 {
814 unsigned int i;
815 void *cache_token;
816 bool final;
817 vec <cgraph_node *>targets
818 = possible_polymorphic_call_targets
819 (edge, &final, &cache_token);
820
821 if (!reachable_call_targets->add (cache_token))
822 {
823 if (symtab->dump_file)
824 dump_possible_polymorphic_call_targets
825 (symtab->dump_file, edge);
826
827 for (i = 0; i < targets.length (); i++)
828 {
829 /* Do not bother to mark virtual methods in anonymous namespace;
830 either we will find use of virtual table defining it, or it is
831 unused. */
832 if (targets[i]->definition
833 && TREE_CODE
834 (TREE_TYPE (targets[i]->decl))
835 == METHOD_TYPE
836 && !type_in_anonymous_namespace_p
837 (method_class_type
838 (TREE_TYPE (targets[i]->decl))))
839 enqueue_node (targets[i]);
840 }
841 }
842
843 /* Very trivial devirtualization; when the type is
844 final or anonymous (so we know all its derivation)
845 and there is only one possible virtual call target,
846 make the edge direct. */
847 if (final)
848 {
849 if (targets.length () <= 1 && dbg_cnt (devirt))
850 {
851 cgraph_node *target;
852 if (targets.length () == 1)
853 target = targets[0];
854 else
855 target = cgraph_node::create
856 (builtin_decl_implicit (BUILT_IN_UNREACHABLE));
857
858 if (symtab->dump_file)
859 {
860 fprintf (symtab->dump_file,
861 "Devirtualizing call: ");
862 print_gimple_stmt (symtab->dump_file,
863 edge->call_stmt, 0,
864 TDF_SLIM);
865 }
866 if (dump_enabled_p ())
867 {
868 location_t locus = gimple_location_safe (edge->call_stmt);
869 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
870 "devirtualizing call in %s to %s\n",
871 edge->caller->name (), target->name ());
872 }
873
874 edge->make_direct (target);
875 edge->redirect_call_stmt_to_callee ();
876 if (symtab->dump_file)
877 {
878 fprintf (symtab->dump_file,
879 "Devirtualized as: ");
880 print_gimple_stmt (symtab->dump_file,
881 edge->call_stmt, 0,
882 TDF_SLIM);
883 }
884 }
885 }
886 }
887
888
889 /* Discover all functions and variables that are trivially needed, analyze
890 them as well as all functions and variables referred by them */
891
892 static void
893 analyze_functions (void)
894 {
895 /* Keep track of already processed nodes when called multiple times for
896 intermodule optimization. */
897 static cgraph_node *first_analyzed;
898 cgraph_node *first_handled = first_analyzed;
899 static varpool_node *first_analyzed_var;
900 varpool_node *first_handled_var = first_analyzed_var;
901 hash_set<void *> reachable_call_targets;
902
903 symtab_node *node;
904 symtab_node *next;
905 int i;
906 ipa_ref *ref;
907 bool changed = true;
908 location_t saved_loc = input_location;
909
910 bitmap_obstack_initialize (NULL);
911 symtab->state = CONSTRUCTION;
912 input_location = UNKNOWN_LOCATION;
913
914 /* Ugly, but the fixup can not happen at a time same body alias is created;
915 C++ FE is confused about the COMDAT groups being right. */
916 if (symtab->cpp_implicit_aliases_done)
917 FOR_EACH_SYMBOL (node)
918 if (node->cpp_implicit_alias)
919 node->fixup_same_cpp_alias_visibility (node->get_alias_target ());
920 if (optimize && flag_devirtualize)
921 build_type_inheritance_graph ();
922
923 /* Analysis adds static variables that in turn adds references to new functions.
924 So we need to iterate the process until it stabilize. */
925 while (changed)
926 {
927 changed = false;
928 process_function_and_variable_attributes (first_analyzed,
929 first_analyzed_var);
930
931 /* First identify the trivially needed symbols. */
932 for (node = symtab->first_symbol ();
933 node != first_analyzed
934 && node != first_analyzed_var; node = node->next)
935 {
936 /* Convert COMDAT group designators to IDENTIFIER_NODEs. */
937 node->get_comdat_group_id ();
938 if (node->needed_p ())
939 {
940 enqueue_node (node);
941 if (!changed && symtab->dump_file)
942 fprintf (symtab->dump_file, "Trivially needed symbols:");
943 changed = true;
944 if (symtab->dump_file)
945 fprintf (symtab->dump_file, " %s", node->asm_name ());
946 if (!changed && symtab->dump_file)
947 fprintf (symtab->dump_file, "\n");
948 }
949 if (node == first_analyzed
950 || node == first_analyzed_var)
951 break;
952 }
953 symtab->process_new_functions ();
954 first_analyzed_var = symtab->first_variable ();
955 first_analyzed = symtab->first_function ();
956
957 if (changed && symtab->dump_file)
958 fprintf (symtab->dump_file, "\n");
959
960 /* Lower representation, build callgraph edges and references for all trivially
961 needed symbols and all symbols referred by them. */
962 while (queued_nodes != &symtab_terminator)
963 {
964 changed = true;
965 node = queued_nodes;
966 queued_nodes = (symtab_node *)queued_nodes->aux;
967 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
968 if (cnode && cnode->definition)
969 {
970 cgraph_edge *edge;
971 tree decl = cnode->decl;
972
973 /* ??? It is possible to create extern inline function
974 and later using weak alias attribute to kill its body.
975 See gcc.c-torture/compile/20011119-1.c */
976 if (!DECL_STRUCT_FUNCTION (decl)
977 && !cnode->alias
978 && !cnode->thunk.thunk_p
979 && !cnode->dispatcher_function)
980 {
981 cnode->reset ();
982 cnode->local.redefined_extern_inline = true;
983 continue;
984 }
985
986 if (!cnode->analyzed)
987 cnode->analyze ();
988
989 for (edge = cnode->callees; edge; edge = edge->next_callee)
990 if (edge->callee->definition)
991 enqueue_node (edge->callee);
992 if (optimize && flag_devirtualize)
993 {
994 cgraph_edge *next;
995
996 for (edge = cnode->indirect_calls; edge; edge = next)
997 {
998 next = edge->next_callee;
999 if (edge->indirect_info->polymorphic)
1000 walk_polymorphic_call_targets (&reachable_call_targets,
1001 edge);
1002 }
1003 }
1004
1005 /* If decl is a clone of an abstract function,
1006 mark that abstract function so that we don't release its body.
1007 The DECL_INITIAL() of that abstract function declaration
1008 will be later needed to output debug info. */
1009 if (DECL_ABSTRACT_ORIGIN (decl))
1010 {
1011 cgraph_node *origin_node
1012 = cgraph_node::get_create (DECL_ABSTRACT_ORIGIN (decl));
1013 origin_node->used_as_abstract_origin = true;
1014 }
1015 }
1016 else
1017 {
1018 varpool_node *vnode = dyn_cast <varpool_node *> (node);
1019 if (vnode && vnode->definition && !vnode->analyzed)
1020 vnode->analyze ();
1021 }
1022
1023 if (node->same_comdat_group)
1024 {
1025 symtab_node *next;
1026 for (next = node->same_comdat_group;
1027 next != node;
1028 next = next->same_comdat_group)
1029 enqueue_node (next);
1030 }
1031 for (i = 0; node->iterate_reference (i, ref); i++)
1032 if (ref->referred->definition)
1033 enqueue_node (ref->referred);
1034 symtab->process_new_functions ();
1035 }
1036 }
1037 if (optimize && flag_devirtualize)
1038 update_type_inheritance_graph ();
1039
1040 /* Collect entry points to the unit. */
1041 if (symtab->dump_file)
1042 {
1043 fprintf (symtab->dump_file, "\n\nInitial ");
1044 symtab_node::dump_table (symtab->dump_file);
1045 }
1046
1047 if (symtab->dump_file)
1048 fprintf (symtab->dump_file, "\nRemoving unused symbols:");
1049
1050 for (node = symtab->first_symbol ();
1051 node != first_handled
1052 && node != first_handled_var; node = next)
1053 {
1054 next = node->next;
1055 if (!node->aux && !node->referred_to_p ())
1056 {
1057 if (symtab->dump_file)
1058 fprintf (symtab->dump_file, " %s", node->name ());
1059 node->remove ();
1060 continue;
1061 }
1062 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1063 {
1064 tree decl = node->decl;
1065
1066 if (cnode->definition && !gimple_has_body_p (decl)
1067 && !cnode->alias
1068 && !cnode->thunk.thunk_p)
1069 cnode->reset ();
1070
1071 gcc_assert (!cnode->definition || cnode->thunk.thunk_p
1072 || cnode->alias
1073 || gimple_has_body_p (decl));
1074 gcc_assert (cnode->analyzed == cnode->definition);
1075 }
1076 node->aux = NULL;
1077 }
1078 for (;node; node = node->next)
1079 node->aux = NULL;
1080 first_analyzed = symtab->first_function ();
1081 first_analyzed_var = symtab->first_variable ();
1082 if (symtab->dump_file)
1083 {
1084 fprintf (symtab->dump_file, "\n\nReclaimed ");
1085 symtab_node::dump_table (symtab->dump_file);
1086 }
1087 bitmap_obstack_release (NULL);
1088 ggc_collect ();
1089 /* Initialize assembler name hash, in particular we want to trigger C++
1090 mangling and same body alias creation before we free DECL_ARGUMENTS
1091 used by it. */
1092 if (!seen_error ())
1093 symtab->symtab_initialize_asm_name_hash ();
1094
1095 input_location = saved_loc;
1096 }
1097
1098 /* Translate the ugly representation of aliases as alias pairs into nice
1099 representation in callgraph. We don't handle all cases yet,
1100 unfortunately. */
1101
1102 static void
1103 handle_alias_pairs (void)
1104 {
1105 alias_pair *p;
1106 unsigned i;
1107
1108 for (i = 0; alias_pairs && alias_pairs->iterate (i, &p);)
1109 {
1110 symtab_node *target_node = symtab_node::get_for_asmname (p->target);
1111
1112 /* Weakrefs with target not defined in current unit are easy to handle:
1113 they behave just as external variables except we need to note the
1114 alias flag to later output the weakref pseudo op into asm file. */
1115 if (!target_node
1116 && lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)) != NULL)
1117 {
1118 symtab_node *node = symtab_node::get (p->decl);
1119 if (node)
1120 {
1121 node->alias_target = p->target;
1122 node->weakref = true;
1123 node->alias = true;
1124 }
1125 alias_pairs->unordered_remove (i);
1126 continue;
1127 }
1128 else if (!target_node)
1129 {
1130 error ("%q+D aliased to undefined symbol %qE", p->decl, p->target);
1131 symtab_node *node = symtab_node::get (p->decl);
1132 if (node)
1133 node->alias = false;
1134 alias_pairs->unordered_remove (i);
1135 continue;
1136 }
1137
1138 if (DECL_EXTERNAL (target_node->decl)
1139 /* We use local aliases for C++ thunks to force the tailcall
1140 to bind locally. This is a hack - to keep it working do
1141 the following (which is not strictly correct). */
1142 && (TREE_CODE (target_node->decl) != FUNCTION_DECL
1143 || ! DECL_VIRTUAL_P (target_node->decl))
1144 && ! lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)))
1145 {
1146 error ("%q+D aliased to external symbol %qE",
1147 p->decl, p->target);
1148 }
1149
1150 if (TREE_CODE (p->decl) == FUNCTION_DECL
1151 && target_node && is_a <cgraph_node *> (target_node))
1152 {
1153 cgraph_node *src_node = cgraph_node::get (p->decl);
1154 if (src_node && src_node->definition)
1155 src_node->reset ();
1156 cgraph_node::create_alias (p->decl, target_node->decl);
1157 alias_pairs->unordered_remove (i);
1158 }
1159 else if (TREE_CODE (p->decl) == VAR_DECL
1160 && target_node && is_a <varpool_node *> (target_node))
1161 {
1162 varpool_node::create_alias (p->decl, target_node->decl);
1163 alias_pairs->unordered_remove (i);
1164 }
1165 else
1166 {
1167 error ("%q+D alias in between function and variable is not supported",
1168 p->decl);
1169 warning (0, "%q+D aliased declaration",
1170 target_node->decl);
1171 alias_pairs->unordered_remove (i);
1172 }
1173 }
1174 vec_free (alias_pairs);
1175 }
1176
1177
1178 /* Figure out what functions we want to assemble. */
1179
1180 static void
1181 mark_functions_to_output (void)
1182 {
1183 cgraph_node *node;
1184 #ifdef ENABLE_CHECKING
1185 bool check_same_comdat_groups = false;
1186
1187 FOR_EACH_FUNCTION (node)
1188 gcc_assert (!node->process);
1189 #endif
1190
1191 FOR_EACH_FUNCTION (node)
1192 {
1193 tree decl = node->decl;
1194
1195 gcc_assert (!node->process || node->same_comdat_group);
1196 if (node->process)
1197 continue;
1198
1199 /* We need to output all local functions that are used and not
1200 always inlined, as well as those that are reachable from
1201 outside the current compilation unit. */
1202 if (node->analyzed
1203 && !node->thunk.thunk_p
1204 && !node->alias
1205 && !node->global.inlined_to
1206 && !TREE_ASM_WRITTEN (decl)
1207 && !DECL_EXTERNAL (decl))
1208 {
1209 node->process = 1;
1210 if (node->same_comdat_group)
1211 {
1212 cgraph_node *next;
1213 for (next = dyn_cast<cgraph_node *> (node->same_comdat_group);
1214 next != node;
1215 next = dyn_cast<cgraph_node *> (next->same_comdat_group))
1216 if (!next->thunk.thunk_p && !next->alias
1217 && !next->comdat_local_p ())
1218 next->process = 1;
1219 }
1220 }
1221 else if (node->same_comdat_group)
1222 {
1223 #ifdef ENABLE_CHECKING
1224 check_same_comdat_groups = true;
1225 #endif
1226 }
1227 else
1228 {
1229 /* We should've reclaimed all functions that are not needed. */
1230 #ifdef ENABLE_CHECKING
1231 if (!node->global.inlined_to
1232 && gimple_has_body_p (decl)
1233 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1234 are inside partition, we can end up not removing the body since we no longer
1235 have analyzed node pointing to it. */
1236 && !node->in_other_partition
1237 && !node->alias
1238 && !node->clones
1239 && !DECL_EXTERNAL (decl))
1240 {
1241 node->debug ();
1242 internal_error ("failed to reclaim unneeded function");
1243 }
1244 #endif
1245 gcc_assert (node->global.inlined_to
1246 || !gimple_has_body_p (decl)
1247 || node->in_other_partition
1248 || node->clones
1249 || DECL_ARTIFICIAL (decl)
1250 || DECL_EXTERNAL (decl));
1251
1252 }
1253
1254 }
1255 #ifdef ENABLE_CHECKING
1256 if (check_same_comdat_groups)
1257 FOR_EACH_FUNCTION (node)
1258 if (node->same_comdat_group && !node->process)
1259 {
1260 tree decl = node->decl;
1261 if (!node->global.inlined_to
1262 && gimple_has_body_p (decl)
1263 /* FIXME: in an ltrans unit when the offline copy is outside a
1264 partition but inline copies are inside a partition, we can
1265 end up not removing the body since we no longer have an
1266 analyzed node pointing to it. */
1267 && !node->in_other_partition
1268 && !node->clones
1269 && !DECL_EXTERNAL (decl))
1270 {
1271 node->debug ();
1272 internal_error ("failed to reclaim unneeded function in same "
1273 "comdat group");
1274 }
1275 }
1276 #endif
1277 }
1278
1279 /* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1280 in lowered gimple form. IN_SSA is true if the gimple is in SSA.
1281
1282 Set current_function_decl and cfun to newly constructed empty function body.
1283 return basic block in the function body. */
1284
1285 basic_block
1286 init_lowered_empty_function (tree decl, bool in_ssa)
1287 {
1288 basic_block bb;
1289
1290 current_function_decl = decl;
1291 allocate_struct_function (decl, false);
1292 gimple_register_cfg_hooks ();
1293 init_empty_tree_cfg ();
1294
1295 if (in_ssa)
1296 {
1297 init_tree_ssa (cfun);
1298 init_ssa_operands (cfun);
1299 cfun->gimple_df->in_ssa_p = true;
1300 cfun->curr_properties |= PROP_ssa;
1301 }
1302
1303 DECL_INITIAL (decl) = make_node (BLOCK);
1304
1305 DECL_SAVED_TREE (decl) = error_mark_node;
1306 cfun->curr_properties |= (PROP_gimple_lcf | PROP_gimple_leh | PROP_gimple_any
1307 | PROP_cfg | PROP_loops);
1308
1309 set_loops_for_fn (cfun, ggc_cleared_alloc<loops> ());
1310 init_loops_structure (cfun, loops_for_fn (cfun), 1);
1311 loops_for_fn (cfun)->state |= LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
1312
1313 /* Create BB for body of the function and connect it properly. */
1314 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR_FOR_FN (cfun));
1315 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, EDGE_FALLTHRU);
1316 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1317 add_bb_to_loop (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
1318
1319 return bb;
1320 }
1321
1322 /* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1323 offset indicated by VIRTUAL_OFFSET, if that is
1324 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1325 zero for a result adjusting thunk. */
1326
1327 static tree
1328 thunk_adjust (gimple_stmt_iterator * bsi,
1329 tree ptr, bool this_adjusting,
1330 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1331 {
1332 gimple stmt;
1333 tree ret;
1334
1335 if (this_adjusting
1336 && fixed_offset != 0)
1337 {
1338 stmt = gimple_build_assign
1339 (ptr, fold_build_pointer_plus_hwi_loc (input_location,
1340 ptr,
1341 fixed_offset));
1342 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1343 }
1344
1345 /* If there's a virtual offset, look up that value in the vtable and
1346 adjust the pointer again. */
1347 if (virtual_offset)
1348 {
1349 tree vtabletmp;
1350 tree vtabletmp2;
1351 tree vtabletmp3;
1352
1353 if (!vtable_entry_type)
1354 {
1355 tree vfunc_type = make_node (FUNCTION_TYPE);
1356 TREE_TYPE (vfunc_type) = integer_type_node;
1357 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1358 layout_type (vfunc_type);
1359
1360 vtable_entry_type = build_pointer_type (vfunc_type);
1361 }
1362
1363 vtabletmp =
1364 create_tmp_reg (build_pointer_type
1365 (build_pointer_type (vtable_entry_type)), "vptr");
1366
1367 /* The vptr is always at offset zero in the object. */
1368 stmt = gimple_build_assign (vtabletmp,
1369 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1370 ptr));
1371 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1372
1373 /* Form the vtable address. */
1374 vtabletmp2 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp)),
1375 "vtableaddr");
1376 stmt = gimple_build_assign (vtabletmp2,
1377 build_simple_mem_ref (vtabletmp));
1378 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1379
1380 /* Find the entry with the vcall offset. */
1381 stmt = gimple_build_assign (vtabletmp2,
1382 fold_build_pointer_plus_loc (input_location,
1383 vtabletmp2,
1384 virtual_offset));
1385 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1386
1387 /* Get the offset itself. */
1388 vtabletmp3 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1389 "vcalloffset");
1390 stmt = gimple_build_assign (vtabletmp3,
1391 build_simple_mem_ref (vtabletmp2));
1392 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1393
1394 /* Adjust the `this' pointer. */
1395 ptr = fold_build_pointer_plus_loc (input_location, ptr, vtabletmp3);
1396 ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
1397 GSI_CONTINUE_LINKING);
1398 }
1399
1400 if (!this_adjusting
1401 && fixed_offset != 0)
1402 /* Adjust the pointer by the constant. */
1403 {
1404 tree ptrtmp;
1405
1406 if (TREE_CODE (ptr) == VAR_DECL)
1407 ptrtmp = ptr;
1408 else
1409 {
1410 ptrtmp = create_tmp_reg (TREE_TYPE (ptr), "ptr");
1411 stmt = gimple_build_assign (ptrtmp, ptr);
1412 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1413 }
1414 ptr = fold_build_pointer_plus_hwi_loc (input_location,
1415 ptrtmp, fixed_offset);
1416 }
1417
1418 /* Emit the statement and gimplify the adjustment expression. */
1419 ret = create_tmp_reg (TREE_TYPE (ptr), "adjusted_this");
1420 stmt = gimple_build_assign (ret, ptr);
1421 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1422
1423 return ret;
1424 }
1425
1426 /* Expand thunk NODE to gimple if possible.
1427 When FORCE_GIMPLE_THUNK is true, gimple thunk is created and
1428 no assembler is produced.
1429 When OUTPUT_ASM_THUNK is true, also produce assembler for
1430 thunks that are not lowered. */
1431
1432 bool
1433 cgraph_node::expand_thunk (bool output_asm_thunks, bool force_gimple_thunk)
1434 {
1435 bool this_adjusting = thunk.this_adjusting;
1436 HOST_WIDE_INT fixed_offset = thunk.fixed_offset;
1437 HOST_WIDE_INT virtual_value = thunk.virtual_value;
1438 tree virtual_offset = NULL;
1439 tree alias = callees->callee->decl;
1440 tree thunk_fndecl = decl;
1441 tree a;
1442
1443
1444 if (!force_gimple_thunk && this_adjusting
1445 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1446 virtual_value, alias))
1447 {
1448 const char *fnname;
1449 tree fn_block;
1450 tree restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1451
1452 if (!output_asm_thunks)
1453 {
1454 analyzed = true;
1455 return false;
1456 }
1457
1458 if (in_lto_p)
1459 get_body ();
1460 a = DECL_ARGUMENTS (thunk_fndecl);
1461
1462 current_function_decl = thunk_fndecl;
1463
1464 /* Ensure thunks are emitted in their correct sections. */
1465 resolve_unique_section (thunk_fndecl, 0, flag_function_sections);
1466
1467 DECL_RESULT (thunk_fndecl)
1468 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1469 RESULT_DECL, 0, restype);
1470 DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1471 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
1472
1473 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1474 create one. */
1475 fn_block = make_node (BLOCK);
1476 BLOCK_VARS (fn_block) = a;
1477 DECL_INITIAL (thunk_fndecl) = fn_block;
1478 init_function_start (thunk_fndecl);
1479 cfun->is_thunk = 1;
1480 insn_locations_init ();
1481 set_curr_insn_location (DECL_SOURCE_LOCATION (thunk_fndecl));
1482 prologue_location = curr_insn_location ();
1483 assemble_start_function (thunk_fndecl, fnname);
1484
1485 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1486 fixed_offset, virtual_value, alias);
1487
1488 assemble_end_function (thunk_fndecl, fnname);
1489 insn_locations_finalize ();
1490 init_insn_lengths ();
1491 free_after_compilation (cfun);
1492 set_cfun (NULL);
1493 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1494 thunk.thunk_p = false;
1495 analyzed = false;
1496 }
1497 else
1498 {
1499 tree restype;
1500 basic_block bb, then_bb, else_bb, return_bb;
1501 gimple_stmt_iterator bsi;
1502 int nargs = 0;
1503 tree arg;
1504 int i;
1505 tree resdecl;
1506 tree restmp = NULL;
1507
1508 gimple call;
1509 gimple ret;
1510
1511 if (in_lto_p)
1512 get_body ();
1513 a = DECL_ARGUMENTS (thunk_fndecl);
1514
1515 current_function_decl = thunk_fndecl;
1516
1517 /* Ensure thunks are emitted in their correct sections. */
1518 resolve_unique_section (thunk_fndecl, 0, flag_function_sections);
1519
1520 DECL_IGNORED_P (thunk_fndecl) = 1;
1521 bitmap_obstack_initialize (NULL);
1522
1523 if (thunk.virtual_offset_p)
1524 virtual_offset = size_int (virtual_value);
1525
1526 /* Build the return declaration for the function. */
1527 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1528 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1529 {
1530 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1531 DECL_ARTIFICIAL (resdecl) = 1;
1532 DECL_IGNORED_P (resdecl) = 1;
1533 DECL_RESULT (thunk_fndecl) = resdecl;
1534 DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1535 }
1536 else
1537 resdecl = DECL_RESULT (thunk_fndecl);
1538
1539 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl, true);
1540
1541 bsi = gsi_start_bb (bb);
1542
1543 /* Build call to the function being thunked. */
1544 if (!VOID_TYPE_P (restype))
1545 {
1546 if (DECL_BY_REFERENCE (resdecl))
1547 restmp = gimple_fold_indirect_ref (resdecl);
1548 else if (!is_gimple_reg_type (restype))
1549 {
1550 restmp = resdecl;
1551 add_local_decl (cfun, restmp);
1552 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1553 }
1554 else
1555 restmp = create_tmp_reg (restype, "retval");
1556 }
1557
1558 for (arg = a; arg; arg = DECL_CHAIN (arg))
1559 nargs++;
1560 auto_vec<tree> vargs (nargs);
1561 if (this_adjusting)
1562 vargs.quick_push (thunk_adjust (&bsi, a, 1, fixed_offset,
1563 virtual_offset));
1564 else if (nargs)
1565 vargs.quick_push (a);
1566
1567 if (nargs)
1568 for (i = 1, arg = DECL_CHAIN (a); i < nargs; i++, arg = DECL_CHAIN (arg))
1569 {
1570 tree tmp = arg;
1571 if (!is_gimple_val (arg))
1572 {
1573 tmp = create_tmp_reg (TYPE_MAIN_VARIANT
1574 (TREE_TYPE (arg)), "arg");
1575 gimple stmt = gimple_build_assign (tmp, arg);
1576 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1577 }
1578 vargs.quick_push (tmp);
1579 }
1580 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1581 callees->call_stmt = call;
1582 gimple_call_set_from_thunk (call, true);
1583 if (restmp)
1584 {
1585 gimple_call_set_lhs (call, restmp);
1586 gcc_assert (useless_type_conversion_p (TREE_TYPE (restmp),
1587 TREE_TYPE (TREE_TYPE (alias))));
1588 }
1589 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1590 if (!(gimple_call_flags (call) & ECF_NORETURN))
1591 {
1592 if (restmp && !this_adjusting
1593 && (fixed_offset || virtual_offset))
1594 {
1595 tree true_label = NULL_TREE;
1596
1597 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1598 {
1599 gimple stmt;
1600 /* If the return type is a pointer, we need to
1601 protect against NULL. We know there will be an
1602 adjustment, because that's why we're emitting a
1603 thunk. */
1604 then_bb = create_basic_block (NULL, (void *) 0, bb);
1605 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1606 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1607 add_bb_to_loop (then_bb, bb->loop_father);
1608 add_bb_to_loop (return_bb, bb->loop_father);
1609 add_bb_to_loop (else_bb, bb->loop_father);
1610 remove_edge (single_succ_edge (bb));
1611 true_label = gimple_block_label (then_bb);
1612 stmt = gimple_build_cond (NE_EXPR, restmp,
1613 build_zero_cst (TREE_TYPE (restmp)),
1614 NULL_TREE, NULL_TREE);
1615 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1616 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1617 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1618 make_edge (return_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1619 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1620 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1621 bsi = gsi_last_bb (then_bb);
1622 }
1623
1624 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1625 fixed_offset, virtual_offset);
1626 if (true_label)
1627 {
1628 gimple stmt;
1629 bsi = gsi_last_bb (else_bb);
1630 stmt = gimple_build_assign (restmp,
1631 build_zero_cst (TREE_TYPE (restmp)));
1632 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1633 bsi = gsi_last_bb (return_bb);
1634 }
1635 }
1636 else
1637 gimple_call_set_tail (call, true);
1638
1639 /* Build return value. */
1640 ret = gimple_build_return (restmp);
1641 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1642 }
1643 else
1644 {
1645 gimple_call_set_tail (call, true);
1646 remove_edge (single_succ_edge (bb));
1647 }
1648
1649 cfun->gimple_df->in_ssa_p = true;
1650 /* FIXME: C++ FE should stop setting TREE_ASM_WRITTEN on thunks. */
1651 TREE_ASM_WRITTEN (thunk_fndecl) = false;
1652 delete_unreachable_blocks ();
1653 update_ssa (TODO_update_ssa);
1654 #ifdef ENABLE_CHECKING
1655 verify_flow_info ();
1656 #endif
1657 free_dominance_info (CDI_DOMINATORS);
1658
1659 /* Since we want to emit the thunk, we explicitly mark its name as
1660 referenced. */
1661 thunk.thunk_p = false;
1662 lowered = true;
1663 bitmap_obstack_release (NULL);
1664 }
1665 current_function_decl = NULL;
1666 set_cfun (NULL);
1667 return true;
1668 }
1669
1670 /* Assemble thunks and aliases associated to node. */
1671
1672 void
1673 cgraph_node::assemble_thunks_and_aliases (void)
1674 {
1675 cgraph_edge *e;
1676 ipa_ref *ref;
1677
1678 for (e = callers; e;)
1679 if (e->caller->thunk.thunk_p)
1680 {
1681 cgraph_node *thunk = e->caller;
1682
1683 e = e->next_caller;
1684 thunk->expand_thunk (true, false);
1685 thunk->assemble_thunks_and_aliases ();
1686 }
1687 else
1688 e = e->next_caller;
1689
1690 FOR_EACH_ALIAS (this, ref)
1691 {
1692 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
1693 bool saved_written = TREE_ASM_WRITTEN (decl);
1694
1695 /* Force assemble_alias to really output the alias this time instead
1696 of buffering it in same alias pairs. */
1697 TREE_ASM_WRITTEN (decl) = 1;
1698 do_assemble_alias (alias->decl,
1699 DECL_ASSEMBLER_NAME (decl));
1700 alias->assemble_thunks_and_aliases ();
1701 TREE_ASM_WRITTEN (decl) = saved_written;
1702 }
1703 }
1704
1705 /* Expand function specified by node. */
1706
1707 void
1708 cgraph_node::expand (void)
1709 {
1710 location_t saved_loc;
1711
1712 /* We ought to not compile any inline clones. */
1713 gcc_assert (!global.inlined_to);
1714
1715 announce_function (decl);
1716 process = 0;
1717 gcc_assert (lowered);
1718 get_body ();
1719
1720 /* Generate RTL for the body of DECL. */
1721
1722 timevar_push (TV_REST_OF_COMPILATION);
1723
1724 gcc_assert (symtab->global_info_ready);
1725
1726 /* Initialize the default bitmap obstack. */
1727 bitmap_obstack_initialize (NULL);
1728
1729 /* Initialize the RTL code for the function. */
1730 current_function_decl = decl;
1731 saved_loc = input_location;
1732 input_location = DECL_SOURCE_LOCATION (decl);
1733 init_function_start (decl);
1734
1735 gimple_register_cfg_hooks ();
1736
1737 bitmap_obstack_initialize (&reg_obstack); /* FIXME, only at RTL generation*/
1738
1739 execute_all_ipa_transforms ();
1740
1741 /* Perform all tree transforms and optimizations. */
1742
1743 /* Signal the start of passes. */
1744 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_START, NULL);
1745
1746 execute_pass_list (cfun, g->get_passes ()->all_passes);
1747
1748 /* Signal the end of passes. */
1749 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_END, NULL);
1750
1751 bitmap_obstack_release (&reg_obstack);
1752
1753 /* Release the default bitmap obstack. */
1754 bitmap_obstack_release (NULL);
1755
1756 /* If requested, warn about function definitions where the function will
1757 return a value (usually of some struct or union type) which itself will
1758 take up a lot of stack space. */
1759 if (warn_larger_than && !DECL_EXTERNAL (decl) && TREE_TYPE (decl))
1760 {
1761 tree ret_type = TREE_TYPE (TREE_TYPE (decl));
1762
1763 if (ret_type && TYPE_SIZE_UNIT (ret_type)
1764 && TREE_CODE (TYPE_SIZE_UNIT (ret_type)) == INTEGER_CST
1765 && 0 < compare_tree_int (TYPE_SIZE_UNIT (ret_type),
1766 larger_than_size))
1767 {
1768 unsigned int size_as_int
1769 = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (ret_type));
1770
1771 if (compare_tree_int (TYPE_SIZE_UNIT (ret_type), size_as_int) == 0)
1772 warning (OPT_Wlarger_than_, "size of return value of %q+D is %u bytes",
1773 decl, size_as_int);
1774 else
1775 warning (OPT_Wlarger_than_, "size of return value of %q+D is larger than %wd bytes",
1776 decl, larger_than_size);
1777 }
1778 }
1779
1780 gimple_set_body (decl, NULL);
1781 if (DECL_STRUCT_FUNCTION (decl) == 0
1782 && !cgraph_node::get (decl)->origin)
1783 {
1784 /* Stop pointing to the local nodes about to be freed.
1785 But DECL_INITIAL must remain nonzero so we know this
1786 was an actual function definition.
1787 For a nested function, this is done in c_pop_function_context.
1788 If rest_of_compilation set this to 0, leave it 0. */
1789 if (DECL_INITIAL (decl) != 0)
1790 DECL_INITIAL (decl) = error_mark_node;
1791 }
1792
1793 input_location = saved_loc;
1794
1795 ggc_collect ();
1796 timevar_pop (TV_REST_OF_COMPILATION);
1797
1798 /* Make sure that BE didn't give up on compiling. */
1799 gcc_assert (TREE_ASM_WRITTEN (decl));
1800 set_cfun (NULL);
1801 current_function_decl = NULL;
1802
1803 /* It would make a lot more sense to output thunks before function body to get more
1804 forward and lest backwarding jumps. This however would need solving problem
1805 with comdats. See PR48668. Also aliases must come after function itself to
1806 make one pass assemblers, like one on AIX, happy. See PR 50689.
1807 FIXME: Perhaps thunks should be move before function IFF they are not in comdat
1808 groups. */
1809 assemble_thunks_and_aliases ();
1810 release_body ();
1811 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1812 points to the dead function body. */
1813 remove_callees ();
1814 remove_all_references ();
1815 }
1816
1817 /* Node comparer that is responsible for the order that corresponds
1818 to time when a function was launched for the first time. */
1819
1820 static int
1821 node_cmp (const void *pa, const void *pb)
1822 {
1823 const cgraph_node *a = *(const cgraph_node * const *) pa;
1824 const cgraph_node *b = *(const cgraph_node * const *) pb;
1825
1826 /* Functions with time profile must be before these without profile. */
1827 if (!a->tp_first_run || !b->tp_first_run)
1828 return a->tp_first_run - b->tp_first_run;
1829
1830 return a->tp_first_run != b->tp_first_run
1831 ? b->tp_first_run - a->tp_first_run
1832 : b->order - a->order;
1833 }
1834
1835 /* Expand all functions that must be output.
1836
1837 Attempt to topologically sort the nodes so function is output when
1838 all called functions are already assembled to allow data to be
1839 propagated across the callgraph. Use a stack to get smaller distance
1840 between a function and its callees (later we may choose to use a more
1841 sophisticated algorithm for function reordering; we will likely want
1842 to use subsections to make the output functions appear in top-down
1843 order). */
1844
1845 static void
1846 expand_all_functions (void)
1847 {
1848 cgraph_node *node;
1849 cgraph_node **order = XCNEWVEC (cgraph_node *,
1850 symtab->cgraph_count);
1851 unsigned int expanded_func_count = 0, profiled_func_count = 0;
1852 int order_pos, new_order_pos = 0;
1853 int i;
1854
1855 order_pos = ipa_reverse_postorder (order);
1856 gcc_assert (order_pos == symtab->cgraph_count);
1857
1858 /* Garbage collector may remove inline clones we eliminate during
1859 optimization. So we must be sure to not reference them. */
1860 for (i = 0; i < order_pos; i++)
1861 if (order[i]->process)
1862 order[new_order_pos++] = order[i];
1863
1864 if (flag_profile_reorder_functions)
1865 qsort (order, new_order_pos, sizeof (cgraph_node *), node_cmp);
1866
1867 for (i = new_order_pos - 1; i >= 0; i--)
1868 {
1869 node = order[i];
1870
1871 if (node->process)
1872 {
1873 expanded_func_count++;
1874 if(node->tp_first_run)
1875 profiled_func_count++;
1876
1877 if (symtab->dump_file)
1878 fprintf (symtab->dump_file,
1879 "Time profile order in expand_all_functions:%s:%d\n",
1880 node->asm_name (), node->tp_first_run);
1881 node->process = 0;
1882 node->expand ();
1883 }
1884 }
1885
1886 if (dump_file)
1887 fprintf (dump_file, "Expanded functions with time profile (%s):%u/%u\n",
1888 main_input_filename, profiled_func_count, expanded_func_count);
1889
1890 if (symtab->dump_file && flag_profile_reorder_functions)
1891 fprintf (symtab->dump_file, "Expanded functions with time profile:%u/%u\n",
1892 profiled_func_count, expanded_func_count);
1893
1894 symtab->process_new_functions ();
1895 free_gimplify_stack ();
1896
1897 free (order);
1898 }
1899
1900 /* This is used to sort the node types by the cgraph order number. */
1901
1902 enum cgraph_order_sort_kind
1903 {
1904 ORDER_UNDEFINED = 0,
1905 ORDER_FUNCTION,
1906 ORDER_VAR,
1907 ORDER_ASM
1908 };
1909
1910 struct cgraph_order_sort
1911 {
1912 enum cgraph_order_sort_kind kind;
1913 union
1914 {
1915 cgraph_node *f;
1916 varpool_node *v;
1917 asm_node *a;
1918 } u;
1919 };
1920
1921 /* Output all functions, variables, and asm statements in the order
1922 according to their order fields, which is the order in which they
1923 appeared in the file. This implements -fno-toplevel-reorder. In
1924 this mode we may output functions and variables which don't really
1925 need to be output. */
1926
1927 static void
1928 output_in_order (void)
1929 {
1930 int max;
1931 cgraph_order_sort *nodes;
1932 int i;
1933 cgraph_node *pf;
1934 varpool_node *pv;
1935 asm_node *pa;
1936 max = symtab->order;
1937 nodes = XCNEWVEC (cgraph_order_sort, max);
1938
1939 FOR_EACH_DEFINED_FUNCTION (pf)
1940 {
1941 if (pf->process && !pf->thunk.thunk_p && !pf->alias)
1942 {
1943 i = pf->order;
1944 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1945 nodes[i].kind = ORDER_FUNCTION;
1946 nodes[i].u.f = pf;
1947 }
1948 }
1949
1950 FOR_EACH_DEFINED_VARIABLE (pv)
1951 if (!DECL_EXTERNAL (pv->decl))
1952 {
1953 i = pv->order;
1954 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1955 nodes[i].kind = ORDER_VAR;
1956 nodes[i].u.v = pv;
1957 }
1958
1959 for (pa = symtab->first_asm_symbol (); pa; pa = pa->next)
1960 {
1961 i = pa->order;
1962 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1963 nodes[i].kind = ORDER_ASM;
1964 nodes[i].u.a = pa;
1965 }
1966
1967 /* In toplevel reorder mode we output all statics; mark them as needed. */
1968
1969 for (i = 0; i < max; ++i)
1970 if (nodes[i].kind == ORDER_VAR)
1971 nodes[i].u.v->finalize_named_section_flags ();
1972
1973 for (i = 0; i < max; ++i)
1974 {
1975 switch (nodes[i].kind)
1976 {
1977 case ORDER_FUNCTION:
1978 nodes[i].u.f->process = 0;
1979 nodes[i].u.f->expand ();
1980 break;
1981
1982 case ORDER_VAR:
1983 nodes[i].u.v->assemble_decl ();
1984 break;
1985
1986 case ORDER_ASM:
1987 assemble_asm (nodes[i].u.a->asm_str);
1988 break;
1989
1990 case ORDER_UNDEFINED:
1991 break;
1992
1993 default:
1994 gcc_unreachable ();
1995 }
1996 }
1997
1998 symtab->clear_asm_symbols ();
1999
2000 free (nodes);
2001 }
2002
2003 static void
2004 ipa_passes (void)
2005 {
2006 gcc::pass_manager *passes = g->get_passes ();
2007
2008 set_cfun (NULL);
2009 current_function_decl = NULL;
2010 gimple_register_cfg_hooks ();
2011 bitmap_obstack_initialize (NULL);
2012
2013 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
2014
2015 if (!in_lto_p)
2016 {
2017 execute_ipa_pass_list (passes->all_small_ipa_passes);
2018 if (seen_error ())
2019 return;
2020 }
2021
2022 /* This extra symtab_remove_unreachable_nodes pass tends to catch some
2023 devirtualization and other changes where removal iterate. */
2024 symtab->remove_unreachable_nodes (true, symtab->dump_file);
2025
2026 /* If pass_all_early_optimizations was not scheduled, the state of
2027 the cgraph will not be properly updated. Update it now. */
2028 if (symtab->state < IPA_SSA)
2029 symtab->state = IPA_SSA;
2030
2031 if (!in_lto_p)
2032 {
2033 /* Generate coverage variables and constructors. */
2034 coverage_finish ();
2035
2036 /* Process new functions added. */
2037 set_cfun (NULL);
2038 current_function_decl = NULL;
2039 symtab->process_new_functions ();
2040
2041 execute_ipa_summary_passes
2042 ((ipa_opt_pass_d *) passes->all_regular_ipa_passes);
2043 }
2044
2045 /* Some targets need to handle LTO assembler output specially. */
2046 if (flag_generate_lto)
2047 targetm.asm_out.lto_start ();
2048
2049 if (!in_lto_p)
2050 ipa_write_summaries ();
2051
2052 if (flag_generate_lto)
2053 targetm.asm_out.lto_end ();
2054
2055 if (!flag_ltrans && (in_lto_p || !flag_lto || flag_fat_lto_objects))
2056 execute_ipa_pass_list (passes->all_regular_ipa_passes);
2057 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
2058
2059 bitmap_obstack_release (NULL);
2060 }
2061
2062
2063 /* Return string alias is alias of. */
2064
2065 static tree
2066 get_alias_symbol (tree decl)
2067 {
2068 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
2069 return get_identifier (TREE_STRING_POINTER
2070 (TREE_VALUE (TREE_VALUE (alias))));
2071 }
2072
2073
2074 /* Weakrefs may be associated to external decls and thus not output
2075 at expansion time. Emit all necessary aliases. */
2076
2077 void
2078 symbol_table::output_weakrefs (void)
2079 {
2080 symtab_node *node;
2081 FOR_EACH_SYMBOL (node)
2082 if (node->alias
2083 && !TREE_ASM_WRITTEN (node->decl)
2084 && node->weakref)
2085 {
2086 tree target;
2087
2088 /* Weakrefs are special by not requiring target definition in current
2089 compilation unit. It is thus bit hard to work out what we want to
2090 alias.
2091 When alias target is defined, we need to fetch it from symtab reference,
2092 otherwise it is pointed to by alias_target. */
2093 if (node->alias_target)
2094 target = (DECL_P (node->alias_target)
2095 ? DECL_ASSEMBLER_NAME (node->alias_target)
2096 : node->alias_target);
2097 else if (node->analyzed)
2098 target = DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl);
2099 else
2100 {
2101 gcc_unreachable ();
2102 target = get_alias_symbol (node->decl);
2103 }
2104 do_assemble_alias (node->decl, target);
2105 }
2106 }
2107
2108 /* Perform simple optimizations based on callgraph. */
2109
2110 void
2111 symbol_table::compile (void)
2112 {
2113 if (seen_error ())
2114 return;
2115
2116 #ifdef ENABLE_CHECKING
2117 symtab_node::verify_symtab_nodes ();
2118 #endif
2119
2120 timevar_push (TV_CGRAPHOPT);
2121 if (pre_ipa_mem_report)
2122 {
2123 fprintf (stderr, "Memory consumption before IPA\n");
2124 dump_memory_report (false);
2125 }
2126 if (!quiet_flag)
2127 fprintf (stderr, "Performing interprocedural optimizations\n");
2128 state = IPA;
2129
2130 /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE. */
2131 if (flag_lto)
2132 lto_streamer_hooks_init ();
2133
2134 /* Don't run the IPA passes if there was any error or sorry messages. */
2135 if (!seen_error ())
2136 ipa_passes ();
2137
2138 /* Do nothing else if any IPA pass found errors or if we are just streaming LTO. */
2139 if (seen_error ()
2140 || (!in_lto_p && flag_lto && !flag_fat_lto_objects))
2141 {
2142 timevar_pop (TV_CGRAPHOPT);
2143 return;
2144 }
2145
2146 /* This pass remove bodies of extern inline functions we never inlined.
2147 Do this later so other IPA passes see what is really going on.
2148 FIXME: This should be run just after inlining by pasmanager. */
2149 remove_unreachable_nodes (false, dump_file);
2150 global_info_ready = true;
2151 if (dump_file)
2152 {
2153 fprintf (dump_file, "Optimized ");
2154 symtab_node:: dump_table (dump_file);
2155 }
2156 if (post_ipa_mem_report)
2157 {
2158 fprintf (stderr, "Memory consumption after IPA\n");
2159 dump_memory_report (false);
2160 }
2161 timevar_pop (TV_CGRAPHOPT);
2162
2163 /* Output everything. */
2164 (*debug_hooks->assembly_start) ();
2165 if (!quiet_flag)
2166 fprintf (stderr, "Assembling functions:\n");
2167 #ifdef ENABLE_CHECKING
2168 symtab_node::verify_symtab_nodes ();
2169 #endif
2170
2171 materialize_all_clones ();
2172 bitmap_obstack_initialize (NULL);
2173 execute_ipa_pass_list (g->get_passes ()->all_late_ipa_passes);
2174 bitmap_obstack_release (NULL);
2175 mark_functions_to_output ();
2176
2177 /* When weakref support is missing, we autmatically translate all
2178 references to NODE to references to its ultimate alias target.
2179 The renaming mechanizm uses flag IDENTIFIER_TRANSPARENT_ALIAS and
2180 TREE_CHAIN.
2181
2182 Set up this mapping before we output any assembler but once we are sure
2183 that all symbol renaming is done.
2184
2185 FIXME: All this uglyness can go away if we just do renaming at gimple
2186 level by physically rewritting the IL. At the moment we can only redirect
2187 calls, so we need infrastructure for renaming references as well. */
2188 #ifndef ASM_OUTPUT_WEAKREF
2189 symtab_node *node;
2190
2191 FOR_EACH_SYMBOL (node)
2192 if (node->alias
2193 && lookup_attribute ("weakref", DECL_ATTRIBUTES (node->decl)))
2194 {
2195 IDENTIFIER_TRANSPARENT_ALIAS
2196 (DECL_ASSEMBLER_NAME (node->decl)) = 1;
2197 TREE_CHAIN (DECL_ASSEMBLER_NAME (node->decl))
2198 = (node->alias_target ? node->alias_target
2199 : DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl));
2200 }
2201 #endif
2202
2203 state = EXPANSION;
2204
2205 if (!flag_toplevel_reorder)
2206 output_in_order ();
2207 else
2208 {
2209 output_asm_statements ();
2210
2211 expand_all_functions ();
2212 output_variables ();
2213 }
2214
2215 process_new_functions ();
2216 state = FINISHED;
2217 output_weakrefs ();
2218
2219 if (dump_file)
2220 {
2221 fprintf (dump_file, "\nFinal ");
2222 symtab_node::dump_table (dump_file);
2223 }
2224 #ifdef ENABLE_CHECKING
2225 symtab_node::verify_symtab_nodes ();
2226 /* Double check that all inline clones are gone and that all
2227 function bodies have been released from memory. */
2228 if (!seen_error ())
2229 {
2230 cgraph_node *node;
2231 bool error_found = false;
2232
2233 FOR_EACH_DEFINED_FUNCTION (node)
2234 if (node->global.inlined_to
2235 || gimple_has_body_p (node->decl))
2236 {
2237 error_found = true;
2238 node->debug ();
2239 }
2240 if (error_found)
2241 internal_error ("nodes with unreleased memory found");
2242 }
2243 #endif
2244 }
2245
2246
2247 /* Analyze the whole compilation unit once it is parsed completely. */
2248
2249 void
2250 symbol_table::finalize_compilation_unit (void)
2251 {
2252 timevar_push (TV_CGRAPH);
2253
2254 /* If we're here there's no current function anymore. Some frontends
2255 are lazy in clearing these. */
2256 current_function_decl = NULL;
2257 set_cfun (NULL);
2258
2259 /* Do not skip analyzing the functions if there were errors, we
2260 miss diagnostics for following functions otherwise. */
2261
2262 /* Emit size functions we didn't inline. */
2263 finalize_size_functions ();
2264
2265 /* Mark alias targets necessary and emit diagnostics. */
2266 handle_alias_pairs ();
2267
2268 if (!quiet_flag)
2269 {
2270 fprintf (stderr, "\nAnalyzing compilation unit\n");
2271 fflush (stderr);
2272 }
2273
2274 if (flag_dump_passes)
2275 dump_passes ();
2276
2277 /* Gimplify and lower all functions, compute reachability and
2278 remove unreachable nodes. */
2279 analyze_functions ();
2280
2281 /* Mark alias targets necessary and emit diagnostics. */
2282 handle_alias_pairs ();
2283
2284 /* Gimplify and lower thunks. */
2285 analyze_functions ();
2286
2287 /* Finally drive the pass manager. */
2288 compile ();
2289
2290 timevar_pop (TV_CGRAPH);
2291 }
2292
2293 /* Creates a wrapper from cgraph_node to TARGET node. Thunk is used for this
2294 kind of wrapper method. */
2295
2296 void
2297 cgraph_node::create_wrapper (cgraph_node *target)
2298 {
2299 /* Preserve DECL_RESULT so we get right by reference flag. */
2300 tree decl_result = DECL_RESULT (decl);
2301
2302 /* Remove the function's body. */
2303 release_body ();
2304 reset ();
2305
2306 DECL_RESULT (decl) = decl_result;
2307 DECL_INITIAL (decl) = NULL;
2308 allocate_struct_function (decl, false);
2309 set_cfun (NULL);
2310
2311 /* Turn alias into thunk and expand it into GIMPLE representation. */
2312 definition = true;
2313 thunk.thunk_p = true;
2314 thunk.this_adjusting = false;
2315
2316 cgraph_edge *e = create_edge (target, NULL, 0, CGRAPH_FREQ_BASE);
2317
2318 expand_thunk (false, true);
2319 e->call_stmt_cannot_inline_p = true;
2320
2321 /* Inline summary set-up. */
2322 analyze ();
2323 inline_analyze_function (this);
2324 }
2325
2326 #include "gt-cgraphunit.h"