re PR ipa/63587 (ICE : tree check: expected var_decl, have result_decl in add_local_v...
[gcc.git] / gcc / cgraphunit.c
1 /* Driver of optimization process
2 Copyright (C) 2003-2014 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This module implements main driver of compilation process.
22
23 The main scope of this file is to act as an interface in between
24 tree based frontends and the backend.
25
26 The front-end is supposed to use following functionality:
27
28 - finalize_function
29
30 This function is called once front-end has parsed whole body of function
31 and it is certain that the function body nor the declaration will change.
32
33 (There is one exception needed for implementing GCC extern inline
34 function.)
35
36 - varpool_finalize_decl
37
38 This function has same behavior as the above but is used for static
39 variables.
40
41 - add_asm_node
42
43 Insert new toplevel ASM statement
44
45 - finalize_compilation_unit
46
47 This function is called once (source level) compilation unit is finalized
48 and it will no longer change.
49
50 The symbol table is constructed starting from the trivially needed
51 symbols finalized by the frontend. Functions are lowered into
52 GIMPLE representation and callgraph/reference lists are constructed.
53 Those are used to discover other necessary functions and variables.
54
55 At the end the bodies of unreachable functions are removed.
56
57 The function can be called multiple times when multiple source level
58 compilation units are combined.
59
60 - compile
61
62 This passes control to the back-end. Optimizations are performed and
63 final assembler is generated. This is done in the following way. Note
64 that with link time optimization the process is split into three
65 stages (compile time, linktime analysis and parallel linktime as
66 indicated bellow).
67
68 Compile time:
69
70 1) Inter-procedural optimization.
71 (ipa_passes)
72
73 This part is further split into:
74
75 a) early optimizations. These are local passes executed in
76 the topological order on the callgraph.
77
78 The purpose of early optimiations is to optimize away simple
79 things that may otherwise confuse IP analysis. Very simple
80 propagation across the callgraph is done i.e. to discover
81 functions without side effects and simple inlining is performed.
82
83 b) early small interprocedural passes.
84
85 Those are interprocedural passes executed only at compilation
86 time. These include, for example, transational memory lowering,
87 unreachable code removal and other simple transformations.
88
89 c) IP analysis stage. All interprocedural passes do their
90 analysis.
91
92 Interprocedural passes differ from small interprocedural
93 passes by their ability to operate across whole program
94 at linktime. Their analysis stage is performed early to
95 both reduce linking times and linktime memory usage by
96 not having to represent whole program in memory.
97
98 d) LTO sreaming. When doing LTO, everything important gets
99 streamed into the object file.
100
101 Compile time and or linktime analysis stage (WPA):
102
103 At linktime units gets streamed back and symbol table is
104 merged. Function bodies are not streamed in and not
105 available.
106 e) IP propagation stage. All IP passes execute their
107 IP propagation. This is done based on the earlier analysis
108 without having function bodies at hand.
109 f) Ltrans streaming. When doing WHOPR LTO, the program
110 is partitioned and streamed into multple object files.
111
112 Compile time and/or parallel linktime stage (ltrans)
113
114 Each of the object files is streamed back and compiled
115 separately. Now the function bodies becomes available
116 again.
117
118 2) Virtual clone materialization
119 (cgraph_materialize_clone)
120
121 IP passes can produce copies of existing functoins (such
122 as versioned clones or inline clones) without actually
123 manipulating their bodies by creating virtual clones in
124 the callgraph. At this time the virtual clones are
125 turned into real functions
126 3) IP transformation
127
128 All IP passes transform function bodies based on earlier
129 decision of the IP propagation.
130
131 4) late small IP passes
132
133 Simple IP passes working within single program partition.
134
135 5) Expansion
136 (expand_all_functions)
137
138 At this stage functions that needs to be output into
139 assembler are identified and compiled in topological order
140 6) Output of variables and aliases
141 Now it is known what variable references was not optimized
142 out and thus all variables are output to the file.
143
144 Note that with -fno-toplevel-reorder passes 5 and 6
145 are combined together in cgraph_output_in_order.
146
147 Finally there are functions to manipulate the callgraph from
148 backend.
149 - cgraph_add_new_function is used to add backend produced
150 functions introduced after the unit is finalized.
151 The functions are enqueue for later processing and inserted
152 into callgraph with cgraph_process_new_functions.
153
154 - cgraph_function_versioning
155
156 produces a copy of function into new one (a version)
157 and apply simple transformations
158 */
159
160 #include "config.h"
161 #include "system.h"
162 #include "coretypes.h"
163 #include "tm.h"
164 #include "tree.h"
165 #include "varasm.h"
166 #include "stor-layout.h"
167 #include "stringpool.h"
168 #include "output.h"
169 #include "rtl.h"
170 #include "predict.h"
171 #include "vec.h"
172 #include "hashtab.h"
173 #include "hash-set.h"
174 #include "machmode.h"
175 #include "hard-reg-set.h"
176 #include "input.h"
177 #include "function.h"
178 #include "basic-block.h"
179 #include "tree-ssa-alias.h"
180 #include "internal-fn.h"
181 #include "gimple-fold.h"
182 #include "gimple-expr.h"
183 #include "is-a.h"
184 #include "gimple.h"
185 #include "gimplify.h"
186 #include "gimple-iterator.h"
187 #include "gimplify-me.h"
188 #include "gimple-ssa.h"
189 #include "tree-cfg.h"
190 #include "tree-into-ssa.h"
191 #include "tree-ssa.h"
192 #include "tree-inline.h"
193 #include "langhooks.h"
194 #include "toplev.h"
195 #include "flags.h"
196 #include "debug.h"
197 #include "target.h"
198 #include "diagnostic.h"
199 #include "params.h"
200 #include "fibheap.h"
201 #include "intl.h"
202 #include "hash-map.h"
203 #include "plugin-api.h"
204 #include "ipa-ref.h"
205 #include "cgraph.h"
206 #include "alloc-pool.h"
207 #include "ipa-prop.h"
208 #include "tree-iterator.h"
209 #include "tree-pass.h"
210 #include "tree-dump.h"
211 #include "gimple-pretty-print.h"
212 #include "output.h"
213 #include "coverage.h"
214 #include "plugin.h"
215 #include "ipa-inline.h"
216 #include "ipa-utils.h"
217 #include "lto-streamer.h"
218 #include "except.h"
219 #include "cfgloop.h"
220 #include "regset.h" /* FIXME: For reg_obstack. */
221 #include "context.h"
222 #include "pass_manager.h"
223 #include "tree-nested.h"
224 #include "gimplify.h"
225 #include "dbgcnt.h"
226
227 /* Queue of cgraph nodes scheduled to be added into cgraph. This is a
228 secondary queue used during optimization to accommodate passes that
229 may generate new functions that need to be optimized and expanded. */
230 vec<cgraph_node *> cgraph_new_nodes;
231
232 static void expand_all_functions (void);
233 static void mark_functions_to_output (void);
234 static void handle_alias_pairs (void);
235
236 /* Used for vtable lookup in thunk adjusting. */
237 static GTY (()) tree vtable_entry_type;
238
239 /* Determine if symbol declaration is needed. That is, visible to something
240 either outside this translation unit, something magic in the system
241 configury */
242 bool
243 symtab_node::needed_p (void)
244 {
245 /* Double check that no one output the function into assembly file
246 early. */
247 gcc_checking_assert (!DECL_ASSEMBLER_NAME_SET_P (decl)
248 || !TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)));
249
250 if (!definition)
251 return false;
252
253 if (DECL_EXTERNAL (decl))
254 return false;
255
256 /* If the user told us it is used, then it must be so. */
257 if (force_output)
258 return true;
259
260 /* ABI forced symbols are needed when they are external. */
261 if (forced_by_abi && TREE_PUBLIC (decl))
262 return true;
263
264 /* Keep constructors, destructors and virtual functions. */
265 if (TREE_CODE (decl) == FUNCTION_DECL
266 && (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl)))
267 return true;
268
269 /* Externally visible variables must be output. The exception is
270 COMDAT variables that must be output only when they are needed. */
271 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
272 return true;
273
274 return false;
275 }
276
277 /* Head and terminator of the queue of nodes to be processed while building
278 callgraph. */
279
280 static symtab_node symtab_terminator;
281 static symtab_node *queued_nodes = &symtab_terminator;
282
283 /* Add NODE to queue starting at QUEUED_NODES.
284 The queue is linked via AUX pointers and terminated by pointer to 1. */
285
286 static void
287 enqueue_node (symtab_node *node)
288 {
289 if (node->aux)
290 return;
291 gcc_checking_assert (queued_nodes);
292 node->aux = queued_nodes;
293 queued_nodes = node;
294 }
295
296 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
297 functions into callgraph in a way so they look like ordinary reachable
298 functions inserted into callgraph already at construction time. */
299
300 void
301 symbol_table::process_new_functions (void)
302 {
303 tree fndecl;
304
305 if (!cgraph_new_nodes.exists ())
306 return;
307
308 handle_alias_pairs ();
309 /* Note that this queue may grow as its being processed, as the new
310 functions may generate new ones. */
311 for (unsigned i = 0; i < cgraph_new_nodes.length (); i++)
312 {
313 cgraph_node *node = cgraph_new_nodes[i];
314 fndecl = node->decl;
315 switch (state)
316 {
317 case CONSTRUCTION:
318 /* At construction time we just need to finalize function and move
319 it into reachable functions list. */
320
321 cgraph_node::finalize_function (fndecl, false);
322 call_cgraph_insertion_hooks (node);
323 enqueue_node (node);
324 break;
325
326 case IPA:
327 case IPA_SSA:
328 /* When IPA optimization already started, do all essential
329 transformations that has been already performed on the whole
330 cgraph but not on this function. */
331
332 gimple_register_cfg_hooks ();
333 if (!node->analyzed)
334 node->analyze ();
335 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
336 if (state == IPA_SSA
337 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
338 g->get_passes ()->execute_early_local_passes ();
339 else if (inline_summary_vec != NULL)
340 compute_inline_parameters (node, true);
341 free_dominance_info (CDI_POST_DOMINATORS);
342 free_dominance_info (CDI_DOMINATORS);
343 pop_cfun ();
344 call_cgraph_insertion_hooks (node);
345 break;
346
347 case EXPANSION:
348 /* Functions created during expansion shall be compiled
349 directly. */
350 node->process = 0;
351 call_cgraph_insertion_hooks (node);
352 node->expand ();
353 break;
354
355 default:
356 gcc_unreachable ();
357 break;
358 }
359 }
360
361 cgraph_new_nodes.release ();
362 }
363
364 /* As an GCC extension we allow redefinition of the function. The
365 semantics when both copies of bodies differ is not well defined.
366 We replace the old body with new body so in unit at a time mode
367 we always use new body, while in normal mode we may end up with
368 old body inlined into some functions and new body expanded and
369 inlined in others.
370
371 ??? It may make more sense to use one body for inlining and other
372 body for expanding the function but this is difficult to do. */
373
374 void
375 cgraph_node::reset (void)
376 {
377 /* If process is set, then we have already begun whole-unit analysis.
378 This is *not* testing for whether we've already emitted the function.
379 That case can be sort-of legitimately seen with real function redefinition
380 errors. I would argue that the front end should never present us with
381 such a case, but don't enforce that for now. */
382 gcc_assert (!process);
383
384 /* Reset our data structures so we can analyze the function again. */
385 memset (&local, 0, sizeof (local));
386 memset (&global, 0, sizeof (global));
387 memset (&rtl, 0, sizeof (rtl));
388 analyzed = false;
389 definition = false;
390 alias = false;
391 weakref = false;
392 cpp_implicit_alias = false;
393
394 remove_callees ();
395 remove_all_references ();
396 }
397
398 /* Return true when there are references to the node. */
399
400 bool
401 symtab_node::referred_to_p (void)
402 {
403 ipa_ref *ref = NULL;
404
405 /* See if there are any references at all. */
406 if (iterate_referring (0, ref))
407 return true;
408 /* For functions check also calls. */
409 cgraph_node *cn = dyn_cast <cgraph_node *> (this);
410 if (cn && cn->callers)
411 return true;
412 return false;
413 }
414
415 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
416 logic in effect. If NO_COLLECT is true, then our caller cannot stand to have
417 the garbage collector run at the moment. We would need to either create
418 a new GC context, or just not compile right now. */
419
420 void
421 cgraph_node::finalize_function (tree decl, bool no_collect)
422 {
423 cgraph_node *node = cgraph_node::get_create (decl);
424
425 if (node->definition)
426 {
427 /* Nested functions should only be defined once. */
428 gcc_assert (!DECL_CONTEXT (decl)
429 || TREE_CODE (DECL_CONTEXT (decl)) != FUNCTION_DECL);
430 node->reset ();
431 node->local.redefined_extern_inline = true;
432 }
433
434 notice_global_symbol (decl);
435 node->definition = true;
436 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
437
438 /* With -fkeep-inline-functions we are keeping all inline functions except
439 for extern inline ones. */
440 if (flag_keep_inline_functions
441 && DECL_DECLARED_INLINE_P (decl)
442 && !DECL_EXTERNAL (decl)
443 && !DECL_DISREGARD_INLINE_LIMITS (decl))
444 node->force_output = 1;
445
446 /* When not optimizing, also output the static functions. (see
447 PR24561), but don't do so for always_inline functions, functions
448 declared inline and nested functions. These were optimized out
449 in the original implementation and it is unclear whether we want
450 to change the behavior here. */
451 if ((!optimize
452 && !node->cpp_implicit_alias
453 && !DECL_DISREGARD_INLINE_LIMITS (decl)
454 && !DECL_DECLARED_INLINE_P (decl)
455 && !(DECL_CONTEXT (decl)
456 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL))
457 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
458 node->force_output = 1;
459
460 /* If we've not yet emitted decl, tell the debug info about it. */
461 if (!TREE_ASM_WRITTEN (decl))
462 (*debug_hooks->deferred_inline_function) (decl);
463
464 /* Possibly warn about unused parameters. */
465 if (warn_unused_parameter)
466 do_warn_unused_parameter (decl);
467
468 if (!no_collect)
469 ggc_collect ();
470
471 if (symtab->state == CONSTRUCTION
472 && (node->needed_p () || node->referred_to_p ()))
473 enqueue_node (node);
474 }
475
476 /* Add the function FNDECL to the call graph.
477 Unlike finalize_function, this function is intended to be used
478 by middle end and allows insertion of new function at arbitrary point
479 of compilation. The function can be either in high, low or SSA form
480 GIMPLE.
481
482 The function is assumed to be reachable and have address taken (so no
483 API breaking optimizations are performed on it).
484
485 Main work done by this function is to enqueue the function for later
486 processing to avoid need the passes to be re-entrant. */
487
488 void
489 cgraph_node::add_new_function (tree fndecl, bool lowered)
490 {
491 gcc::pass_manager *passes = g->get_passes ();
492 cgraph_node *node;
493 switch (symtab->state)
494 {
495 case PARSING:
496 cgraph_node::finalize_function (fndecl, false);
497 break;
498 case CONSTRUCTION:
499 /* Just enqueue function to be processed at nearest occurrence. */
500 node = cgraph_node::get_create (fndecl);
501 if (lowered)
502 node->lowered = true;
503 cgraph_new_nodes.safe_push (node);
504 break;
505
506 case IPA:
507 case IPA_SSA:
508 case EXPANSION:
509 /* Bring the function into finalized state and enqueue for later
510 analyzing and compilation. */
511 node = cgraph_node::get_create (fndecl);
512 node->local.local = false;
513 node->definition = true;
514 node->force_output = true;
515 if (!lowered && symtab->state == EXPANSION)
516 {
517 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
518 gimple_register_cfg_hooks ();
519 bitmap_obstack_initialize (NULL);
520 execute_pass_list (cfun, passes->all_lowering_passes);
521 passes->execute_early_local_passes ();
522 bitmap_obstack_release (NULL);
523 pop_cfun ();
524
525 lowered = true;
526 }
527 if (lowered)
528 node->lowered = true;
529 cgraph_new_nodes.safe_push (node);
530 break;
531
532 case FINISHED:
533 /* At the very end of compilation we have to do all the work up
534 to expansion. */
535 node = cgraph_node::create (fndecl);
536 if (lowered)
537 node->lowered = true;
538 node->definition = true;
539 node->analyze ();
540 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
541 gimple_register_cfg_hooks ();
542 bitmap_obstack_initialize (NULL);
543 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
544 g->get_passes ()->execute_early_local_passes ();
545 bitmap_obstack_release (NULL);
546 pop_cfun ();
547 node->expand ();
548 break;
549
550 default:
551 gcc_unreachable ();
552 }
553
554 /* Set a personality if required and we already passed EH lowering. */
555 if (lowered
556 && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
557 == eh_personality_lang))
558 DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
559 }
560
561 /* Analyze the function scheduled to be output. */
562 void
563 cgraph_node::analyze (void)
564 {
565 tree decl = this->decl;
566 location_t saved_loc = input_location;
567 input_location = DECL_SOURCE_LOCATION (decl);
568
569 if (thunk.thunk_p)
570 {
571 create_edge (cgraph_node::get (thunk.alias),
572 NULL, 0, CGRAPH_FREQ_BASE);
573 if (!expand_thunk (false, false))
574 {
575 thunk.alias = NULL;
576 return;
577 }
578 thunk.alias = NULL;
579 }
580 if (alias)
581 resolve_alias (cgraph_node::get (alias_target));
582 else if (dispatcher_function)
583 {
584 /* Generate the dispatcher body of multi-versioned functions. */
585 cgraph_function_version_info *dispatcher_version_info
586 = function_version ();
587 if (dispatcher_version_info != NULL
588 && (dispatcher_version_info->dispatcher_resolver
589 == NULL_TREE))
590 {
591 tree resolver = NULL_TREE;
592 gcc_assert (targetm.generate_version_dispatcher_body);
593 resolver = targetm.generate_version_dispatcher_body (this);
594 gcc_assert (resolver != NULL_TREE);
595 }
596 }
597 else
598 {
599 push_cfun (DECL_STRUCT_FUNCTION (decl));
600
601 assign_assembler_name_if_neeeded (decl);
602
603 /* Make sure to gimplify bodies only once. During analyzing a
604 function we lower it, which will require gimplified nested
605 functions, so we can end up here with an already gimplified
606 body. */
607 if (!gimple_has_body_p (decl))
608 gimplify_function_tree (decl);
609 dump_function (TDI_generic, decl);
610
611 /* Lower the function. */
612 if (!lowered)
613 {
614 if (nested)
615 lower_nested_functions (decl);
616 gcc_assert (!nested);
617
618 gimple_register_cfg_hooks ();
619 bitmap_obstack_initialize (NULL);
620 execute_pass_list (cfun, g->get_passes ()->all_lowering_passes);
621 free_dominance_info (CDI_POST_DOMINATORS);
622 free_dominance_info (CDI_DOMINATORS);
623 compact_blocks ();
624 bitmap_obstack_release (NULL);
625 lowered = true;
626 }
627
628 pop_cfun ();
629 }
630 analyzed = true;
631
632 input_location = saved_loc;
633 }
634
635 /* C++ frontend produce same body aliases all over the place, even before PCH
636 gets streamed out. It relies on us linking the aliases with their function
637 in order to do the fixups, but ipa-ref is not PCH safe. Consequentely we
638 first produce aliases without links, but once C++ FE is sure he won't sream
639 PCH we build the links via this function. */
640
641 void
642 symbol_table::process_same_body_aliases (void)
643 {
644 symtab_node *node;
645 FOR_EACH_SYMBOL (node)
646 if (node->cpp_implicit_alias && !node->analyzed)
647 node->resolve_alias
648 (TREE_CODE (node->alias_target) == VAR_DECL
649 ? (symtab_node *)varpool_node::get_create (node->alias_target)
650 : (symtab_node *)cgraph_node::get_create (node->alias_target));
651 cpp_implicit_aliases_done = true;
652 }
653
654 /* Process attributes common for vars and functions. */
655
656 static void
657 process_common_attributes (symtab_node *node, tree decl)
658 {
659 tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
660
661 if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
662 {
663 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
664 "%<weakref%> attribute should be accompanied with"
665 " an %<alias%> attribute");
666 DECL_WEAK (decl) = 0;
667 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
668 DECL_ATTRIBUTES (decl));
669 }
670
671 if (lookup_attribute ("no_reorder", DECL_ATTRIBUTES (decl)))
672 node->no_reorder = 1;
673 }
674
675 /* Look for externally_visible and used attributes and mark cgraph nodes
676 accordingly.
677
678 We cannot mark the nodes at the point the attributes are processed (in
679 handle_*_attribute) because the copy of the declarations available at that
680 point may not be canonical. For example, in:
681
682 void f();
683 void f() __attribute__((used));
684
685 the declaration we see in handle_used_attribute will be the second
686 declaration -- but the front end will subsequently merge that declaration
687 with the original declaration and discard the second declaration.
688
689 Furthermore, we can't mark these nodes in finalize_function because:
690
691 void f() {}
692 void f() __attribute__((externally_visible));
693
694 is valid.
695
696 So, we walk the nodes at the end of the translation unit, applying the
697 attributes at that point. */
698
699 static void
700 process_function_and_variable_attributes (cgraph_node *first,
701 varpool_node *first_var)
702 {
703 cgraph_node *node;
704 varpool_node *vnode;
705
706 for (node = symtab->first_function (); node != first;
707 node = symtab->next_function (node))
708 {
709 tree decl = node->decl;
710 if (DECL_PRESERVE_P (decl))
711 node->mark_force_output ();
712 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
713 {
714 if (! TREE_PUBLIC (node->decl))
715 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
716 "%<externally_visible%>"
717 " attribute have effect only on public objects");
718 }
719 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
720 && (node->definition && !node->alias))
721 {
722 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
723 "%<weakref%> attribute ignored"
724 " because function is defined");
725 DECL_WEAK (decl) = 0;
726 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
727 DECL_ATTRIBUTES (decl));
728 }
729
730 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl))
731 && !DECL_DECLARED_INLINE_P (decl)
732 /* redefining extern inline function makes it DECL_UNINLINABLE. */
733 && !DECL_UNINLINABLE (decl))
734 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
735 "always_inline function might not be inlinable");
736
737 process_common_attributes (node, decl);
738 }
739 for (vnode = symtab->first_variable (); vnode != first_var;
740 vnode = symtab->next_variable (vnode))
741 {
742 tree decl = vnode->decl;
743 if (DECL_EXTERNAL (decl)
744 && DECL_INITIAL (decl))
745 varpool_node::finalize_decl (decl);
746 if (DECL_PRESERVE_P (decl))
747 vnode->force_output = true;
748 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
749 {
750 if (! TREE_PUBLIC (vnode->decl))
751 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
752 "%<externally_visible%>"
753 " attribute have effect only on public objects");
754 }
755 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
756 && vnode->definition
757 && DECL_INITIAL (decl))
758 {
759 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
760 "%<weakref%> attribute ignored"
761 " because variable is initialized");
762 DECL_WEAK (decl) = 0;
763 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
764 DECL_ATTRIBUTES (decl));
765 }
766 process_common_attributes (vnode, decl);
767 }
768 }
769
770 /* Mark DECL as finalized. By finalizing the declaration, frontend instruct the
771 middle end to output the variable to asm file, if needed or externally
772 visible. */
773
774 void
775 varpool_node::finalize_decl (tree decl)
776 {
777 varpool_node *node = varpool_node::get_create (decl);
778
779 gcc_assert (TREE_STATIC (decl) || DECL_EXTERNAL (decl));
780
781 if (node->definition)
782 return;
783 notice_global_symbol (decl);
784 node->definition = true;
785 if (TREE_THIS_VOLATILE (decl) || DECL_PRESERVE_P (decl)
786 /* Traditionally we do not eliminate static variables when not
787 optimizing and when not doing toplevel reoder. */
788 || node->no_reorder
789 || ((!flag_toplevel_reorder
790 && !DECL_COMDAT (node->decl)
791 && !DECL_ARTIFICIAL (node->decl))))
792 node->force_output = true;
793
794 if (symtab->state == CONSTRUCTION
795 && (node->needed_p () || node->referred_to_p ()))
796 enqueue_node (node);
797 if (symtab->state >= IPA_SSA)
798 node->analyze ();
799 /* Some frontends produce various interface variables after compilation
800 finished. */
801 if (symtab->state == FINISHED
802 || (!flag_toplevel_reorder
803 && symtab->state == EXPANSION))
804 node->assemble_decl ();
805 }
806
807 /* EDGE is an polymorphic call. Mark all possible targets as reachable
808 and if there is only one target, perform trivial devirtualization.
809 REACHABLE_CALL_TARGETS collects target lists we already walked to
810 avoid udplicate work. */
811
812 static void
813 walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
814 cgraph_edge *edge)
815 {
816 unsigned int i;
817 void *cache_token;
818 bool final;
819 vec <cgraph_node *>targets
820 = possible_polymorphic_call_targets
821 (edge, &final, &cache_token);
822
823 if (!reachable_call_targets->add (cache_token))
824 {
825 if (symtab->dump_file)
826 dump_possible_polymorphic_call_targets
827 (symtab->dump_file, edge);
828
829 for (i = 0; i < targets.length (); i++)
830 {
831 /* Do not bother to mark virtual methods in anonymous namespace;
832 either we will find use of virtual table defining it, or it is
833 unused. */
834 if (targets[i]->definition
835 && TREE_CODE
836 (TREE_TYPE (targets[i]->decl))
837 == METHOD_TYPE
838 && !type_in_anonymous_namespace_p
839 (method_class_type
840 (TREE_TYPE (targets[i]->decl))))
841 enqueue_node (targets[i]);
842 }
843 }
844
845 /* Very trivial devirtualization; when the type is
846 final or anonymous (so we know all its derivation)
847 and there is only one possible virtual call target,
848 make the edge direct. */
849 if (final)
850 {
851 if (targets.length () <= 1 && dbg_cnt (devirt))
852 {
853 cgraph_node *target;
854 if (targets.length () == 1)
855 target = targets[0];
856 else
857 target = cgraph_node::create
858 (builtin_decl_implicit (BUILT_IN_UNREACHABLE));
859
860 if (symtab->dump_file)
861 {
862 fprintf (symtab->dump_file,
863 "Devirtualizing call: ");
864 print_gimple_stmt (symtab->dump_file,
865 edge->call_stmt, 0,
866 TDF_SLIM);
867 }
868 if (dump_enabled_p ())
869 {
870 location_t locus = gimple_location_safe (edge->call_stmt);
871 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
872 "devirtualizing call in %s to %s\n",
873 edge->caller->name (), target->name ());
874 }
875
876 edge->make_direct (target);
877 edge->redirect_call_stmt_to_callee ();
878 if (symtab->dump_file)
879 {
880 fprintf (symtab->dump_file,
881 "Devirtualized as: ");
882 print_gimple_stmt (symtab->dump_file,
883 edge->call_stmt, 0,
884 TDF_SLIM);
885 }
886 }
887 }
888 }
889
890
891 /* Discover all functions and variables that are trivially needed, analyze
892 them as well as all functions and variables referred by them */
893 static cgraph_node *first_analyzed;
894 static varpool_node *first_analyzed_var;
895
896 static void
897 analyze_functions (void)
898 {
899 /* Keep track of already processed nodes when called multiple times for
900 intermodule optimization. */
901 cgraph_node *first_handled = first_analyzed;
902 varpool_node *first_handled_var = first_analyzed_var;
903 hash_set<void *> reachable_call_targets;
904
905 symtab_node *node;
906 symtab_node *next;
907 int i;
908 ipa_ref *ref;
909 bool changed = true;
910 location_t saved_loc = input_location;
911
912 bitmap_obstack_initialize (NULL);
913 symtab->state = CONSTRUCTION;
914 input_location = UNKNOWN_LOCATION;
915
916 /* Ugly, but the fixup can not happen at a time same body alias is created;
917 C++ FE is confused about the COMDAT groups being right. */
918 if (symtab->cpp_implicit_aliases_done)
919 FOR_EACH_SYMBOL (node)
920 if (node->cpp_implicit_alias)
921 node->fixup_same_cpp_alias_visibility (node->get_alias_target ());
922 if (optimize && flag_devirtualize)
923 build_type_inheritance_graph ();
924
925 /* Analysis adds static variables that in turn adds references to new functions.
926 So we need to iterate the process until it stabilize. */
927 while (changed)
928 {
929 changed = false;
930 process_function_and_variable_attributes (first_analyzed,
931 first_analyzed_var);
932
933 /* First identify the trivially needed symbols. */
934 for (node = symtab->first_symbol ();
935 node != first_analyzed
936 && node != first_analyzed_var; node = node->next)
937 {
938 /* Convert COMDAT group designators to IDENTIFIER_NODEs. */
939 node->get_comdat_group_id ();
940 if (node->needed_p ())
941 {
942 enqueue_node (node);
943 if (!changed && symtab->dump_file)
944 fprintf (symtab->dump_file, "Trivially needed symbols:");
945 changed = true;
946 if (symtab->dump_file)
947 fprintf (symtab->dump_file, " %s", node->asm_name ());
948 if (!changed && symtab->dump_file)
949 fprintf (symtab->dump_file, "\n");
950 }
951 if (node == first_analyzed
952 || node == first_analyzed_var)
953 break;
954 }
955 symtab->process_new_functions ();
956 first_analyzed_var = symtab->first_variable ();
957 first_analyzed = symtab->first_function ();
958
959 if (changed && symtab->dump_file)
960 fprintf (symtab->dump_file, "\n");
961
962 /* Lower representation, build callgraph edges and references for all trivially
963 needed symbols and all symbols referred by them. */
964 while (queued_nodes != &symtab_terminator)
965 {
966 changed = true;
967 node = queued_nodes;
968 queued_nodes = (symtab_node *)queued_nodes->aux;
969 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
970 if (cnode && cnode->definition)
971 {
972 cgraph_edge *edge;
973 tree decl = cnode->decl;
974
975 /* ??? It is possible to create extern inline function
976 and later using weak alias attribute to kill its body.
977 See gcc.c-torture/compile/20011119-1.c */
978 if (!DECL_STRUCT_FUNCTION (decl)
979 && !cnode->alias
980 && !cnode->thunk.thunk_p
981 && !cnode->dispatcher_function)
982 {
983 cnode->reset ();
984 cnode->local.redefined_extern_inline = true;
985 continue;
986 }
987
988 if (!cnode->analyzed)
989 cnode->analyze ();
990
991 for (edge = cnode->callees; edge; edge = edge->next_callee)
992 if (edge->callee->definition)
993 enqueue_node (edge->callee);
994 if (optimize && flag_devirtualize)
995 {
996 cgraph_edge *next;
997
998 for (edge = cnode->indirect_calls; edge; edge = next)
999 {
1000 next = edge->next_callee;
1001 if (edge->indirect_info->polymorphic)
1002 walk_polymorphic_call_targets (&reachable_call_targets,
1003 edge);
1004 }
1005 }
1006
1007 /* If decl is a clone of an abstract function,
1008 mark that abstract function so that we don't release its body.
1009 The DECL_INITIAL() of that abstract function declaration
1010 will be later needed to output debug info. */
1011 if (DECL_ABSTRACT_ORIGIN (decl))
1012 {
1013 cgraph_node *origin_node
1014 = cgraph_node::get_create (DECL_ABSTRACT_ORIGIN (decl));
1015 origin_node->used_as_abstract_origin = true;
1016 }
1017 }
1018 else
1019 {
1020 varpool_node *vnode = dyn_cast <varpool_node *> (node);
1021 if (vnode && vnode->definition && !vnode->analyzed)
1022 vnode->analyze ();
1023 }
1024
1025 if (node->same_comdat_group)
1026 {
1027 symtab_node *next;
1028 for (next = node->same_comdat_group;
1029 next != node;
1030 next = next->same_comdat_group)
1031 enqueue_node (next);
1032 }
1033 for (i = 0; node->iterate_reference (i, ref); i++)
1034 if (ref->referred->definition)
1035 enqueue_node (ref->referred);
1036 symtab->process_new_functions ();
1037 }
1038 }
1039 if (optimize && flag_devirtualize)
1040 update_type_inheritance_graph ();
1041
1042 /* Collect entry points to the unit. */
1043 if (symtab->dump_file)
1044 {
1045 fprintf (symtab->dump_file, "\n\nInitial ");
1046 symtab_node::dump_table (symtab->dump_file);
1047 }
1048
1049 if (symtab->dump_file)
1050 fprintf (symtab->dump_file, "\nRemoving unused symbols:");
1051
1052 for (node = symtab->first_symbol ();
1053 node != first_handled
1054 && node != first_handled_var; node = next)
1055 {
1056 next = node->next;
1057 if (!node->aux && !node->referred_to_p ())
1058 {
1059 if (symtab->dump_file)
1060 fprintf (symtab->dump_file, " %s", node->name ());
1061 node->remove ();
1062 continue;
1063 }
1064 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1065 {
1066 tree decl = node->decl;
1067
1068 if (cnode->definition && !gimple_has_body_p (decl)
1069 && !cnode->alias
1070 && !cnode->thunk.thunk_p)
1071 cnode->reset ();
1072
1073 gcc_assert (!cnode->definition || cnode->thunk.thunk_p
1074 || cnode->alias
1075 || gimple_has_body_p (decl));
1076 gcc_assert (cnode->analyzed == cnode->definition);
1077 }
1078 node->aux = NULL;
1079 }
1080 for (;node; node = node->next)
1081 node->aux = NULL;
1082 first_analyzed = symtab->first_function ();
1083 first_analyzed_var = symtab->first_variable ();
1084 if (symtab->dump_file)
1085 {
1086 fprintf (symtab->dump_file, "\n\nReclaimed ");
1087 symtab_node::dump_table (symtab->dump_file);
1088 }
1089 bitmap_obstack_release (NULL);
1090 ggc_collect ();
1091 /* Initialize assembler name hash, in particular we want to trigger C++
1092 mangling and same body alias creation before we free DECL_ARGUMENTS
1093 used by it. */
1094 if (!seen_error ())
1095 symtab->symtab_initialize_asm_name_hash ();
1096
1097 input_location = saved_loc;
1098 }
1099
1100 /* Translate the ugly representation of aliases as alias pairs into nice
1101 representation in callgraph. We don't handle all cases yet,
1102 unfortunately. */
1103
1104 static void
1105 handle_alias_pairs (void)
1106 {
1107 alias_pair *p;
1108 unsigned i;
1109
1110 for (i = 0; alias_pairs && alias_pairs->iterate (i, &p);)
1111 {
1112 symtab_node *target_node = symtab_node::get_for_asmname (p->target);
1113
1114 /* Weakrefs with target not defined in current unit are easy to handle:
1115 they behave just as external variables except we need to note the
1116 alias flag to later output the weakref pseudo op into asm file. */
1117 if (!target_node
1118 && lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)) != NULL)
1119 {
1120 symtab_node *node = symtab_node::get (p->decl);
1121 if (node)
1122 {
1123 node->alias_target = p->target;
1124 node->weakref = true;
1125 node->alias = true;
1126 }
1127 alias_pairs->unordered_remove (i);
1128 continue;
1129 }
1130 else if (!target_node)
1131 {
1132 error ("%q+D aliased to undefined symbol %qE", p->decl, p->target);
1133 symtab_node *node = symtab_node::get (p->decl);
1134 if (node)
1135 node->alias = false;
1136 alias_pairs->unordered_remove (i);
1137 continue;
1138 }
1139
1140 if (DECL_EXTERNAL (target_node->decl)
1141 /* We use local aliases for C++ thunks to force the tailcall
1142 to bind locally. This is a hack - to keep it working do
1143 the following (which is not strictly correct). */
1144 && (TREE_CODE (target_node->decl) != FUNCTION_DECL
1145 || ! DECL_VIRTUAL_P (target_node->decl))
1146 && ! lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)))
1147 {
1148 error ("%q+D aliased to external symbol %qE",
1149 p->decl, p->target);
1150 }
1151
1152 if (TREE_CODE (p->decl) == FUNCTION_DECL
1153 && target_node && is_a <cgraph_node *> (target_node))
1154 {
1155 cgraph_node *src_node = cgraph_node::get (p->decl);
1156 if (src_node && src_node->definition)
1157 src_node->reset ();
1158 cgraph_node::create_alias (p->decl, target_node->decl);
1159 alias_pairs->unordered_remove (i);
1160 }
1161 else if (TREE_CODE (p->decl) == VAR_DECL
1162 && target_node && is_a <varpool_node *> (target_node))
1163 {
1164 varpool_node::create_alias (p->decl, target_node->decl);
1165 alias_pairs->unordered_remove (i);
1166 }
1167 else
1168 {
1169 error ("%q+D alias in between function and variable is not supported",
1170 p->decl);
1171 warning (0, "%q+D aliased declaration",
1172 target_node->decl);
1173 alias_pairs->unordered_remove (i);
1174 }
1175 }
1176 vec_free (alias_pairs);
1177 }
1178
1179
1180 /* Figure out what functions we want to assemble. */
1181
1182 static void
1183 mark_functions_to_output (void)
1184 {
1185 cgraph_node *node;
1186 #ifdef ENABLE_CHECKING
1187 bool check_same_comdat_groups = false;
1188
1189 FOR_EACH_FUNCTION (node)
1190 gcc_assert (!node->process);
1191 #endif
1192
1193 FOR_EACH_FUNCTION (node)
1194 {
1195 tree decl = node->decl;
1196
1197 gcc_assert (!node->process || node->same_comdat_group);
1198 if (node->process)
1199 continue;
1200
1201 /* We need to output all local functions that are used and not
1202 always inlined, as well as those that are reachable from
1203 outside the current compilation unit. */
1204 if (node->analyzed
1205 && !node->thunk.thunk_p
1206 && !node->alias
1207 && !node->global.inlined_to
1208 && !TREE_ASM_WRITTEN (decl)
1209 && !DECL_EXTERNAL (decl))
1210 {
1211 node->process = 1;
1212 if (node->same_comdat_group)
1213 {
1214 cgraph_node *next;
1215 for (next = dyn_cast<cgraph_node *> (node->same_comdat_group);
1216 next != node;
1217 next = dyn_cast<cgraph_node *> (next->same_comdat_group))
1218 if (!next->thunk.thunk_p && !next->alias
1219 && !next->comdat_local_p ())
1220 next->process = 1;
1221 }
1222 }
1223 else if (node->same_comdat_group)
1224 {
1225 #ifdef ENABLE_CHECKING
1226 check_same_comdat_groups = true;
1227 #endif
1228 }
1229 else
1230 {
1231 /* We should've reclaimed all functions that are not needed. */
1232 #ifdef ENABLE_CHECKING
1233 if (!node->global.inlined_to
1234 && gimple_has_body_p (decl)
1235 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1236 are inside partition, we can end up not removing the body since we no longer
1237 have analyzed node pointing to it. */
1238 && !node->in_other_partition
1239 && !node->alias
1240 && !node->clones
1241 && !DECL_EXTERNAL (decl))
1242 {
1243 node->debug ();
1244 internal_error ("failed to reclaim unneeded function");
1245 }
1246 #endif
1247 gcc_assert (node->global.inlined_to
1248 || !gimple_has_body_p (decl)
1249 || node->in_other_partition
1250 || node->clones
1251 || DECL_ARTIFICIAL (decl)
1252 || DECL_EXTERNAL (decl));
1253
1254 }
1255
1256 }
1257 #ifdef ENABLE_CHECKING
1258 if (check_same_comdat_groups)
1259 FOR_EACH_FUNCTION (node)
1260 if (node->same_comdat_group && !node->process)
1261 {
1262 tree decl = node->decl;
1263 if (!node->global.inlined_to
1264 && gimple_has_body_p (decl)
1265 /* FIXME: in an ltrans unit when the offline copy is outside a
1266 partition but inline copies are inside a partition, we can
1267 end up not removing the body since we no longer have an
1268 analyzed node pointing to it. */
1269 && !node->in_other_partition
1270 && !node->clones
1271 && !DECL_EXTERNAL (decl))
1272 {
1273 node->debug ();
1274 internal_error ("failed to reclaim unneeded function in same "
1275 "comdat group");
1276 }
1277 }
1278 #endif
1279 }
1280
1281 /* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1282 in lowered gimple form. IN_SSA is true if the gimple is in SSA.
1283
1284 Set current_function_decl and cfun to newly constructed empty function body.
1285 return basic block in the function body. */
1286
1287 basic_block
1288 init_lowered_empty_function (tree decl, bool in_ssa)
1289 {
1290 basic_block bb;
1291
1292 current_function_decl = decl;
1293 allocate_struct_function (decl, false);
1294 gimple_register_cfg_hooks ();
1295 init_empty_tree_cfg ();
1296
1297 if (in_ssa)
1298 {
1299 init_tree_ssa (cfun);
1300 init_ssa_operands (cfun);
1301 cfun->gimple_df->in_ssa_p = true;
1302 cfun->curr_properties |= PROP_ssa;
1303 }
1304
1305 DECL_INITIAL (decl) = make_node (BLOCK);
1306
1307 DECL_SAVED_TREE (decl) = error_mark_node;
1308 cfun->curr_properties |= (PROP_gimple_lcf | PROP_gimple_leh | PROP_gimple_any
1309 | PROP_cfg | PROP_loops);
1310
1311 set_loops_for_fn (cfun, ggc_cleared_alloc<loops> ());
1312 init_loops_structure (cfun, loops_for_fn (cfun), 1);
1313 loops_for_fn (cfun)->state |= LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
1314
1315 /* Create BB for body of the function and connect it properly. */
1316 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR_FOR_FN (cfun));
1317 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, EDGE_FALLTHRU);
1318 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1319 add_bb_to_loop (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
1320
1321 return bb;
1322 }
1323
1324 /* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1325 offset indicated by VIRTUAL_OFFSET, if that is
1326 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1327 zero for a result adjusting thunk. */
1328
1329 static tree
1330 thunk_adjust (gimple_stmt_iterator * bsi,
1331 tree ptr, bool this_adjusting,
1332 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1333 {
1334 gimple stmt;
1335 tree ret;
1336
1337 if (this_adjusting
1338 && fixed_offset != 0)
1339 {
1340 stmt = gimple_build_assign
1341 (ptr, fold_build_pointer_plus_hwi_loc (input_location,
1342 ptr,
1343 fixed_offset));
1344 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1345 }
1346
1347 /* If there's a virtual offset, look up that value in the vtable and
1348 adjust the pointer again. */
1349 if (virtual_offset)
1350 {
1351 tree vtabletmp;
1352 tree vtabletmp2;
1353 tree vtabletmp3;
1354
1355 if (!vtable_entry_type)
1356 {
1357 tree vfunc_type = make_node (FUNCTION_TYPE);
1358 TREE_TYPE (vfunc_type) = integer_type_node;
1359 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1360 layout_type (vfunc_type);
1361
1362 vtable_entry_type = build_pointer_type (vfunc_type);
1363 }
1364
1365 vtabletmp =
1366 create_tmp_reg (build_pointer_type
1367 (build_pointer_type (vtable_entry_type)), "vptr");
1368
1369 /* The vptr is always at offset zero in the object. */
1370 stmt = gimple_build_assign (vtabletmp,
1371 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1372 ptr));
1373 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1374
1375 /* Form the vtable address. */
1376 vtabletmp2 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp)),
1377 "vtableaddr");
1378 stmt = gimple_build_assign (vtabletmp2,
1379 build_simple_mem_ref (vtabletmp));
1380 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1381
1382 /* Find the entry with the vcall offset. */
1383 stmt = gimple_build_assign (vtabletmp2,
1384 fold_build_pointer_plus_loc (input_location,
1385 vtabletmp2,
1386 virtual_offset));
1387 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1388
1389 /* Get the offset itself. */
1390 vtabletmp3 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1391 "vcalloffset");
1392 stmt = gimple_build_assign (vtabletmp3,
1393 build_simple_mem_ref (vtabletmp2));
1394 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1395
1396 /* Adjust the `this' pointer. */
1397 ptr = fold_build_pointer_plus_loc (input_location, ptr, vtabletmp3);
1398 ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
1399 GSI_CONTINUE_LINKING);
1400 }
1401
1402 if (!this_adjusting
1403 && fixed_offset != 0)
1404 /* Adjust the pointer by the constant. */
1405 {
1406 tree ptrtmp;
1407
1408 if (TREE_CODE (ptr) == VAR_DECL)
1409 ptrtmp = ptr;
1410 else
1411 {
1412 ptrtmp = create_tmp_reg (TREE_TYPE (ptr), "ptr");
1413 stmt = gimple_build_assign (ptrtmp, ptr);
1414 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1415 }
1416 ptr = fold_build_pointer_plus_hwi_loc (input_location,
1417 ptrtmp, fixed_offset);
1418 }
1419
1420 /* Emit the statement and gimplify the adjustment expression. */
1421 ret = create_tmp_reg (TREE_TYPE (ptr), "adjusted_this");
1422 stmt = gimple_build_assign (ret, ptr);
1423 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1424
1425 return ret;
1426 }
1427
1428 /* Expand thunk NODE to gimple if possible.
1429 When FORCE_GIMPLE_THUNK is true, gimple thunk is created and
1430 no assembler is produced.
1431 When OUTPUT_ASM_THUNK is true, also produce assembler for
1432 thunks that are not lowered. */
1433
1434 bool
1435 cgraph_node::expand_thunk (bool output_asm_thunks, bool force_gimple_thunk)
1436 {
1437 bool this_adjusting = thunk.this_adjusting;
1438 HOST_WIDE_INT fixed_offset = thunk.fixed_offset;
1439 HOST_WIDE_INT virtual_value = thunk.virtual_value;
1440 tree virtual_offset = NULL;
1441 tree alias = callees->callee->decl;
1442 tree thunk_fndecl = decl;
1443 tree a;
1444
1445
1446 if (!force_gimple_thunk && this_adjusting
1447 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1448 virtual_value, alias))
1449 {
1450 const char *fnname;
1451 tree fn_block;
1452 tree restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1453
1454 if (!output_asm_thunks)
1455 {
1456 analyzed = true;
1457 return false;
1458 }
1459
1460 if (in_lto_p)
1461 get_body ();
1462 a = DECL_ARGUMENTS (thunk_fndecl);
1463
1464 current_function_decl = thunk_fndecl;
1465
1466 /* Ensure thunks are emitted in their correct sections. */
1467 resolve_unique_section (thunk_fndecl, 0, flag_function_sections);
1468
1469 DECL_RESULT (thunk_fndecl)
1470 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1471 RESULT_DECL, 0, restype);
1472 DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1473 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
1474
1475 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1476 create one. */
1477 fn_block = make_node (BLOCK);
1478 BLOCK_VARS (fn_block) = a;
1479 DECL_INITIAL (thunk_fndecl) = fn_block;
1480 init_function_start (thunk_fndecl);
1481 cfun->is_thunk = 1;
1482 insn_locations_init ();
1483 set_curr_insn_location (DECL_SOURCE_LOCATION (thunk_fndecl));
1484 prologue_location = curr_insn_location ();
1485 assemble_start_function (thunk_fndecl, fnname);
1486
1487 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1488 fixed_offset, virtual_value, alias);
1489
1490 assemble_end_function (thunk_fndecl, fnname);
1491 insn_locations_finalize ();
1492 init_insn_lengths ();
1493 free_after_compilation (cfun);
1494 set_cfun (NULL);
1495 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1496 thunk.thunk_p = false;
1497 analyzed = false;
1498 }
1499 else
1500 {
1501 tree restype;
1502 basic_block bb, then_bb, else_bb, return_bb;
1503 gimple_stmt_iterator bsi;
1504 int nargs = 0;
1505 tree arg;
1506 int i;
1507 tree resdecl;
1508 tree restmp = NULL;
1509
1510 gimple call;
1511 gimple ret;
1512
1513 if (in_lto_p)
1514 get_body ();
1515 a = DECL_ARGUMENTS (thunk_fndecl);
1516
1517 current_function_decl = thunk_fndecl;
1518
1519 /* Ensure thunks are emitted in their correct sections. */
1520 resolve_unique_section (thunk_fndecl, 0, flag_function_sections);
1521
1522 DECL_IGNORED_P (thunk_fndecl) = 1;
1523 bitmap_obstack_initialize (NULL);
1524
1525 if (thunk.virtual_offset_p)
1526 virtual_offset = size_int (virtual_value);
1527
1528 /* Build the return declaration for the function. */
1529 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1530 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1531 {
1532 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1533 DECL_ARTIFICIAL (resdecl) = 1;
1534 DECL_IGNORED_P (resdecl) = 1;
1535 DECL_RESULT (thunk_fndecl) = resdecl;
1536 DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1537 }
1538 else
1539 resdecl = DECL_RESULT (thunk_fndecl);
1540
1541 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl, true);
1542
1543 bsi = gsi_start_bb (bb);
1544
1545 /* Build call to the function being thunked. */
1546 if (!VOID_TYPE_P (restype))
1547 {
1548 if (DECL_BY_REFERENCE (resdecl))
1549 restmp = gimple_fold_indirect_ref (resdecl);
1550 else if (!is_gimple_reg_type (restype))
1551 {
1552 restmp = resdecl;
1553
1554 if (TREE_CODE (restmp) == VAR_DECL)
1555 add_local_decl (cfun, restmp);
1556 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1557 }
1558 else
1559 restmp = create_tmp_reg (restype, "retval");
1560 }
1561
1562 for (arg = a; arg; arg = DECL_CHAIN (arg))
1563 nargs++;
1564 auto_vec<tree> vargs (nargs);
1565 if (this_adjusting)
1566 vargs.quick_push (thunk_adjust (&bsi, a, 1, fixed_offset,
1567 virtual_offset));
1568 else if (nargs)
1569 vargs.quick_push (a);
1570
1571 if (nargs)
1572 for (i = 1, arg = DECL_CHAIN (a); i < nargs; i++, arg = DECL_CHAIN (arg))
1573 {
1574 tree tmp = arg;
1575 if (!is_gimple_val (arg))
1576 {
1577 tmp = create_tmp_reg (TYPE_MAIN_VARIANT
1578 (TREE_TYPE (arg)), "arg");
1579 gimple stmt = gimple_build_assign (tmp, arg);
1580 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1581 }
1582 vargs.quick_push (tmp);
1583 }
1584 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1585 callees->call_stmt = call;
1586 gimple_call_set_from_thunk (call, true);
1587 if (restmp)
1588 {
1589 gimple_call_set_lhs (call, restmp);
1590 gcc_assert (useless_type_conversion_p (TREE_TYPE (restmp),
1591 TREE_TYPE (TREE_TYPE (alias))));
1592 }
1593 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1594 if (!(gimple_call_flags (call) & ECF_NORETURN))
1595 {
1596 if (restmp && !this_adjusting
1597 && (fixed_offset || virtual_offset))
1598 {
1599 tree true_label = NULL_TREE;
1600
1601 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1602 {
1603 gimple stmt;
1604 /* If the return type is a pointer, we need to
1605 protect against NULL. We know there will be an
1606 adjustment, because that's why we're emitting a
1607 thunk. */
1608 then_bb = create_basic_block (NULL, (void *) 0, bb);
1609 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1610 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1611 add_bb_to_loop (then_bb, bb->loop_father);
1612 add_bb_to_loop (return_bb, bb->loop_father);
1613 add_bb_to_loop (else_bb, bb->loop_father);
1614 remove_edge (single_succ_edge (bb));
1615 true_label = gimple_block_label (then_bb);
1616 stmt = gimple_build_cond (NE_EXPR, restmp,
1617 build_zero_cst (TREE_TYPE (restmp)),
1618 NULL_TREE, NULL_TREE);
1619 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1620 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1621 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1622 make_edge (return_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1623 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1624 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1625 bsi = gsi_last_bb (then_bb);
1626 }
1627
1628 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1629 fixed_offset, virtual_offset);
1630 if (true_label)
1631 {
1632 gimple stmt;
1633 bsi = gsi_last_bb (else_bb);
1634 stmt = gimple_build_assign (restmp,
1635 build_zero_cst (TREE_TYPE (restmp)));
1636 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1637 bsi = gsi_last_bb (return_bb);
1638 }
1639 }
1640 else
1641 gimple_call_set_tail (call, true);
1642
1643 /* Build return value. */
1644 ret = gimple_build_return (restmp);
1645 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1646 }
1647 else
1648 {
1649 gimple_call_set_tail (call, true);
1650 remove_edge (single_succ_edge (bb));
1651 }
1652
1653 cfun->gimple_df->in_ssa_p = true;
1654 /* FIXME: C++ FE should stop setting TREE_ASM_WRITTEN on thunks. */
1655 TREE_ASM_WRITTEN (thunk_fndecl) = false;
1656 delete_unreachable_blocks ();
1657 update_ssa (TODO_update_ssa);
1658 #ifdef ENABLE_CHECKING
1659 verify_flow_info ();
1660 #endif
1661 free_dominance_info (CDI_DOMINATORS);
1662
1663 /* Since we want to emit the thunk, we explicitly mark its name as
1664 referenced. */
1665 thunk.thunk_p = false;
1666 lowered = true;
1667 bitmap_obstack_release (NULL);
1668 }
1669 current_function_decl = NULL;
1670 set_cfun (NULL);
1671 return true;
1672 }
1673
1674 /* Assemble thunks and aliases associated to node. */
1675
1676 void
1677 cgraph_node::assemble_thunks_and_aliases (void)
1678 {
1679 cgraph_edge *e;
1680 ipa_ref *ref;
1681
1682 for (e = callers; e;)
1683 if (e->caller->thunk.thunk_p)
1684 {
1685 cgraph_node *thunk = e->caller;
1686
1687 e = e->next_caller;
1688 thunk->expand_thunk (true, false);
1689 thunk->assemble_thunks_and_aliases ();
1690 }
1691 else
1692 e = e->next_caller;
1693
1694 FOR_EACH_ALIAS (this, ref)
1695 {
1696 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
1697 bool saved_written = TREE_ASM_WRITTEN (decl);
1698
1699 /* Force assemble_alias to really output the alias this time instead
1700 of buffering it in same alias pairs. */
1701 TREE_ASM_WRITTEN (decl) = 1;
1702 do_assemble_alias (alias->decl,
1703 DECL_ASSEMBLER_NAME (decl));
1704 alias->assemble_thunks_and_aliases ();
1705 TREE_ASM_WRITTEN (decl) = saved_written;
1706 }
1707 }
1708
1709 /* Expand function specified by node. */
1710
1711 void
1712 cgraph_node::expand (void)
1713 {
1714 location_t saved_loc;
1715
1716 /* We ought to not compile any inline clones. */
1717 gcc_assert (!global.inlined_to);
1718
1719 announce_function (decl);
1720 process = 0;
1721 gcc_assert (lowered);
1722 get_body ();
1723
1724 /* Generate RTL for the body of DECL. */
1725
1726 timevar_push (TV_REST_OF_COMPILATION);
1727
1728 gcc_assert (symtab->global_info_ready);
1729
1730 /* Initialize the default bitmap obstack. */
1731 bitmap_obstack_initialize (NULL);
1732
1733 /* Initialize the RTL code for the function. */
1734 current_function_decl = decl;
1735 saved_loc = input_location;
1736 input_location = DECL_SOURCE_LOCATION (decl);
1737 init_function_start (decl);
1738
1739 gimple_register_cfg_hooks ();
1740
1741 bitmap_obstack_initialize (&reg_obstack); /* FIXME, only at RTL generation*/
1742
1743 execute_all_ipa_transforms ();
1744
1745 /* Perform all tree transforms and optimizations. */
1746
1747 /* Signal the start of passes. */
1748 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_START, NULL);
1749
1750 execute_pass_list (cfun, g->get_passes ()->all_passes);
1751
1752 /* Signal the end of passes. */
1753 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_END, NULL);
1754
1755 bitmap_obstack_release (&reg_obstack);
1756
1757 /* Release the default bitmap obstack. */
1758 bitmap_obstack_release (NULL);
1759
1760 /* If requested, warn about function definitions where the function will
1761 return a value (usually of some struct or union type) which itself will
1762 take up a lot of stack space. */
1763 if (warn_larger_than && !DECL_EXTERNAL (decl) && TREE_TYPE (decl))
1764 {
1765 tree ret_type = TREE_TYPE (TREE_TYPE (decl));
1766
1767 if (ret_type && TYPE_SIZE_UNIT (ret_type)
1768 && TREE_CODE (TYPE_SIZE_UNIT (ret_type)) == INTEGER_CST
1769 && 0 < compare_tree_int (TYPE_SIZE_UNIT (ret_type),
1770 larger_than_size))
1771 {
1772 unsigned int size_as_int
1773 = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (ret_type));
1774
1775 if (compare_tree_int (TYPE_SIZE_UNIT (ret_type), size_as_int) == 0)
1776 warning (OPT_Wlarger_than_, "size of return value of %q+D is %u bytes",
1777 decl, size_as_int);
1778 else
1779 warning (OPT_Wlarger_than_, "size of return value of %q+D is larger than %wd bytes",
1780 decl, larger_than_size);
1781 }
1782 }
1783
1784 gimple_set_body (decl, NULL);
1785 if (DECL_STRUCT_FUNCTION (decl) == 0
1786 && !cgraph_node::get (decl)->origin)
1787 {
1788 /* Stop pointing to the local nodes about to be freed.
1789 But DECL_INITIAL must remain nonzero so we know this
1790 was an actual function definition.
1791 For a nested function, this is done in c_pop_function_context.
1792 If rest_of_compilation set this to 0, leave it 0. */
1793 if (DECL_INITIAL (decl) != 0)
1794 DECL_INITIAL (decl) = error_mark_node;
1795 }
1796
1797 input_location = saved_loc;
1798
1799 ggc_collect ();
1800 timevar_pop (TV_REST_OF_COMPILATION);
1801
1802 /* Make sure that BE didn't give up on compiling. */
1803 gcc_assert (TREE_ASM_WRITTEN (decl));
1804 set_cfun (NULL);
1805 current_function_decl = NULL;
1806
1807 /* It would make a lot more sense to output thunks before function body to get more
1808 forward and lest backwarding jumps. This however would need solving problem
1809 with comdats. See PR48668. Also aliases must come after function itself to
1810 make one pass assemblers, like one on AIX, happy. See PR 50689.
1811 FIXME: Perhaps thunks should be move before function IFF they are not in comdat
1812 groups. */
1813 assemble_thunks_and_aliases ();
1814 release_body ();
1815 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1816 points to the dead function body. */
1817 remove_callees ();
1818 remove_all_references ();
1819 }
1820
1821 /* Node comparer that is responsible for the order that corresponds
1822 to time when a function was launched for the first time. */
1823
1824 static int
1825 node_cmp (const void *pa, const void *pb)
1826 {
1827 const cgraph_node *a = *(const cgraph_node * const *) pa;
1828 const cgraph_node *b = *(const cgraph_node * const *) pb;
1829
1830 /* Functions with time profile must be before these without profile. */
1831 if (!a->tp_first_run || !b->tp_first_run)
1832 return a->tp_first_run - b->tp_first_run;
1833
1834 return a->tp_first_run != b->tp_first_run
1835 ? b->tp_first_run - a->tp_first_run
1836 : b->order - a->order;
1837 }
1838
1839 /* Expand all functions that must be output.
1840
1841 Attempt to topologically sort the nodes so function is output when
1842 all called functions are already assembled to allow data to be
1843 propagated across the callgraph. Use a stack to get smaller distance
1844 between a function and its callees (later we may choose to use a more
1845 sophisticated algorithm for function reordering; we will likely want
1846 to use subsections to make the output functions appear in top-down
1847 order). */
1848
1849 static void
1850 expand_all_functions (void)
1851 {
1852 cgraph_node *node;
1853 cgraph_node **order = XCNEWVEC (cgraph_node *,
1854 symtab->cgraph_count);
1855 unsigned int expanded_func_count = 0, profiled_func_count = 0;
1856 int order_pos, new_order_pos = 0;
1857 int i;
1858
1859 order_pos = ipa_reverse_postorder (order);
1860 gcc_assert (order_pos == symtab->cgraph_count);
1861
1862 /* Garbage collector may remove inline clones we eliminate during
1863 optimization. So we must be sure to not reference them. */
1864 for (i = 0; i < order_pos; i++)
1865 if (order[i]->process)
1866 order[new_order_pos++] = order[i];
1867
1868 if (flag_profile_reorder_functions)
1869 qsort (order, new_order_pos, sizeof (cgraph_node *), node_cmp);
1870
1871 for (i = new_order_pos - 1; i >= 0; i--)
1872 {
1873 node = order[i];
1874
1875 if (node->process)
1876 {
1877 expanded_func_count++;
1878 if(node->tp_first_run)
1879 profiled_func_count++;
1880
1881 if (symtab->dump_file)
1882 fprintf (symtab->dump_file,
1883 "Time profile order in expand_all_functions:%s:%d\n",
1884 node->asm_name (), node->tp_first_run);
1885 node->process = 0;
1886 node->expand ();
1887 }
1888 }
1889
1890 if (dump_file)
1891 fprintf (dump_file, "Expanded functions with time profile (%s):%u/%u\n",
1892 main_input_filename, profiled_func_count, expanded_func_count);
1893
1894 if (symtab->dump_file && flag_profile_reorder_functions)
1895 fprintf (symtab->dump_file, "Expanded functions with time profile:%u/%u\n",
1896 profiled_func_count, expanded_func_count);
1897
1898 symtab->process_new_functions ();
1899 free_gimplify_stack ();
1900
1901 free (order);
1902 }
1903
1904 /* This is used to sort the node types by the cgraph order number. */
1905
1906 enum cgraph_order_sort_kind
1907 {
1908 ORDER_UNDEFINED = 0,
1909 ORDER_FUNCTION,
1910 ORDER_VAR,
1911 ORDER_ASM
1912 };
1913
1914 struct cgraph_order_sort
1915 {
1916 enum cgraph_order_sort_kind kind;
1917 union
1918 {
1919 cgraph_node *f;
1920 varpool_node *v;
1921 asm_node *a;
1922 } u;
1923 };
1924
1925 /* Output all functions, variables, and asm statements in the order
1926 according to their order fields, which is the order in which they
1927 appeared in the file. This implements -fno-toplevel-reorder. In
1928 this mode we may output functions and variables which don't really
1929 need to be output.
1930 When NO_REORDER is true only do this for symbols marked no reorder. */
1931
1932 static void
1933 output_in_order (bool no_reorder)
1934 {
1935 int max;
1936 cgraph_order_sort *nodes;
1937 int i;
1938 cgraph_node *pf;
1939 varpool_node *pv;
1940 asm_node *pa;
1941 max = symtab->order;
1942 nodes = XCNEWVEC (cgraph_order_sort, max);
1943
1944 FOR_EACH_DEFINED_FUNCTION (pf)
1945 {
1946 if (pf->process && !pf->thunk.thunk_p && !pf->alias)
1947 {
1948 if (no_reorder && !pf->no_reorder)
1949 continue;
1950 i = pf->order;
1951 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1952 nodes[i].kind = ORDER_FUNCTION;
1953 nodes[i].u.f = pf;
1954 }
1955 }
1956
1957 FOR_EACH_DEFINED_VARIABLE (pv)
1958 if (!DECL_EXTERNAL (pv->decl))
1959 {
1960 if (no_reorder && !pv->no_reorder)
1961 continue;
1962 i = pv->order;
1963 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1964 nodes[i].kind = ORDER_VAR;
1965 nodes[i].u.v = pv;
1966 }
1967
1968 for (pa = symtab->first_asm_symbol (); pa; pa = pa->next)
1969 {
1970 i = pa->order;
1971 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1972 nodes[i].kind = ORDER_ASM;
1973 nodes[i].u.a = pa;
1974 }
1975
1976 /* In toplevel reorder mode we output all statics; mark them as needed. */
1977
1978 for (i = 0; i < max; ++i)
1979 if (nodes[i].kind == ORDER_VAR)
1980 nodes[i].u.v->finalize_named_section_flags ();
1981
1982 for (i = 0; i < max; ++i)
1983 {
1984 switch (nodes[i].kind)
1985 {
1986 case ORDER_FUNCTION:
1987 nodes[i].u.f->process = 0;
1988 nodes[i].u.f->expand ();
1989 break;
1990
1991 case ORDER_VAR:
1992 nodes[i].u.v->assemble_decl ();
1993 break;
1994
1995 case ORDER_ASM:
1996 assemble_asm (nodes[i].u.a->asm_str);
1997 break;
1998
1999 case ORDER_UNDEFINED:
2000 break;
2001
2002 default:
2003 gcc_unreachable ();
2004 }
2005 }
2006
2007 symtab->clear_asm_symbols ();
2008
2009 free (nodes);
2010 }
2011
2012 static void
2013 ipa_passes (void)
2014 {
2015 gcc::pass_manager *passes = g->get_passes ();
2016
2017 set_cfun (NULL);
2018 current_function_decl = NULL;
2019 gimple_register_cfg_hooks ();
2020 bitmap_obstack_initialize (NULL);
2021
2022 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
2023
2024 if (!in_lto_p)
2025 {
2026 execute_ipa_pass_list (passes->all_small_ipa_passes);
2027 if (seen_error ())
2028 return;
2029 }
2030
2031 /* This extra symtab_remove_unreachable_nodes pass tends to catch some
2032 devirtualization and other changes where removal iterate. */
2033 symtab->remove_unreachable_nodes (true, symtab->dump_file);
2034
2035 /* If pass_all_early_optimizations was not scheduled, the state of
2036 the cgraph will not be properly updated. Update it now. */
2037 if (symtab->state < IPA_SSA)
2038 symtab->state = IPA_SSA;
2039
2040 if (!in_lto_p)
2041 {
2042 /* Generate coverage variables and constructors. */
2043 coverage_finish ();
2044
2045 /* Process new functions added. */
2046 set_cfun (NULL);
2047 current_function_decl = NULL;
2048 symtab->process_new_functions ();
2049
2050 execute_ipa_summary_passes
2051 ((ipa_opt_pass_d *) passes->all_regular_ipa_passes);
2052 }
2053
2054 /* Some targets need to handle LTO assembler output specially. */
2055 if (flag_generate_lto)
2056 targetm.asm_out.lto_start ();
2057
2058 if (!in_lto_p)
2059 ipa_write_summaries ();
2060
2061 if (flag_generate_lto)
2062 targetm.asm_out.lto_end ();
2063
2064 if (!flag_ltrans && (in_lto_p || !flag_lto || flag_fat_lto_objects))
2065 execute_ipa_pass_list (passes->all_regular_ipa_passes);
2066 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
2067
2068 bitmap_obstack_release (NULL);
2069 }
2070
2071
2072 /* Return string alias is alias of. */
2073
2074 static tree
2075 get_alias_symbol (tree decl)
2076 {
2077 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
2078 return get_identifier (TREE_STRING_POINTER
2079 (TREE_VALUE (TREE_VALUE (alias))));
2080 }
2081
2082
2083 /* Weakrefs may be associated to external decls and thus not output
2084 at expansion time. Emit all necessary aliases. */
2085
2086 void
2087 symbol_table::output_weakrefs (void)
2088 {
2089 symtab_node *node;
2090 FOR_EACH_SYMBOL (node)
2091 if (node->alias
2092 && !TREE_ASM_WRITTEN (node->decl)
2093 && node->weakref)
2094 {
2095 tree target;
2096
2097 /* Weakrefs are special by not requiring target definition in current
2098 compilation unit. It is thus bit hard to work out what we want to
2099 alias.
2100 When alias target is defined, we need to fetch it from symtab reference,
2101 otherwise it is pointed to by alias_target. */
2102 if (node->alias_target)
2103 target = (DECL_P (node->alias_target)
2104 ? DECL_ASSEMBLER_NAME (node->alias_target)
2105 : node->alias_target);
2106 else if (node->analyzed)
2107 target = DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl);
2108 else
2109 {
2110 gcc_unreachable ();
2111 target = get_alias_symbol (node->decl);
2112 }
2113 do_assemble_alias (node->decl, target);
2114 }
2115 }
2116
2117 /* Perform simple optimizations based on callgraph. */
2118
2119 void
2120 symbol_table::compile (void)
2121 {
2122 if (seen_error ())
2123 return;
2124
2125 #ifdef ENABLE_CHECKING
2126 symtab_node::verify_symtab_nodes ();
2127 #endif
2128
2129 timevar_push (TV_CGRAPHOPT);
2130 if (pre_ipa_mem_report)
2131 {
2132 fprintf (stderr, "Memory consumption before IPA\n");
2133 dump_memory_report (false);
2134 }
2135 if (!quiet_flag)
2136 fprintf (stderr, "Performing interprocedural optimizations\n");
2137 state = IPA;
2138
2139 /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE. */
2140 if (flag_lto)
2141 lto_streamer_hooks_init ();
2142
2143 /* Don't run the IPA passes if there was any error or sorry messages. */
2144 if (!seen_error ())
2145 ipa_passes ();
2146
2147 /* Do nothing else if any IPA pass found errors or if we are just streaming LTO. */
2148 if (seen_error ()
2149 || (!in_lto_p && flag_lto && !flag_fat_lto_objects))
2150 {
2151 timevar_pop (TV_CGRAPHOPT);
2152 return;
2153 }
2154
2155 /* This pass remove bodies of extern inline functions we never inlined.
2156 Do this later so other IPA passes see what is really going on.
2157 FIXME: This should be run just after inlining by pasmanager. */
2158 remove_unreachable_nodes (false, dump_file);
2159 global_info_ready = true;
2160 if (dump_file)
2161 {
2162 fprintf (dump_file, "Optimized ");
2163 symtab_node:: dump_table (dump_file);
2164 }
2165 if (post_ipa_mem_report)
2166 {
2167 fprintf (stderr, "Memory consumption after IPA\n");
2168 dump_memory_report (false);
2169 }
2170 timevar_pop (TV_CGRAPHOPT);
2171
2172 /* Output everything. */
2173 (*debug_hooks->assembly_start) ();
2174 if (!quiet_flag)
2175 fprintf (stderr, "Assembling functions:\n");
2176 #ifdef ENABLE_CHECKING
2177 symtab_node::verify_symtab_nodes ();
2178 #endif
2179
2180 materialize_all_clones ();
2181 bitmap_obstack_initialize (NULL);
2182 execute_ipa_pass_list (g->get_passes ()->all_late_ipa_passes);
2183 bitmap_obstack_release (NULL);
2184 mark_functions_to_output ();
2185
2186 /* When weakref support is missing, we autmatically translate all
2187 references to NODE to references to its ultimate alias target.
2188 The renaming mechanizm uses flag IDENTIFIER_TRANSPARENT_ALIAS and
2189 TREE_CHAIN.
2190
2191 Set up this mapping before we output any assembler but once we are sure
2192 that all symbol renaming is done.
2193
2194 FIXME: All this uglyness can go away if we just do renaming at gimple
2195 level by physically rewritting the IL. At the moment we can only redirect
2196 calls, so we need infrastructure for renaming references as well. */
2197 #ifndef ASM_OUTPUT_WEAKREF
2198 symtab_node *node;
2199
2200 FOR_EACH_SYMBOL (node)
2201 if (node->alias
2202 && lookup_attribute ("weakref", DECL_ATTRIBUTES (node->decl)))
2203 {
2204 IDENTIFIER_TRANSPARENT_ALIAS
2205 (DECL_ASSEMBLER_NAME (node->decl)) = 1;
2206 TREE_CHAIN (DECL_ASSEMBLER_NAME (node->decl))
2207 = (node->alias_target ? node->alias_target
2208 : DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl));
2209 }
2210 #endif
2211
2212 state = EXPANSION;
2213
2214 if (!flag_toplevel_reorder)
2215 output_in_order (false);
2216 else
2217 {
2218 /* Output first asm statements and anything ordered. The process
2219 flag is cleared for these nodes, so we skip them later. */
2220 output_in_order (true);
2221 expand_all_functions ();
2222 output_variables ();
2223 }
2224
2225 process_new_functions ();
2226 state = FINISHED;
2227 output_weakrefs ();
2228
2229 if (dump_file)
2230 {
2231 fprintf (dump_file, "\nFinal ");
2232 symtab_node::dump_table (dump_file);
2233 }
2234 #ifdef ENABLE_CHECKING
2235 symtab_node::verify_symtab_nodes ();
2236 /* Double check that all inline clones are gone and that all
2237 function bodies have been released from memory. */
2238 if (!seen_error ())
2239 {
2240 cgraph_node *node;
2241 bool error_found = false;
2242
2243 FOR_EACH_DEFINED_FUNCTION (node)
2244 if (node->global.inlined_to
2245 || gimple_has_body_p (node->decl))
2246 {
2247 error_found = true;
2248 node->debug ();
2249 }
2250 if (error_found)
2251 internal_error ("nodes with unreleased memory found");
2252 }
2253 #endif
2254 }
2255
2256
2257 /* Analyze the whole compilation unit once it is parsed completely. */
2258
2259 void
2260 symbol_table::finalize_compilation_unit (void)
2261 {
2262 timevar_push (TV_CGRAPH);
2263
2264 /* If we're here there's no current function anymore. Some frontends
2265 are lazy in clearing these. */
2266 current_function_decl = NULL;
2267 set_cfun (NULL);
2268
2269 /* Do not skip analyzing the functions if there were errors, we
2270 miss diagnostics for following functions otherwise. */
2271
2272 /* Emit size functions we didn't inline. */
2273 finalize_size_functions ();
2274
2275 /* Mark alias targets necessary and emit diagnostics. */
2276 handle_alias_pairs ();
2277
2278 if (!quiet_flag)
2279 {
2280 fprintf (stderr, "\nAnalyzing compilation unit\n");
2281 fflush (stderr);
2282 }
2283
2284 if (flag_dump_passes)
2285 dump_passes ();
2286
2287 /* Gimplify and lower all functions, compute reachability and
2288 remove unreachable nodes. */
2289 analyze_functions ();
2290
2291 /* Mark alias targets necessary and emit diagnostics. */
2292 handle_alias_pairs ();
2293
2294 /* Gimplify and lower thunks. */
2295 analyze_functions ();
2296
2297 /* Finally drive the pass manager. */
2298 compile ();
2299
2300 timevar_pop (TV_CGRAPH);
2301 }
2302
2303 /* Reset all state within cgraphunit.c so that we can rerun the compiler
2304 within the same process. For use by toplev::finalize. */
2305
2306 void
2307 cgraphunit_c_finalize (void)
2308 {
2309 gcc_assert (cgraph_new_nodes.length () == 0);
2310 cgraph_new_nodes.truncate (0);
2311
2312 vtable_entry_type = NULL;
2313 queued_nodes = &symtab_terminator;
2314
2315 first_analyzed = NULL;
2316 first_analyzed_var = NULL;
2317 }
2318
2319 /* Creates a wrapper from cgraph_node to TARGET node. Thunk is used for this
2320 kind of wrapper method. */
2321
2322 void
2323 cgraph_node::create_wrapper (cgraph_node *target)
2324 {
2325 /* Preserve DECL_RESULT so we get right by reference flag. */
2326 tree decl_result = DECL_RESULT (decl);
2327
2328 /* Remove the function's body but keep arguments to be reused
2329 for thunk. */
2330 release_body (true);
2331 reset ();
2332
2333 DECL_RESULT (decl) = decl_result;
2334 DECL_INITIAL (decl) = NULL;
2335 allocate_struct_function (decl, false);
2336 set_cfun (NULL);
2337
2338 /* Turn alias into thunk and expand it into GIMPLE representation. */
2339 definition = true;
2340 thunk.thunk_p = true;
2341 thunk.this_adjusting = false;
2342
2343 cgraph_edge *e = create_edge (target, NULL, 0, CGRAPH_FREQ_BASE);
2344
2345 expand_thunk (false, true);
2346 e->call_stmt_cannot_inline_p = true;
2347
2348 /* Inline summary set-up. */
2349 analyze ();
2350 inline_analyze_function (this);
2351 }
2352
2353 #include "gt-cgraphunit.h"