cgraphunit.c (analyze_functions): Do not analyze extern inline funtions when not...
[gcc.git] / gcc / cgraphunit.c
1 /* Driver of optimization process
2 Copyright (C) 2003-2014 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This module implements main driver of compilation process.
22
23 The main scope of this file is to act as an interface in between
24 tree based frontends and the backend.
25
26 The front-end is supposed to use following functionality:
27
28 - finalize_function
29
30 This function is called once front-end has parsed whole body of function
31 and it is certain that the function body nor the declaration will change.
32
33 (There is one exception needed for implementing GCC extern inline
34 function.)
35
36 - varpool_finalize_decl
37
38 This function has same behavior as the above but is used for static
39 variables.
40
41 - add_asm_node
42
43 Insert new toplevel ASM statement
44
45 - finalize_compilation_unit
46
47 This function is called once (source level) compilation unit is finalized
48 and it will no longer change.
49
50 The symbol table is constructed starting from the trivially needed
51 symbols finalized by the frontend. Functions are lowered into
52 GIMPLE representation and callgraph/reference lists are constructed.
53 Those are used to discover other necessary functions and variables.
54
55 At the end the bodies of unreachable functions are removed.
56
57 The function can be called multiple times when multiple source level
58 compilation units are combined.
59
60 - compile
61
62 This passes control to the back-end. Optimizations are performed and
63 final assembler is generated. This is done in the following way. Note
64 that with link time optimization the process is split into three
65 stages (compile time, linktime analysis and parallel linktime as
66 indicated bellow).
67
68 Compile time:
69
70 1) Inter-procedural optimization.
71 (ipa_passes)
72
73 This part is further split into:
74
75 a) early optimizations. These are local passes executed in
76 the topological order on the callgraph.
77
78 The purpose of early optimiations is to optimize away simple
79 things that may otherwise confuse IP analysis. Very simple
80 propagation across the callgraph is done i.e. to discover
81 functions without side effects and simple inlining is performed.
82
83 b) early small interprocedural passes.
84
85 Those are interprocedural passes executed only at compilation
86 time. These include, for example, transational memory lowering,
87 unreachable code removal and other simple transformations.
88
89 c) IP analysis stage. All interprocedural passes do their
90 analysis.
91
92 Interprocedural passes differ from small interprocedural
93 passes by their ability to operate across whole program
94 at linktime. Their analysis stage is performed early to
95 both reduce linking times and linktime memory usage by
96 not having to represent whole program in memory.
97
98 d) LTO sreaming. When doing LTO, everything important gets
99 streamed into the object file.
100
101 Compile time and or linktime analysis stage (WPA):
102
103 At linktime units gets streamed back and symbol table is
104 merged. Function bodies are not streamed in and not
105 available.
106 e) IP propagation stage. All IP passes execute their
107 IP propagation. This is done based on the earlier analysis
108 without having function bodies at hand.
109 f) Ltrans streaming. When doing WHOPR LTO, the program
110 is partitioned and streamed into multple object files.
111
112 Compile time and/or parallel linktime stage (ltrans)
113
114 Each of the object files is streamed back and compiled
115 separately. Now the function bodies becomes available
116 again.
117
118 2) Virtual clone materialization
119 (cgraph_materialize_clone)
120
121 IP passes can produce copies of existing functoins (such
122 as versioned clones or inline clones) without actually
123 manipulating their bodies by creating virtual clones in
124 the callgraph. At this time the virtual clones are
125 turned into real functions
126 3) IP transformation
127
128 All IP passes transform function bodies based on earlier
129 decision of the IP propagation.
130
131 4) late small IP passes
132
133 Simple IP passes working within single program partition.
134
135 5) Expansion
136 (expand_all_functions)
137
138 At this stage functions that needs to be output into
139 assembler are identified and compiled in topological order
140 6) Output of variables and aliases
141 Now it is known what variable references was not optimized
142 out and thus all variables are output to the file.
143
144 Note that with -fno-toplevel-reorder passes 5 and 6
145 are combined together in cgraph_output_in_order.
146
147 Finally there are functions to manipulate the callgraph from
148 backend.
149 - cgraph_add_new_function is used to add backend produced
150 functions introduced after the unit is finalized.
151 The functions are enqueue for later processing and inserted
152 into callgraph with cgraph_process_new_functions.
153
154 - cgraph_function_versioning
155
156 produces a copy of function into new one (a version)
157 and apply simple transformations
158 */
159
160 #include "config.h"
161 #include "system.h"
162 #include "coretypes.h"
163 #include "tm.h"
164 #include "tree.h"
165 #include "varasm.h"
166 #include "stor-layout.h"
167 #include "stringpool.h"
168 #include "output.h"
169 #include "rtl.h"
170 #include "predict.h"
171 #include "vec.h"
172 #include "hashtab.h"
173 #include "hash-set.h"
174 #include "machmode.h"
175 #include "hard-reg-set.h"
176 #include "input.h"
177 #include "function.h"
178 #include "basic-block.h"
179 #include "tree-ssa-alias.h"
180 #include "internal-fn.h"
181 #include "gimple-fold.h"
182 #include "gimple-expr.h"
183 #include "is-a.h"
184 #include "gimple.h"
185 #include "gimplify.h"
186 #include "gimple-iterator.h"
187 #include "gimplify-me.h"
188 #include "gimple-ssa.h"
189 #include "tree-cfg.h"
190 #include "tree-into-ssa.h"
191 #include "tree-ssa.h"
192 #include "tree-inline.h"
193 #include "langhooks.h"
194 #include "toplev.h"
195 #include "flags.h"
196 #include "debug.h"
197 #include "target.h"
198 #include "diagnostic.h"
199 #include "params.h"
200 #include "intl.h"
201 #include "hash-map.h"
202 #include "plugin-api.h"
203 #include "ipa-ref.h"
204 #include "cgraph.h"
205 #include "alloc-pool.h"
206 #include "ipa-prop.h"
207 #include "tree-iterator.h"
208 #include "tree-pass.h"
209 #include "tree-dump.h"
210 #include "gimple-pretty-print.h"
211 #include "output.h"
212 #include "coverage.h"
213 #include "plugin.h"
214 #include "ipa-inline.h"
215 #include "ipa-utils.h"
216 #include "lto-streamer.h"
217 #include "except.h"
218 #include "cfgloop.h"
219 #include "regset.h" /* FIXME: For reg_obstack. */
220 #include "context.h"
221 #include "pass_manager.h"
222 #include "tree-nested.h"
223 #include "gimplify.h"
224 #include "dbgcnt.h"
225 #include "tree-chkp.h"
226 #include "lto-section-names.h"
227 #include "omp-low.h"
228 #include "print-tree.h"
229
230 /* Queue of cgraph nodes scheduled to be added into cgraph. This is a
231 secondary queue used during optimization to accommodate passes that
232 may generate new functions that need to be optimized and expanded. */
233 vec<cgraph_node *> cgraph_new_nodes;
234
235 static void expand_all_functions (void);
236 static void mark_functions_to_output (void);
237 static void handle_alias_pairs (void);
238
239 /* Used for vtable lookup in thunk adjusting. */
240 static GTY (()) tree vtable_entry_type;
241
242 /* Determine if symbol declaration is needed. That is, visible to something
243 either outside this translation unit, something magic in the system
244 configury */
245 bool
246 symtab_node::needed_p (void)
247 {
248 /* Double check that no one output the function into assembly file
249 early. */
250 gcc_checking_assert (!DECL_ASSEMBLER_NAME_SET_P (decl)
251 || !TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)));
252
253 if (!definition)
254 return false;
255
256 if (DECL_EXTERNAL (decl))
257 return false;
258
259 /* If the user told us it is used, then it must be so. */
260 if (force_output)
261 return true;
262
263 /* ABI forced symbols are needed when they are external. */
264 if (forced_by_abi && TREE_PUBLIC (decl))
265 return true;
266
267 /* Keep constructors, destructors and virtual functions. */
268 if (TREE_CODE (decl) == FUNCTION_DECL
269 && (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl)))
270 return true;
271
272 /* Externally visible variables must be output. The exception is
273 COMDAT variables that must be output only when they are needed. */
274 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
275 return true;
276
277 return false;
278 }
279
280 /* Head and terminator of the queue of nodes to be processed while building
281 callgraph. */
282
283 static symtab_node symtab_terminator;
284 static symtab_node *queued_nodes = &symtab_terminator;
285
286 /* Add NODE to queue starting at QUEUED_NODES.
287 The queue is linked via AUX pointers and terminated by pointer to 1. */
288
289 static void
290 enqueue_node (symtab_node *node)
291 {
292 if (node->aux)
293 return;
294 gcc_checking_assert (queued_nodes);
295 node->aux = queued_nodes;
296 queued_nodes = node;
297 }
298
299 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
300 functions into callgraph in a way so they look like ordinary reachable
301 functions inserted into callgraph already at construction time. */
302
303 void
304 symbol_table::process_new_functions (void)
305 {
306 tree fndecl;
307
308 if (!cgraph_new_nodes.exists ())
309 return;
310
311 handle_alias_pairs ();
312 /* Note that this queue may grow as its being processed, as the new
313 functions may generate new ones. */
314 for (unsigned i = 0; i < cgraph_new_nodes.length (); i++)
315 {
316 cgraph_node *node = cgraph_new_nodes[i];
317 fndecl = node->decl;
318 switch (state)
319 {
320 case CONSTRUCTION:
321 /* At construction time we just need to finalize function and move
322 it into reachable functions list. */
323
324 cgraph_node::finalize_function (fndecl, false);
325 call_cgraph_insertion_hooks (node);
326 enqueue_node (node);
327 break;
328
329 case IPA:
330 case IPA_SSA:
331 case IPA_SSA_AFTER_INLINING:
332 /* When IPA optimization already started, do all essential
333 transformations that has been already performed on the whole
334 cgraph but not on this function. */
335
336 gimple_register_cfg_hooks ();
337 if (!node->analyzed)
338 node->analyze ();
339 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
340 if ((state == IPA_SSA || state == IPA_SSA_AFTER_INLINING)
341 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
342 g->get_passes ()->execute_early_local_passes ();
343 else if (inline_summary_vec != NULL)
344 compute_inline_parameters (node, true);
345 free_dominance_info (CDI_POST_DOMINATORS);
346 free_dominance_info (CDI_DOMINATORS);
347 pop_cfun ();
348 call_cgraph_insertion_hooks (node);
349 break;
350
351 case EXPANSION:
352 /* Functions created during expansion shall be compiled
353 directly. */
354 node->process = 0;
355 call_cgraph_insertion_hooks (node);
356 node->expand ();
357 break;
358
359 default:
360 gcc_unreachable ();
361 break;
362 }
363 }
364
365 cgraph_new_nodes.release ();
366 }
367
368 /* As an GCC extension we allow redefinition of the function. The
369 semantics when both copies of bodies differ is not well defined.
370 We replace the old body with new body so in unit at a time mode
371 we always use new body, while in normal mode we may end up with
372 old body inlined into some functions and new body expanded and
373 inlined in others.
374
375 ??? It may make more sense to use one body for inlining and other
376 body for expanding the function but this is difficult to do. */
377
378 void
379 cgraph_node::reset (void)
380 {
381 /* If process is set, then we have already begun whole-unit analysis.
382 This is *not* testing for whether we've already emitted the function.
383 That case can be sort-of legitimately seen with real function redefinition
384 errors. I would argue that the front end should never present us with
385 such a case, but don't enforce that for now. */
386 gcc_assert (!process);
387
388 /* Reset our data structures so we can analyze the function again. */
389 memset (&local, 0, sizeof (local));
390 memset (&global, 0, sizeof (global));
391 memset (&rtl, 0, sizeof (rtl));
392 analyzed = false;
393 definition = false;
394 alias = false;
395 weakref = false;
396 cpp_implicit_alias = false;
397
398 remove_callees ();
399 remove_all_references ();
400 }
401
402 /* Return true when there are references to the node. */
403
404 bool
405 symtab_node::referred_to_p (void)
406 {
407 ipa_ref *ref = NULL;
408
409 /* See if there are any references at all. */
410 if (iterate_referring (0, ref))
411 return true;
412 /* For functions check also calls. */
413 cgraph_node *cn = dyn_cast <cgraph_node *> (this);
414 if (cn && cn->callers)
415 return true;
416 return false;
417 }
418
419 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
420 logic in effect. If NO_COLLECT is true, then our caller cannot stand to have
421 the garbage collector run at the moment. We would need to either create
422 a new GC context, or just not compile right now. */
423
424 void
425 cgraph_node::finalize_function (tree decl, bool no_collect)
426 {
427 cgraph_node *node = cgraph_node::get_create (decl);
428
429 if (node->definition)
430 {
431 /* Nested functions should only be defined once. */
432 gcc_assert (!DECL_CONTEXT (decl)
433 || TREE_CODE (DECL_CONTEXT (decl)) != FUNCTION_DECL);
434 node->reset ();
435 node->local.redefined_extern_inline = true;
436 }
437
438 notice_global_symbol (decl);
439 node->definition = true;
440 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
441
442 /* With -fkeep-inline-functions we are keeping all inline functions except
443 for extern inline ones. */
444 if (flag_keep_inline_functions
445 && DECL_DECLARED_INLINE_P (decl)
446 && !DECL_EXTERNAL (decl)
447 && !DECL_DISREGARD_INLINE_LIMITS (decl))
448 node->force_output = 1;
449
450 /* When not optimizing, also output the static functions. (see
451 PR24561), but don't do so for always_inline functions, functions
452 declared inline and nested functions. These were optimized out
453 in the original implementation and it is unclear whether we want
454 to change the behavior here. */
455 if ((!opt_for_fn (decl, optimize)
456 && !node->cpp_implicit_alias
457 && !DECL_DISREGARD_INLINE_LIMITS (decl)
458 && !DECL_DECLARED_INLINE_P (decl)
459 && !(DECL_CONTEXT (decl)
460 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL))
461 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
462 node->force_output = 1;
463
464 /* If we've not yet emitted decl, tell the debug info about it. */
465 if (!TREE_ASM_WRITTEN (decl))
466 (*debug_hooks->deferred_inline_function) (decl);
467
468 /* Possibly warn about unused parameters. */
469 if (warn_unused_parameter)
470 do_warn_unused_parameter (decl);
471
472 if (!no_collect)
473 ggc_collect ();
474
475 if (symtab->state == CONSTRUCTION
476 && (node->needed_p () || node->referred_to_p ()))
477 enqueue_node (node);
478 }
479
480 /* Add the function FNDECL to the call graph.
481 Unlike finalize_function, this function is intended to be used
482 by middle end and allows insertion of new function at arbitrary point
483 of compilation. The function can be either in high, low or SSA form
484 GIMPLE.
485
486 The function is assumed to be reachable and have address taken (so no
487 API breaking optimizations are performed on it).
488
489 Main work done by this function is to enqueue the function for later
490 processing to avoid need the passes to be re-entrant. */
491
492 void
493 cgraph_node::add_new_function (tree fndecl, bool lowered)
494 {
495 gcc::pass_manager *passes = g->get_passes ();
496 cgraph_node *node;
497 switch (symtab->state)
498 {
499 case PARSING:
500 cgraph_node::finalize_function (fndecl, false);
501 break;
502 case CONSTRUCTION:
503 /* Just enqueue function to be processed at nearest occurrence. */
504 node = cgraph_node::get_create (fndecl);
505 if (lowered)
506 node->lowered = true;
507 cgraph_new_nodes.safe_push (node);
508 break;
509
510 case IPA:
511 case IPA_SSA:
512 case IPA_SSA_AFTER_INLINING:
513 case EXPANSION:
514 /* Bring the function into finalized state and enqueue for later
515 analyzing and compilation. */
516 node = cgraph_node::get_create (fndecl);
517 node->local.local = false;
518 node->definition = true;
519 node->force_output = true;
520 if (!lowered && symtab->state == EXPANSION)
521 {
522 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
523 gimple_register_cfg_hooks ();
524 bitmap_obstack_initialize (NULL);
525 execute_pass_list (cfun, passes->all_lowering_passes);
526 passes->execute_early_local_passes ();
527 bitmap_obstack_release (NULL);
528 pop_cfun ();
529
530 lowered = true;
531 }
532 if (lowered)
533 node->lowered = true;
534 cgraph_new_nodes.safe_push (node);
535 break;
536
537 case FINISHED:
538 /* At the very end of compilation we have to do all the work up
539 to expansion. */
540 node = cgraph_node::create (fndecl);
541 if (lowered)
542 node->lowered = true;
543 node->definition = true;
544 node->analyze ();
545 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
546 gimple_register_cfg_hooks ();
547 bitmap_obstack_initialize (NULL);
548 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
549 g->get_passes ()->execute_early_local_passes ();
550 bitmap_obstack_release (NULL);
551 pop_cfun ();
552 node->expand ();
553 break;
554
555 default:
556 gcc_unreachable ();
557 }
558
559 /* Set a personality if required and we already passed EH lowering. */
560 if (lowered
561 && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
562 == eh_personality_lang))
563 DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
564 }
565
566 /* Analyze the function scheduled to be output. */
567 void
568 cgraph_node::analyze (void)
569 {
570 tree decl = this->decl;
571 location_t saved_loc = input_location;
572 input_location = DECL_SOURCE_LOCATION (decl);
573
574 if (thunk.thunk_p)
575 {
576 create_edge (cgraph_node::get (thunk.alias),
577 NULL, 0, CGRAPH_FREQ_BASE);
578 if (!expand_thunk (false, false))
579 {
580 thunk.alias = NULL;
581 return;
582 }
583 thunk.alias = NULL;
584 }
585 if (alias)
586 resolve_alias (cgraph_node::get (alias_target));
587 else if (dispatcher_function)
588 {
589 /* Generate the dispatcher body of multi-versioned functions. */
590 cgraph_function_version_info *dispatcher_version_info
591 = function_version ();
592 if (dispatcher_version_info != NULL
593 && (dispatcher_version_info->dispatcher_resolver
594 == NULL_TREE))
595 {
596 tree resolver = NULL_TREE;
597 gcc_assert (targetm.generate_version_dispatcher_body);
598 resolver = targetm.generate_version_dispatcher_body (this);
599 gcc_assert (resolver != NULL_TREE);
600 }
601 }
602 else
603 {
604 push_cfun (DECL_STRUCT_FUNCTION (decl));
605
606 assign_assembler_name_if_neeeded (decl);
607
608 /* Make sure to gimplify bodies only once. During analyzing a
609 function we lower it, which will require gimplified nested
610 functions, so we can end up here with an already gimplified
611 body. */
612 if (!gimple_has_body_p (decl))
613 gimplify_function_tree (decl);
614 dump_function (TDI_generic, decl);
615
616 /* Lower the function. */
617 if (!lowered)
618 {
619 if (nested)
620 lower_nested_functions (decl);
621 gcc_assert (!nested);
622
623 gimple_register_cfg_hooks ();
624 bitmap_obstack_initialize (NULL);
625 execute_pass_list (cfun, g->get_passes ()->all_lowering_passes);
626 free_dominance_info (CDI_POST_DOMINATORS);
627 free_dominance_info (CDI_DOMINATORS);
628 compact_blocks ();
629 bitmap_obstack_release (NULL);
630 lowered = true;
631 }
632
633 pop_cfun ();
634 }
635 analyzed = true;
636
637 input_location = saved_loc;
638 }
639
640 /* C++ frontend produce same body aliases all over the place, even before PCH
641 gets streamed out. It relies on us linking the aliases with their function
642 in order to do the fixups, but ipa-ref is not PCH safe. Consequentely we
643 first produce aliases without links, but once C++ FE is sure he won't sream
644 PCH we build the links via this function. */
645
646 void
647 symbol_table::process_same_body_aliases (void)
648 {
649 symtab_node *node;
650 FOR_EACH_SYMBOL (node)
651 if (node->cpp_implicit_alias && !node->analyzed)
652 node->resolve_alias
653 (TREE_CODE (node->alias_target) == VAR_DECL
654 ? (symtab_node *)varpool_node::get_create (node->alias_target)
655 : (symtab_node *)cgraph_node::get_create (node->alias_target));
656 cpp_implicit_aliases_done = true;
657 }
658
659 /* Process attributes common for vars and functions. */
660
661 static void
662 process_common_attributes (symtab_node *node, tree decl)
663 {
664 tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
665
666 if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
667 {
668 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
669 "%<weakref%> attribute should be accompanied with"
670 " an %<alias%> attribute");
671 DECL_WEAK (decl) = 0;
672 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
673 DECL_ATTRIBUTES (decl));
674 }
675
676 if (lookup_attribute ("no_reorder", DECL_ATTRIBUTES (decl)))
677 node->no_reorder = 1;
678 }
679
680 /* Look for externally_visible and used attributes and mark cgraph nodes
681 accordingly.
682
683 We cannot mark the nodes at the point the attributes are processed (in
684 handle_*_attribute) because the copy of the declarations available at that
685 point may not be canonical. For example, in:
686
687 void f();
688 void f() __attribute__((used));
689
690 the declaration we see in handle_used_attribute will be the second
691 declaration -- but the front end will subsequently merge that declaration
692 with the original declaration and discard the second declaration.
693
694 Furthermore, we can't mark these nodes in finalize_function because:
695
696 void f() {}
697 void f() __attribute__((externally_visible));
698
699 is valid.
700
701 So, we walk the nodes at the end of the translation unit, applying the
702 attributes at that point. */
703
704 static void
705 process_function_and_variable_attributes (cgraph_node *first,
706 varpool_node *first_var)
707 {
708 cgraph_node *node;
709 varpool_node *vnode;
710
711 for (node = symtab->first_function (); node != first;
712 node = symtab->next_function (node))
713 {
714 tree decl = node->decl;
715 if (DECL_PRESERVE_P (decl))
716 node->mark_force_output ();
717 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
718 {
719 if (! TREE_PUBLIC (node->decl))
720 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
721 "%<externally_visible%>"
722 " attribute have effect only on public objects");
723 }
724 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
725 && (node->definition && !node->alias))
726 {
727 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
728 "%<weakref%> attribute ignored"
729 " because function is defined");
730 DECL_WEAK (decl) = 0;
731 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
732 DECL_ATTRIBUTES (decl));
733 }
734
735 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl))
736 && !DECL_DECLARED_INLINE_P (decl)
737 /* redefining extern inline function makes it DECL_UNINLINABLE. */
738 && !DECL_UNINLINABLE (decl))
739 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
740 "always_inline function might not be inlinable");
741
742 process_common_attributes (node, decl);
743 }
744 for (vnode = symtab->first_variable (); vnode != first_var;
745 vnode = symtab->next_variable (vnode))
746 {
747 tree decl = vnode->decl;
748 if (DECL_EXTERNAL (decl)
749 && DECL_INITIAL (decl))
750 varpool_node::finalize_decl (decl);
751 if (DECL_PRESERVE_P (decl))
752 vnode->force_output = true;
753 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
754 {
755 if (! TREE_PUBLIC (vnode->decl))
756 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
757 "%<externally_visible%>"
758 " attribute have effect only on public objects");
759 }
760 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
761 && vnode->definition
762 && DECL_INITIAL (decl))
763 {
764 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
765 "%<weakref%> attribute ignored"
766 " because variable is initialized");
767 DECL_WEAK (decl) = 0;
768 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
769 DECL_ATTRIBUTES (decl));
770 }
771 process_common_attributes (vnode, decl);
772 }
773 }
774
775 /* Mark DECL as finalized. By finalizing the declaration, frontend instruct the
776 middle end to output the variable to asm file, if needed or externally
777 visible. */
778
779 void
780 varpool_node::finalize_decl (tree decl)
781 {
782 varpool_node *node = varpool_node::get_create (decl);
783
784 gcc_assert (TREE_STATIC (decl) || DECL_EXTERNAL (decl));
785
786 if (node->definition)
787 return;
788 notice_global_symbol (decl);
789 node->definition = true;
790 if (TREE_THIS_VOLATILE (decl) || DECL_PRESERVE_P (decl)
791 /* Traditionally we do not eliminate static variables when not
792 optimizing and when not doing toplevel reoder. */
793 || node->no_reorder
794 || ((!flag_toplevel_reorder
795 && !DECL_COMDAT (node->decl)
796 && !DECL_ARTIFICIAL (node->decl))))
797 node->force_output = true;
798
799 if (symtab->state == CONSTRUCTION
800 && (node->needed_p () || node->referred_to_p ()))
801 enqueue_node (node);
802 if (symtab->state >= IPA_SSA)
803 node->analyze ();
804 /* Some frontends produce various interface variables after compilation
805 finished. */
806 if (symtab->state == FINISHED
807 || (!flag_toplevel_reorder
808 && symtab->state == EXPANSION))
809 node->assemble_decl ();
810
811 if (DECL_INITIAL (decl))
812 chkp_register_var_initializer (decl);
813 }
814
815 /* EDGE is an polymorphic call. Mark all possible targets as reachable
816 and if there is only one target, perform trivial devirtualization.
817 REACHABLE_CALL_TARGETS collects target lists we already walked to
818 avoid udplicate work. */
819
820 static void
821 walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
822 cgraph_edge *edge)
823 {
824 unsigned int i;
825 void *cache_token;
826 bool final;
827 vec <cgraph_node *>targets
828 = possible_polymorphic_call_targets
829 (edge, &final, &cache_token);
830
831 if (!reachable_call_targets->add (cache_token))
832 {
833 if (symtab->dump_file)
834 dump_possible_polymorphic_call_targets
835 (symtab->dump_file, edge);
836
837 for (i = 0; i < targets.length (); i++)
838 {
839 /* Do not bother to mark virtual methods in anonymous namespace;
840 either we will find use of virtual table defining it, or it is
841 unused. */
842 if (targets[i]->definition
843 && TREE_CODE
844 (TREE_TYPE (targets[i]->decl))
845 == METHOD_TYPE
846 && !type_in_anonymous_namespace_p
847 (method_class_type
848 (TREE_TYPE (targets[i]->decl))))
849 enqueue_node (targets[i]);
850 }
851 }
852
853 /* Very trivial devirtualization; when the type is
854 final or anonymous (so we know all its derivation)
855 and there is only one possible virtual call target,
856 make the edge direct. */
857 if (final)
858 {
859 if (targets.length () <= 1 && dbg_cnt (devirt))
860 {
861 cgraph_node *target;
862 if (targets.length () == 1)
863 target = targets[0];
864 else
865 target = cgraph_node::create
866 (builtin_decl_implicit (BUILT_IN_UNREACHABLE));
867
868 if (symtab->dump_file)
869 {
870 fprintf (symtab->dump_file,
871 "Devirtualizing call: ");
872 print_gimple_stmt (symtab->dump_file,
873 edge->call_stmt, 0,
874 TDF_SLIM);
875 }
876 if (dump_enabled_p ())
877 {
878 location_t locus = gimple_location_safe (edge->call_stmt);
879 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
880 "devirtualizing call in %s to %s\n",
881 edge->caller->name (), target->name ());
882 }
883
884 edge->make_direct (target);
885 edge->redirect_call_stmt_to_callee ();
886
887 /* Call to __builtin_unreachable shouldn't be instrumented. */
888 if (!targets.length ())
889 gimple_call_set_with_bounds (edge->call_stmt, false);
890
891 if (symtab->dump_file)
892 {
893 fprintf (symtab->dump_file,
894 "Devirtualized as: ");
895 print_gimple_stmt (symtab->dump_file,
896 edge->call_stmt, 0,
897 TDF_SLIM);
898 }
899 }
900 }
901 }
902
903
904 /* Discover all functions and variables that are trivially needed, analyze
905 them as well as all functions and variables referred by them */
906 static cgraph_node *first_analyzed;
907 static varpool_node *first_analyzed_var;
908
909 static void
910 analyze_functions (void)
911 {
912 /* Keep track of already processed nodes when called multiple times for
913 intermodule optimization. */
914 cgraph_node *first_handled = first_analyzed;
915 varpool_node *first_handled_var = first_analyzed_var;
916 hash_set<void *> reachable_call_targets;
917
918 symtab_node *node;
919 symtab_node *next;
920 int i;
921 ipa_ref *ref;
922 bool changed = true;
923 location_t saved_loc = input_location;
924
925 bitmap_obstack_initialize (NULL);
926 symtab->state = CONSTRUCTION;
927 input_location = UNKNOWN_LOCATION;
928
929 /* Ugly, but the fixup can not happen at a time same body alias is created;
930 C++ FE is confused about the COMDAT groups being right. */
931 if (symtab->cpp_implicit_aliases_done)
932 FOR_EACH_SYMBOL (node)
933 if (node->cpp_implicit_alias)
934 node->fixup_same_cpp_alias_visibility (node->get_alias_target ());
935 build_type_inheritance_graph ();
936
937 /* Analysis adds static variables that in turn adds references to new functions.
938 So we need to iterate the process until it stabilize. */
939 while (changed)
940 {
941 changed = false;
942 process_function_and_variable_attributes (first_analyzed,
943 first_analyzed_var);
944
945 /* First identify the trivially needed symbols. */
946 for (node = symtab->first_symbol ();
947 node != first_analyzed
948 && node != first_analyzed_var; node = node->next)
949 {
950 /* Convert COMDAT group designators to IDENTIFIER_NODEs. */
951 node->get_comdat_group_id ();
952 if (node->needed_p ())
953 {
954 enqueue_node (node);
955 if (!changed && symtab->dump_file)
956 fprintf (symtab->dump_file, "Trivially needed symbols:");
957 changed = true;
958 if (symtab->dump_file)
959 fprintf (symtab->dump_file, " %s", node->asm_name ());
960 if (!changed && symtab->dump_file)
961 fprintf (symtab->dump_file, "\n");
962 }
963 if (node == first_analyzed
964 || node == first_analyzed_var)
965 break;
966 }
967 symtab->process_new_functions ();
968 first_analyzed_var = symtab->first_variable ();
969 first_analyzed = symtab->first_function ();
970
971 if (changed && symtab->dump_file)
972 fprintf (symtab->dump_file, "\n");
973
974 /* Lower representation, build callgraph edges and references for all trivially
975 needed symbols and all symbols referred by them. */
976 while (queued_nodes != &symtab_terminator)
977 {
978 changed = true;
979 node = queued_nodes;
980 queued_nodes = (symtab_node *)queued_nodes->aux;
981 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
982 if (cnode && cnode->definition)
983 {
984 cgraph_edge *edge;
985 tree decl = cnode->decl;
986
987 /* ??? It is possible to create extern inline function
988 and later using weak alias attribute to kill its body.
989 See gcc.c-torture/compile/20011119-1.c */
990 if (!DECL_STRUCT_FUNCTION (decl)
991 && !cnode->alias
992 && !cnode->thunk.thunk_p
993 && !cnode->dispatcher_function)
994 {
995 cnode->reset ();
996 cnode->local.redefined_extern_inline = true;
997 continue;
998 }
999
1000 if (!cnode->analyzed)
1001 cnode->analyze ();
1002
1003 for (edge = cnode->callees; edge; edge = edge->next_callee)
1004 if (edge->callee->definition
1005 && (!DECL_EXTERNAL (edge->callee->decl)
1006 /* When not optimizing, do not try to analyze extern
1007 inline functions. Doing so is pointless. */
1008 || opt_for_fn (edge->callee->decl, optimize)
1009 /* Weakrefs needs to be preserved. */
1010 || edge->callee->alias
1011 /* always_inline functions are inlined aven at -O0. */
1012 || lookup_attribute
1013 ("always_inline",
1014 DECL_ATTRIBUTES (edge->callee->decl))
1015 /* Multiversioned functions needs the dispatcher to
1016 be produced locally even for extern functions. */
1017 || edge->callee->function_version ()))
1018 enqueue_node (edge->callee);
1019 if (opt_for_fn (cnode->decl, optimize)
1020 && opt_for_fn (cnode->decl, flag_devirtualize))
1021 {
1022 cgraph_edge *next;
1023
1024 for (edge = cnode->indirect_calls; edge; edge = next)
1025 {
1026 next = edge->next_callee;
1027 if (edge->indirect_info->polymorphic)
1028 walk_polymorphic_call_targets (&reachable_call_targets,
1029 edge);
1030 }
1031 }
1032
1033 /* If decl is a clone of an abstract function,
1034 mark that abstract function so that we don't release its body.
1035 The DECL_INITIAL() of that abstract function declaration
1036 will be later needed to output debug info. */
1037 if (DECL_ABSTRACT_ORIGIN (decl))
1038 {
1039 cgraph_node *origin_node
1040 = cgraph_node::get_create (DECL_ABSTRACT_ORIGIN (decl));
1041 origin_node->used_as_abstract_origin = true;
1042 }
1043 }
1044 else
1045 {
1046 varpool_node *vnode = dyn_cast <varpool_node *> (node);
1047 if (vnode && vnode->definition && !vnode->analyzed)
1048 vnode->analyze ();
1049 }
1050
1051 if (node->same_comdat_group)
1052 {
1053 symtab_node *next;
1054 for (next = node->same_comdat_group;
1055 next != node;
1056 next = next->same_comdat_group)
1057 if (!next->comdat_local_p ())
1058 enqueue_node (next);
1059 }
1060 for (i = 0; node->iterate_reference (i, ref); i++)
1061 if (ref->referred->definition
1062 && (!DECL_EXTERNAL (ref->referred->decl)
1063 || ((TREE_CODE (ref->referred->decl) != FUNCTION_DECL
1064 && optimize)
1065 || (TREE_CODE (ref->referred->decl) == FUNCTION_DECL
1066 && opt_for_fn (ref->referred->decl, optimize))
1067 || ref->referred->alias)))
1068 enqueue_node (ref->referred);
1069 symtab->process_new_functions ();
1070 }
1071 }
1072 update_type_inheritance_graph ();
1073
1074 /* Collect entry points to the unit. */
1075 if (symtab->dump_file)
1076 {
1077 fprintf (symtab->dump_file, "\n\nInitial ");
1078 symtab_node::dump_table (symtab->dump_file);
1079 }
1080
1081 if (symtab->dump_file)
1082 fprintf (symtab->dump_file, "\nRemoving unused symbols:");
1083
1084 for (node = symtab->first_symbol ();
1085 node != first_handled
1086 && node != first_handled_var; node = next)
1087 {
1088 next = node->next;
1089 if (!node->aux && !node->referred_to_p ())
1090 {
1091 if (symtab->dump_file)
1092 fprintf (symtab->dump_file, " %s", node->name ());
1093 node->remove ();
1094 continue;
1095 }
1096 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1097 {
1098 tree decl = node->decl;
1099
1100 if (cnode->definition && !gimple_has_body_p (decl)
1101 && !cnode->alias
1102 && !cnode->thunk.thunk_p)
1103 cnode->reset ();
1104
1105 gcc_assert (!cnode->definition || cnode->thunk.thunk_p
1106 || cnode->alias
1107 || gimple_has_body_p (decl));
1108 gcc_assert (cnode->analyzed == cnode->definition);
1109 }
1110 node->aux = NULL;
1111 }
1112 for (;node; node = node->next)
1113 node->aux = NULL;
1114 first_analyzed = symtab->first_function ();
1115 first_analyzed_var = symtab->first_variable ();
1116 if (symtab->dump_file)
1117 {
1118 fprintf (symtab->dump_file, "\n\nReclaimed ");
1119 symtab_node::dump_table (symtab->dump_file);
1120 }
1121 bitmap_obstack_release (NULL);
1122 ggc_collect ();
1123 /* Initialize assembler name hash, in particular we want to trigger C++
1124 mangling and same body alias creation before we free DECL_ARGUMENTS
1125 used by it. */
1126 if (!seen_error ())
1127 symtab->symtab_initialize_asm_name_hash ();
1128
1129 input_location = saved_loc;
1130 }
1131
1132 /* Translate the ugly representation of aliases as alias pairs into nice
1133 representation in callgraph. We don't handle all cases yet,
1134 unfortunately. */
1135
1136 static void
1137 handle_alias_pairs (void)
1138 {
1139 alias_pair *p;
1140 unsigned i;
1141
1142 for (i = 0; alias_pairs && alias_pairs->iterate (i, &p);)
1143 {
1144 symtab_node *target_node = symtab_node::get_for_asmname (p->target);
1145
1146 /* Weakrefs with target not defined in current unit are easy to handle:
1147 they behave just as external variables except we need to note the
1148 alias flag to later output the weakref pseudo op into asm file. */
1149 if (!target_node
1150 && lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)) != NULL)
1151 {
1152 symtab_node *node = symtab_node::get (p->decl);
1153 if (node)
1154 {
1155 node->alias_target = p->target;
1156 node->weakref = true;
1157 node->alias = true;
1158 }
1159 alias_pairs->unordered_remove (i);
1160 continue;
1161 }
1162 else if (!target_node)
1163 {
1164 error ("%q+D aliased to undefined symbol %qE", p->decl, p->target);
1165 symtab_node *node = symtab_node::get (p->decl);
1166 if (node)
1167 node->alias = false;
1168 alias_pairs->unordered_remove (i);
1169 continue;
1170 }
1171
1172 if (DECL_EXTERNAL (target_node->decl)
1173 /* We use local aliases for C++ thunks to force the tailcall
1174 to bind locally. This is a hack - to keep it working do
1175 the following (which is not strictly correct). */
1176 && (TREE_CODE (target_node->decl) != FUNCTION_DECL
1177 || ! DECL_VIRTUAL_P (target_node->decl))
1178 && ! lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)))
1179 {
1180 error ("%q+D aliased to external symbol %qE",
1181 p->decl, p->target);
1182 }
1183
1184 if (TREE_CODE (p->decl) == FUNCTION_DECL
1185 && target_node && is_a <cgraph_node *> (target_node))
1186 {
1187 cgraph_node *src_node = cgraph_node::get (p->decl);
1188 if (src_node && src_node->definition)
1189 src_node->reset ();
1190 cgraph_node::create_alias (p->decl, target_node->decl);
1191 alias_pairs->unordered_remove (i);
1192 }
1193 else if (TREE_CODE (p->decl) == VAR_DECL
1194 && target_node && is_a <varpool_node *> (target_node))
1195 {
1196 varpool_node::create_alias (p->decl, target_node->decl);
1197 alias_pairs->unordered_remove (i);
1198 }
1199 else
1200 {
1201 error ("%q+D alias in between function and variable is not supported",
1202 p->decl);
1203 warning (0, "%q+D aliased declaration",
1204 target_node->decl);
1205 alias_pairs->unordered_remove (i);
1206 }
1207 }
1208 vec_free (alias_pairs);
1209 }
1210
1211
1212 /* Figure out what functions we want to assemble. */
1213
1214 static void
1215 mark_functions_to_output (void)
1216 {
1217 cgraph_node *node;
1218 #ifdef ENABLE_CHECKING
1219 bool check_same_comdat_groups = false;
1220
1221 FOR_EACH_FUNCTION (node)
1222 gcc_assert (!node->process);
1223 #endif
1224
1225 FOR_EACH_FUNCTION (node)
1226 {
1227 tree decl = node->decl;
1228
1229 gcc_assert (!node->process || node->same_comdat_group);
1230 if (node->process)
1231 continue;
1232
1233 /* We need to output all local functions that are used and not
1234 always inlined, as well as those that are reachable from
1235 outside the current compilation unit. */
1236 if (node->analyzed
1237 && !node->thunk.thunk_p
1238 && !node->alias
1239 && !node->global.inlined_to
1240 && !TREE_ASM_WRITTEN (decl)
1241 && !DECL_EXTERNAL (decl))
1242 {
1243 node->process = 1;
1244 if (node->same_comdat_group)
1245 {
1246 cgraph_node *next;
1247 for (next = dyn_cast<cgraph_node *> (node->same_comdat_group);
1248 next != node;
1249 next = dyn_cast<cgraph_node *> (next->same_comdat_group))
1250 if (!next->thunk.thunk_p && !next->alias
1251 && !next->comdat_local_p ())
1252 next->process = 1;
1253 }
1254 }
1255 else if (node->same_comdat_group)
1256 {
1257 #ifdef ENABLE_CHECKING
1258 check_same_comdat_groups = true;
1259 #endif
1260 }
1261 else
1262 {
1263 /* We should've reclaimed all functions that are not needed. */
1264 #ifdef ENABLE_CHECKING
1265 if (!node->global.inlined_to
1266 && gimple_has_body_p (decl)
1267 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1268 are inside partition, we can end up not removing the body since we no longer
1269 have analyzed node pointing to it. */
1270 && !node->in_other_partition
1271 && !node->alias
1272 && !node->clones
1273 && !DECL_EXTERNAL (decl))
1274 {
1275 node->debug ();
1276 internal_error ("failed to reclaim unneeded function");
1277 }
1278 #endif
1279 gcc_assert (node->global.inlined_to
1280 || !gimple_has_body_p (decl)
1281 || node->in_other_partition
1282 || node->clones
1283 || DECL_ARTIFICIAL (decl)
1284 || DECL_EXTERNAL (decl));
1285
1286 }
1287
1288 }
1289 #ifdef ENABLE_CHECKING
1290 if (check_same_comdat_groups)
1291 FOR_EACH_FUNCTION (node)
1292 if (node->same_comdat_group && !node->process)
1293 {
1294 tree decl = node->decl;
1295 if (!node->global.inlined_to
1296 && gimple_has_body_p (decl)
1297 /* FIXME: in an ltrans unit when the offline copy is outside a
1298 partition but inline copies are inside a partition, we can
1299 end up not removing the body since we no longer have an
1300 analyzed node pointing to it. */
1301 && !node->in_other_partition
1302 && !node->clones
1303 && !DECL_EXTERNAL (decl))
1304 {
1305 node->debug ();
1306 internal_error ("failed to reclaim unneeded function in same "
1307 "comdat group");
1308 }
1309 }
1310 #endif
1311 }
1312
1313 /* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1314 in lowered gimple form. IN_SSA is true if the gimple is in SSA.
1315
1316 Set current_function_decl and cfun to newly constructed empty function body.
1317 return basic block in the function body. */
1318
1319 basic_block
1320 init_lowered_empty_function (tree decl, bool in_ssa)
1321 {
1322 basic_block bb;
1323
1324 current_function_decl = decl;
1325 allocate_struct_function (decl, false);
1326 gimple_register_cfg_hooks ();
1327 init_empty_tree_cfg ();
1328
1329 if (in_ssa)
1330 {
1331 init_tree_ssa (cfun);
1332 init_ssa_operands (cfun);
1333 cfun->gimple_df->in_ssa_p = true;
1334 cfun->curr_properties |= PROP_ssa;
1335 }
1336
1337 DECL_INITIAL (decl) = make_node (BLOCK);
1338
1339 DECL_SAVED_TREE (decl) = error_mark_node;
1340 cfun->curr_properties |= (PROP_gimple_lcf | PROP_gimple_leh | PROP_gimple_any
1341 | PROP_cfg | PROP_loops);
1342
1343 set_loops_for_fn (cfun, ggc_cleared_alloc<loops> ());
1344 init_loops_structure (cfun, loops_for_fn (cfun), 1);
1345 loops_for_fn (cfun)->state |= LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
1346
1347 /* Create BB for body of the function and connect it properly. */
1348 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR_FOR_FN (cfun));
1349 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, EDGE_FALLTHRU);
1350 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1351 add_bb_to_loop (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
1352
1353 return bb;
1354 }
1355
1356 /* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1357 offset indicated by VIRTUAL_OFFSET, if that is
1358 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1359 zero for a result adjusting thunk. */
1360
1361 static tree
1362 thunk_adjust (gimple_stmt_iterator * bsi,
1363 tree ptr, bool this_adjusting,
1364 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1365 {
1366 gassign *stmt;
1367 tree ret;
1368
1369 if (this_adjusting
1370 && fixed_offset != 0)
1371 {
1372 stmt = gimple_build_assign
1373 (ptr, fold_build_pointer_plus_hwi_loc (input_location,
1374 ptr,
1375 fixed_offset));
1376 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1377 }
1378
1379 /* If there's a virtual offset, look up that value in the vtable and
1380 adjust the pointer again. */
1381 if (virtual_offset)
1382 {
1383 tree vtabletmp;
1384 tree vtabletmp2;
1385 tree vtabletmp3;
1386
1387 if (!vtable_entry_type)
1388 {
1389 tree vfunc_type = make_node (FUNCTION_TYPE);
1390 TREE_TYPE (vfunc_type) = integer_type_node;
1391 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1392 layout_type (vfunc_type);
1393
1394 vtable_entry_type = build_pointer_type (vfunc_type);
1395 }
1396
1397 vtabletmp =
1398 create_tmp_reg (build_pointer_type
1399 (build_pointer_type (vtable_entry_type)), "vptr");
1400
1401 /* The vptr is always at offset zero in the object. */
1402 stmt = gimple_build_assign (vtabletmp,
1403 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1404 ptr));
1405 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1406
1407 /* Form the vtable address. */
1408 vtabletmp2 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp)),
1409 "vtableaddr");
1410 stmt = gimple_build_assign (vtabletmp2,
1411 build_simple_mem_ref (vtabletmp));
1412 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1413
1414 /* Find the entry with the vcall offset. */
1415 stmt = gimple_build_assign (vtabletmp2,
1416 fold_build_pointer_plus_loc (input_location,
1417 vtabletmp2,
1418 virtual_offset));
1419 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1420
1421 /* Get the offset itself. */
1422 vtabletmp3 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1423 "vcalloffset");
1424 stmt = gimple_build_assign (vtabletmp3,
1425 build_simple_mem_ref (vtabletmp2));
1426 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1427
1428 /* Adjust the `this' pointer. */
1429 ptr = fold_build_pointer_plus_loc (input_location, ptr, vtabletmp3);
1430 ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
1431 GSI_CONTINUE_LINKING);
1432 }
1433
1434 if (!this_adjusting
1435 && fixed_offset != 0)
1436 /* Adjust the pointer by the constant. */
1437 {
1438 tree ptrtmp;
1439
1440 if (TREE_CODE (ptr) == VAR_DECL)
1441 ptrtmp = ptr;
1442 else
1443 {
1444 ptrtmp = create_tmp_reg (TREE_TYPE (ptr), "ptr");
1445 stmt = gimple_build_assign (ptrtmp, ptr);
1446 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1447 }
1448 ptr = fold_build_pointer_plus_hwi_loc (input_location,
1449 ptrtmp, fixed_offset);
1450 }
1451
1452 /* Emit the statement and gimplify the adjustment expression. */
1453 ret = create_tmp_reg (TREE_TYPE (ptr), "adjusted_this");
1454 stmt = gimple_build_assign (ret, ptr);
1455 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1456
1457 return ret;
1458 }
1459
1460 /* Expand thunk NODE to gimple if possible.
1461 When FORCE_GIMPLE_THUNK is true, gimple thunk is created and
1462 no assembler is produced.
1463 When OUTPUT_ASM_THUNK is true, also produce assembler for
1464 thunks that are not lowered. */
1465
1466 bool
1467 cgraph_node::expand_thunk (bool output_asm_thunks, bool force_gimple_thunk)
1468 {
1469 bool this_adjusting = thunk.this_adjusting;
1470 HOST_WIDE_INT fixed_offset = thunk.fixed_offset;
1471 HOST_WIDE_INT virtual_value = thunk.virtual_value;
1472 tree virtual_offset = NULL;
1473 tree alias = callees->callee->decl;
1474 tree thunk_fndecl = decl;
1475 tree a;
1476
1477
1478 if (!force_gimple_thunk && this_adjusting
1479 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1480 virtual_value, alias))
1481 {
1482 const char *fnname;
1483 tree fn_block;
1484 tree restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1485
1486 if (!output_asm_thunks)
1487 {
1488 analyzed = true;
1489 return false;
1490 }
1491
1492 if (in_lto_p)
1493 get_untransformed_body ();
1494 a = DECL_ARGUMENTS (thunk_fndecl);
1495
1496 current_function_decl = thunk_fndecl;
1497
1498 /* Ensure thunks are emitted in their correct sections. */
1499 resolve_unique_section (thunk_fndecl, 0, flag_function_sections);
1500
1501 DECL_RESULT (thunk_fndecl)
1502 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1503 RESULT_DECL, 0, restype);
1504 DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1505 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
1506
1507 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1508 create one. */
1509 fn_block = make_node (BLOCK);
1510 BLOCK_VARS (fn_block) = a;
1511 DECL_INITIAL (thunk_fndecl) = fn_block;
1512 init_function_start (thunk_fndecl);
1513 cfun->is_thunk = 1;
1514 insn_locations_init ();
1515 set_curr_insn_location (DECL_SOURCE_LOCATION (thunk_fndecl));
1516 prologue_location = curr_insn_location ();
1517 assemble_start_function (thunk_fndecl, fnname);
1518
1519 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1520 fixed_offset, virtual_value, alias);
1521
1522 assemble_end_function (thunk_fndecl, fnname);
1523 insn_locations_finalize ();
1524 init_insn_lengths ();
1525 free_after_compilation (cfun);
1526 set_cfun (NULL);
1527 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1528 thunk.thunk_p = false;
1529 analyzed = false;
1530 }
1531 else
1532 {
1533 tree restype;
1534 basic_block bb, then_bb, else_bb, return_bb;
1535 gimple_stmt_iterator bsi;
1536 int nargs = 0;
1537 tree arg;
1538 int i;
1539 tree resdecl;
1540 tree restmp = NULL;
1541
1542 gcall *call;
1543 greturn *ret;
1544
1545 if (in_lto_p)
1546 get_untransformed_body ();
1547 a = DECL_ARGUMENTS (thunk_fndecl);
1548
1549 current_function_decl = thunk_fndecl;
1550
1551 /* Ensure thunks are emitted in their correct sections. */
1552 resolve_unique_section (thunk_fndecl, 0, flag_function_sections);
1553
1554 DECL_IGNORED_P (thunk_fndecl) = 1;
1555 bitmap_obstack_initialize (NULL);
1556
1557 if (thunk.virtual_offset_p)
1558 virtual_offset = size_int (virtual_value);
1559
1560 /* Build the return declaration for the function. */
1561 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1562 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1563 {
1564 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1565 DECL_ARTIFICIAL (resdecl) = 1;
1566 DECL_IGNORED_P (resdecl) = 1;
1567 DECL_RESULT (thunk_fndecl) = resdecl;
1568 DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1569 }
1570 else
1571 resdecl = DECL_RESULT (thunk_fndecl);
1572
1573 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl, true);
1574
1575 bsi = gsi_start_bb (bb);
1576
1577 /* Build call to the function being thunked. */
1578 if (!VOID_TYPE_P (restype))
1579 {
1580 if (DECL_BY_REFERENCE (resdecl))
1581 {
1582 restmp = gimple_fold_indirect_ref (resdecl);
1583 if (!restmp)
1584 restmp = build2 (MEM_REF,
1585 TREE_TYPE (TREE_TYPE (DECL_RESULT (alias))),
1586 resdecl,
1587 build_int_cst (TREE_TYPE
1588 (DECL_RESULT (alias)), 0));
1589 }
1590 else if (!is_gimple_reg_type (restype))
1591 {
1592 restmp = resdecl;
1593
1594 if (TREE_CODE (restmp) == VAR_DECL)
1595 add_local_decl (cfun, restmp);
1596 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1597 }
1598 else
1599 restmp = create_tmp_reg (restype, "retval");
1600 }
1601
1602 for (arg = a; arg; arg = DECL_CHAIN (arg))
1603 nargs++;
1604 auto_vec<tree> vargs (nargs);
1605 if (this_adjusting)
1606 vargs.quick_push (thunk_adjust (&bsi, a, 1, fixed_offset,
1607 virtual_offset));
1608 else if (nargs)
1609 vargs.quick_push (a);
1610
1611 if (nargs)
1612 for (i = 1, arg = DECL_CHAIN (a); i < nargs; i++, arg = DECL_CHAIN (arg))
1613 {
1614 tree tmp = arg;
1615 if (!is_gimple_val (arg))
1616 {
1617 tmp = create_tmp_reg (TYPE_MAIN_VARIANT
1618 (TREE_TYPE (arg)), "arg");
1619 gimple stmt = gimple_build_assign (tmp, arg);
1620 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1621 }
1622 vargs.quick_push (tmp);
1623 }
1624 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1625 callees->call_stmt = call;
1626 gimple_call_set_from_thunk (call, true);
1627 gimple_call_set_with_bounds (call, instrumentation_clone);
1628 if (restmp)
1629 {
1630 gimple_call_set_lhs (call, restmp);
1631 gcc_assert (useless_type_conversion_p (TREE_TYPE (restmp),
1632 TREE_TYPE (TREE_TYPE (alias))));
1633 }
1634 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1635 if (!(gimple_call_flags (call) & ECF_NORETURN))
1636 {
1637 if (restmp && !this_adjusting
1638 && (fixed_offset || virtual_offset))
1639 {
1640 tree true_label = NULL_TREE;
1641
1642 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1643 {
1644 gimple stmt;
1645 /* If the return type is a pointer, we need to
1646 protect against NULL. We know there will be an
1647 adjustment, because that's why we're emitting a
1648 thunk. */
1649 then_bb = create_basic_block (NULL, (void *) 0, bb);
1650 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1651 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1652 add_bb_to_loop (then_bb, bb->loop_father);
1653 add_bb_to_loop (return_bb, bb->loop_father);
1654 add_bb_to_loop (else_bb, bb->loop_father);
1655 remove_edge (single_succ_edge (bb));
1656 true_label = gimple_block_label (then_bb);
1657 stmt = gimple_build_cond (NE_EXPR, restmp,
1658 build_zero_cst (TREE_TYPE (restmp)),
1659 NULL_TREE, NULL_TREE);
1660 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1661 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1662 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1663 make_edge (return_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1664 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1665 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1666 bsi = gsi_last_bb (then_bb);
1667 }
1668
1669 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1670 fixed_offset, virtual_offset);
1671 if (true_label)
1672 {
1673 gimple stmt;
1674 bsi = gsi_last_bb (else_bb);
1675 stmt = gimple_build_assign (restmp,
1676 build_zero_cst (TREE_TYPE (restmp)));
1677 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1678 bsi = gsi_last_bb (return_bb);
1679 }
1680 }
1681 else
1682 gimple_call_set_tail (call, true);
1683
1684 /* Build return value. */
1685 if (!DECL_BY_REFERENCE (resdecl))
1686 ret = gimple_build_return (restmp);
1687 else
1688 ret = gimple_build_return (resdecl);
1689
1690 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1691 }
1692 else
1693 {
1694 gimple_call_set_tail (call, true);
1695 remove_edge (single_succ_edge (bb));
1696 }
1697
1698 cfun->gimple_df->in_ssa_p = true;
1699 /* FIXME: C++ FE should stop setting TREE_ASM_WRITTEN on thunks. */
1700 TREE_ASM_WRITTEN (thunk_fndecl) = false;
1701 delete_unreachable_blocks ();
1702 update_ssa (TODO_update_ssa);
1703 #ifdef ENABLE_CHECKING
1704 verify_flow_info ();
1705 #endif
1706 free_dominance_info (CDI_DOMINATORS);
1707
1708 /* Since we want to emit the thunk, we explicitly mark its name as
1709 referenced. */
1710 thunk.thunk_p = false;
1711 lowered = true;
1712 bitmap_obstack_release (NULL);
1713 }
1714 current_function_decl = NULL;
1715 set_cfun (NULL);
1716 return true;
1717 }
1718
1719 /* Assemble thunks and aliases associated to node. */
1720
1721 void
1722 cgraph_node::assemble_thunks_and_aliases (void)
1723 {
1724 cgraph_edge *e;
1725 ipa_ref *ref;
1726
1727 for (e = callers; e;)
1728 if (e->caller->thunk.thunk_p
1729 && !e->caller->thunk.add_pointer_bounds_args)
1730 {
1731 cgraph_node *thunk = e->caller;
1732
1733 e = e->next_caller;
1734 thunk->expand_thunk (true, false);
1735 thunk->assemble_thunks_and_aliases ();
1736 }
1737 else
1738 e = e->next_caller;
1739
1740 FOR_EACH_ALIAS (this, ref)
1741 {
1742 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
1743 bool saved_written = TREE_ASM_WRITTEN (decl);
1744
1745 /* Force assemble_alias to really output the alias this time instead
1746 of buffering it in same alias pairs. */
1747 TREE_ASM_WRITTEN (decl) = 1;
1748 do_assemble_alias (alias->decl,
1749 DECL_ASSEMBLER_NAME (decl));
1750 alias->assemble_thunks_and_aliases ();
1751 TREE_ASM_WRITTEN (decl) = saved_written;
1752 }
1753 }
1754
1755 /* Expand function specified by node. */
1756
1757 void
1758 cgraph_node::expand (void)
1759 {
1760 location_t saved_loc;
1761
1762 /* We ought to not compile any inline clones. */
1763 gcc_assert (!global.inlined_to);
1764
1765 announce_function (decl);
1766 process = 0;
1767 gcc_assert (lowered);
1768 get_untransformed_body ();
1769
1770 /* Generate RTL for the body of DECL. */
1771
1772 timevar_push (TV_REST_OF_COMPILATION);
1773
1774 gcc_assert (symtab->global_info_ready);
1775
1776 /* Initialize the default bitmap obstack. */
1777 bitmap_obstack_initialize (NULL);
1778
1779 /* Initialize the RTL code for the function. */
1780 current_function_decl = decl;
1781 saved_loc = input_location;
1782 input_location = DECL_SOURCE_LOCATION (decl);
1783 init_function_start (decl);
1784
1785 gimple_register_cfg_hooks ();
1786
1787 bitmap_obstack_initialize (&reg_obstack); /* FIXME, only at RTL generation*/
1788
1789 execute_all_ipa_transforms ();
1790
1791 /* Perform all tree transforms and optimizations. */
1792
1793 /* Signal the start of passes. */
1794 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_START, NULL);
1795
1796 execute_pass_list (cfun, g->get_passes ()->all_passes);
1797
1798 /* Signal the end of passes. */
1799 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_END, NULL);
1800
1801 bitmap_obstack_release (&reg_obstack);
1802
1803 /* Release the default bitmap obstack. */
1804 bitmap_obstack_release (NULL);
1805
1806 /* If requested, warn about function definitions where the function will
1807 return a value (usually of some struct or union type) which itself will
1808 take up a lot of stack space. */
1809 if (warn_larger_than && !DECL_EXTERNAL (decl) && TREE_TYPE (decl))
1810 {
1811 tree ret_type = TREE_TYPE (TREE_TYPE (decl));
1812
1813 if (ret_type && TYPE_SIZE_UNIT (ret_type)
1814 && TREE_CODE (TYPE_SIZE_UNIT (ret_type)) == INTEGER_CST
1815 && 0 < compare_tree_int (TYPE_SIZE_UNIT (ret_type),
1816 larger_than_size))
1817 {
1818 unsigned int size_as_int
1819 = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (ret_type));
1820
1821 if (compare_tree_int (TYPE_SIZE_UNIT (ret_type), size_as_int) == 0)
1822 warning (OPT_Wlarger_than_, "size of return value of %q+D is %u bytes",
1823 decl, size_as_int);
1824 else
1825 warning (OPT_Wlarger_than_, "size of return value of %q+D is larger than %wd bytes",
1826 decl, larger_than_size);
1827 }
1828 }
1829
1830 gimple_set_body (decl, NULL);
1831 if (DECL_STRUCT_FUNCTION (decl) == 0
1832 && !cgraph_node::get (decl)->origin)
1833 {
1834 /* Stop pointing to the local nodes about to be freed.
1835 But DECL_INITIAL must remain nonzero so we know this
1836 was an actual function definition.
1837 For a nested function, this is done in c_pop_function_context.
1838 If rest_of_compilation set this to 0, leave it 0. */
1839 if (DECL_INITIAL (decl) != 0)
1840 DECL_INITIAL (decl) = error_mark_node;
1841 }
1842
1843 input_location = saved_loc;
1844
1845 ggc_collect ();
1846 timevar_pop (TV_REST_OF_COMPILATION);
1847
1848 /* Make sure that BE didn't give up on compiling. */
1849 gcc_assert (TREE_ASM_WRITTEN (decl));
1850 set_cfun (NULL);
1851 current_function_decl = NULL;
1852
1853 /* It would make a lot more sense to output thunks before function body to get more
1854 forward and lest backwarding jumps. This however would need solving problem
1855 with comdats. See PR48668. Also aliases must come after function itself to
1856 make one pass assemblers, like one on AIX, happy. See PR 50689.
1857 FIXME: Perhaps thunks should be move before function IFF they are not in comdat
1858 groups. */
1859 assemble_thunks_and_aliases ();
1860 release_body ();
1861 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1862 points to the dead function body. */
1863 remove_callees ();
1864 remove_all_references ();
1865 }
1866
1867 /* Node comparer that is responsible for the order that corresponds
1868 to time when a function was launched for the first time. */
1869
1870 static int
1871 node_cmp (const void *pa, const void *pb)
1872 {
1873 const cgraph_node *a = *(const cgraph_node * const *) pa;
1874 const cgraph_node *b = *(const cgraph_node * const *) pb;
1875
1876 /* Functions with time profile must be before these without profile. */
1877 if (!a->tp_first_run || !b->tp_first_run)
1878 return a->tp_first_run - b->tp_first_run;
1879
1880 return a->tp_first_run != b->tp_first_run
1881 ? b->tp_first_run - a->tp_first_run
1882 : b->order - a->order;
1883 }
1884
1885 /* Expand all functions that must be output.
1886
1887 Attempt to topologically sort the nodes so function is output when
1888 all called functions are already assembled to allow data to be
1889 propagated across the callgraph. Use a stack to get smaller distance
1890 between a function and its callees (later we may choose to use a more
1891 sophisticated algorithm for function reordering; we will likely want
1892 to use subsections to make the output functions appear in top-down
1893 order). */
1894
1895 static void
1896 expand_all_functions (void)
1897 {
1898 cgraph_node *node;
1899 cgraph_node **order = XCNEWVEC (cgraph_node *,
1900 symtab->cgraph_count);
1901 unsigned int expanded_func_count = 0, profiled_func_count = 0;
1902 int order_pos, new_order_pos = 0;
1903 int i;
1904
1905 order_pos = ipa_reverse_postorder (order);
1906 gcc_assert (order_pos == symtab->cgraph_count);
1907
1908 /* Garbage collector may remove inline clones we eliminate during
1909 optimization. So we must be sure to not reference them. */
1910 for (i = 0; i < order_pos; i++)
1911 if (order[i]->process)
1912 order[new_order_pos++] = order[i];
1913
1914 if (flag_profile_reorder_functions)
1915 qsort (order, new_order_pos, sizeof (cgraph_node *), node_cmp);
1916
1917 for (i = new_order_pos - 1; i >= 0; i--)
1918 {
1919 node = order[i];
1920
1921 if (node->process)
1922 {
1923 expanded_func_count++;
1924 if(node->tp_first_run)
1925 profiled_func_count++;
1926
1927 if (symtab->dump_file)
1928 fprintf (symtab->dump_file,
1929 "Time profile order in expand_all_functions:%s:%d\n",
1930 node->asm_name (), node->tp_first_run);
1931 node->process = 0;
1932 node->expand ();
1933 }
1934 }
1935
1936 if (dump_file)
1937 fprintf (dump_file, "Expanded functions with time profile (%s):%u/%u\n",
1938 main_input_filename, profiled_func_count, expanded_func_count);
1939
1940 if (symtab->dump_file && flag_profile_reorder_functions)
1941 fprintf (symtab->dump_file, "Expanded functions with time profile:%u/%u\n",
1942 profiled_func_count, expanded_func_count);
1943
1944 symtab->process_new_functions ();
1945 free_gimplify_stack ();
1946
1947 free (order);
1948 }
1949
1950 /* This is used to sort the node types by the cgraph order number. */
1951
1952 enum cgraph_order_sort_kind
1953 {
1954 ORDER_UNDEFINED = 0,
1955 ORDER_FUNCTION,
1956 ORDER_VAR,
1957 ORDER_ASM
1958 };
1959
1960 struct cgraph_order_sort
1961 {
1962 enum cgraph_order_sort_kind kind;
1963 union
1964 {
1965 cgraph_node *f;
1966 varpool_node *v;
1967 asm_node *a;
1968 } u;
1969 };
1970
1971 /* Output all functions, variables, and asm statements in the order
1972 according to their order fields, which is the order in which they
1973 appeared in the file. This implements -fno-toplevel-reorder. In
1974 this mode we may output functions and variables which don't really
1975 need to be output.
1976 When NO_REORDER is true only do this for symbols marked no reorder. */
1977
1978 static void
1979 output_in_order (bool no_reorder)
1980 {
1981 int max;
1982 cgraph_order_sort *nodes;
1983 int i;
1984 cgraph_node *pf;
1985 varpool_node *pv;
1986 asm_node *pa;
1987 max = symtab->order;
1988 nodes = XCNEWVEC (cgraph_order_sort, max);
1989
1990 FOR_EACH_DEFINED_FUNCTION (pf)
1991 {
1992 if (pf->process && !pf->thunk.thunk_p && !pf->alias)
1993 {
1994 if (no_reorder && !pf->no_reorder)
1995 continue;
1996 i = pf->order;
1997 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1998 nodes[i].kind = ORDER_FUNCTION;
1999 nodes[i].u.f = pf;
2000 }
2001 }
2002
2003 FOR_EACH_DEFINED_VARIABLE (pv)
2004 if (!DECL_EXTERNAL (pv->decl))
2005 {
2006 if (no_reorder && !pv->no_reorder)
2007 continue;
2008 i = pv->order;
2009 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2010 nodes[i].kind = ORDER_VAR;
2011 nodes[i].u.v = pv;
2012 }
2013
2014 for (pa = symtab->first_asm_symbol (); pa; pa = pa->next)
2015 {
2016 i = pa->order;
2017 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2018 nodes[i].kind = ORDER_ASM;
2019 nodes[i].u.a = pa;
2020 }
2021
2022 /* In toplevel reorder mode we output all statics; mark them as needed. */
2023
2024 for (i = 0; i < max; ++i)
2025 if (nodes[i].kind == ORDER_VAR)
2026 nodes[i].u.v->finalize_named_section_flags ();
2027
2028 for (i = 0; i < max; ++i)
2029 {
2030 switch (nodes[i].kind)
2031 {
2032 case ORDER_FUNCTION:
2033 nodes[i].u.f->process = 0;
2034 nodes[i].u.f->expand ();
2035 break;
2036
2037 case ORDER_VAR:
2038 nodes[i].u.v->assemble_decl ();
2039 break;
2040
2041 case ORDER_ASM:
2042 assemble_asm (nodes[i].u.a->asm_str);
2043 break;
2044
2045 case ORDER_UNDEFINED:
2046 break;
2047
2048 default:
2049 gcc_unreachable ();
2050 }
2051 }
2052
2053 symtab->clear_asm_symbols ();
2054
2055 free (nodes);
2056 }
2057
2058 static void
2059 ipa_passes (void)
2060 {
2061 gcc::pass_manager *passes = g->get_passes ();
2062
2063 set_cfun (NULL);
2064 current_function_decl = NULL;
2065 gimple_register_cfg_hooks ();
2066 bitmap_obstack_initialize (NULL);
2067
2068 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
2069
2070 if (!in_lto_p)
2071 {
2072 execute_ipa_pass_list (passes->all_small_ipa_passes);
2073 if (seen_error ())
2074 return;
2075 }
2076
2077 /* This extra symtab_remove_unreachable_nodes pass tends to catch some
2078 devirtualization and other changes where removal iterate. */
2079 symtab->remove_unreachable_nodes (symtab->dump_file);
2080
2081 /* If pass_all_early_optimizations was not scheduled, the state of
2082 the cgraph will not be properly updated. Update it now. */
2083 if (symtab->state < IPA_SSA)
2084 symtab->state = IPA_SSA;
2085
2086 if (!in_lto_p)
2087 {
2088 /* Generate coverage variables and constructors. */
2089 coverage_finish ();
2090
2091 /* Process new functions added. */
2092 set_cfun (NULL);
2093 current_function_decl = NULL;
2094 symtab->process_new_functions ();
2095
2096 execute_ipa_summary_passes
2097 ((ipa_opt_pass_d *) passes->all_regular_ipa_passes);
2098 }
2099
2100 /* Some targets need to handle LTO assembler output specially. */
2101 if (flag_generate_lto || flag_generate_offload)
2102 targetm.asm_out.lto_start ();
2103
2104 if (!in_lto_p)
2105 {
2106 if (g->have_offload)
2107 {
2108 section_name_prefix = OFFLOAD_SECTION_NAME_PREFIX;
2109 ipa_write_summaries (true);
2110 }
2111 if (flag_lto)
2112 {
2113 section_name_prefix = LTO_SECTION_NAME_PREFIX;
2114 ipa_write_summaries (false);
2115 }
2116 }
2117
2118 if (flag_generate_lto || flag_generate_offload)
2119 targetm.asm_out.lto_end ();
2120
2121 if (!flag_ltrans && (in_lto_p || !flag_lto || flag_fat_lto_objects))
2122 execute_ipa_pass_list (passes->all_regular_ipa_passes);
2123 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
2124
2125 bitmap_obstack_release (NULL);
2126 }
2127
2128
2129 /* Return string alias is alias of. */
2130
2131 static tree
2132 get_alias_symbol (tree decl)
2133 {
2134 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
2135 return get_identifier (TREE_STRING_POINTER
2136 (TREE_VALUE (TREE_VALUE (alias))));
2137 }
2138
2139
2140 /* Weakrefs may be associated to external decls and thus not output
2141 at expansion time. Emit all necessary aliases. */
2142
2143 void
2144 symbol_table::output_weakrefs (void)
2145 {
2146 symtab_node *node;
2147 cgraph_node *cnode;
2148 FOR_EACH_SYMBOL (node)
2149 if (node->alias
2150 && !TREE_ASM_WRITTEN (node->decl)
2151 && (!(cnode = dyn_cast <cgraph_node *> (node))
2152 || !cnode->instrumented_version
2153 || !TREE_ASM_WRITTEN (cnode->instrumented_version->decl))
2154 && node->weakref)
2155 {
2156 tree target;
2157
2158 /* Weakrefs are special by not requiring target definition in current
2159 compilation unit. It is thus bit hard to work out what we want to
2160 alias.
2161 When alias target is defined, we need to fetch it from symtab reference,
2162 otherwise it is pointed to by alias_target. */
2163 if (node->alias_target)
2164 target = (DECL_P (node->alias_target)
2165 ? DECL_ASSEMBLER_NAME (node->alias_target)
2166 : node->alias_target);
2167 else if (node->analyzed)
2168 target = DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl);
2169 else
2170 {
2171 gcc_unreachable ();
2172 target = get_alias_symbol (node->decl);
2173 }
2174 do_assemble_alias (node->decl, target);
2175 }
2176 }
2177
2178 /* Perform simple optimizations based on callgraph. */
2179
2180 void
2181 symbol_table::compile (void)
2182 {
2183 if (seen_error ())
2184 return;
2185
2186 #ifdef ENABLE_CHECKING
2187 symtab_node::verify_symtab_nodes ();
2188 #endif
2189
2190 timevar_push (TV_CGRAPHOPT);
2191 if (pre_ipa_mem_report)
2192 {
2193 fprintf (stderr, "Memory consumption before IPA\n");
2194 dump_memory_report (false);
2195 }
2196 if (!quiet_flag)
2197 fprintf (stderr, "Performing interprocedural optimizations\n");
2198 state = IPA;
2199
2200 /* Offloading requires LTO infrastructure. */
2201 if (!in_lto_p && g->have_offload)
2202 flag_generate_offload = 1;
2203
2204 /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE. */
2205 if (flag_generate_lto || flag_generate_offload)
2206 lto_streamer_hooks_init ();
2207
2208 /* Don't run the IPA passes if there was any error or sorry messages. */
2209 if (!seen_error ())
2210 ipa_passes ();
2211
2212 /* Do nothing else if any IPA pass found errors or if we are just streaming LTO. */
2213 if (seen_error ()
2214 || (!in_lto_p && flag_lto && !flag_fat_lto_objects))
2215 {
2216 timevar_pop (TV_CGRAPHOPT);
2217 return;
2218 }
2219
2220 global_info_ready = true;
2221 if (dump_file)
2222 {
2223 fprintf (dump_file, "Optimized ");
2224 symtab_node:: dump_table (dump_file);
2225 }
2226 if (post_ipa_mem_report)
2227 {
2228 fprintf (stderr, "Memory consumption after IPA\n");
2229 dump_memory_report (false);
2230 }
2231 timevar_pop (TV_CGRAPHOPT);
2232
2233 /* Output everything. */
2234 (*debug_hooks->assembly_start) ();
2235 if (!quiet_flag)
2236 fprintf (stderr, "Assembling functions:\n");
2237 #ifdef ENABLE_CHECKING
2238 symtab_node::verify_symtab_nodes ();
2239 #endif
2240
2241 materialize_all_clones ();
2242 bitmap_obstack_initialize (NULL);
2243 execute_ipa_pass_list (g->get_passes ()->all_late_ipa_passes);
2244 bitmap_obstack_release (NULL);
2245 mark_functions_to_output ();
2246
2247 /* When weakref support is missing, we autmatically translate all
2248 references to NODE to references to its ultimate alias target.
2249 The renaming mechanizm uses flag IDENTIFIER_TRANSPARENT_ALIAS and
2250 TREE_CHAIN.
2251
2252 Set up this mapping before we output any assembler but once we are sure
2253 that all symbol renaming is done.
2254
2255 FIXME: All this uglyness can go away if we just do renaming at gimple
2256 level by physically rewritting the IL. At the moment we can only redirect
2257 calls, so we need infrastructure for renaming references as well. */
2258 #ifndef ASM_OUTPUT_WEAKREF
2259 symtab_node *node;
2260
2261 FOR_EACH_SYMBOL (node)
2262 if (node->alias
2263 && lookup_attribute ("weakref", DECL_ATTRIBUTES (node->decl)))
2264 {
2265 IDENTIFIER_TRANSPARENT_ALIAS
2266 (DECL_ASSEMBLER_NAME (node->decl)) = 1;
2267 TREE_CHAIN (DECL_ASSEMBLER_NAME (node->decl))
2268 = (node->alias_target ? node->alias_target
2269 : DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl));
2270 }
2271 #endif
2272
2273 state = EXPANSION;
2274
2275 if (!flag_toplevel_reorder)
2276 output_in_order (false);
2277 else
2278 {
2279 /* Output first asm statements and anything ordered. The process
2280 flag is cleared for these nodes, so we skip them later. */
2281 output_in_order (true);
2282 expand_all_functions ();
2283 output_variables ();
2284 }
2285
2286 process_new_functions ();
2287 state = FINISHED;
2288 output_weakrefs ();
2289
2290 if (dump_file)
2291 {
2292 fprintf (dump_file, "\nFinal ");
2293 symtab_node::dump_table (dump_file);
2294 }
2295 #ifdef ENABLE_CHECKING
2296 symtab_node::verify_symtab_nodes ();
2297 /* Double check that all inline clones are gone and that all
2298 function bodies have been released from memory. */
2299 if (!seen_error ())
2300 {
2301 cgraph_node *node;
2302 bool error_found = false;
2303
2304 FOR_EACH_DEFINED_FUNCTION (node)
2305 if (node->global.inlined_to
2306 || gimple_has_body_p (node->decl))
2307 {
2308 error_found = true;
2309 node->debug ();
2310 }
2311 if (error_found)
2312 internal_error ("nodes with unreleased memory found");
2313 }
2314 #endif
2315 }
2316
2317
2318 /* Analyze the whole compilation unit once it is parsed completely. */
2319
2320 void
2321 symbol_table::finalize_compilation_unit (void)
2322 {
2323 timevar_push (TV_CGRAPH);
2324
2325 /* If we're here there's no current function anymore. Some frontends
2326 are lazy in clearing these. */
2327 current_function_decl = NULL;
2328 set_cfun (NULL);
2329
2330 /* Do not skip analyzing the functions if there were errors, we
2331 miss diagnostics for following functions otherwise. */
2332
2333 /* Emit size functions we didn't inline. */
2334 finalize_size_functions ();
2335
2336 /* Mark alias targets necessary and emit diagnostics. */
2337 handle_alias_pairs ();
2338
2339 if (!quiet_flag)
2340 {
2341 fprintf (stderr, "\nAnalyzing compilation unit\n");
2342 fflush (stderr);
2343 }
2344
2345 if (flag_dump_passes)
2346 dump_passes ();
2347
2348 /* Gimplify and lower all functions, compute reachability and
2349 remove unreachable nodes. */
2350 analyze_functions ();
2351
2352 /* Mark alias targets necessary and emit diagnostics. */
2353 handle_alias_pairs ();
2354
2355 /* Gimplify and lower thunks. */
2356 analyze_functions ();
2357
2358 /* Finally drive the pass manager. */
2359 compile ();
2360
2361 timevar_pop (TV_CGRAPH);
2362 }
2363
2364 /* Reset all state within cgraphunit.c so that we can rerun the compiler
2365 within the same process. For use by toplev::finalize. */
2366
2367 void
2368 cgraphunit_c_finalize (void)
2369 {
2370 gcc_assert (cgraph_new_nodes.length () == 0);
2371 cgraph_new_nodes.truncate (0);
2372
2373 vtable_entry_type = NULL;
2374 queued_nodes = &symtab_terminator;
2375
2376 first_analyzed = NULL;
2377 first_analyzed_var = NULL;
2378 }
2379
2380 /* Creates a wrapper from cgraph_node to TARGET node. Thunk is used for this
2381 kind of wrapper method. */
2382
2383 void
2384 cgraph_node::create_wrapper (cgraph_node *target)
2385 {
2386 /* Preserve DECL_RESULT so we get right by reference flag. */
2387 tree decl_result = DECL_RESULT (decl);
2388
2389 /* Remove the function's body but keep arguments to be reused
2390 for thunk. */
2391 release_body (true);
2392 reset ();
2393
2394 DECL_RESULT (decl) = decl_result;
2395 DECL_INITIAL (decl) = NULL;
2396 allocate_struct_function (decl, false);
2397 set_cfun (NULL);
2398
2399 /* Turn alias into thunk and expand it into GIMPLE representation. */
2400 definition = true;
2401 thunk.thunk_p = true;
2402 thunk.this_adjusting = false;
2403
2404 cgraph_edge *e = create_edge (target, NULL, 0, CGRAPH_FREQ_BASE);
2405
2406 tree arguments = DECL_ARGUMENTS (decl);
2407
2408 while (arguments)
2409 {
2410 TREE_ADDRESSABLE (arguments) = false;
2411 arguments = TREE_CHAIN (arguments);
2412 }
2413
2414 expand_thunk (false, true);
2415 e->call_stmt_cannot_inline_p = true;
2416
2417 /* Inline summary set-up. */
2418 analyze ();
2419 inline_analyze_function (this);
2420 }
2421
2422 #include "gt-cgraphunit.h"