re PR ipa/64896 (ICE in get_address_mode, at rtlanal.c:5442)
[gcc.git] / gcc / cgraphunit.c
1 /* Driver of optimization process
2 Copyright (C) 2003-2015 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This module implements main driver of compilation process.
22
23 The main scope of this file is to act as an interface in between
24 tree based frontends and the backend.
25
26 The front-end is supposed to use following functionality:
27
28 - finalize_function
29
30 This function is called once front-end has parsed whole body of function
31 and it is certain that the function body nor the declaration will change.
32
33 (There is one exception needed for implementing GCC extern inline
34 function.)
35
36 - varpool_finalize_decl
37
38 This function has same behavior as the above but is used for static
39 variables.
40
41 - add_asm_node
42
43 Insert new toplevel ASM statement
44
45 - finalize_compilation_unit
46
47 This function is called once (source level) compilation unit is finalized
48 and it will no longer change.
49
50 The symbol table is constructed starting from the trivially needed
51 symbols finalized by the frontend. Functions are lowered into
52 GIMPLE representation and callgraph/reference lists are constructed.
53 Those are used to discover other necessary functions and variables.
54
55 At the end the bodies of unreachable functions are removed.
56
57 The function can be called multiple times when multiple source level
58 compilation units are combined.
59
60 - compile
61
62 This passes control to the back-end. Optimizations are performed and
63 final assembler is generated. This is done in the following way. Note
64 that with link time optimization the process is split into three
65 stages (compile time, linktime analysis and parallel linktime as
66 indicated bellow).
67
68 Compile time:
69
70 1) Inter-procedural optimization.
71 (ipa_passes)
72
73 This part is further split into:
74
75 a) early optimizations. These are local passes executed in
76 the topological order on the callgraph.
77
78 The purpose of early optimiations is to optimize away simple
79 things that may otherwise confuse IP analysis. Very simple
80 propagation across the callgraph is done i.e. to discover
81 functions without side effects and simple inlining is performed.
82
83 b) early small interprocedural passes.
84
85 Those are interprocedural passes executed only at compilation
86 time. These include, for example, transational memory lowering,
87 unreachable code removal and other simple transformations.
88
89 c) IP analysis stage. All interprocedural passes do their
90 analysis.
91
92 Interprocedural passes differ from small interprocedural
93 passes by their ability to operate across whole program
94 at linktime. Their analysis stage is performed early to
95 both reduce linking times and linktime memory usage by
96 not having to represent whole program in memory.
97
98 d) LTO sreaming. When doing LTO, everything important gets
99 streamed into the object file.
100
101 Compile time and or linktime analysis stage (WPA):
102
103 At linktime units gets streamed back and symbol table is
104 merged. Function bodies are not streamed in and not
105 available.
106 e) IP propagation stage. All IP passes execute their
107 IP propagation. This is done based on the earlier analysis
108 without having function bodies at hand.
109 f) Ltrans streaming. When doing WHOPR LTO, the program
110 is partitioned and streamed into multple object files.
111
112 Compile time and/or parallel linktime stage (ltrans)
113
114 Each of the object files is streamed back and compiled
115 separately. Now the function bodies becomes available
116 again.
117
118 2) Virtual clone materialization
119 (cgraph_materialize_clone)
120
121 IP passes can produce copies of existing functoins (such
122 as versioned clones or inline clones) without actually
123 manipulating their bodies by creating virtual clones in
124 the callgraph. At this time the virtual clones are
125 turned into real functions
126 3) IP transformation
127
128 All IP passes transform function bodies based on earlier
129 decision of the IP propagation.
130
131 4) late small IP passes
132
133 Simple IP passes working within single program partition.
134
135 5) Expansion
136 (expand_all_functions)
137
138 At this stage functions that needs to be output into
139 assembler are identified and compiled in topological order
140 6) Output of variables and aliases
141 Now it is known what variable references was not optimized
142 out and thus all variables are output to the file.
143
144 Note that with -fno-toplevel-reorder passes 5 and 6
145 are combined together in cgraph_output_in_order.
146
147 Finally there are functions to manipulate the callgraph from
148 backend.
149 - cgraph_add_new_function is used to add backend produced
150 functions introduced after the unit is finalized.
151 The functions are enqueue for later processing and inserted
152 into callgraph with cgraph_process_new_functions.
153
154 - cgraph_function_versioning
155
156 produces a copy of function into new one (a version)
157 and apply simple transformations
158 */
159
160 #include "config.h"
161 #include "system.h"
162 #include "coretypes.h"
163 #include "tm.h"
164 #include "hash-set.h"
165 #include "machmode.h"
166 #include "vec.h"
167 #include "double-int.h"
168 #include "input.h"
169 #include "alias.h"
170 #include "symtab.h"
171 #include "wide-int.h"
172 #include "inchash.h"
173 #include "tree.h"
174 #include "fold-const.h"
175 #include "varasm.h"
176 #include "stor-layout.h"
177 #include "stringpool.h"
178 #include "output.h"
179 #include "rtl.h"
180 #include "predict.h"
181 #include "hard-reg-set.h"
182 #include "input.h"
183 #include "function.h"
184 #include "basic-block.h"
185 #include "tree-ssa-alias.h"
186 #include "internal-fn.h"
187 #include "gimple-fold.h"
188 #include "gimple-expr.h"
189 #include "is-a.h"
190 #include "gimple.h"
191 #include "gimplify.h"
192 #include "gimple-iterator.h"
193 #include "gimplify-me.h"
194 #include "gimple-ssa.h"
195 #include "tree-cfg.h"
196 #include "tree-into-ssa.h"
197 #include "tree-ssa.h"
198 #include "tree-inline.h"
199 #include "langhooks.h"
200 #include "toplev.h"
201 #include "flags.h"
202 #include "debug.h"
203 #include "target.h"
204 #include "diagnostic.h"
205 #include "params.h"
206 #include "intl.h"
207 #include "hash-map.h"
208 #include "plugin-api.h"
209 #include "ipa-ref.h"
210 #include "cgraph.h"
211 #include "alloc-pool.h"
212 #include "symbol-summary.h"
213 #include "ipa-prop.h"
214 #include "tree-iterator.h"
215 #include "tree-pass.h"
216 #include "tree-dump.h"
217 #include "gimple-pretty-print.h"
218 #include "output.h"
219 #include "coverage.h"
220 #include "plugin.h"
221 #include "ipa-inline.h"
222 #include "ipa-utils.h"
223 #include "lto-streamer.h"
224 #include "except.h"
225 #include "cfgloop.h"
226 #include "regset.h" /* FIXME: For reg_obstack. */
227 #include "context.h"
228 #include "pass_manager.h"
229 #include "tree-nested.h"
230 #include "gimplify.h"
231 #include "dbgcnt.h"
232 #include "tree-chkp.h"
233 #include "lto-section-names.h"
234 #include "omp-low.h"
235 #include "print-tree.h"
236
237 /* Queue of cgraph nodes scheduled to be added into cgraph. This is a
238 secondary queue used during optimization to accommodate passes that
239 may generate new functions that need to be optimized and expanded. */
240 vec<cgraph_node *> cgraph_new_nodes;
241
242 static void expand_all_functions (void);
243 static void mark_functions_to_output (void);
244 static void handle_alias_pairs (void);
245
246 /* Used for vtable lookup in thunk adjusting. */
247 static GTY (()) tree vtable_entry_type;
248
249 /* Determine if symbol declaration is needed. That is, visible to something
250 either outside this translation unit, something magic in the system
251 configury */
252 bool
253 symtab_node::needed_p (void)
254 {
255 /* Double check that no one output the function into assembly file
256 early. */
257 gcc_checking_assert (!DECL_ASSEMBLER_NAME_SET_P (decl)
258 || !TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)));
259
260 if (!definition)
261 return false;
262
263 if (DECL_EXTERNAL (decl))
264 return false;
265
266 /* If the user told us it is used, then it must be so. */
267 if (force_output)
268 return true;
269
270 /* ABI forced symbols are needed when they are external. */
271 if (forced_by_abi && TREE_PUBLIC (decl))
272 return true;
273
274 /* Keep constructors, destructors and virtual functions. */
275 if (TREE_CODE (decl) == FUNCTION_DECL
276 && (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl)))
277 return true;
278
279 /* Externally visible variables must be output. The exception is
280 COMDAT variables that must be output only when they are needed. */
281 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
282 return true;
283
284 return false;
285 }
286
287 /* Head and terminator of the queue of nodes to be processed while building
288 callgraph. */
289
290 static symtab_node symtab_terminator;
291 static symtab_node *queued_nodes = &symtab_terminator;
292
293 /* Add NODE to queue starting at QUEUED_NODES.
294 The queue is linked via AUX pointers and terminated by pointer to 1. */
295
296 static void
297 enqueue_node (symtab_node *node)
298 {
299 if (node->aux)
300 return;
301 gcc_checking_assert (queued_nodes);
302 node->aux = queued_nodes;
303 queued_nodes = node;
304 }
305
306 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
307 functions into callgraph in a way so they look like ordinary reachable
308 functions inserted into callgraph already at construction time. */
309
310 void
311 symbol_table::process_new_functions (void)
312 {
313 tree fndecl;
314
315 if (!cgraph_new_nodes.exists ())
316 return;
317
318 handle_alias_pairs ();
319 /* Note that this queue may grow as its being processed, as the new
320 functions may generate new ones. */
321 for (unsigned i = 0; i < cgraph_new_nodes.length (); i++)
322 {
323 cgraph_node *node = cgraph_new_nodes[i];
324 fndecl = node->decl;
325 switch (state)
326 {
327 case CONSTRUCTION:
328 /* At construction time we just need to finalize function and move
329 it into reachable functions list. */
330
331 cgraph_node::finalize_function (fndecl, false);
332 call_cgraph_insertion_hooks (node);
333 enqueue_node (node);
334 break;
335
336 case IPA:
337 case IPA_SSA:
338 case IPA_SSA_AFTER_INLINING:
339 /* When IPA optimization already started, do all essential
340 transformations that has been already performed on the whole
341 cgraph but not on this function. */
342
343 gimple_register_cfg_hooks ();
344 if (!node->analyzed)
345 node->analyze ();
346 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
347 if ((state == IPA_SSA || state == IPA_SSA_AFTER_INLINING)
348 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
349 g->get_passes ()->execute_early_local_passes ();
350 else if (inline_summaries != NULL)
351 compute_inline_parameters (node, true);
352 free_dominance_info (CDI_POST_DOMINATORS);
353 free_dominance_info (CDI_DOMINATORS);
354 pop_cfun ();
355 call_cgraph_insertion_hooks (node);
356 break;
357
358 case EXPANSION:
359 /* Functions created during expansion shall be compiled
360 directly. */
361 node->process = 0;
362 call_cgraph_insertion_hooks (node);
363 node->expand ();
364 break;
365
366 default:
367 gcc_unreachable ();
368 break;
369 }
370 }
371
372 cgraph_new_nodes.release ();
373 }
374
375 /* As an GCC extension we allow redefinition of the function. The
376 semantics when both copies of bodies differ is not well defined.
377 We replace the old body with new body so in unit at a time mode
378 we always use new body, while in normal mode we may end up with
379 old body inlined into some functions and new body expanded and
380 inlined in others.
381
382 ??? It may make more sense to use one body for inlining and other
383 body for expanding the function but this is difficult to do. */
384
385 void
386 cgraph_node::reset (void)
387 {
388 /* If process is set, then we have already begun whole-unit analysis.
389 This is *not* testing for whether we've already emitted the function.
390 That case can be sort-of legitimately seen with real function redefinition
391 errors. I would argue that the front end should never present us with
392 such a case, but don't enforce that for now. */
393 gcc_assert (!process);
394
395 /* Reset our data structures so we can analyze the function again. */
396 memset (&local, 0, sizeof (local));
397 memset (&global, 0, sizeof (global));
398 memset (&rtl, 0, sizeof (rtl));
399 analyzed = false;
400 definition = false;
401 alias = false;
402 weakref = false;
403 cpp_implicit_alias = false;
404
405 remove_callees ();
406 remove_all_references ();
407 }
408
409 /* Return true when there are references to the node. */
410
411 bool
412 symtab_node::referred_to_p (void)
413 {
414 ipa_ref *ref = NULL;
415
416 /* See if there are any references at all. */
417 if (iterate_referring (0, ref))
418 return true;
419 /* For functions check also calls. */
420 cgraph_node *cn = dyn_cast <cgraph_node *> (this);
421 if (cn && cn->callers)
422 return true;
423 return false;
424 }
425
426 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
427 logic in effect. If NO_COLLECT is true, then our caller cannot stand to have
428 the garbage collector run at the moment. We would need to either create
429 a new GC context, or just not compile right now. */
430
431 void
432 cgraph_node::finalize_function (tree decl, bool no_collect)
433 {
434 cgraph_node *node = cgraph_node::get_create (decl);
435
436 if (node->definition)
437 {
438 /* Nested functions should only be defined once. */
439 gcc_assert (!DECL_CONTEXT (decl)
440 || TREE_CODE (DECL_CONTEXT (decl)) != FUNCTION_DECL);
441 node->reset ();
442 node->local.redefined_extern_inline = true;
443 }
444
445 notice_global_symbol (decl);
446 node->definition = true;
447 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
448
449 /* With -fkeep-inline-functions we are keeping all inline functions except
450 for extern inline ones. */
451 if (flag_keep_inline_functions
452 && DECL_DECLARED_INLINE_P (decl)
453 && !DECL_EXTERNAL (decl)
454 && !DECL_DISREGARD_INLINE_LIMITS (decl))
455 node->force_output = 1;
456
457 /* When not optimizing, also output the static functions. (see
458 PR24561), but don't do so for always_inline functions, functions
459 declared inline and nested functions. These were optimized out
460 in the original implementation and it is unclear whether we want
461 to change the behavior here. */
462 if ((!opt_for_fn (decl, optimize)
463 && !node->cpp_implicit_alias
464 && !DECL_DISREGARD_INLINE_LIMITS (decl)
465 && !DECL_DECLARED_INLINE_P (decl)
466 && !(DECL_CONTEXT (decl)
467 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL))
468 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
469 node->force_output = 1;
470
471 /* If we've not yet emitted decl, tell the debug info about it. */
472 if (!TREE_ASM_WRITTEN (decl))
473 (*debug_hooks->deferred_inline_function) (decl);
474
475 /* Possibly warn about unused parameters. */
476 if (warn_unused_parameter)
477 do_warn_unused_parameter (decl);
478
479 if (!no_collect)
480 ggc_collect ();
481
482 if (symtab->state == CONSTRUCTION
483 && (node->needed_p () || node->referred_to_p ()))
484 enqueue_node (node);
485 }
486
487 /* Add the function FNDECL to the call graph.
488 Unlike finalize_function, this function is intended to be used
489 by middle end and allows insertion of new function at arbitrary point
490 of compilation. The function can be either in high, low or SSA form
491 GIMPLE.
492
493 The function is assumed to be reachable and have address taken (so no
494 API breaking optimizations are performed on it).
495
496 Main work done by this function is to enqueue the function for later
497 processing to avoid need the passes to be re-entrant. */
498
499 void
500 cgraph_node::add_new_function (tree fndecl, bool lowered)
501 {
502 gcc::pass_manager *passes = g->get_passes ();
503 cgraph_node *node;
504 switch (symtab->state)
505 {
506 case PARSING:
507 cgraph_node::finalize_function (fndecl, false);
508 break;
509 case CONSTRUCTION:
510 /* Just enqueue function to be processed at nearest occurrence. */
511 node = cgraph_node::get_create (fndecl);
512 if (lowered)
513 node->lowered = true;
514 cgraph_new_nodes.safe_push (node);
515 break;
516
517 case IPA:
518 case IPA_SSA:
519 case IPA_SSA_AFTER_INLINING:
520 case EXPANSION:
521 /* Bring the function into finalized state and enqueue for later
522 analyzing and compilation. */
523 node = cgraph_node::get_create (fndecl);
524 node->local.local = false;
525 node->definition = true;
526 node->force_output = true;
527 if (!lowered && symtab->state == EXPANSION)
528 {
529 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
530 gimple_register_cfg_hooks ();
531 bitmap_obstack_initialize (NULL);
532 execute_pass_list (cfun, passes->all_lowering_passes);
533 passes->execute_early_local_passes ();
534 bitmap_obstack_release (NULL);
535 pop_cfun ();
536
537 lowered = true;
538 }
539 if (lowered)
540 node->lowered = true;
541 cgraph_new_nodes.safe_push (node);
542 break;
543
544 case FINISHED:
545 /* At the very end of compilation we have to do all the work up
546 to expansion. */
547 node = cgraph_node::create (fndecl);
548 if (lowered)
549 node->lowered = true;
550 node->definition = true;
551 node->analyze ();
552 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
553 gimple_register_cfg_hooks ();
554 bitmap_obstack_initialize (NULL);
555 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
556 g->get_passes ()->execute_early_local_passes ();
557 bitmap_obstack_release (NULL);
558 pop_cfun ();
559 node->expand ();
560 break;
561
562 default:
563 gcc_unreachable ();
564 }
565
566 /* Set a personality if required and we already passed EH lowering. */
567 if (lowered
568 && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
569 == eh_personality_lang))
570 DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
571 }
572
573 /* Analyze the function scheduled to be output. */
574 void
575 cgraph_node::analyze (void)
576 {
577 tree decl = this->decl;
578 location_t saved_loc = input_location;
579 input_location = DECL_SOURCE_LOCATION (decl);
580
581 if (thunk.thunk_p)
582 {
583 create_edge (cgraph_node::get (thunk.alias),
584 NULL, 0, CGRAPH_FREQ_BASE);
585 if (!expand_thunk (false, false))
586 {
587 thunk.alias = NULL;
588 return;
589 }
590 thunk.alias = NULL;
591 }
592 if (alias)
593 resolve_alias (cgraph_node::get (alias_target));
594 else if (dispatcher_function)
595 {
596 /* Generate the dispatcher body of multi-versioned functions. */
597 cgraph_function_version_info *dispatcher_version_info
598 = function_version ();
599 if (dispatcher_version_info != NULL
600 && (dispatcher_version_info->dispatcher_resolver
601 == NULL_TREE))
602 {
603 tree resolver = NULL_TREE;
604 gcc_assert (targetm.generate_version_dispatcher_body);
605 resolver = targetm.generate_version_dispatcher_body (this);
606 gcc_assert (resolver != NULL_TREE);
607 }
608 }
609 else
610 {
611 push_cfun (DECL_STRUCT_FUNCTION (decl));
612
613 assign_assembler_name_if_neeeded (decl);
614
615 /* Make sure to gimplify bodies only once. During analyzing a
616 function we lower it, which will require gimplified nested
617 functions, so we can end up here with an already gimplified
618 body. */
619 if (!gimple_has_body_p (decl))
620 gimplify_function_tree (decl);
621 dump_function (TDI_generic, decl);
622
623 /* Lower the function. */
624 if (!lowered)
625 {
626 if (nested)
627 lower_nested_functions (decl);
628 gcc_assert (!nested);
629
630 gimple_register_cfg_hooks ();
631 bitmap_obstack_initialize (NULL);
632 execute_pass_list (cfun, g->get_passes ()->all_lowering_passes);
633 free_dominance_info (CDI_POST_DOMINATORS);
634 free_dominance_info (CDI_DOMINATORS);
635 compact_blocks ();
636 bitmap_obstack_release (NULL);
637 lowered = true;
638 }
639
640 pop_cfun ();
641 }
642 analyzed = true;
643
644 input_location = saved_loc;
645 }
646
647 /* C++ frontend produce same body aliases all over the place, even before PCH
648 gets streamed out. It relies on us linking the aliases with their function
649 in order to do the fixups, but ipa-ref is not PCH safe. Consequentely we
650 first produce aliases without links, but once C++ FE is sure he won't sream
651 PCH we build the links via this function. */
652
653 void
654 symbol_table::process_same_body_aliases (void)
655 {
656 symtab_node *node;
657 FOR_EACH_SYMBOL (node)
658 if (node->cpp_implicit_alias && !node->analyzed)
659 node->resolve_alias
660 (TREE_CODE (node->alias_target) == VAR_DECL
661 ? (symtab_node *)varpool_node::get_create (node->alias_target)
662 : (symtab_node *)cgraph_node::get_create (node->alias_target));
663 cpp_implicit_aliases_done = true;
664 }
665
666 /* Process attributes common for vars and functions. */
667
668 static void
669 process_common_attributes (symtab_node *node, tree decl)
670 {
671 tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
672
673 if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
674 {
675 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
676 "%<weakref%> attribute should be accompanied with"
677 " an %<alias%> attribute");
678 DECL_WEAK (decl) = 0;
679 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
680 DECL_ATTRIBUTES (decl));
681 }
682
683 if (lookup_attribute ("no_reorder", DECL_ATTRIBUTES (decl)))
684 node->no_reorder = 1;
685 }
686
687 /* Look for externally_visible and used attributes and mark cgraph nodes
688 accordingly.
689
690 We cannot mark the nodes at the point the attributes are processed (in
691 handle_*_attribute) because the copy of the declarations available at that
692 point may not be canonical. For example, in:
693
694 void f();
695 void f() __attribute__((used));
696
697 the declaration we see in handle_used_attribute will be the second
698 declaration -- but the front end will subsequently merge that declaration
699 with the original declaration and discard the second declaration.
700
701 Furthermore, we can't mark these nodes in finalize_function because:
702
703 void f() {}
704 void f() __attribute__((externally_visible));
705
706 is valid.
707
708 So, we walk the nodes at the end of the translation unit, applying the
709 attributes at that point. */
710
711 static void
712 process_function_and_variable_attributes (cgraph_node *first,
713 varpool_node *first_var)
714 {
715 cgraph_node *node;
716 varpool_node *vnode;
717
718 for (node = symtab->first_function (); node != first;
719 node = symtab->next_function (node))
720 {
721 tree decl = node->decl;
722 if (DECL_PRESERVE_P (decl))
723 node->mark_force_output ();
724 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
725 {
726 if (! TREE_PUBLIC (node->decl))
727 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
728 "%<externally_visible%>"
729 " attribute have effect only on public objects");
730 }
731 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
732 && (node->definition && !node->alias))
733 {
734 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
735 "%<weakref%> attribute ignored"
736 " because function is defined");
737 DECL_WEAK (decl) = 0;
738 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
739 DECL_ATTRIBUTES (decl));
740 }
741
742 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl))
743 && !DECL_DECLARED_INLINE_P (decl)
744 /* redefining extern inline function makes it DECL_UNINLINABLE. */
745 && !DECL_UNINLINABLE (decl))
746 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
747 "always_inline function might not be inlinable");
748
749 process_common_attributes (node, decl);
750 }
751 for (vnode = symtab->first_variable (); vnode != first_var;
752 vnode = symtab->next_variable (vnode))
753 {
754 tree decl = vnode->decl;
755 if (DECL_EXTERNAL (decl)
756 && DECL_INITIAL (decl))
757 varpool_node::finalize_decl (decl);
758 if (DECL_PRESERVE_P (decl))
759 vnode->force_output = true;
760 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
761 {
762 if (! TREE_PUBLIC (vnode->decl))
763 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
764 "%<externally_visible%>"
765 " attribute have effect only on public objects");
766 }
767 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
768 && vnode->definition
769 && DECL_INITIAL (decl))
770 {
771 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
772 "%<weakref%> attribute ignored"
773 " because variable is initialized");
774 DECL_WEAK (decl) = 0;
775 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
776 DECL_ATTRIBUTES (decl));
777 }
778 process_common_attributes (vnode, decl);
779 }
780 }
781
782 /* Mark DECL as finalized. By finalizing the declaration, frontend instruct the
783 middle end to output the variable to asm file, if needed or externally
784 visible. */
785
786 void
787 varpool_node::finalize_decl (tree decl)
788 {
789 varpool_node *node = varpool_node::get_create (decl);
790
791 gcc_assert (TREE_STATIC (decl) || DECL_EXTERNAL (decl));
792
793 if (node->definition)
794 return;
795 notice_global_symbol (decl);
796 node->definition = true;
797 if (TREE_THIS_VOLATILE (decl) || DECL_PRESERVE_P (decl)
798 /* Traditionally we do not eliminate static variables when not
799 optimizing and when not doing toplevel reoder. */
800 || node->no_reorder
801 || ((!flag_toplevel_reorder
802 && !DECL_COMDAT (node->decl)
803 && !DECL_ARTIFICIAL (node->decl))))
804 node->force_output = true;
805
806 if (symtab->state == CONSTRUCTION
807 && (node->needed_p () || node->referred_to_p ()))
808 enqueue_node (node);
809 if (symtab->state >= IPA_SSA)
810 node->analyze ();
811 /* Some frontends produce various interface variables after compilation
812 finished. */
813 if (symtab->state == FINISHED
814 || (!flag_toplevel_reorder
815 && symtab->state == EXPANSION))
816 node->assemble_decl ();
817
818 if (DECL_INITIAL (decl))
819 chkp_register_var_initializer (decl);
820 }
821
822 /* EDGE is an polymorphic call. Mark all possible targets as reachable
823 and if there is only one target, perform trivial devirtualization.
824 REACHABLE_CALL_TARGETS collects target lists we already walked to
825 avoid udplicate work. */
826
827 static void
828 walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
829 cgraph_edge *edge)
830 {
831 unsigned int i;
832 void *cache_token;
833 bool final;
834 vec <cgraph_node *>targets
835 = possible_polymorphic_call_targets
836 (edge, &final, &cache_token);
837
838 if (!reachable_call_targets->add (cache_token))
839 {
840 if (symtab->dump_file)
841 dump_possible_polymorphic_call_targets
842 (symtab->dump_file, edge);
843
844 for (i = 0; i < targets.length (); i++)
845 {
846 /* Do not bother to mark virtual methods in anonymous namespace;
847 either we will find use of virtual table defining it, or it is
848 unused. */
849 if (targets[i]->definition
850 && TREE_CODE
851 (TREE_TYPE (targets[i]->decl))
852 == METHOD_TYPE
853 && !type_in_anonymous_namespace_p
854 (method_class_type
855 (TREE_TYPE (targets[i]->decl))))
856 enqueue_node (targets[i]);
857 }
858 }
859
860 /* Very trivial devirtualization; when the type is
861 final or anonymous (so we know all its derivation)
862 and there is only one possible virtual call target,
863 make the edge direct. */
864 if (final)
865 {
866 if (targets.length () <= 1 && dbg_cnt (devirt))
867 {
868 cgraph_node *target;
869 if (targets.length () == 1)
870 target = targets[0];
871 else
872 target = cgraph_node::create
873 (builtin_decl_implicit (BUILT_IN_UNREACHABLE));
874
875 if (symtab->dump_file)
876 {
877 fprintf (symtab->dump_file,
878 "Devirtualizing call: ");
879 print_gimple_stmt (symtab->dump_file,
880 edge->call_stmt, 0,
881 TDF_SLIM);
882 }
883 if (dump_enabled_p ())
884 {
885 location_t locus = gimple_location_safe (edge->call_stmt);
886 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
887 "devirtualizing call in %s to %s\n",
888 edge->caller->name (), target->name ());
889 }
890
891 edge->make_direct (target);
892 edge->redirect_call_stmt_to_callee ();
893
894 /* Call to __builtin_unreachable shouldn't be instrumented. */
895 if (!targets.length ())
896 gimple_call_set_with_bounds (edge->call_stmt, false);
897
898 if (symtab->dump_file)
899 {
900 fprintf (symtab->dump_file,
901 "Devirtualized as: ");
902 print_gimple_stmt (symtab->dump_file,
903 edge->call_stmt, 0,
904 TDF_SLIM);
905 }
906 }
907 }
908 }
909
910
911 /* Discover all functions and variables that are trivially needed, analyze
912 them as well as all functions and variables referred by them */
913 static cgraph_node *first_analyzed;
914 static varpool_node *first_analyzed_var;
915
916 static void
917 analyze_functions (void)
918 {
919 /* Keep track of already processed nodes when called multiple times for
920 intermodule optimization. */
921 cgraph_node *first_handled = first_analyzed;
922 varpool_node *first_handled_var = first_analyzed_var;
923 hash_set<void *> reachable_call_targets;
924
925 symtab_node *node;
926 symtab_node *next;
927 int i;
928 ipa_ref *ref;
929 bool changed = true;
930 location_t saved_loc = input_location;
931
932 bitmap_obstack_initialize (NULL);
933 symtab->state = CONSTRUCTION;
934 input_location = UNKNOWN_LOCATION;
935
936 /* Ugly, but the fixup can not happen at a time same body alias is created;
937 C++ FE is confused about the COMDAT groups being right. */
938 if (symtab->cpp_implicit_aliases_done)
939 FOR_EACH_SYMBOL (node)
940 if (node->cpp_implicit_alias)
941 node->fixup_same_cpp_alias_visibility (node->get_alias_target ());
942 build_type_inheritance_graph ();
943
944 /* Analysis adds static variables that in turn adds references to new functions.
945 So we need to iterate the process until it stabilize. */
946 while (changed)
947 {
948 changed = false;
949 process_function_and_variable_attributes (first_analyzed,
950 first_analyzed_var);
951
952 /* First identify the trivially needed symbols. */
953 for (node = symtab->first_symbol ();
954 node != first_analyzed
955 && node != first_analyzed_var; node = node->next)
956 {
957 /* Convert COMDAT group designators to IDENTIFIER_NODEs. */
958 node->get_comdat_group_id ();
959 if (node->needed_p ())
960 {
961 enqueue_node (node);
962 if (!changed && symtab->dump_file)
963 fprintf (symtab->dump_file, "Trivially needed symbols:");
964 changed = true;
965 if (symtab->dump_file)
966 fprintf (symtab->dump_file, " %s", node->asm_name ());
967 if (!changed && symtab->dump_file)
968 fprintf (symtab->dump_file, "\n");
969 }
970 if (node == first_analyzed
971 || node == first_analyzed_var)
972 break;
973 }
974 symtab->process_new_functions ();
975 first_analyzed_var = symtab->first_variable ();
976 first_analyzed = symtab->first_function ();
977
978 if (changed && symtab->dump_file)
979 fprintf (symtab->dump_file, "\n");
980
981 /* Lower representation, build callgraph edges and references for all trivially
982 needed symbols and all symbols referred by them. */
983 while (queued_nodes != &symtab_terminator)
984 {
985 changed = true;
986 node = queued_nodes;
987 queued_nodes = (symtab_node *)queued_nodes->aux;
988 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
989 if (cnode && cnode->definition)
990 {
991 cgraph_edge *edge;
992 tree decl = cnode->decl;
993
994 /* ??? It is possible to create extern inline function
995 and later using weak alias attribute to kill its body.
996 See gcc.c-torture/compile/20011119-1.c */
997 if (!DECL_STRUCT_FUNCTION (decl)
998 && !cnode->alias
999 && !cnode->thunk.thunk_p
1000 && !cnode->dispatcher_function)
1001 {
1002 cnode->reset ();
1003 cnode->local.redefined_extern_inline = true;
1004 continue;
1005 }
1006
1007 if (!cnode->analyzed)
1008 cnode->analyze ();
1009
1010 for (edge = cnode->callees; edge; edge = edge->next_callee)
1011 if (edge->callee->definition
1012 && (!DECL_EXTERNAL (edge->callee->decl)
1013 /* When not optimizing, do not try to analyze extern
1014 inline functions. Doing so is pointless. */
1015 || opt_for_fn (edge->callee->decl, optimize)
1016 /* Weakrefs needs to be preserved. */
1017 || edge->callee->alias
1018 /* always_inline functions are inlined aven at -O0. */
1019 || lookup_attribute
1020 ("always_inline",
1021 DECL_ATTRIBUTES (edge->callee->decl))
1022 /* Multiversioned functions needs the dispatcher to
1023 be produced locally even for extern functions. */
1024 || edge->callee->function_version ()))
1025 enqueue_node (edge->callee);
1026 if (opt_for_fn (cnode->decl, optimize)
1027 && opt_for_fn (cnode->decl, flag_devirtualize))
1028 {
1029 cgraph_edge *next;
1030
1031 for (edge = cnode->indirect_calls; edge; edge = next)
1032 {
1033 next = edge->next_callee;
1034 if (edge->indirect_info->polymorphic)
1035 walk_polymorphic_call_targets (&reachable_call_targets,
1036 edge);
1037 }
1038 }
1039
1040 /* If decl is a clone of an abstract function,
1041 mark that abstract function so that we don't release its body.
1042 The DECL_INITIAL() of that abstract function declaration
1043 will be later needed to output debug info. */
1044 if (DECL_ABSTRACT_ORIGIN (decl))
1045 {
1046 cgraph_node *origin_node
1047 = cgraph_node::get_create (DECL_ABSTRACT_ORIGIN (decl));
1048 origin_node->used_as_abstract_origin = true;
1049 }
1050 }
1051 else
1052 {
1053 varpool_node *vnode = dyn_cast <varpool_node *> (node);
1054 if (vnode && vnode->definition && !vnode->analyzed)
1055 vnode->analyze ();
1056 }
1057
1058 if (node->same_comdat_group)
1059 {
1060 symtab_node *next;
1061 for (next = node->same_comdat_group;
1062 next != node;
1063 next = next->same_comdat_group)
1064 if (!next->comdat_local_p ())
1065 enqueue_node (next);
1066 }
1067 for (i = 0; node->iterate_reference (i, ref); i++)
1068 if (ref->referred->definition
1069 && (!DECL_EXTERNAL (ref->referred->decl)
1070 || ((TREE_CODE (ref->referred->decl) != FUNCTION_DECL
1071 && optimize)
1072 || (TREE_CODE (ref->referred->decl) == FUNCTION_DECL
1073 && opt_for_fn (ref->referred->decl, optimize))
1074 || node->alias
1075 || ref->referred->alias)))
1076 enqueue_node (ref->referred);
1077 symtab->process_new_functions ();
1078 }
1079 }
1080 update_type_inheritance_graph ();
1081
1082 /* Collect entry points to the unit. */
1083 if (symtab->dump_file)
1084 {
1085 fprintf (symtab->dump_file, "\n\nInitial ");
1086 symtab_node::dump_table (symtab->dump_file);
1087 }
1088
1089 if (symtab->dump_file)
1090 fprintf (symtab->dump_file, "\nRemoving unused symbols:");
1091
1092 for (node = symtab->first_symbol ();
1093 node != first_handled
1094 && node != first_handled_var; node = next)
1095 {
1096 next = node->next;
1097 if (!node->aux && !node->referred_to_p ())
1098 {
1099 if (symtab->dump_file)
1100 fprintf (symtab->dump_file, " %s", node->name ());
1101 node->remove ();
1102 continue;
1103 }
1104 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1105 {
1106 tree decl = node->decl;
1107
1108 if (cnode->definition && !gimple_has_body_p (decl)
1109 && !cnode->alias
1110 && !cnode->thunk.thunk_p)
1111 cnode->reset ();
1112
1113 gcc_assert (!cnode->definition || cnode->thunk.thunk_p
1114 || cnode->alias
1115 || gimple_has_body_p (decl));
1116 gcc_assert (cnode->analyzed == cnode->definition);
1117 }
1118 node->aux = NULL;
1119 }
1120 for (;node; node = node->next)
1121 node->aux = NULL;
1122 first_analyzed = symtab->first_function ();
1123 first_analyzed_var = symtab->first_variable ();
1124 if (symtab->dump_file)
1125 {
1126 fprintf (symtab->dump_file, "\n\nReclaimed ");
1127 symtab_node::dump_table (symtab->dump_file);
1128 }
1129 bitmap_obstack_release (NULL);
1130 ggc_collect ();
1131 /* Initialize assembler name hash, in particular we want to trigger C++
1132 mangling and same body alias creation before we free DECL_ARGUMENTS
1133 used by it. */
1134 if (!seen_error ())
1135 symtab->symtab_initialize_asm_name_hash ();
1136
1137 input_location = saved_loc;
1138 }
1139
1140 /* Translate the ugly representation of aliases as alias pairs into nice
1141 representation in callgraph. We don't handle all cases yet,
1142 unfortunately. */
1143
1144 static void
1145 handle_alias_pairs (void)
1146 {
1147 alias_pair *p;
1148 unsigned i;
1149
1150 for (i = 0; alias_pairs && alias_pairs->iterate (i, &p);)
1151 {
1152 symtab_node *target_node = symtab_node::get_for_asmname (p->target);
1153
1154 /* Weakrefs with target not defined in current unit are easy to handle:
1155 they behave just as external variables except we need to note the
1156 alias flag to later output the weakref pseudo op into asm file. */
1157 if (!target_node
1158 && lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)) != NULL)
1159 {
1160 symtab_node *node = symtab_node::get (p->decl);
1161 if (node)
1162 {
1163 node->alias_target = p->target;
1164 node->weakref = true;
1165 node->alias = true;
1166 }
1167 alias_pairs->unordered_remove (i);
1168 continue;
1169 }
1170 else if (!target_node)
1171 {
1172 error ("%q+D aliased to undefined symbol %qE", p->decl, p->target);
1173 symtab_node *node = symtab_node::get (p->decl);
1174 if (node)
1175 node->alias = false;
1176 alias_pairs->unordered_remove (i);
1177 continue;
1178 }
1179
1180 if (DECL_EXTERNAL (target_node->decl)
1181 /* We use local aliases for C++ thunks to force the tailcall
1182 to bind locally. This is a hack - to keep it working do
1183 the following (which is not strictly correct). */
1184 && (TREE_CODE (target_node->decl) != FUNCTION_DECL
1185 || ! DECL_VIRTUAL_P (target_node->decl))
1186 && ! lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)))
1187 {
1188 error ("%q+D aliased to external symbol %qE",
1189 p->decl, p->target);
1190 }
1191
1192 if (TREE_CODE (p->decl) == FUNCTION_DECL
1193 && target_node && is_a <cgraph_node *> (target_node))
1194 {
1195 cgraph_node *src_node = cgraph_node::get (p->decl);
1196 if (src_node && src_node->definition)
1197 src_node->reset ();
1198 cgraph_node::create_alias (p->decl, target_node->decl);
1199 alias_pairs->unordered_remove (i);
1200 }
1201 else if (TREE_CODE (p->decl) == VAR_DECL
1202 && target_node && is_a <varpool_node *> (target_node))
1203 {
1204 varpool_node::create_alias (p->decl, target_node->decl);
1205 alias_pairs->unordered_remove (i);
1206 }
1207 else
1208 {
1209 error ("%q+D alias in between function and variable is not supported",
1210 p->decl);
1211 warning (0, "%q+D aliased declaration",
1212 target_node->decl);
1213 alias_pairs->unordered_remove (i);
1214 }
1215 }
1216 vec_free (alias_pairs);
1217 }
1218
1219
1220 /* Figure out what functions we want to assemble. */
1221
1222 static void
1223 mark_functions_to_output (void)
1224 {
1225 cgraph_node *node;
1226 #ifdef ENABLE_CHECKING
1227 bool check_same_comdat_groups = false;
1228
1229 FOR_EACH_FUNCTION (node)
1230 gcc_assert (!node->process);
1231 #endif
1232
1233 FOR_EACH_FUNCTION (node)
1234 {
1235 tree decl = node->decl;
1236
1237 gcc_assert (!node->process || node->same_comdat_group);
1238 if (node->process)
1239 continue;
1240
1241 /* We need to output all local functions that are used and not
1242 always inlined, as well as those that are reachable from
1243 outside the current compilation unit. */
1244 if (node->analyzed
1245 && !node->thunk.thunk_p
1246 && !node->alias
1247 && !node->global.inlined_to
1248 && !TREE_ASM_WRITTEN (decl)
1249 && !DECL_EXTERNAL (decl))
1250 {
1251 node->process = 1;
1252 if (node->same_comdat_group)
1253 {
1254 cgraph_node *next;
1255 for (next = dyn_cast<cgraph_node *> (node->same_comdat_group);
1256 next != node;
1257 next = dyn_cast<cgraph_node *> (next->same_comdat_group))
1258 if (!next->thunk.thunk_p && !next->alias
1259 && !next->comdat_local_p ())
1260 next->process = 1;
1261 }
1262 }
1263 else if (node->same_comdat_group)
1264 {
1265 #ifdef ENABLE_CHECKING
1266 check_same_comdat_groups = true;
1267 #endif
1268 }
1269 else
1270 {
1271 /* We should've reclaimed all functions that are not needed. */
1272 #ifdef ENABLE_CHECKING
1273 if (!node->global.inlined_to
1274 && gimple_has_body_p (decl)
1275 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1276 are inside partition, we can end up not removing the body since we no longer
1277 have analyzed node pointing to it. */
1278 && !node->in_other_partition
1279 && !node->alias
1280 && !node->clones
1281 && !DECL_EXTERNAL (decl))
1282 {
1283 node->debug ();
1284 internal_error ("failed to reclaim unneeded function");
1285 }
1286 #endif
1287 gcc_assert (node->global.inlined_to
1288 || !gimple_has_body_p (decl)
1289 || node->in_other_partition
1290 || node->clones
1291 || DECL_ARTIFICIAL (decl)
1292 || DECL_EXTERNAL (decl));
1293
1294 }
1295
1296 }
1297 #ifdef ENABLE_CHECKING
1298 if (check_same_comdat_groups)
1299 FOR_EACH_FUNCTION (node)
1300 if (node->same_comdat_group && !node->process)
1301 {
1302 tree decl = node->decl;
1303 if (!node->global.inlined_to
1304 && gimple_has_body_p (decl)
1305 /* FIXME: in an ltrans unit when the offline copy is outside a
1306 partition but inline copies are inside a partition, we can
1307 end up not removing the body since we no longer have an
1308 analyzed node pointing to it. */
1309 && !node->in_other_partition
1310 && !node->clones
1311 && !DECL_EXTERNAL (decl))
1312 {
1313 node->debug ();
1314 internal_error ("failed to reclaim unneeded function in same "
1315 "comdat group");
1316 }
1317 }
1318 #endif
1319 }
1320
1321 /* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1322 in lowered gimple form. IN_SSA is true if the gimple is in SSA.
1323
1324 Set current_function_decl and cfun to newly constructed empty function body.
1325 return basic block in the function body. */
1326
1327 basic_block
1328 init_lowered_empty_function (tree decl, bool in_ssa, gcov_type count)
1329 {
1330 basic_block bb;
1331 edge e;
1332
1333 current_function_decl = decl;
1334 allocate_struct_function (decl, false);
1335 gimple_register_cfg_hooks ();
1336 init_empty_tree_cfg ();
1337
1338 if (in_ssa)
1339 {
1340 init_tree_ssa (cfun);
1341 init_ssa_operands (cfun);
1342 cfun->gimple_df->in_ssa_p = true;
1343 cfun->curr_properties |= PROP_ssa;
1344 }
1345
1346 DECL_INITIAL (decl) = make_node (BLOCK);
1347
1348 DECL_SAVED_TREE (decl) = error_mark_node;
1349 cfun->curr_properties |= (PROP_gimple_lcf | PROP_gimple_leh | PROP_gimple_any
1350 | PROP_cfg | PROP_loops);
1351
1352 set_loops_for_fn (cfun, ggc_cleared_alloc<loops> ());
1353 init_loops_structure (cfun, loops_for_fn (cfun), 1);
1354 loops_for_fn (cfun)->state |= LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
1355
1356 /* Create BB for body of the function and connect it properly. */
1357 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = count;
1358 ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency = REG_BR_PROB_BASE;
1359 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = count;
1360 EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency = REG_BR_PROB_BASE;
1361 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR_FOR_FN (cfun));
1362 bb->count = count;
1363 bb->frequency = BB_FREQ_MAX;
1364 e = make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, EDGE_FALLTHRU);
1365 e->count = count;
1366 e->probability = REG_BR_PROB_BASE;
1367 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1368 e->count = count;
1369 e->probability = REG_BR_PROB_BASE;
1370 add_bb_to_loop (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
1371
1372 return bb;
1373 }
1374
1375 /* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1376 offset indicated by VIRTUAL_OFFSET, if that is
1377 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1378 zero for a result adjusting thunk. */
1379
1380 static tree
1381 thunk_adjust (gimple_stmt_iterator * bsi,
1382 tree ptr, bool this_adjusting,
1383 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1384 {
1385 gassign *stmt;
1386 tree ret;
1387
1388 if (this_adjusting
1389 && fixed_offset != 0)
1390 {
1391 stmt = gimple_build_assign
1392 (ptr, fold_build_pointer_plus_hwi_loc (input_location,
1393 ptr,
1394 fixed_offset));
1395 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1396 }
1397
1398 /* If there's a virtual offset, look up that value in the vtable and
1399 adjust the pointer again. */
1400 if (virtual_offset)
1401 {
1402 tree vtabletmp;
1403 tree vtabletmp2;
1404 tree vtabletmp3;
1405
1406 if (!vtable_entry_type)
1407 {
1408 tree vfunc_type = make_node (FUNCTION_TYPE);
1409 TREE_TYPE (vfunc_type) = integer_type_node;
1410 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1411 layout_type (vfunc_type);
1412
1413 vtable_entry_type = build_pointer_type (vfunc_type);
1414 }
1415
1416 vtabletmp =
1417 create_tmp_reg (build_pointer_type
1418 (build_pointer_type (vtable_entry_type)), "vptr");
1419
1420 /* The vptr is always at offset zero in the object. */
1421 stmt = gimple_build_assign (vtabletmp,
1422 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1423 ptr));
1424 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1425
1426 /* Form the vtable address. */
1427 vtabletmp2 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp)),
1428 "vtableaddr");
1429 stmt = gimple_build_assign (vtabletmp2,
1430 build_simple_mem_ref (vtabletmp));
1431 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1432
1433 /* Find the entry with the vcall offset. */
1434 stmt = gimple_build_assign (vtabletmp2,
1435 fold_build_pointer_plus_loc (input_location,
1436 vtabletmp2,
1437 virtual_offset));
1438 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1439
1440 /* Get the offset itself. */
1441 vtabletmp3 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1442 "vcalloffset");
1443 stmt = gimple_build_assign (vtabletmp3,
1444 build_simple_mem_ref (vtabletmp2));
1445 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1446
1447 /* Adjust the `this' pointer. */
1448 ptr = fold_build_pointer_plus_loc (input_location, ptr, vtabletmp3);
1449 ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
1450 GSI_CONTINUE_LINKING);
1451 }
1452
1453 if (!this_adjusting
1454 && fixed_offset != 0)
1455 /* Adjust the pointer by the constant. */
1456 {
1457 tree ptrtmp;
1458
1459 if (TREE_CODE (ptr) == VAR_DECL)
1460 ptrtmp = ptr;
1461 else
1462 {
1463 ptrtmp = create_tmp_reg (TREE_TYPE (ptr), "ptr");
1464 stmt = gimple_build_assign (ptrtmp, ptr);
1465 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1466 }
1467 ptr = fold_build_pointer_plus_hwi_loc (input_location,
1468 ptrtmp, fixed_offset);
1469 }
1470
1471 /* Emit the statement and gimplify the adjustment expression. */
1472 ret = create_tmp_reg (TREE_TYPE (ptr), "adjusted_this");
1473 stmt = gimple_build_assign (ret, ptr);
1474 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1475
1476 return ret;
1477 }
1478
1479 /* Expand thunk NODE to gimple if possible.
1480 When FORCE_GIMPLE_THUNK is true, gimple thunk is created and
1481 no assembler is produced.
1482 When OUTPUT_ASM_THUNK is true, also produce assembler for
1483 thunks that are not lowered. */
1484
1485 bool
1486 cgraph_node::expand_thunk (bool output_asm_thunks, bool force_gimple_thunk)
1487 {
1488 bool this_adjusting = thunk.this_adjusting;
1489 HOST_WIDE_INT fixed_offset = thunk.fixed_offset;
1490 HOST_WIDE_INT virtual_value = thunk.virtual_value;
1491 tree virtual_offset = NULL;
1492 tree alias = callees->callee->decl;
1493 tree thunk_fndecl = decl;
1494 tree a;
1495
1496
1497 if (!force_gimple_thunk && this_adjusting
1498 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1499 virtual_value, alias))
1500 {
1501 const char *fnname;
1502 tree fn_block;
1503 tree restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1504
1505 if (!output_asm_thunks)
1506 {
1507 analyzed = true;
1508 return false;
1509 }
1510
1511 if (in_lto_p)
1512 get_untransformed_body ();
1513 a = DECL_ARGUMENTS (thunk_fndecl);
1514
1515 current_function_decl = thunk_fndecl;
1516
1517 /* Ensure thunks are emitted in their correct sections. */
1518 resolve_unique_section (thunk_fndecl, 0, flag_function_sections);
1519
1520 DECL_RESULT (thunk_fndecl)
1521 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1522 RESULT_DECL, 0, restype);
1523 DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1524 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
1525
1526 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1527 create one. */
1528 fn_block = make_node (BLOCK);
1529 BLOCK_VARS (fn_block) = a;
1530 DECL_INITIAL (thunk_fndecl) = fn_block;
1531 init_function_start (thunk_fndecl);
1532 cfun->is_thunk = 1;
1533 insn_locations_init ();
1534 set_curr_insn_location (DECL_SOURCE_LOCATION (thunk_fndecl));
1535 prologue_location = curr_insn_location ();
1536 assemble_start_function (thunk_fndecl, fnname);
1537
1538 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1539 fixed_offset, virtual_value, alias);
1540
1541 assemble_end_function (thunk_fndecl, fnname);
1542 insn_locations_finalize ();
1543 init_insn_lengths ();
1544 free_after_compilation (cfun);
1545 set_cfun (NULL);
1546 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1547 thunk.thunk_p = false;
1548 analyzed = false;
1549 }
1550 else
1551 {
1552 tree restype;
1553 basic_block bb, then_bb, else_bb, return_bb;
1554 gimple_stmt_iterator bsi;
1555 int nargs = 0;
1556 tree arg;
1557 int i;
1558 tree resdecl;
1559 tree restmp = NULL;
1560
1561 gcall *call;
1562 greturn *ret;
1563
1564 if (in_lto_p)
1565 get_untransformed_body ();
1566 a = DECL_ARGUMENTS (thunk_fndecl);
1567
1568 current_function_decl = thunk_fndecl;
1569
1570 /* Ensure thunks are emitted in their correct sections. */
1571 resolve_unique_section (thunk_fndecl, 0, flag_function_sections);
1572
1573 DECL_IGNORED_P (thunk_fndecl) = 1;
1574 bitmap_obstack_initialize (NULL);
1575
1576 if (thunk.virtual_offset_p)
1577 virtual_offset = size_int (virtual_value);
1578
1579 /* Build the return declaration for the function. */
1580 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1581 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1582 {
1583 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1584 DECL_ARTIFICIAL (resdecl) = 1;
1585 DECL_IGNORED_P (resdecl) = 1;
1586 DECL_RESULT (thunk_fndecl) = resdecl;
1587 DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1588 }
1589 else
1590 resdecl = DECL_RESULT (thunk_fndecl);
1591
1592 bb = then_bb = else_bb = return_bb
1593 = init_lowered_empty_function (thunk_fndecl, true, count);
1594
1595 bsi = gsi_start_bb (bb);
1596
1597 /* Build call to the function being thunked. */
1598 if (!VOID_TYPE_P (restype))
1599 {
1600 if (DECL_BY_REFERENCE (resdecl))
1601 {
1602 restmp = gimple_fold_indirect_ref (resdecl);
1603 if (!restmp)
1604 restmp = build2 (MEM_REF,
1605 TREE_TYPE (TREE_TYPE (DECL_RESULT (alias))),
1606 resdecl,
1607 build_int_cst (TREE_TYPE
1608 (DECL_RESULT (alias)), 0));
1609 }
1610 else if (!is_gimple_reg_type (restype))
1611 {
1612 if (aggregate_value_p (resdecl, TREE_TYPE (thunk_fndecl)))
1613 {
1614 restmp = resdecl;
1615
1616 if (TREE_CODE (restmp) == VAR_DECL)
1617 add_local_decl (cfun, restmp);
1618 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1619 }
1620 else
1621 restmp = create_tmp_var (restype, "retval");
1622 }
1623 else
1624 restmp = create_tmp_reg (restype, "retval");
1625 }
1626
1627 for (arg = a; arg; arg = DECL_CHAIN (arg))
1628 nargs++;
1629 auto_vec<tree> vargs (nargs);
1630 i = 0;
1631 arg = a;
1632 if (this_adjusting)
1633 {
1634 vargs.quick_push (thunk_adjust (&bsi, a, 1, fixed_offset,
1635 virtual_offset));
1636 arg = DECL_CHAIN (a);
1637 i = 1;
1638 }
1639
1640 if (nargs)
1641 for (; i < nargs; i++, arg = DECL_CHAIN (arg))
1642 {
1643 tree tmp = arg;
1644 if (!is_gimple_val (arg))
1645 {
1646 tmp = create_tmp_reg (TYPE_MAIN_VARIANT
1647 (TREE_TYPE (arg)), "arg");
1648 gimple stmt = gimple_build_assign (tmp, arg);
1649 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1650 }
1651 vargs.quick_push (tmp);
1652 }
1653 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1654 callees->call_stmt = call;
1655 gimple_call_set_from_thunk (call, true);
1656 gimple_call_set_with_bounds (call, instrumentation_clone);
1657 if (restmp)
1658 {
1659 gimple_call_set_lhs (call, restmp);
1660 gcc_assert (useless_type_conversion_p (TREE_TYPE (restmp),
1661 TREE_TYPE (TREE_TYPE (alias))));
1662 }
1663 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1664 if (!(gimple_call_flags (call) & ECF_NORETURN))
1665 {
1666 if (restmp && !this_adjusting
1667 && (fixed_offset || virtual_offset))
1668 {
1669 tree true_label = NULL_TREE;
1670
1671 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1672 {
1673 gimple stmt;
1674 edge e;
1675 /* If the return type is a pointer, we need to
1676 protect against NULL. We know there will be an
1677 adjustment, because that's why we're emitting a
1678 thunk. */
1679 then_bb = create_basic_block (NULL, (void *) 0, bb);
1680 then_bb->count = count - count / 16;
1681 then_bb->frequency = BB_FREQ_MAX - BB_FREQ_MAX / 16;
1682 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1683 return_bb->count = count;
1684 return_bb->frequency = BB_FREQ_MAX;
1685 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1686 then_bb->count = count / 16;
1687 then_bb->frequency = BB_FREQ_MAX / 16;
1688 add_bb_to_loop (then_bb, bb->loop_father);
1689 add_bb_to_loop (return_bb, bb->loop_father);
1690 add_bb_to_loop (else_bb, bb->loop_father);
1691 remove_edge (single_succ_edge (bb));
1692 true_label = gimple_block_label (then_bb);
1693 stmt = gimple_build_cond (NE_EXPR, restmp,
1694 build_zero_cst (TREE_TYPE (restmp)),
1695 NULL_TREE, NULL_TREE);
1696 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1697 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1698 e->probability = REG_BR_PROB_BASE - REG_BR_PROB_BASE / 16;
1699 e->count = count - count / 16;
1700 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1701 e->probability = REG_BR_PROB_BASE / 16;
1702 e->count = count / 16;
1703 e = make_edge (return_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1704 e->probability = REG_BR_PROB_BASE;
1705 e->count = count;
1706 e = make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1707 e->probability = REG_BR_PROB_BASE;
1708 e->count = count - count / 16;
1709 e = make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1710 e->probability = REG_BR_PROB_BASE;
1711 e->count = count / 16;
1712 bsi = gsi_last_bb (then_bb);
1713 }
1714
1715 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1716 fixed_offset, virtual_offset);
1717 if (true_label)
1718 {
1719 gimple stmt;
1720 bsi = gsi_last_bb (else_bb);
1721 stmt = gimple_build_assign (restmp,
1722 build_zero_cst (TREE_TYPE (restmp)));
1723 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1724 bsi = gsi_last_bb (return_bb);
1725 }
1726 }
1727 else
1728 gimple_call_set_tail (call, true);
1729
1730 /* Build return value. */
1731 if (!DECL_BY_REFERENCE (resdecl))
1732 ret = gimple_build_return (restmp);
1733 else
1734 ret = gimple_build_return (resdecl);
1735
1736 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1737 }
1738 else
1739 {
1740 gimple_call_set_tail (call, true);
1741 remove_edge (single_succ_edge (bb));
1742 }
1743
1744 cfun->gimple_df->in_ssa_p = true;
1745 profile_status_for_fn (cfun)
1746 = count ? PROFILE_READ : PROFILE_GUESSED;
1747 /* FIXME: C++ FE should stop setting TREE_ASM_WRITTEN on thunks. */
1748 TREE_ASM_WRITTEN (thunk_fndecl) = false;
1749 delete_unreachable_blocks ();
1750 update_ssa (TODO_update_ssa);
1751 #ifdef ENABLE_CHECKING
1752 verify_flow_info ();
1753 #endif
1754 free_dominance_info (CDI_DOMINATORS);
1755
1756 /* Since we want to emit the thunk, we explicitly mark its name as
1757 referenced. */
1758 thunk.thunk_p = false;
1759 lowered = true;
1760 bitmap_obstack_release (NULL);
1761 }
1762 current_function_decl = NULL;
1763 set_cfun (NULL);
1764 return true;
1765 }
1766
1767 /* Assemble thunks and aliases associated to node. */
1768
1769 void
1770 cgraph_node::assemble_thunks_and_aliases (void)
1771 {
1772 cgraph_edge *e;
1773 ipa_ref *ref;
1774
1775 for (e = callers; e;)
1776 if (e->caller->thunk.thunk_p
1777 && !e->caller->thunk.add_pointer_bounds_args)
1778 {
1779 cgraph_node *thunk = e->caller;
1780
1781 e = e->next_caller;
1782 thunk->expand_thunk (true, false);
1783 thunk->assemble_thunks_and_aliases ();
1784 }
1785 else
1786 e = e->next_caller;
1787
1788 FOR_EACH_ALIAS (this, ref)
1789 {
1790 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
1791 bool saved_written = TREE_ASM_WRITTEN (decl);
1792
1793 /* Force assemble_alias to really output the alias this time instead
1794 of buffering it in same alias pairs. */
1795 TREE_ASM_WRITTEN (decl) = 1;
1796 do_assemble_alias (alias->decl,
1797 DECL_ASSEMBLER_NAME (decl));
1798 alias->assemble_thunks_and_aliases ();
1799 TREE_ASM_WRITTEN (decl) = saved_written;
1800 }
1801 }
1802
1803 /* Expand function specified by node. */
1804
1805 void
1806 cgraph_node::expand (void)
1807 {
1808 location_t saved_loc;
1809
1810 /* We ought to not compile any inline clones. */
1811 gcc_assert (!global.inlined_to);
1812
1813 announce_function (decl);
1814 process = 0;
1815 gcc_assert (lowered);
1816 get_untransformed_body ();
1817
1818 /* Generate RTL for the body of DECL. */
1819
1820 timevar_push (TV_REST_OF_COMPILATION);
1821
1822 gcc_assert (symtab->global_info_ready);
1823
1824 /* Initialize the default bitmap obstack. */
1825 bitmap_obstack_initialize (NULL);
1826
1827 /* Initialize the RTL code for the function. */
1828 current_function_decl = decl;
1829 saved_loc = input_location;
1830 input_location = DECL_SOURCE_LOCATION (decl);
1831 init_function_start (decl);
1832
1833 gimple_register_cfg_hooks ();
1834
1835 bitmap_obstack_initialize (&reg_obstack); /* FIXME, only at RTL generation*/
1836
1837 execute_all_ipa_transforms ();
1838
1839 /* Perform all tree transforms and optimizations. */
1840
1841 /* Signal the start of passes. */
1842 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_START, NULL);
1843
1844 execute_pass_list (cfun, g->get_passes ()->all_passes);
1845
1846 /* Signal the end of passes. */
1847 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_END, NULL);
1848
1849 bitmap_obstack_release (&reg_obstack);
1850
1851 /* Release the default bitmap obstack. */
1852 bitmap_obstack_release (NULL);
1853
1854 /* If requested, warn about function definitions where the function will
1855 return a value (usually of some struct or union type) which itself will
1856 take up a lot of stack space. */
1857 if (warn_larger_than && !DECL_EXTERNAL (decl) && TREE_TYPE (decl))
1858 {
1859 tree ret_type = TREE_TYPE (TREE_TYPE (decl));
1860
1861 if (ret_type && TYPE_SIZE_UNIT (ret_type)
1862 && TREE_CODE (TYPE_SIZE_UNIT (ret_type)) == INTEGER_CST
1863 && 0 < compare_tree_int (TYPE_SIZE_UNIT (ret_type),
1864 larger_than_size))
1865 {
1866 unsigned int size_as_int
1867 = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (ret_type));
1868
1869 if (compare_tree_int (TYPE_SIZE_UNIT (ret_type), size_as_int) == 0)
1870 warning (OPT_Wlarger_than_, "size of return value of %q+D is %u bytes",
1871 decl, size_as_int);
1872 else
1873 warning (OPT_Wlarger_than_, "size of return value of %q+D is larger than %wd bytes",
1874 decl, larger_than_size);
1875 }
1876 }
1877
1878 gimple_set_body (decl, NULL);
1879 if (DECL_STRUCT_FUNCTION (decl) == 0
1880 && !cgraph_node::get (decl)->origin)
1881 {
1882 /* Stop pointing to the local nodes about to be freed.
1883 But DECL_INITIAL must remain nonzero so we know this
1884 was an actual function definition.
1885 For a nested function, this is done in c_pop_function_context.
1886 If rest_of_compilation set this to 0, leave it 0. */
1887 if (DECL_INITIAL (decl) != 0)
1888 DECL_INITIAL (decl) = error_mark_node;
1889 }
1890
1891 input_location = saved_loc;
1892
1893 ggc_collect ();
1894 timevar_pop (TV_REST_OF_COMPILATION);
1895
1896 /* Make sure that BE didn't give up on compiling. */
1897 gcc_assert (TREE_ASM_WRITTEN (decl));
1898 set_cfun (NULL);
1899 current_function_decl = NULL;
1900
1901 /* It would make a lot more sense to output thunks before function body to get more
1902 forward and lest backwarding jumps. This however would need solving problem
1903 with comdats. See PR48668. Also aliases must come after function itself to
1904 make one pass assemblers, like one on AIX, happy. See PR 50689.
1905 FIXME: Perhaps thunks should be move before function IFF they are not in comdat
1906 groups. */
1907 assemble_thunks_and_aliases ();
1908 release_body ();
1909 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1910 points to the dead function body. */
1911 remove_callees ();
1912 remove_all_references ();
1913 }
1914
1915 /* Node comparer that is responsible for the order that corresponds
1916 to time when a function was launched for the first time. */
1917
1918 static int
1919 node_cmp (const void *pa, const void *pb)
1920 {
1921 const cgraph_node *a = *(const cgraph_node * const *) pa;
1922 const cgraph_node *b = *(const cgraph_node * const *) pb;
1923
1924 /* Functions with time profile must be before these without profile. */
1925 if (!a->tp_first_run || !b->tp_first_run)
1926 return a->tp_first_run - b->tp_first_run;
1927
1928 return a->tp_first_run != b->tp_first_run
1929 ? b->tp_first_run - a->tp_first_run
1930 : b->order - a->order;
1931 }
1932
1933 /* Expand all functions that must be output.
1934
1935 Attempt to topologically sort the nodes so function is output when
1936 all called functions are already assembled to allow data to be
1937 propagated across the callgraph. Use a stack to get smaller distance
1938 between a function and its callees (later we may choose to use a more
1939 sophisticated algorithm for function reordering; we will likely want
1940 to use subsections to make the output functions appear in top-down
1941 order). */
1942
1943 static void
1944 expand_all_functions (void)
1945 {
1946 cgraph_node *node;
1947 cgraph_node **order = XCNEWVEC (cgraph_node *,
1948 symtab->cgraph_count);
1949 unsigned int expanded_func_count = 0, profiled_func_count = 0;
1950 int order_pos, new_order_pos = 0;
1951 int i;
1952
1953 order_pos = ipa_reverse_postorder (order);
1954 gcc_assert (order_pos == symtab->cgraph_count);
1955
1956 /* Garbage collector may remove inline clones we eliminate during
1957 optimization. So we must be sure to not reference them. */
1958 for (i = 0; i < order_pos; i++)
1959 if (order[i]->process)
1960 order[new_order_pos++] = order[i];
1961
1962 if (flag_profile_reorder_functions)
1963 qsort (order, new_order_pos, sizeof (cgraph_node *), node_cmp);
1964
1965 for (i = new_order_pos - 1; i >= 0; i--)
1966 {
1967 node = order[i];
1968
1969 if (node->process)
1970 {
1971 expanded_func_count++;
1972 if(node->tp_first_run)
1973 profiled_func_count++;
1974
1975 if (symtab->dump_file)
1976 fprintf (symtab->dump_file,
1977 "Time profile order in expand_all_functions:%s:%d\n",
1978 node->asm_name (), node->tp_first_run);
1979 node->process = 0;
1980 node->expand ();
1981 }
1982 }
1983
1984 if (dump_file)
1985 fprintf (dump_file, "Expanded functions with time profile (%s):%u/%u\n",
1986 main_input_filename, profiled_func_count, expanded_func_count);
1987
1988 if (symtab->dump_file && flag_profile_reorder_functions)
1989 fprintf (symtab->dump_file, "Expanded functions with time profile:%u/%u\n",
1990 profiled_func_count, expanded_func_count);
1991
1992 symtab->process_new_functions ();
1993 free_gimplify_stack ();
1994
1995 free (order);
1996 }
1997
1998 /* This is used to sort the node types by the cgraph order number. */
1999
2000 enum cgraph_order_sort_kind
2001 {
2002 ORDER_UNDEFINED = 0,
2003 ORDER_FUNCTION,
2004 ORDER_VAR,
2005 ORDER_ASM
2006 };
2007
2008 struct cgraph_order_sort
2009 {
2010 enum cgraph_order_sort_kind kind;
2011 union
2012 {
2013 cgraph_node *f;
2014 varpool_node *v;
2015 asm_node *a;
2016 } u;
2017 };
2018
2019 /* Output all functions, variables, and asm statements in the order
2020 according to their order fields, which is the order in which they
2021 appeared in the file. This implements -fno-toplevel-reorder. In
2022 this mode we may output functions and variables which don't really
2023 need to be output.
2024 When NO_REORDER is true only do this for symbols marked no reorder. */
2025
2026 static void
2027 output_in_order (bool no_reorder)
2028 {
2029 int max;
2030 cgraph_order_sort *nodes;
2031 int i;
2032 cgraph_node *pf;
2033 varpool_node *pv;
2034 asm_node *pa;
2035 max = symtab->order;
2036 nodes = XCNEWVEC (cgraph_order_sort, max);
2037
2038 FOR_EACH_DEFINED_FUNCTION (pf)
2039 {
2040 if (pf->process && !pf->thunk.thunk_p && !pf->alias)
2041 {
2042 if (no_reorder && !pf->no_reorder)
2043 continue;
2044 i = pf->order;
2045 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2046 nodes[i].kind = ORDER_FUNCTION;
2047 nodes[i].u.f = pf;
2048 }
2049 }
2050
2051 FOR_EACH_DEFINED_VARIABLE (pv)
2052 if (!DECL_EXTERNAL (pv->decl))
2053 {
2054 if (no_reorder && !pv->no_reorder)
2055 continue;
2056 i = pv->order;
2057 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2058 nodes[i].kind = ORDER_VAR;
2059 nodes[i].u.v = pv;
2060 }
2061
2062 for (pa = symtab->first_asm_symbol (); pa; pa = pa->next)
2063 {
2064 i = pa->order;
2065 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2066 nodes[i].kind = ORDER_ASM;
2067 nodes[i].u.a = pa;
2068 }
2069
2070 /* In toplevel reorder mode we output all statics; mark them as needed. */
2071
2072 for (i = 0; i < max; ++i)
2073 if (nodes[i].kind == ORDER_VAR)
2074 nodes[i].u.v->finalize_named_section_flags ();
2075
2076 for (i = 0; i < max; ++i)
2077 {
2078 switch (nodes[i].kind)
2079 {
2080 case ORDER_FUNCTION:
2081 nodes[i].u.f->process = 0;
2082 nodes[i].u.f->expand ();
2083 break;
2084
2085 case ORDER_VAR:
2086 nodes[i].u.v->assemble_decl ();
2087 break;
2088
2089 case ORDER_ASM:
2090 assemble_asm (nodes[i].u.a->asm_str);
2091 break;
2092
2093 case ORDER_UNDEFINED:
2094 break;
2095
2096 default:
2097 gcc_unreachable ();
2098 }
2099 }
2100
2101 symtab->clear_asm_symbols ();
2102
2103 free (nodes);
2104 }
2105
2106 static void
2107 ipa_passes (void)
2108 {
2109 gcc::pass_manager *passes = g->get_passes ();
2110
2111 set_cfun (NULL);
2112 current_function_decl = NULL;
2113 gimple_register_cfg_hooks ();
2114 bitmap_obstack_initialize (NULL);
2115
2116 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
2117
2118 if (!in_lto_p)
2119 {
2120 execute_ipa_pass_list (passes->all_small_ipa_passes);
2121 if (seen_error ())
2122 return;
2123 }
2124
2125 /* This extra symtab_remove_unreachable_nodes pass tends to catch some
2126 devirtualization and other changes where removal iterate. */
2127 symtab->remove_unreachable_nodes (symtab->dump_file);
2128
2129 /* If pass_all_early_optimizations was not scheduled, the state of
2130 the cgraph will not be properly updated. Update it now. */
2131 if (symtab->state < IPA_SSA)
2132 symtab->state = IPA_SSA;
2133
2134 if (!in_lto_p)
2135 {
2136 /* Generate coverage variables and constructors. */
2137 coverage_finish ();
2138
2139 /* Process new functions added. */
2140 set_cfun (NULL);
2141 current_function_decl = NULL;
2142 symtab->process_new_functions ();
2143
2144 execute_ipa_summary_passes
2145 ((ipa_opt_pass_d *) passes->all_regular_ipa_passes);
2146 }
2147
2148 /* Some targets need to handle LTO assembler output specially. */
2149 if (flag_generate_lto || flag_generate_offload)
2150 targetm.asm_out.lto_start ();
2151
2152 if (!in_lto_p)
2153 {
2154 if (g->have_offload)
2155 {
2156 section_name_prefix = OFFLOAD_SECTION_NAME_PREFIX;
2157 lto_stream_offload_p = true;
2158 ipa_write_summaries ();
2159 lto_stream_offload_p = false;
2160 }
2161 if (flag_lto)
2162 {
2163 section_name_prefix = LTO_SECTION_NAME_PREFIX;
2164 lto_stream_offload_p = false;
2165 ipa_write_summaries ();
2166 }
2167 }
2168
2169 if (flag_generate_lto || flag_generate_offload)
2170 targetm.asm_out.lto_end ();
2171
2172 if (!flag_ltrans && (in_lto_p || !flag_lto || flag_fat_lto_objects))
2173 execute_ipa_pass_list (passes->all_regular_ipa_passes);
2174 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
2175
2176 bitmap_obstack_release (NULL);
2177 }
2178
2179
2180 /* Return string alias is alias of. */
2181
2182 static tree
2183 get_alias_symbol (tree decl)
2184 {
2185 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
2186 return get_identifier (TREE_STRING_POINTER
2187 (TREE_VALUE (TREE_VALUE (alias))));
2188 }
2189
2190
2191 /* Weakrefs may be associated to external decls and thus not output
2192 at expansion time. Emit all necessary aliases. */
2193
2194 void
2195 symbol_table::output_weakrefs (void)
2196 {
2197 symtab_node *node;
2198 cgraph_node *cnode;
2199 FOR_EACH_SYMBOL (node)
2200 if (node->alias
2201 && !TREE_ASM_WRITTEN (node->decl)
2202 && (!(cnode = dyn_cast <cgraph_node *> (node))
2203 || !cnode->instrumented_version
2204 || !TREE_ASM_WRITTEN (cnode->instrumented_version->decl))
2205 && node->weakref)
2206 {
2207 tree target;
2208
2209 /* Weakrefs are special by not requiring target definition in current
2210 compilation unit. It is thus bit hard to work out what we want to
2211 alias.
2212 When alias target is defined, we need to fetch it from symtab reference,
2213 otherwise it is pointed to by alias_target. */
2214 if (node->alias_target)
2215 target = (DECL_P (node->alias_target)
2216 ? DECL_ASSEMBLER_NAME (node->alias_target)
2217 : node->alias_target);
2218 else if (node->analyzed)
2219 target = DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl);
2220 else
2221 {
2222 gcc_unreachable ();
2223 target = get_alias_symbol (node->decl);
2224 }
2225 do_assemble_alias (node->decl, target);
2226 }
2227 }
2228
2229 /* Perform simple optimizations based on callgraph. */
2230
2231 void
2232 symbol_table::compile (void)
2233 {
2234 if (seen_error ())
2235 return;
2236
2237 #ifdef ENABLE_CHECKING
2238 symtab_node::verify_symtab_nodes ();
2239 #endif
2240
2241 timevar_push (TV_CGRAPHOPT);
2242 if (pre_ipa_mem_report)
2243 {
2244 fprintf (stderr, "Memory consumption before IPA\n");
2245 dump_memory_report (false);
2246 }
2247 if (!quiet_flag)
2248 fprintf (stderr, "Performing interprocedural optimizations\n");
2249 state = IPA;
2250
2251 /* Offloading requires LTO infrastructure. */
2252 if (!in_lto_p && g->have_offload)
2253 flag_generate_offload = 1;
2254
2255 /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE. */
2256 if (flag_generate_lto || flag_generate_offload)
2257 lto_streamer_hooks_init ();
2258
2259 /* Don't run the IPA passes if there was any error or sorry messages. */
2260 if (!seen_error ())
2261 ipa_passes ();
2262
2263 /* Do nothing else if any IPA pass found errors or if we are just streaming LTO. */
2264 if (seen_error ()
2265 || (!in_lto_p && flag_lto && !flag_fat_lto_objects))
2266 {
2267 timevar_pop (TV_CGRAPHOPT);
2268 return;
2269 }
2270
2271 global_info_ready = true;
2272 if (dump_file)
2273 {
2274 fprintf (dump_file, "Optimized ");
2275 symtab_node:: dump_table (dump_file);
2276 }
2277 if (post_ipa_mem_report)
2278 {
2279 fprintf (stderr, "Memory consumption after IPA\n");
2280 dump_memory_report (false);
2281 }
2282 timevar_pop (TV_CGRAPHOPT);
2283
2284 /* Output everything. */
2285 (*debug_hooks->assembly_start) ();
2286 if (!quiet_flag)
2287 fprintf (stderr, "Assembling functions:\n");
2288 #ifdef ENABLE_CHECKING
2289 symtab_node::verify_symtab_nodes ();
2290 #endif
2291
2292 materialize_all_clones ();
2293 bitmap_obstack_initialize (NULL);
2294 execute_ipa_pass_list (g->get_passes ()->all_late_ipa_passes);
2295 bitmap_obstack_release (NULL);
2296 mark_functions_to_output ();
2297
2298 /* When weakref support is missing, we autmatically translate all
2299 references to NODE to references to its ultimate alias target.
2300 The renaming mechanizm uses flag IDENTIFIER_TRANSPARENT_ALIAS and
2301 TREE_CHAIN.
2302
2303 Set up this mapping before we output any assembler but once we are sure
2304 that all symbol renaming is done.
2305
2306 FIXME: All this uglyness can go away if we just do renaming at gimple
2307 level by physically rewritting the IL. At the moment we can only redirect
2308 calls, so we need infrastructure for renaming references as well. */
2309 #ifndef ASM_OUTPUT_WEAKREF
2310 symtab_node *node;
2311
2312 FOR_EACH_SYMBOL (node)
2313 if (node->alias
2314 && lookup_attribute ("weakref", DECL_ATTRIBUTES (node->decl)))
2315 {
2316 IDENTIFIER_TRANSPARENT_ALIAS
2317 (DECL_ASSEMBLER_NAME (node->decl)) = 1;
2318 TREE_CHAIN (DECL_ASSEMBLER_NAME (node->decl))
2319 = (node->alias_target ? node->alias_target
2320 : DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl));
2321 }
2322 #endif
2323
2324 state = EXPANSION;
2325
2326 if (!flag_toplevel_reorder)
2327 output_in_order (false);
2328 else
2329 {
2330 /* Output first asm statements and anything ordered. The process
2331 flag is cleared for these nodes, so we skip them later. */
2332 output_in_order (true);
2333 expand_all_functions ();
2334 output_variables ();
2335 }
2336
2337 process_new_functions ();
2338 state = FINISHED;
2339 output_weakrefs ();
2340
2341 if (dump_file)
2342 {
2343 fprintf (dump_file, "\nFinal ");
2344 symtab_node::dump_table (dump_file);
2345 }
2346 #ifdef ENABLE_CHECKING
2347 symtab_node::verify_symtab_nodes ();
2348 /* Double check that all inline clones are gone and that all
2349 function bodies have been released from memory. */
2350 if (!seen_error ())
2351 {
2352 cgraph_node *node;
2353 bool error_found = false;
2354
2355 FOR_EACH_DEFINED_FUNCTION (node)
2356 if (node->global.inlined_to
2357 || gimple_has_body_p (node->decl))
2358 {
2359 error_found = true;
2360 node->debug ();
2361 }
2362 if (error_found)
2363 internal_error ("nodes with unreleased memory found");
2364 }
2365 #endif
2366 }
2367
2368
2369 /* Analyze the whole compilation unit once it is parsed completely. */
2370
2371 void
2372 symbol_table::finalize_compilation_unit (void)
2373 {
2374 timevar_push (TV_CGRAPH);
2375
2376 /* If we're here there's no current function anymore. Some frontends
2377 are lazy in clearing these. */
2378 current_function_decl = NULL;
2379 set_cfun (NULL);
2380
2381 /* Do not skip analyzing the functions if there were errors, we
2382 miss diagnostics for following functions otherwise. */
2383
2384 /* Emit size functions we didn't inline. */
2385 finalize_size_functions ();
2386
2387 /* Mark alias targets necessary and emit diagnostics. */
2388 handle_alias_pairs ();
2389
2390 if (!quiet_flag)
2391 {
2392 fprintf (stderr, "\nAnalyzing compilation unit\n");
2393 fflush (stderr);
2394 }
2395
2396 if (flag_dump_passes)
2397 dump_passes ();
2398
2399 /* Gimplify and lower all functions, compute reachability and
2400 remove unreachable nodes. */
2401 analyze_functions ();
2402
2403 /* Mark alias targets necessary and emit diagnostics. */
2404 handle_alias_pairs ();
2405
2406 /* Gimplify and lower thunks. */
2407 analyze_functions ();
2408
2409 /* Finally drive the pass manager. */
2410 compile ();
2411
2412 timevar_pop (TV_CGRAPH);
2413 }
2414
2415 /* Reset all state within cgraphunit.c so that we can rerun the compiler
2416 within the same process. For use by toplev::finalize. */
2417
2418 void
2419 cgraphunit_c_finalize (void)
2420 {
2421 gcc_assert (cgraph_new_nodes.length () == 0);
2422 cgraph_new_nodes.truncate (0);
2423
2424 vtable_entry_type = NULL;
2425 queued_nodes = &symtab_terminator;
2426
2427 first_analyzed = NULL;
2428 first_analyzed_var = NULL;
2429 }
2430
2431 /* Creates a wrapper from cgraph_node to TARGET node. Thunk is used for this
2432 kind of wrapper method. */
2433
2434 void
2435 cgraph_node::create_wrapper (cgraph_node *target)
2436 {
2437 /* Preserve DECL_RESULT so we get right by reference flag. */
2438 tree decl_result = DECL_RESULT (decl);
2439
2440 /* Remove the function's body but keep arguments to be reused
2441 for thunk. */
2442 release_body (true);
2443 reset ();
2444
2445 DECL_RESULT (decl) = decl_result;
2446 DECL_INITIAL (decl) = NULL;
2447 allocate_struct_function (decl, false);
2448 set_cfun (NULL);
2449
2450 /* Turn alias into thunk and expand it into GIMPLE representation. */
2451 definition = true;
2452 thunk.thunk_p = true;
2453 thunk.this_adjusting = false;
2454 create_edge (target, NULL, count, CGRAPH_FREQ_BASE);
2455
2456 tree arguments = DECL_ARGUMENTS (decl);
2457
2458 while (arguments)
2459 {
2460 TREE_ADDRESSABLE (arguments) = false;
2461 arguments = TREE_CHAIN (arguments);
2462 }
2463
2464 expand_thunk (false, true);
2465
2466 /* Inline summary set-up. */
2467 analyze ();
2468 inline_analyze_function (this);
2469 }
2470
2471 #include "gt-cgraphunit.h"